half-way finished migration

This commit is contained in:
Danilo Reyes 2024-04-18 19:47:16 -06:00
parent 27c700767c
commit bad3a00e3c
56 changed files with 2441 additions and 8 deletions

147
workstation/base.nix Normal file
View File

@ -0,0 +1,147 @@
{ config, lib, pkgs, ... }: {
system = {
copySystemConfiguration = true;
stateVersion = "23.11";
};
time = {
timeZone = "America/Mexico_City";
hardwareClockInLocalTime = true;
};
i18n = {
defaultLocale = "en_CA.UTF-8";
extraLocaleSettings = { LC_MONETARY = "es_MX.UTF-8"; };
};
console = {
font = "Lat2-Terminus16";
keyMap = "us";
# useXkbConfig = true; # use xkbOptions in tty.
};
security = {
polkit.enable = true;
sudo = {
enable = true;
wheelNeedsPassword = false;
};
pam.loginLimits = [{
domain = "*";
type = "soft";
item = "nofile";
value = "8192";
}];
};
nix = {
optimise.automatic = true;
gc = {
automatic = true;
dates = "weekly";
};
settings = {
auto-optimise-store = true;
experimental-features = [ "nix-command" "flakes" ];
substituters = [
"https://nix-gaming.cachix.org"
"https://nixpkgs-python.cachix.org"
"https://devenv.cachix.org"
"https://cuda-maintainers.cachix.org"
"https://ai.cachix.org"
];
trusted-public-keys = [
"nix-gaming.cachix.org-1:nbjlureqMbRAxR1gJ/f3hxemL9svXaZF/Ees8vCUUs4="
"nixpkgs-python.cachix.org-1:hxjI7pFxTyuTHn2NkvWCrAUcNZLNS3ZAvfYNuYifcEU="
"devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="
"cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E="
"ai.cachix.org-1:N9dzRK+alWwoKXQlnn0H6aUx0lU/mspIoz8hMvGvbbc="
];
};
};
users.groups.piracy.gid = 985;
environment = {
systemPackages = with pkgs; [ wget ];
variables = rec {
# PATH
XDG_CACHE_HOME = "\${HOME}/.cache";
XDG_CONFIG_HOME = "\${HOME}/.config";
XDG_BIN_HOME = "\${HOME}/.local/bin";
XDG_DATA_HOME = "\${HOME}/.local/share";
XDG_STATE_HOME = "\${HOME}/.local/state";
# DEV PATH
CABAL_DIR = "${XDG_CACHE_HOME}/cabal";
CARGO_HOME = "${XDG_DATA_HOME}/cargo";
GEM_HOME = "${XDG_DATA_HOME}/ruby/gems";
GEM_PATH = "${XDG_DATA_HOME}/ruby/gems";
GEM_SPEC_CACHE = "${XDG_DATA_HOME}/ruby/specs";
GOPATH = "${XDG_DATA_HOME}/go";
NPM_CONFIG_USERCONFIG = "${XDG_CONFIG_HOME}/npm/npmrc";
PNPM_HOME = "${XDG_DATA_HOME}/pnpm";
PSQL_HISTORY = "${XDG_DATA_HOME}/psql_history";
REDISCLI_HISTFILE = "${XDG_DATA_HOME}/redis/rediscli_history";
WINEPREFIX = "${XDG_DATA_HOME}/wine";
PYTHONSTARTUP = "${XDG_CONFIG_HOME}/python/pythonrc";
STACK_ROOT = "${XDG_DATA_HOME}/stack";
# OPTIONS
HISTFILE = "${XDG_STATE_HOME}/bash/history";
LESSHISTFILE = "-";
GHCUP_USE_XDG_DIRS = "true";
RIPGREP_CONFIG_PATH = "${XDG_CONFIG_HOME}/ripgrep/ripgreprc";
ELECTRUMDIR = "${XDG_DATA_HOME}/electrum";
VISUAL = "emacsclient -ca emacs";
WGETRC = "${XDG_CONFIG_HOME}/wgetrc";
XCOMPOSECACHE = "${XDG_CACHE_HOME}/X11/xcompose";
"_JAVA_OPTIONS" = "-Djava.util.prefs.userRoot=${XDG_CONFIG_HOME}/java";
DOCKER_CONFIG = "${XDG_CONFIG_HOME}/docker";
# NVIDIA
CUDA_CACHE_PATH = "${XDG_CACHE_HOME}/nv";
# Themes
# WEBKIT_DISABLE_COMPOSITING_MODE = "1";
CALIBRE_USE_SYSTEM_THEME = "1";
PATH = [
"\${HOME}/.local/bin"
"${XDG_CONFIG_HOME}/emacs/bin"
"${XDG_DATA_HOME}/npm/bin"
"${XDG_DATA_HOME}/pnpm"
];
};
};
programs = {
starship.enable = true;
tmux.enable = true;
fzf.fuzzyCompletion = true;
neovim = {
enable = true;
vimAlias = true;
};
gnupg.agent = {
enable = true;
enableSSHSupport = true;
};
};
services = {
smartd.enable = true;
fstrim.enable = true;
btrfs.autoScrub = {
enable = true;
fileSystems = [ "/" ];
};
avahi = {
enable = true;
nssmdns = true;
};
openssh = {
enable = true;
openFirewall = true;
startWhenNeeded = true;
settings = {
PasswordAuthentication = false;
PermitRootLogin = "prohibit-password";
KbdInteractiveAuthentication = false;
};
};
};
fonts.fontconfig.enable = true;
powerManagement.cpuFreqGovernor = lib.mkDefault "performance";
}

View File

@ -0,0 +1,14 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
gimp # the coolest bestest art program to never exist
krita # art to your heart desire!
mypaint # not the best art program
mypaint-brushes # but it's got some
mypaint-brushes1 # nice damn brushes
# drawpile # arty party with friends!!
pureref # create inspiration/reference boards
blender # cgi animation and sculpting
]);
}

View File

@ -0,0 +1,8 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
ghc # compiler
haskell-language-server # lsp
]);
}

View File

@ -0,0 +1,10 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
expect # keep color when nom'ing
nix-output-monitor # autistic nix builds
nixfmt # linting
cachix # why spend time compiling?
]);
}

View File

@ -0,0 +1,20 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
pipenv # python development workflow for humans
(python3.withPackages (ps:
with ps; [
# nose # testing and running python scripts
# poetry # dependency management made easy
# pytest # framework for writing tests
black # Python code formatter
editorconfig # follow rules of contributin
flake8 # wraper for pyflakes, pycodestyle and mccabe
isort # sort Python imports
pyflakes # checks source code for errors
pylint # bug and style checker for python
speedtest-cli # check internet speed from the comand line
]))
]);
}

View File

@ -0,0 +1,11 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
hunspell
hunspellDicts.it_IT
hunspellDicts.es_MX
hunspellDicts.es_ES
hunspellDicts.en_CA-large
]);
}

View File

@ -0,0 +1,10 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
(nerdfonts.override {
fonts = [ "CascadiaCode" "ComicShannsMono" "Iosevka" ];
})
symbola
]);
}

View File

@ -0,0 +1,35 @@
{ config, pkgs, ... }:
{
programs = {
steam = {
enable = true;
remotePlay.openFirewall = true;
dedicatedServer.openFirewall = true;
};
};
users.users.jawz.packages = with pkgs; ([
(lutris.override {
extraPkgs = pkgs: [
winetricks
wine64Packages.stable
wineWowPackages.stable
];
})
cartridges # games launcher
heroic # install epic games
gamemode # optimizes linux to have better gaming performance
# grapejuice # roblox manager
# minecraft # minecraft official launcher
protonup-qt # update proton-ge
# ns-usbloader # load games into my switch
# emulators
rpcs3 # ps3 emulator
pcsx2 # ps2 emulator
cemu # wii u emulator
dolphin-emu # wii emulator
citra-nightly # 3Ds emulator
snes9x-gtk # snes emulator
]);
}

View File

@ -0,0 +1,27 @@
{ config, pkgs, ... }:
{
programs = {
geary.enable = true;
firefox = {
enable = true;
languagePacks = [ "en-CA" "es-MX" "it" ];
};
};
services = { psd.enable = true; };
users.users.jawz.packages = with pkgs; ([
nextcloud-client # self-hosted google-drive alternative
fragments # beautiful torrent client
protonmail-bridge # bridge for protonmail
tor-browser-bundle-bin # dark web, so dark!
chromium # web browser with spyware included
telegram-desktop # furry chat
nicotine-plus # remember Ares?
vesktop
(pkgs.discord.override {
withOpenASAR = true;
# withVencord = true;
})
# hugo # website engine
]);
}

View File

@ -0,0 +1,13 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([
libreoffice # office, but based
calibre # ugly af eBook library manager
newsflash # feed reader, syncs with nextcloud
furtherance # I packaged this one tehee track time utility
# foliate # gtk eBook reader
# wike # gtk wikipedia wow!
# denaro # manage your finances
]);
}

View File

@ -0,0 +1,5 @@
{ config, pkgs, ... }:
{
users.users.jawz.packages = with pkgs; ([ ]);
}

View File

@ -1,7 +1,7 @@
#+TITLE: JawZ NixOS workstation configuration #+TITLE: JawZ NixOS workstation configuration
#+AUTHOR: Danilo Reyes #+AUTHOR: Danilo Reyes
#+PROPERTY: header-args :tangle configuration.nix # #+PROPERTY: header-args :tangle configuration.nix
#+auto_tangle: t # #+auto_tangle: t
* TODO [0/6] * TODO [0/6]
- [ ] System configurations [0/8] - [ ] System configurations [0/8]
@ -69,7 +69,7 @@ passwords and other secrets.
imports = [ imports = [
# <agenix/modules/age.nix> # <agenix/modules/age.nix>
./fstab.nix ./fstab.nix
./docker.nix # ./docker.nix
(import "${home-manager}/nixos") (import "${home-manager}/nixos")
# nixGaming.nixosModules.pipewireLowLatency # nixGaming.nixosModules.pipewireLowLatency
]; ];
@ -405,9 +405,9 @@ gnome.gnome-tweaks # tweaks for the gnome desktop environment
# Fonts # Fonts
(nerdfonts.override { (nerdfonts.override {
fonts = [ "Agave" "CascadiaCode" "SourceCodePro" fonts = [ "CascadiaCode"
"ComicShannsMono" "OpenDyslexic" "ComicShannsMono"
"Ubuntu" "FiraCode" "Iosevka" ]; "Iosevka" ];
}) })
symbola symbola
(papirus-icon-theme.override { (papirus-icon-theme.override {
@ -435,7 +435,7 @@ mypaint-brushes # but it's got some
mypaint-brushes1 # nice damn brushes mypaint-brushes1 # nice damn brushes
# drawpile # arty party with friends!! # drawpile # arty party with friends!!
pureref # create inspiration/reference boards # pureref # create inspiration/reference boards
#+end_src #+end_src
*** GAMING *** GAMING
@ -922,7 +922,7 @@ environment = {
}; };
systemPackages = with pkgs; [ systemPackages = with pkgs; [
wget wget
gwe # gwe
]; ];
variables = rec { variables = rec {
# PATH # PATH

65
workstation/gnome.nix Normal file
View File

@ -0,0 +1,65 @@
{ config, pkgs, ... }:
{
services.xserver = {
enable = true;
displayManager.gdm.enable = true;
desktopManager.gnome.enable = true;
libinput.enable = true;
};
environment.gnome.excludePackages = (with pkgs; [
gnome-photos
gnome-tour
gnome-text-editor
gnome-connections
# gnome-shell-extensions
baobab
]) ++ (with pkgs.gnome; [
# totem
# gedit
gnome-music
epiphany
gnome-characters
yelp
gnome-font-viewer
cheese
]);
qt = {
enable = true;
style = "adwaita";
};
users.users.jawz.packages = with pkgs;
([
adw-gtk3
gnome.gnome-tweaks # tweaks for the gnome desktop environment
(papirus-icon-theme.override { color = "adwaita"; })
# gradience # theme customizer, allows you to modify adw-gtk3 themes
# lm_sensors # for extension, displays cpu temp
libgda # for pano shell extension
]) ++ (with pkgs.gnomeExtensions; [
appindicator # applets for open applications
reading-strip # like putting a finger on every line I read
tactile # window manager
pano # clipboard manager
freon # hardware temperature monitor
# blur-my-shell # make the overview more visually appealing
# gamemode # I guess I'm a gamer now?
# burn-my-windows
# forge # window manager
]);
nixpkgs.overlays = [
(final: prev: {
gnome = prev.gnome.overrideScope' (gnomeFinal: gnomePrev: {
mutter = gnomePrev.mutter.overrideAttrs (old: {
src = pkgs.fetchgit {
url = "https://gitlab.gnome.org/vanvugt/mutter.git";
# GNOME 45: triple-buffering-v4-45
rev = "0b896518b2028d9c4d6ea44806d093fd33793689";
sha256 = "sha256-mzNy5GPlB2qkI2KEAErJQzO//uo8yO0kPQUwvGDwR4w=";
};
});
});
})
];
}

View File

@ -0,0 +1,145 @@
{ config, lib, pkgs, ... }:
let
version = "23.11";
home-manager = builtins.fetchTarball
"https://github.com/nix-community/home-manager/archive/release-${version}.tar.gz";
in {
imports = [ (import "${home-manager}/nixos") ];
home-manager = {
useUserPackages = true;
useGlobalPkgs = true;
users.jawz = { config, pkgs, ... }: {
home.stateVersion = version;
programs.bash = {
enable = true;
historyFile = "\${XDG_STATE_HOME}/bash/history";
historyControl = [ "erasedups" "ignorespace" ];
shellAliases = {
hh = "hstr";
ls = "eza --icons --group-directories-first";
edit = "emacsclient -t";
comic = ''download -u jawz -i "$(cat $LC | fzf --multi --exact -i)"'';
gallery =
''download -u jawz -i "$(cat $LW | fzf --multi --exact -i)"'';
cp = "cp -i";
mv = "mv -i";
mkcd = ''mkdir -pv "$1" && cd "$1" || exit'';
mkdir = "mkdir -p";
rm = "trash";
".." = "cd ..";
"..." = "cd ../..";
".3" = "cd ../../..";
".4" = "cd ../../../..";
".5" = "cd ../../../../..";
dl = "download -u jawz -i";
e = "edit";
c = "cat";
b = "bat";
f = "fzf --multi --exact -i";
sc = "systemctl --user";
jc = "journalctl --user -xefu";
open-gallery = ''
cd /mnt/pool/scrapping/JawZ/gallery-dl &&
xdg-open "$(fd . ./ Husbands -tdirectory -d 1 | fzf -i)"'';
unique-extensions = ''
fd -tf | rev | cut -d. -f1 | rev |
tr '[:upper:]' '[:lower:]' | sort |
uniq --count | sort -rn'';
};
enableVteIntegration = true;
initExtra = ''
$HOME/.local/bin/pokemon-colorscripts -r --no-title
# Lists
list_root="${config.xdg.configHome}"/jawz/lists/jawz
export LW=$list_root/watch.txt
export LI=$list_root/instant.txt
export LC=$list_root/comic.txt
export command_timeout=30
if command -v fzf-share >/dev/null; then
source "$(fzf-share)/key-bindings.bash"
source "$(fzf-share)/completion.bash"
fi
nixos-reload () {
NIXOSDIR=/home/jawz/Development/NixOS
nix-store --add-fixed sha256 $NIXOSDIR/scripts/PureRef-1.11.1_x64.Appimage
nixfmt $NIXOSDIR/workstation/*.nix
sudo unbuffer nixos-rebuild switch -I \
nixos-config=$NIXOSDIR/workstation/configuration.nix \
|& nom
}
'';
};
xdg = {
enable = true;
userDirs = {
enable = true;
createDirectories = false;
desktop = "${config.home.homeDirectory}";
documents = "${config.home.homeDirectory}/Documents";
download = "${config.home.homeDirectory}/Downloads";
music = "${config.home.homeDirectory}/Music";
pictures = "${config.home.homeDirectory}/Pictures";
templates = "${config.xdg.dataHome}/Templates";
videos = "${config.home.homeDirectory}/Videos";
};
configFile = {
"wgetrc".source = ../dotfiles/wget/wgetrc;
"configstore/update-notifier-npm-check.json".source =
../dotfiles/npm/update-notifier-npm-check.json;
"npm/npmrc".source = ../dotfiles/npm/npmrc;
"gallery-dl/config.json".source = ../dotfiles/gallery-dl/config.json;
"htop/htoprc".source = ../dotfiles/htop/htoprc;
"python/pythonrc".source = ../dotfiles/pythonrc;
};
};
programs = {
emacs.enable = true;
helix = { enable = true; };
hstr.enable = true;
direnv = {
enable = true;
enableBashIntegration = true;
nix-direnv.enable = true;
};
bat = {
enable = true;
config = {
pager = "less -FR";
theme = "base16";
};
extraPackages = with pkgs.bat-extras; [
batman # man pages
batpipe # piping
batgrep # ripgrep
batdiff # this is getting crazy!
batwatch # probably my next best friend
prettybat # trans your sourcecode!
];
};
git = {
enable = true;
userName = "Danilo Reyes";
userEmail = "CaptainJawZ@protonmail.com";
};
htop = {
enable = true;
package = pkgs.htop-vim;
};
};
services = {
lorri.enable = true;
emacs = {
enable = true;
defaultEditor = true;
package = with pkgs;
((emacsPackagesFor emacs-gtk).emacsWithPackages
(epkgs: [ epkgs.vterm ]));
startWithUserSession = "graphical";
};
};
};
};
}

28
workstation/jawz.nix Normal file
View File

@ -0,0 +1,28 @@
{ config, lib, pkgs, ... }:
{
users.users.jawz = {
isNormalUser = true;
extraGroups = [
"wheel"
"networkmanager"
"scanner"
"lp"
"piracy"
"kavita"
"video"
"docker"
"libvirt"
"rslsync"
];
initialPassword = "password";
openssh = {
authorizedKeys.keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIB5GaQM4N+yGAByibOFQOBVMV/6TjOfaGIP+NunMiK76 gpodeacerocdreyes@100CDREYES"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMkpeIV9G26W2/e9PsjBx3sNwPGoicJ807ExRGh4KjhW jawz@server"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGjnNIggZweJ+GJKKvFEPhpLcs+t64xXjBmeuERsLFLL jawz@miniserver"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINBEblxSDhWPEo33crSjooeUg4W02ruENxHLmmBqCuIo jawz@galaxy"
];
};
};
}

View File

@ -0,0 +1 @@
CONFIG_FILE = "/home/jawz/.config/jawz/config.yaml"

View File

@ -0,0 +1 @@
use nix

View File

@ -0,0 +1,96 @@
#!/usr/bin/env python3
"""Setup the argparser"""
import argparse
scrapper_types = (
"push",
"main",
"instagram",
"kemono",
"comic",
"manga",
"webcomic",
)
# Define types of instagram stories
instagram_types = ["posts", "reels", "channel", "stories", "highlights"]
def argparser(users: list) -> argparse.Namespace:
"""Returns an argparser to evaluate user input"""
# ARG PARSER
parser = argparse.ArgumentParser(
prog="Downloader",
description="Download images and galleries from a wide array of websites"
" either by using links or chosing from user define lists."
" This program also takes care of archiving tasks,"
" that keep the run time fast and prevents downloading duplicates.",
)
# Chose the type of scrapper
parser.add_argument(
choices=scrapper_types,
nargs="?",
dest="scrapper",
help="Select a scrapper.",
)
# Parse user list
parser.add_argument(
"-u",
"--user",
choices=users,
dest="user",
help="Selects the personal user list to process. Defaults to everyone",
default="everyone",
type=str,
)
# Parse individual links
parser.add_argument(
"-i",
"--input",
nargs="*",
dest="link",
action="append",
help="Download the provided links",
type=str,
)
# Set the print list flag
parser.add_argument(
"-l",
"--list",
dest="flag_list",
action="store_true",
help="Prints a list of all the added links and prompts for a choice",
)
# Set the use archiver flag
parser.add_argument(
"-a",
"--no-archive",
dest="flag_archive",
action="store_false",
help="Disables the archiver flag",
)
# Set the skip flag
parser.add_argument(
"-s",
"--no_skip",
dest="flag_skip",
action="store_false",
help="Disables the skip function, downloads the entire gallery",
)
parser.add_argument(
"-v",
"--verbose",
dest="flag_verbose",
action="store_true",
help="Prints the generated commands instead of running them",
)
parser.add_argument(
"-t",
"--type-post",
choices=instagram_types,
nargs="*",
dest="post_type",
help="Filters posts on instagram by type",
default=instagram_types,
type=str,
)
return parser.parse_args()

View File

@ -0,0 +1,46 @@
#!/usr/bin/env python3
from classes.user import User
from functions import LOG
from functions import load_config_variables
from functions import quote
from functions import run
class Gallery:
def __init__(self) -> None:
self.archive: bool = True
self.skip_arg: str = ""
self.link: str = ""
self.dest: str = ""
self.list: str = ""
self.opt_args: str = ""
self.command: str = ""
def generate_command(self, user: User = User(1), is_comic: bool = False) -> None:
"""Generates a command string."""
if is_comic:
configs = load_config_variables()
directory = quote(configs["comic"]["download-dir"])
database = quote(configs["comic"]["database"])
queue = quote(configs["comic"][f"{self.list}-list"]) if self.list else ""
else:
directory = quote(str(user.directories[self.dest]))
database = quote(str(user.dbs["gallery"]))
queue = quote(str(user.lists[self.list])) if self.list else ""
command = f"gallery-dl --sleep {str(user.sleep)}"
command += self.skip_arg if self.skip_arg else ""
command += f" --dest {directory}" if self.dest or is_comic else ""
command += f" --download-archive {database}" if self.archive else ""
command += self.opt_args if self.opt_args else ""
if self.link and not self.list:
command += f" {quote(self.link)}"
if self.list and not self.link:
command += f" -i {queue}"
LOG.debug(command)
self.command = command
def run_command(self, verbose: bool):
run(self.command, verbose)

View File

@ -0,0 +1,105 @@
#!/usr/bin/env python3
"""Define the user class to populate and setup the download environment"""
import re
from random import shuffle
from pathlib import Path
from functions import load_config_variables
from functions import validate_twitter_link
from functions import parse_link
from functions import clean_cache
from functions import LOG
class User:
"""Populate the directory for each user"""
# pylint: disable=too-many-instance-attributes
def __init__(self, index) -> None:
config = load_config_variables()
self.config = config["users"][index] | config["global"]
self.name = self.config["name"]
self.sleep = self.config["sleep"]
# Directories
self.directories = {
str(key).replace("-dir", ""): Path(self.config[f"{key}"])
for key in filter(lambda x: re.search("-dir", x), self.config.keys())
}
self.directories["cache"] = self.directories["cache"] / self.name
self.directories["lists"] = self.directories["lists"] / self.name
# Files
self.dbs = {
"gallery": self.directories["databases"] / f"{self.name}.sqlite3",
"media": self.directories["databases"] / f"{self.name}_ytdl.txt",
}
# Lists
self.lists = {
"master": self.directories["lists"] / "watch.txt",
"push": self.directories["lists"] / "instant.txt",
"instagram": self.directories["cache"] / "instagram.txt",
"kemono": self.directories["cache"] / "kemono.txt",
"main": self.directories["cache"] / "main.txt",
}
def _create_directories(self) -> None:
"""Create user directories if they don't exist"""
clean_cache(self.directories["cache"])
# Create directories
for directory in self.directories.keys():
self.directories[directory].mkdir(parents=True, exist_ok=True)
# Check for the existence of core files
if not self.directories["lists"].is_dir():
LOG.error("Lists directory for user %s doesn't exist", self.name)
# dbs stands for databases, the archives.
for db in filter(lambda x: not self.dbs[x].is_file(), self.dbs.keys()):
self.dbs[db].touch()
for lst in filter(lambda x: not self.lists[x].is_file(), ["master", "push"]):
self.lists[lst].touch()
def append_list(self, name: str, line: str) -> None:
"""Appends a line into the given list"""
with open(self.lists[name], "a+", encoding="utf-8") as a_file:
a_file.write(line + "\n")
def _append_cache_list(self, line) -> None:
"""Writes the input line into it's respective list,
depending on what website it belongs to."""
if re.search("twitter", line):
self.append_list("main", validate_twitter_link(line))
elif re.search(r"kemono\.party", line):
self.append_list("kemono", line)
elif re.search("instagram", line):
self.append_list("instagram", line)
else:
self.append_list("main", line)
def list_manager(self) -> None:
"""Manage all the user list and create sub-lists"""
self._create_directories() # Call the function to create necesary cache dirs
with open(self.lists["master"], "r", encoding="utf-8") as r_file:
master_content = list(map(lambda x: x.rstrip(), r_file))
# Create temporary list files segmented per scrapper
shuffle(master_content)
for line in master_content:
self._append_cache_list(line)
def save_link(self, link: str) -> None:
"""Checks the master list against a new link
if unmatched, appends it to the end of the list"""
with open(self.lists["master"], "r", encoding="utf-8") as r_file:
links = r_file.read().lower()
if parse_link(link).lower() in links:
LOG.info("Gallery repeated, not saving")
return
LOG.info("New gallery, saving")
self.append_list("master", parse_link(link))

View File

@ -0,0 +1 @@
{ pkgs ? import <nixpkgs> { } }: pkgs.callPackage ./derivation.nix { }

View File

@ -0,0 +1,21 @@
{ lib, stdenv, python3Packages }:
with python3Packages;
buildPythonApplication {
pname = "download";
version = "2.0";
src = ./.;
doCheck = false;
buildInputs = [ setuptools ];
propagatedBuildInputs = [ pyyaml types-pyyaml ];
meta = with lib; {
description =
"Download links from many sources (X, Instagram, YouTube, etc) in an organized manner.";
# homepage = "https://github.com/lakoliu/Furtherance";
license = licenses.gpl3Plus;
platforms = platforms.linux;
maintainers = with maintainers; [ CaptainJawZ ];
};
}

View File

@ -0,0 +1,294 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Rewriting of the download manager script
with the intention to make it
more modular with the use of flags
in order to avoid unnecesary modifications
to the cofig files.
Also following in line more posix and python rules.
"""
import re
import yaml
from typing import Dict
from functions import LOG
from functions import run
from functions import quote
from functions import list_lines
from functions import load_config_variables
from functions import parse_link
from argparser import argparser
from classes.user import User
from classes.gallery import Gallery
# GLOBAL VARIABLE SECTION
CONFIGS = load_config_variables()
# Enable a default "everyone" flag for when running stuff like download gallery
USERS = ["everyone"] + [user["name"] for user in CONFIGS["users"]]
ARGS = argparser(USERS)
class Video:
"""Just a simple class to unify the Video parameters into a single one."""
def __init__(self) -> None:
self.use_archive: bool = True
self.link: str = ""
self.dest: str = ""
self.database: str = ""
def get_index(name: str) -> int:
"""Find the index in the config file"""
return next((i for i, d in enumerate(CONFIGS["users"]) if d["name"] == name), -1)
def parse_gallery(gdl_list: str, user: User) -> None:
"""Processes the gallery-dl command based on the selected gallery"""
gallery = Gallery()
gallery.archive = ARGS.flag_archive
gallery.skip_arg = " -o skip=true" if not ARGS.flag_skip else ""
gallery.dest = "download"
gallery.list = gdl_list
gallery.opt_args = parse_instagram(gdl_list)
gallery.generate_command(user)
gallery.run_command(ARGS.flag_verbose)
def parse_instagram(link: str) -> str:
"""Fix instagram links"""
if "instagram" not in link:
return ""
if isinstance(ARGS.post_type, list):
return f" -o include={quote(','.join(ARGS.post_type))}"
return f" -o include={quote(ARGS.post_type)}"
def video_command(video: Video) -> str:
"""Filters and processes the required command to download videos"""
command = "yt-dlp"
rgx_yt = re.compile(r"(https:\/\/youtube|https:\/\/www.youtube|https:\/\/youtu.be)")
rgx_music = re.compile(r"(https:\/\/music.youtube.*)")
if re.search(r"chaturbate", video.link):
return f"chat-dl {video.link}"
if rgx_yt.search(video.link):
command += " --embed-subs --embed-thumbnail"
command += " --embed-metadata --embed-chapters"
command += f" -o {quote(video.dest + '/%(title)s.%(ext)s')}"
elif rgx_music.search(video.link):
command += f" --download-archive {video.database}" if video.use_archive else ""
command += " --no-playlist --newline -x"
command += " --audio-format best --add-metadata --audio-quality 0 -o"
command += f" {quote(video.dest + '/%(title)s.%(ext)s')}"
else: # Any other video link, just do it generic
command += f" -f mp4 -o {quote(video.dest + '/%(title)s.%(ext)s')}"
LOG.info("%s %s", command, video.link)
return f"{command} {quote(video.link)}"
def comic_manager(skip_arg: str, category: str) -> None:
"""Process the information to download manga"""
re_cat = "manga|webtoon" if category == "manga" else "readcomiconline"
with open(CONFIGS["comic"]["comic-list"], "r", encoding="utf-8") as r_file:
links = list(filter(lambda x: re.search(re_cat, x), r_file))
for link in links:
gallery = Gallery()
gallery.archive = ARGS.flag_archive
gallery.skip_arg = skip_arg
gallery.link = link
gallery.generate_command(is_comic=True)
gallery.run_command(ARGS.flag_verbose)
def print_webcomics(webcomics: Dict[str, Dict]) -> int:
"""Prints a list of webcomics, and returns an index."""
for index, entry in enumerate(webcomics["webcomics"]):
print(list_lines(index, entry["name"]))
return int(input("Select a webcomic: "))
def webcomic_manager():
"""Process the information to download webcomics"""
with open(CONFIGS["comic"]["webcomic-list"], "r", encoding="utf-8") as r_file:
webcomics = yaml.safe_load(r_file)
usr_input = print_webcomics(webcomics)
# Determines where the webcomic will be downloaded
rating = webcomics["webcomics"][usr_input]["type"]
dest = webcomics["global"][f"{rating}_directory"]
name = webcomics["webcomics"][usr_input]["name"]
link = webcomics["webcomics"][usr_input]["url"]
nxt_code = webcomics["webcomics"][usr_input]["next_code"]
img_code = webcomics["webcomics"][usr_input]["image_code"]
LOG.info("The webcomic is %s", dest)
command = f"cd {quote(dest)} && webcomix custom"
command += f" {quote(name)}"
command += " --start-url"
command += f" {quote(link)}"
command += f" --next-page-xpath={quote(nxt_code)}"
command += f" --image-xpath={quote(img_code)}"
command += " -y --cbz"
run(command, ARGS.flag_verbose)
def save_comic(link: str) -> None:
"""Add comic/manga link to the list"""
list_comic = CONFIGS["comic"]["comic-list"]
with open(list_comic, "r", encoding="utf-8") as r_file:
links = r_file.read().lower()
if parse_link(link).lower() in links:
LOG.info("Graphic novel repeated, not saving")
return
LOG.info("New graphic novel, saving")
with open(list_comic, "a", encoding="utf-8") as w_file:
w_file.write(link + "\n")
def push_manager(user: User):
"""Filters out the URL to use the appropiate downloader"""
# Creates an array which will store any links that should use youtube-dl
rgx_gallery = re.compile(
r"(twitter\.com\/\w+((?=.*media)|(?!.*status)))"
r"|(men\.wikifeet)"
r"|(furaffinity\.net\/user\/)"
r"|((deviantart\.com\/\w+(?!.*\/art\/)))"
r"|(furaffinity\.net\/gallery\/)"
r"|(furaffinity\.net\/scraps\/)"
r"|(furaffinity\.net\/favorites\/)"
r"|(instagram.com(?!\/p\/)\/\w+)"
r"|(e621\.net((?=\/post\/)|(?!\/posts\/)))"
r"|(flickr\.com\/photos\/\w+\/(?!\d+))"
r"|(tumblr\.com(?!\/post\/))"
r"|(kemono\.party\/(fanbox|gumroad|patreon)(?!\/user\/\d+\/post))"
r"|(blogspot\.com(?!\/))"
r"|(rule34\.paheal\.net\/post\/(?!view))"
r"|(rule34\.xxx\/index\.php\?page\=post&s=(?!view))"
r"|(pixiv\.net\/(en\/)?((?=users)|(?!artwork)))"
r"|(fanbox\.cc\/@\w+(?!.*posts\/\d+))"
r"|(reddit\.com\/(user|u))"
r"|(baraag\.net\/((@\w+)|(?!\/\d+)))"
r"|(pinterest\.com\/(?!pin\/\d+))"
r"|(redgifs\.com\/(users|u|(?!watch)))",
)
rgx_video = re.compile("youtu.be|youtube|pornhub|xtube|xvideos|chaturbate")
rgx_comic = re.compile("readcomiconline|mangahere|mangadex|webtoons")
with open(user.lists["push"], "r", encoding="utf-8") as r_file:
links = list(map(lambda x: x.rstrip(), r_file))
links_galleries = filter(rgx_gallery.search, links)
links_videos = filter(rgx_video.search, links)
links_comics = filter(rgx_comic.search, links)
links_other = filter(
lambda x: (not rgx_video.search(x))
and (not rgx_gallery.search(x))
and (not rgx_comic.search(x)),
links,
)
for link in links_galleries:
gallery = Gallery()
gallery.archive = ARGS.flag_archive
gallery.skip_arg = " -o skip=true" if not ARGS.flag_skip else ""
gallery.link = parse_link(link)
gallery.dest = "download"
gallery.opt_args = parse_instagram(link)
gallery.generate_command(user)
gallery.run_command(ARGS.flag_verbose)
user.save_link(link)
for link in links_comics:
if ARGS.flag_skip and re.search(r"readcomiconline", link):
skip_arg = " --chapter-range 1"
elif ARGS.flag_skip and re.search(r"mangahere|webtoons", link):
skip_arg = " --chapter-range 1-5"
else:
skip_arg = ""
gallery = Gallery()
gallery.archive = ARGS.flag_archive
gallery.skip_arg = skip_arg
gallery.link = link
gallery.generate_command(is_comic=True)
gallery.run_command(ARGS.flag_verbose)
save_comic(link)
for link in links_videos:
video = Video()
video.use_archive = ARGS.flag_archive
video.link = link
video.dest = f"{user.directories['media']}"
video.database = quote(f"{user.dbs['media']}")
run(video_command(video), ARGS.flag_verbose)
for link in links_other:
LOG.info("Other type of download %s", link)
gallery = Gallery()
gallery.archive = False
gallery.skip_arg = " -o directory='[]'"
gallery.link = link
gallery.dest = "push"
gallery.generate_command(user)
gallery.run_command(ARGS.flag_verbose)
# Flush the push list, cleans all the contents
with open(user.lists["push"], "w", encoding="utf-8") as w_file:
w_file.close()
def scrapper_manager(user: User) -> None:
"""Analyze the user arguments and call in functions"""
user.list_manager()
if re.search(r"main|instagram|kemono", ARGS.scrapper):
skip_arg = "" if ARGS.flag_skip else " -o skip=true"
parse_gallery(ARGS.scrapper, user)
elif ARGS.scrapper in "push":
push_manager(user)
elif re.search("^comic|manga", ARGS.scrapper):
skip_arg = " --chapter-range 1" if ARGS.flag_skip else ""
skip_arg += "-5" if ARGS.scrapper in "manga" else ""
comic_manager(skip_arg, ARGS.scrapper)
elif re.search("webcomic", ARGS.scrapper):
webcomic_manager()
def scrap_everyone() -> None:
"""Iterates over every user of my scrapper"""
for current_user in CONFIGS["users"]:
user = User(get_index(current_user["name"]))
LOG.info("Scrapping %s for %s", ARGS.scrapper, current_user["name"])
scrapper_manager(user)
def main():
"""Main module to decide what to do based on the parsed arguments"""
if ARGS.scrapper:
rgx_shared = re.compile("push|main|instagram|kemono")
if (ARGS.user in "everyone") and (rgx_shared.search(ARGS.scrapper)):
scrap_everyone()
else:
scrapper_manager(User(get_index(ARGS.user)))
elif ARGS.link:
is_admin = re.search(r"everyone|jawz", ARGS.user)
user = User(get_index("jawz" if is_admin else ARGS.user))
for arg_link in ARGS.link[0]:
user.append_list("push", parse_link(arg_link))
push_manager(user)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,112 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Personal functions to aid on multiple scripts"""
import sys
import fileinput
import re
import os
import logging
from pathlib import Path
import yaml
VERBOSE_G = False
LOG = logging.getLogger()
HANDLER = logging.StreamHandler()
FORMATTER = logging.Formatter(
"[%(filename)s][%(levelname)s] %(funcName)s '%(message)s'"
)
HANDLER.setFormatter(FORMATTER)
LOG.addHandler(HANDLER)
LOG.setLevel(logging.INFO)
def validate_twitter_link(line: str) -> str:
"""returns a fixed link, which ends with /media"""
# if url contains /media at the end just write the line
if re.search(r"\/media$", line):
return line
# if does not contain /media at the end then add /media
return f"{line}/media"
def parse_link(link: str) -> str:
"""Fixes links"""
if not re.search(r"(twitter\.com\/\w+(\/)?(?!.*status))", link):
LOG.debug("No modifications needed for the link %s", link)
return link
# if url contains /media at the end just write the line
fixed_link = validate_twitter_link(link)
LOG.debug("Processed link %s", fixed_link)
return fixed_link
def load_config_variables():
"""Loads all the variables from the config file"""
config_file = Path("~/.config/jawz/config.yaml")
with open(config_file.expanduser(), encoding="utf-8") as open_file:
return yaml.safe_load(open_file)
def clean_cache(directory: Path):
"""Recursively deletes all the content of a directory,
including the directory itself."""
if not directory.is_dir():
return
for file in filter(lambda x: x.is_file(), directory.iterdir()):
file.unlink()
for dir in filter(lambda x: x.is_dir(), directory.iterdir()):
dir.rmdir()
directory.rmdir()
def run(command: str, verbose: bool):
"""Run command in a subprocess"""
# pylint: disable=subprocess-run-check
# This toggle allows for a really wasy debug when using -v
if verbose:
print(command)
else:
os.system(command)
def list_lines(i: int, line: str) -> str:
"""Create a numbered list"""
return f"{i}) {line}"
def quote(line: str) -> str:
"""Quote the line"""
return f'"{line}"'
def sort_txt_file(file_path: Path):
"""Sort every line alphabetically
remove duplicated and empty lines"""
file = str(file_path.resolve())
run(f"sort -u {quote(file)} -o {quote(file)}", VERBOSE_G)
run(f"sed -i '/^$/d' {quote(file)}", VERBOSE_G)
run(f'sed -i -e "s,http:,https:," {quote(file)}', VERBOSE_G)
# fix this using strip on python
# line.strip("/")
run(f'sed -i -e "s,/$,," {quote(file)}', VERBOSE_G) # trailing /
def randomize_txt_file(file_path: Path):
"""Randomize the order of the
lines of the txt file"""
file = str(file_path.resolve())
run(f"sort -R {quote(file)} -o {quote(file)}", VERBOSE_G)
def parse_list(file):
"""Replace http with https and remove trailing /"""
for line in fileinput.input(file, inplace=True):
sys.stdout.write(str(line).replace("http://", "https://"))
with open(file, "r+", encoding="utf-8") as open_file:
f_content = open_file.read()
f_content = re.compile(r"\/$", 0).sub(r"\/$", "")
open_file.seek(0)
open_file.truncate()
print(f_content)
sort_txt_file(file)

View File

@ -0,0 +1,16 @@
[metadata]
name = download
version = 1.5
[options]
py_modules =
download
functions
argparser
classes.gallery
classes.user
[options.entry_points]
console_scripts =
download = download:main

View File

@ -0,0 +1,3 @@
from setuptools import setup
setup()

View File

@ -0,0 +1,27 @@
{ pkgs ? import <nixpkgs> { } }:
with pkgs;
mkShell {
packages = [
(python3.withPackages (ps: with ps; [ setuptools pyyaml types-pyyaml ]))
# (buildPythonApplication rec {
# pname = "webcomix";
# version = "3.9.0";
# src = fetchFromGitHub {
# inherit pname version;
# owner = "J-CPelletier";
# repo = pname;
# rev = "v${version}";
# sha256 = "sha256-hCnic8Rd81qY1R1XMrSME5ntYTSvZu4/ANp03nCmLKU=";
# };
# doCheck = false;
# propagatedBuildInputs =
# [ click scrapy scrapy-splash scrapy-fake-useragent tqdm ];
# })
];
buildInputs = [
];
}

View File

@ -0,0 +1,136 @@
#!/usr/bin/env python3
# Imports
import os
import math
# Function for calculating the appropriate bitrate to use during conversion
def get_bitrate(duration, filesize, audio_br):
br = math.floor(filesize / duration - audio_br)
return br, br * 0.50, br * 1.45
def encode(ffmpeg_string, output_name, fs):
os.system(ffmpeg_string)
end_size = (
os.path.getsize(
"/dev/shm/ffmpeg/out/{output_name}".format(output_name=output_name)
)
* 0.00000095367432
)
if end_size < fs:
print(
ffmpeg_string.replace("\t", "")
+ "\nThe FFMPEG string above has yielded a file whose size is "
+ str(end_size)
+ "MB.\n{output_name} is ready for Discord.\n".format(
output_name=output_name
)
)
return False
else:
print(
ffmpeg_string.replace("\t", "")
+ "\nThe FFMPEG string above has yielded a file whose size is "
+ str(end_size)
+ "MB.\n{output_name} is NOT ready for Discord, and will be re-run.\nMy bad.".format(
output_name=output_name
)
)
return True
def time_calculations(fname, length):
startstring = fname[0:2] + ":" + fname[2:4] + ":" + fname[4:6]
endstring = fname[7:9] + ":" + fname[9:11] + ":" + fname[11:13]
try:
int(fname[0:6])
startseconds = (
int(fname[0:2]) * 60 * 60 + int(fname[2:4]) * 60 + int(fname[4:6])
)
try:
int(fname[11:13])
endseconds = (
int(fname[7:9]) * 60 * 60 + int(fname[9:11]) * 60 + int(fname[11:13])
)
duration = endseconds - startseconds
timestamped_section = f"-ss {startstring} -to {endstring}"
except:
duration = length - startseconds
timestamped_section = f"-ss {startstring}"
except:
duration = length
timestamped_section = ""
return duration, timestamped_section
fname = os.listdir("/dev/shm/ffmpeg/in/")[0]
os.rename("/dev/shm/ffmpeg/in/" + fname, "/dev/shm/ffmpeg/in/" + fname.replace(" ", ""))
fname = fname.replace(" ", "")
# ffprobe to calculate the total duration of the clip.
length = math.floor(
float(
os.popen(
"ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 /dev/shm/ffmpeg/in/{fname}".format(
fname=fname
)
).read()
)
)
duration, timestamped_section = time_calculations(fname, length)
run = True
reso = os.getenv("reso")
codec = os.getenv("codec")
audio_br = os.getenv("audio_br")
audio_br = int(str(os.getenv("audio_br")))
fs = float(str(os.getenv("fs")))
target_fs = fs
codecs = {
"vp9": {
"pass1": f"-vf scale={reso} -g 240 -threads 8 -speed 4 -row-mt 1 -tile-columns 2 -vsync cfr -c:v libvpx-vp9 -pass 1 -an",
"pass2": f"-vf scale={reso} -g 240 -threads 8 -speed 2 -row-mt 1 -tile-columns 2 -c:v libvpx-vp9 -c:a libopus -pass 2",
"output_name": "small_" + fname.replace(".mp4", ".webm"),
},
"x264": {
"pass1": f"-vf scale={reso} -vsync cfr -c:v libx264 -pass 1 -an",
"pass2": f"-vf scale={reso} -c:v libx264 -c:a aac -pass 2 ",
"output_name": "small_" + fname,
},
"x265": {
"pass1": f"-vf scale={reso} -c:v libx265 -vsync cfr -x265-params pass=1 -an",
"pass2": f"-vf scale={reso} -c:v libx265 -x265-params pass=2 -c:a aac",
"output_name": "small_" + fname,
},
}
while run:
# Conversion to KiB
end_fs = fs * 8192
br, minbr, maxbr = get_bitrate(
duration=duration, filesize=end_fs, audio_br=audio_br
)
ffmpeg_string = f"""
ffpb {timestamped_section} -hwaccel cuda -i /dev/shm/ffmpeg/in/{fname} -y \
{codecs[str(codec)]['pass1']} \
-b:v {br}k -minrate {minbr}k -maxrate {maxbr}k \
-f null /dev/null && \
ffpb {timestamped_section} -hwaccel cuda -i /dev/shm/ffmpeg/in/{fname} \
{codecs[str(codec)]['pass2']} \
-b:a {audio_br}k -b:v {br}k -minrate {minbr}k -maxrate {maxbr}k \
/dev/shm/ffmpeg/out/{codecs[str(codec)]['output_name']} -y
"""
run = encode(
ffmpeg_string, output_name=codecs[str(codec)]["output_name"], fs=target_fs
)
if run:
fs = fs - 0.2

View File

@ -0,0 +1,122 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash gum trashy fd ripgrep mediainfo
replace_extension() {
local file_basename
file_basename=$(basename "$1")
echo "${file_basename%.*}.$2"
}
convert_gif() {
file_newname=$(replace_extension "$1" gif)
ffpb -i "$(realpath "$1")" -vf fps=12,scale=480:-1,smartblur=ls=-0.5 "$file_newname"
}
convert_av1() {
local file_newname
file_newname=$(replace_extension "$1" mp4)
local file_tempdest=/dev/shm/$file_newname
local file_destination
file_destination=$(dirname "$(realpath "$1")")/$file_newname
ffpb -i "$1" \
-c:v libaom-av1 \
-threads 12 -cpu-used 7 \
"$file_tempdest"
trash "$1"
mv -i "$file_tempdest" "$file_destination"
}
convert_mp4() {
local file_newname
file_newname=$(replace_extension "$1" mp4)
local file_tempdest=/dev/shm/$file_newname
local file_destination
file_destination=$(dirname "$(realpath "$1")")/$file_newname
ffpb -i "$1" \
-c:v libx265 \
-preset veryslow \
"$file_tempdest"
trash "$1"
mv -i "$file_tempdest" "$file_destination"
}
convert_discord() {
local file_newname
file_newname=$2_$(replace_extension "$1" mp4)
local dir_ram=/dev/shm/ffmpeg
mkdir -p $dir_ram/{in,out}
ffpb -hwaccel cuda -i "$(realpath "$1")" \
-c:v h264_nvenc \
"$dir_ram"/in/discord.mp4
cd "$dir_ram" || exit
codec=x264 audio_br=$3 fs=$4 reso=$5 ffmpeg4discord
mv "$dir_ram"/out/small_discord.mp4 ~/"$file_newname"
command rm -rf "$dir_ram"
}
operation=$(gum choose mp4 av1 discord nitro gif enc265)
case $operation in
1 | mp4)
to_convert=()
while IFS= read -r file; do
to_convert+=("$file")
done < <(fd . "$(pwd)" -tf -aL | fzf --multi -i)
for file in "${to_convert[@]}"; do
convert_mp4 "$file"
done
;;
2 | av1)
to_convert=()
while IFS= read -r file; do
to_convert+=("$file")
done < <(fd . "$(pwd)" -tf -aL | fzf --multi -i)
for file in "${to_convert[@]}"; do
convert_av1 "$file"
done
;;
3 | discord)
to_convert=()
while IFS= read -r file; do
to_convert+=("$file")
done < <(fd . "$(pwd)" -tf -aL | fzf --multi -i)
for file in "${to_convert[@]}"; do
convert_discord "$file" discord 96 8.0 "1280x720"
done
;;
4 | nitro)
to_convert=()
while IFS= read -r file; do
to_convert+=("$file")
done < <(fd . "$(pwd)" -tf -aL | fzf --multi -i)
for file in "${to_convert[@]}"; do
convert_discord "$file" nitro 128 50.0 "1920x1080"
done
;;
5 | gif)
to_convert=()
while IFS= read -r file; do
to_convert+=("$file")
done < <(fd . "$(pwd)" -tf -aL | fzf --multi -i)
for file in "${to_convert[@]}"; do
convert_gif "$file"
done
;;
6 | enc265)
to_convert=()
extensions=(flv m4v mpg avi mov ts mkv mp4 webm)
for ext in "${extensions[@]}"; do
while IFS= read -r file; do
if ! (mediainfo "$file" | grep Writing\ library | grep -q x265); then
to_convert+=("$file")
fi
done < <(fd . -e "$ext" -tf -aL)
done
for file in "${to_convert[@]}"; do
convert_mp4 "$file"
done
;;
*)
echo -n "Please select a valid input"
;;
esac

View File

@ -0,0 +1,27 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash fd ripgrep
root=/mnt/pool/multimedia/media/Series
while IFS= read -r directory; do
while IFS= read -r season; do
season_episodes=()
while IFS= read -r episode; do
number="$(basename "$episode" |
rg --pcre2 -o "S\d+E\d+" |
rg --pcre2 -o "\d+$" |
awk '$0*=1')"
season_episodes+=($((number)))
done < <(fd . "$season" -tf -d1 \
-E '*.srt' \
-E '*.jpg' \
-E '*.nfo' \
-E '*.json')
dupe=$(printf '%s\n' "${season_episodes[@]}" | awk '!($0 in seen){seen[$0];next} 1')
if [[ -z $dupe ]]; then
continue
fi
echo "The episode $dupe is duplicated on $(basename "$season") of $(basename "$directory")"
echo "$season"
echo "_______________"
done < <(fd . "$directory" -td -d1)
done < <(fd . "$root" -td -d 1)

View File

@ -0,0 +1,167 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash gum fd ripgrep eza trash-cli zip unzip
root_directories=(
/mnt/pool/multimedia/media/Library/Comics
/mnt/pool/multimedia/media/Library/Manga
/mnt/pool/multimedia/media/Library/Webtoons
/mnt/pool/multimedia/media/Library/Espaniol/Manga
)
newname() {
echo "$1" | sed -E "s/$2/$3/g"
}
separator() {
gum style --foreground 7 _________________________
}
announce_changes() {
echo "Renaming:"
gum style --foreground 1 "$1"
echo "Into:"
gum style --foreground 2 "$2"
separator
}
rename_file() {
while IFS= read -r file; do
local original_name
original_name=$(basename "$file")
local new_name
new_name=$(newname "$(basename "$file")" "$2" "$3")
announce_changes "$original_name" "$new_name"
command mv -n "$(dirname "$file")"/{"$original_name","$new_name"}
done < <(fd "$1" --absolute-path -tf -s "${root_directories[@]}")
}
rename_directory() {
while IFS= read -r dir; do
local new_name
new_name=$(newname "$(basename "$dir")" "$2" "$3")
local new_dir
new_dir=$(dirname "$dir")/$new_name
announce_changes "$dir" "$new_dir"
echo "Processing..."
if [ ! -d "$new_dir" ]; then
echo "$(basename "$new_dir") doesn't exist. Creating it."
command mkdir -p "$new_dir"
fi
if [ -d "$new_dir" ]; then
echo "$(basename "$new_dir") has been created!, moving the following files:"
eza "$dir"
fd . "$dir" -x mv -n {} "$(realpath "$new_dir")"
fi
separator
done < <(fd "$1" --absolute-path -td -s "${root_directories[@]}")
}
# Check directory existence
for dir in "${root_directories[@]}"; do
if [ -d "$dir" ]; then
continue
fi
echo "directory doesn't exist... creating $dir"
mkdir -vp "$dir"
done
# Capitalize Special words
words=(special tpb full annual)
Words=(Special TPB Full Annual)
counter=0
for word in "${words[@]}"; do
while IFS= read -r file; do
new_name=$(newname "$(basename "$file")" "$word" "${Words[$counter]}")
echo "Inproper capitalization of the word"
gum style --foreground 1 "$word"
echo "adjusting it into"
gum style --foreground 2 "${Words[$counter]}"
announce_changes "$(basename "$file")" "$new_name"
command mv -n "$(dirname "$file")"/{"$(basename "$file")","$new_name"}
done < <(fd "$word" --absolute-path -tf -s "${root_directories[@]}")
counter=$((counter + 1))
done
# Rename Year files
# set regex_year_grep "\([[:digit:]]{4}\)"
# set regex_year_string "(\()(\d{4})(\))"
# rename_directory $regex_year_grep $regex_year_string \$2
# rename_file $regex_year_grep $regex_year_string \$2
# Rename #_ downloads
regex_hashtag="#_"
rename_directory $regex_hashtag $regex_hashtag "#"
rename_file $regex_hashtag $regex_hashtag "#"
rename_keywords() {
# Followed by digit
local regex_digit_fd="$1 \d+"
local regex_digit="($1 )([[:digit:]]+)"
rename_directory "$regex_digit_fd" "$regex_digit" "\1#\2"
rename_file "$regex_digit_fd" "$regex_digit" "\1#\2"
# Without digit
regex="#$1"
rename_directory "$regex" "$regex" "$1"
rename_file "$regex" "$regex" "$1"
}
rename_keywords TPB
rename_keywords Special
rename_keywords Annual
# Rename #Full
rename_directory " #Full" " #Full" ""
rename_file " #Full" " #Full" ""
# Rename double space
rename_directory " " " " " "
rename_file " " " " " "
# Fix names
wrongnames=(
"Dr. Stone"
i-dont-want-this-kind-of-hero
pure-of-heart
scoob-and-shag
stick-n-poke
"Houseki no Kuni"
"Gantz E"
"Gantz G"
)
rightname=(
"Dr. STONE"
"I DON'T WANT THIS KIND OF HERO"
"Pure of Heart"
"Scoob and Shag"
"Stick n' Poke"
"Land of the Lustrous"
"Gatz:E"
"Gantz:G"
)
counter=0
for wrongname in "${wrongnames[@]}"; do
rename_directory "$wrongname" "$wrongname" "${rightname[$counter]}"
rename_file "$wrongname" "$wrongname" "${rightname[$counter]}"
counter=$((counter + 1))
done
# Merge TPB (Part X) files
while IFS= read -r file; do
new_name=$(newname "$(basename "$file" .cbz)" "TPB \(Part [[:digit:]]+\)" TPB)
extract_dir=$(realpath "$(dirname "$file")"/"$new_name")
if [ ! -d "$extract_dir" ]; then
mkdir -p "$extract_dir"
fi
unzip "$file" -d "$extract_dir"/"$(basename "$file" .cbz)"
cd "$extract_dir" || exit
zip -r "$(realpath "$(dirname "$file")")"/"$new_name"\.cbz ./
trash "$file"
trash "$extract_dir"/"$(basename "$file" .cbz)"
done < <(fd "Part \d+" --absolute-path -tf -s "${root_directories[@]}")
# sudo fd . --absolute-path -tf -td "${root_directories[@]}" -x chown jawz:kavita {}
fd . --absolute-path -tf "${root_directories[@]}" -x chmod 664 {}
fd . --absolute-path -td "${root_directories[@]}" -x chmod 775 {}
fd . --absolute-path -td -te "${root_directories[@]}" -x rmdir {}

View File

@ -0,0 +1,59 @@
#!/run/current-system/sw/bin/bash
# Cron tasks
if type /run/current-system/sw/bin/nextcloud-occ 2>/dev/null; then
/run/current-system/sw/bin/nextcloud-occ preview:pre-generate
/run/current-system/sw/bin/nextcloud-occ face:background_job -t 900
fi
# Sync GDL stuff
root=/mnt/pool/scrapping
cd $root || exit
set -- Aqp Ghekre
for user in "$@"; do
originDir=$root/$user
destDir=/mnt/pool/nextcloud/$user/files/Requested
destDirDup=/mnt/pool/nextcloud/$user/files/RequestedDupePlzCheckNDel
if [ ! -d "$destDir" ]; then
echo "$destDir does not exist, creating..."
mkdir -p "$destDir"
fi
cd "$originDir" || exit
find . -type f -not -name '*.part' | while read -r file; do
destination=$destDir/"$(echo "$file" | sed "s/^\.\///")"
destinationDup=$destDirDup/"$(echo "$file" | sed "s/^\.\///")"
if [ ! -f "$destination" ]; then
echo "Safe to move $(basename "$file")"
if [ ! -d "$(dirname "$destination")" ]; then
echo "Creating parent directory..."
mkdir -p "$(dirname "$destination")"
fi
mv -n "$file" "$destination"
else
echo "Duplicated encountered $(basename "$file")"
if [ ! -d "$(dirname "$destinationDup")" ]; then
echo "Creating parent directory..."
mkdir -p "$(dirname "$destinationDup")"
fi
mv -n "$file" "$destinationDup"
fi
done
find ./ -mindepth 1 -type d -empty -delete
chown 987:988 -R "$destDir"
find "$destDir" -type d -exec chmod -R 755 {} \;
find "$destDir" -type f -exec chmod -R 644 {} \;
if [ -d "$destDirDup" ]; then
chown 987:988 -R "$destDirDup"
find "$destDirDup" -type d -exec chmod -R 755 {} \;
find "$destDirDup" -type f -exec chmod -R 644 {} \;
fi
if type /run/current-system/sw/bin/nextcloud-occ 2>/dev/null; then
/run/current-system/sw/bin/nextcloud-occ files:scan --all
fi
done

View File

@ -0,0 +1,51 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash fd borgbackup gum ripgrep
BORG_PASSPHRASE=$(gum input --password --placeholder "Type borg password")
export BORG_PASSPHRASE
d_root=$HOME/pika
f_string=home/jawz/.config/jawz/lists/jawz/watch.txt
d_borg=/mnt/pool/backups/pika/lists
while IFS= read -r repo; do
IFS=" " read -r -a array <<<"$repo"
repo_id="${array[0]}"
mkdir -vp "$d_root/$repo_id" && cd "$d_root/$repo_id" || exit
borg extract $d_borg::"$repo_id" $f_string
cat "$d_root/$repo_id/$f_string" >>"$d_root/master"
done < <(borg list "$d_borg")
cd "$HOME" || exit
sort -u "$d_root/master" -o "$d_root/sorted"
sort -u "$LW" -o "$LW"
echo "Current $(wc -l <"$LW") archived $(wc -l <"$d_root/sorted")"
echo "Missing lines:"
diff "$d_root/sorted" "$LW"
# look for duped lines with different casing
echo "Duplicated lines:"
while IFS= read -r line; do
if ! [ "$line" == "${line,,}" ]; then
if rg "${line,,}" <"$LW"; then
echo "$line"
fi
fi
done <"$LW"
# delete pika backups
if gum confirm "Limpiar pika?"; then
command rm -rf "$d_root"
while IFS= read -r repo; do
IFS=" " read -r -a array <<<"$repo"
repo_id="${array[0]}"
gum spin --spinner dot --title "Cleaning $repo_id..." -- borg delete $d_borg::"$repo_id"
done < <(borg list "$d_borg")
else
echo "Canceled, no files deleted"
fi
gum spin --spinner dot --title "Cleaning $repo_id..." -- borg compact "$d_borg"
gum spin --spinner dot --title "Cleaning $repo_id..." -- borg compact /mnt/pool/backups/pika/home

View File

@ -0,0 +1,45 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash gnome.zenity rmlint git gum xclip
if [ -n "$1" ]; then
operation=$1
else
operation=$(gum choose rmlint download git)
fi
case $operation in
# onlyfans)
# source ~/Development/Python/onlyfans/bin/activate.fish
# python ~/Development/Git/OnlyFans/start_ofd.py
# deactivate
rmlint)
rmlint -g --types="duplicates" \
--config=sh:handler=clone \
/mnt/pool/
;;
download)
ENTRY=$(zenity --entry --width=250 --title "Push Manager" \
--text="Verify the following entry is correct" \
--add-entry="Clipboard:" --entry-text "$(xclip -o -sel clip)")
if [ -n "$ENTRY" ]; then
# kgx -e "download -u jawz -i '$ENTRY'"
# ssh jawz@45.33.124.254 "echo ""$ENTRY"" >> ~/.config/jawz/lists/jawz/instant.txt"
kgx -e "ssh jawz@45.33.124.254 ""~/.local/bin/download -u jawz -i ""$ENTRY"" "" "
else
zenity --error --width=250 \
--text "Please verify and try again"
fi
;;
git)
git_dir=$HOME/Development/Git
while IFS= read -r repo; do
if ! [ -d "$repo/.git" ]; then
continue
fi
cd "$repo" || exit
gum style --foreground 2 "Updating $(basename "$repo")"
git fsck --full
git pull
done < <(fd . "$git_dir" -td --absolute-path -d 1)
;;
esac

View File

@ -0,0 +1,33 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash fd
before_count=$(fd -tf | wc -l)
i=0
for file in $(fd -d1 -tf -E '*.mp4' -E '*.webm'); do
dir_name=$(basename "$(pwd)")_$(printf %03d $((i / $1 + 1)))
mkdir -p "$dir_name"
mv -i "$file" "$(realpath "$dir_name")"/
i=$((i + 1))
done
for file in $(fd -d1 -tf -e webm); do
mkdir -p webm
mv -i "$file" "$(realpath webm)"/
done
for file in $(fd -d1 -tf -e mp4); do
mkdir -p videos
mv -i "$file" "$(realpath videos)"/
done
after_count=$(fd -tf | wc -l)
if [[ "$before_count" == "$after_count" ]]; then
echo "No file count differences"
else
echo "Before count: $before_count"
echo "After count: $after_count"
fi
sleep 10
exit

View File

@ -0,0 +1,29 @@
#!/usr/bin/env nix-shell
#! nix-shell -i bash -p bash yt-dlp
minutes=10
time_alive=60
sleep_time=$((minutes * 60))
loops=$((time_alive / (sleep_time / time_alive)))
re="[[:space:]]+"
echo $1
if [[ $1 =~ $re ]]; then
read -ra arr <<<"$1"
url="https://picarto.tv/${arr[0]}"
else
url="https://chaturbate.com/$1"
fi
save_dir=/mnt/pool/glue/stream-dl
if [ ! -d "$save_dir" ]; then
mkdir -p "$save_dir"
fi
cd $save_dir || exit
for i in $(seq 1 1 "$loops"); do
waiting_time=$(((i * sleep_time) / time_alive))
yt-dlp --hls-use-mpegts --prefer-ffmpeg -o '%(title)s.%(ext)s' "$url"
echo "sleeping for $sleep_time seconds… been waiting for $waiting_time minutes"
sleep $sleep_time
done

184
workstation/scripts/sub-sync.sh Executable file
View File

@ -0,0 +1,184 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash fd ripgrep file alass ffmpeg gum
MEDIA_ROOT=("/mnt/pool/multimedia/media/Series" "/mnt/pool/multimedia/media/Movies")
REPLACE_DIR="/mnt/pool/multimedia/media"
SUBTITLE_MIRROR="/mnt/pool/multimedia/backups/subtitles"
RAM_SUB="/dev/shm/sub.srt"
# BACKUPS SUBTITLES
backup_subtitles() {
while IFS= read -r subtitle; do
echo "backing up $subtitle"
dest_dir="$(dirname "$subtitle")"
dest_dir="${dest_dir/$REPLACE_DIR/$SUBTITLE_MIRROR}"
mkdir -p "$dest_dir"
cp "$subtitle" "${subtitle/$REPLACE_DIR/$SUBTITLE_MIRROR}"
done < <(fd . -tf -e srt --absolute-path "${MEDIA_ROOT[@]}")
}
clean_up() {
while IFS= read -r directory; do
echo "cleaning up $directory"
subtitles=()
mapfile -d $'\0' subtitles < <(fd . "$directory" -e srt -tf -d 1 -0)
if [ "${#subtitles[@]}" -lt 2 ]; then
continue
fi
unset base_subtitle
unset subtitles_group
for subtitle in "${subtitles[@]}"; do
group=()
mapfile -d $'\0' group < <(fd --fixed-strings \
"$(basename "$subtitle" .srt)" "$directory" \
-d 1 -tf -0 -e srt)
for key in "${!group[@]}"; do
if ! echo "${group[$key]}" | rg -P '\.\d{1,2}(\.\w+(-\w+)?)?\.srt' -q; then
unset "group[$key]"
continue
fi
if [ -z "${group[$key]}" ]; then
continue
fi
echo "removing $(basename "$subtitle")"
rm "$subtitle"
done
done
done < <(fd . -td --absolute-path "${MEDIA_ROOT[@]}")
}
rename_languages() {
while IFS= read -r file; do
base=$(basename "$file" .eng.srt)
dir=$(dirname "$file")
echo "renaming sub $base"
mv "$file" "$dir/$base.$2.srt"
done < <(fd . -tf --absolute-path "${MEDIA_ROOT[@]}" -e "$1.srt")
}
sync_subtitles() {
while IFS= read -r directory; do
echo "scanning for sync $directory"
while IFS= read -r subtitle; do
echo "processing $subtitle"
video=()
extension=$(echo "$subtitle" | rg -oP "(\.\w+(-\w+)?)?\.srt")
basename="$(basename "$subtitle" "$extension")"
mapfile -d $'\0' video < <(fd "$basename" \
"$directory" --fixed-strings \
-e mkv -e mp4 -e avi -e webm -tf -d 1 -0)
# skips directory if it contains more than 1 video file
# should never get triggered
if [ "${#video[@]}" -gt 1 ]; then
basename "$(dirname "$directory")"
echo "$(basename "$directory") has many video files: ${#video[@]}"
continue
fi
# update subtitle in ram
if [ -e "$RAM_SUB" ]; then
rm "$RAM_SUB"
fi
cp "$subtitle" "$RAM_SUB"
if [ ! -e $RAM_SUB ] && [ ! -e "${video[0]}" ]; then
continue
fi
echo "processing...$subtitle"
alass-cli "${video[0]}" "$RAM_SUB" "$subtitle"
done < <(fd . "$directory" -tf -e srt -d 1 --newer "$1")
done < <(fd . -td --absolute-path "${MEDIA_ROOT[@]}")
}
find_dupes() {
while IFS= read -r directory; do
videos=()
mapfile -d $'\0' videos < <(fd . \
"$directory" -tf -d 1 -0 \
-e mkv -e mp4 -e avi -e webm)
if [ "${#videos[@]}" == 0 ]; then
if [[ "$directory" != *"Season"* ]]; then
continue
fi
echo "NO FILES ERROR: $directory"
fi
if [ "${#videos[@]}" == 1 ]; then
continue
fi
if [ "${#videos[@]}" -gt 1 ]; then
if [[ "$directory" == *"media/Movies"* ]]; then
echo "Movie directory has more than a movie"
continue
fi
for episode in "${videos[@]}"; do
episode_number="$(echo "$episode" |
rg -oP "S\d+E\d+(-E\d+)? ")"
episode_files="$(
fd "$episode_number" "$directory" --fixed-strings \
-tf -d 1 \
-e mkv -e mp4 -e avi -e webm | wc -l
)"
if [ "$episode_files" == 1 ]; then
continue
fi
echo ____________________________
echo "The episode $episode_number is repeated on"
echo "$directory"
fd "$episode_number" "$directory" --fixed-strings \
-tf -d 1 \
-e mkv -e mp4 -e avi -e webm
done
fi
done < <(fd . -td --absolute-path "${MEDIA_ROOT[@]}")
}
fd . /mnt/pool/multimedia/media/Series/ --owner jawz -x chown sonarr:piracy {}
fd . /mnt/pool/multimedia/media/Movies/ --owner jawz -x chown radarr:piracy {}
fd . "${MEDIA_ROOT[@]}" -td -x chmod 775 {}
fd . "${MEDIA_ROOT[@]}" -tf -x chmod 664 {}
rename_languages eng en
rename_languages spa es
rename_languages mx es_MX
if [ -n "$1" ]; then
operation=$1
else
operation=$(gum choose backup clean sync all)
fi
if [ -n "$2" ]; then
start_time=$2
else
start_time="$(date '+%Y-%m-%d') 00:00:00"
fi
case $operation in
backup)
backup_subtitles
;;
clean)
clean_up
;;
sync)
sync_subtitles "$start_time"
;;
dupe)
find_dupes
;;
all)
echo "backing up"
backup_subtitles
echo "cleaning up"
clean_up
echo "syncing"
sync_subtitles "$start_time"
;;
esac

162
workstation/scripts/tasks.sh Executable file
View File

@ -0,0 +1,162 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p bash trashy fd ripgrep file
directories=("$HOME/Pictures/To Organize/" "$HOME/Downloads/" "$HOME/Downloads/Windows")
replace_extension() {
local file_basename
file_basename=$(basename "$1")
echo "${file_basename%.*}.$2"
}
generate_random_number() {
local min=0
local max=9999999999
printf "%010d\n" $((min + RANDOM % max))
}
test_name() {
local random_number
random_number=$(generate_random_number)
while (($(fd "$random_number"* "$HOME/Pictures/" "$HOME/Downloads/" -tf | wc -l) > 0)); do
echo "Conflicts found, generating a new filename"
random_number=$(generate_random_number)
echo "$random_number"
done
echo "$random_number"
}
while IFS= read -r file; do
regex_str='source|tenor|media|duckduckgo\.com|giphy|'
regex_str+='(?<!app)image|^download|unknown|zoom|'
regex_str+='new_canvas|untitled|drawpile|OIG|'
regex_str+='imgpsh_'
if ! basename "$file" | rg --pcre2 -q "$regex_str"; then
continue
fi
new_name=$(test_name)
echo renaming
echo "$file"
echo into
echo "$(dirname "$file")"/"$new_name"
echo ---------------
command mv -n "$(dirname "$file")"/{"$(basename "$file")","$new_name"}
if ! basename "$file" | rg -q 'Screenshot_\d{8}'; then
continue
fi
echo "moving screenshot $file into $HOME/Pictures/Screenshots/"
command mv -n "$file" "$HOME/Pictures/Screenshots/"
done < <(fd . "${directories[@]}" -d 1 -tf --absolute-path)
# screenshots=$HOME/Pictures/Screenshots
classify_directories=("$HOME/Pictures/Screenshots" "$HOME/Pictures/Photos/Camera")
if (($(fd . "${classify_directories[@]}" -tf -d 1 | wc -l) > 0)); then
while IFS= read -r file; do
date=$(stat -c "%y" "$file" | rg -o "\d{4}-\d{2}-\d{2}")
year=$(echo "$date" | rg -o "\d{4}")
month=$(echo "$date" | rg -o "\d{4}-\d{2}" | rg -o --pcre2 "(?<=-)\d{2}")
parent_dir=$(dirname "$(realpath "$file")")
dest_dir=$(realpath "$parent_dir")/$year/$month
echo "Moving screenshot $(basename "$file") into $dest_dir"
mkdir -vp "$dest_dir"
command mv -n "$file" "$dest_dir/"
done < <(fd . "${classify_directories[@]}" --absolute-path -tf -d 1)
fi
# Where steam screenshots are stored, may need to replace with ur ID
dir_steam=$XDG_DATA_HOME/Steam/userdata/107446271/760/remote
declare -A games
# Insert here new games, put between [] the ID of the game
# You can find it by visiting the $dir_steam directory
# the ID is simply the name of the folder in there.
games+=(
[386360]=Smite
[960090]="Bloons Tower Defense 6"
[648800]=Raft
[262060]="Darkest Dungeon"
[234140]="Mad Max"
[433340]="Slime Rancher"
[1190460]="Death Stranding"
[1850570]="Death Stranding"
)
for key in "${!games[@]}"; do
# Modify this to store your screenshots somewhere else
dir_dest=$(realpath "$HOME/Pictures/Screenshots/Games")/${games[$key]}
dir_game=$(realpath "$dir_steam")/$key/screenshots
# If there are not screenshots currently stored, why bother lol
if ! [[ -d $dir_game ]]; then #
continue
fi
# If screenshots exist however...
if ! (($(fd . "$dir_game" -d 1 -tf | wc -l) > 0)); then
continue
fi
# Create destination directory
mkdir -vp "$dir_dest"
echo "Moving ${games[$key]} screenshots..."
fd . "$dir_game" -d 1 -tf -x mv -n {} "$dir_dest"/
# Delete thumnnails
echo "Deleting ${games[$key]} thumbnails..."
rm -rf "$dir_game"/thumbnails
done
# Clearing up empty directories
fd . "$dir_steam" -td -te -x trash {}
cyberpunk_dir=$HOME/Games/gog/cyberpunk-2077/drive_c/users/jawz/Pictures/"Cyberpunk 2077"
if [[ -d $cyberpunk_dir ]]; then
while IFS= read -r file; do
echo "Moving cyberpunk screenshots $(basename "$file")"
command mv -n "$file" "$HOME/Pictures/Screenshots/Games/Cyberpunk 2077/"
done < <(fd . "$cyberpunk_dir" -tf)
fi
proton_dir=$HOME/.steam/steam/compatibilitytools.d
if [[ -d "$proton_dir" ]]; then
while IFS= read -r protonver; do
lutrisdir=$XDG_DATA_HOME/lutris/runners/wine/$(basename "$protonver")
if [ -d "$lutrisdir" ] && [ -L "$lutrisdir" ]; then
continue
fi
echo "Symlink $lutrisdir doesn't exist, creating link..."
ln -s "$(realpath "$protonver")"/files "$lutrisdir"
done < <(fd . "$proton_dir" -d 1 -td)
fi
fd . "$XDG_DATA_HOME/lutris/runners/wine" -d1 -tl -Lx trash {}
while IFS= read -r file; do
ext=$(file --mime-type "$file" | rg -o '\w+$')
correct_ext=${ext,,}
filename=$(basename -- "$file")
current_ext="${filename##*.}"
filename="${filename%.*}"
if ! echo "$correct_ext" | rg -q 'jpe|jpg|jpeg|png|gif'; then
continue
fi
if [ "$current_ext" == "$correct_ext" ]; then
continue
fi
echo "The file $(basename "$file")" \
"will be renamed, the propper extension is $correct_ext"
new_name="$filename".$correct_ext
if command mv -n "$(dirname "$file")"/{"$(basename "$file")","$new_name"}; then
continue
fi
file_hash="$(sha256sum "$file" | gawk '{ print $1 }')"
if ! echo "$file_hash $(dirname "$file")/$new_name" | sha256sum -c; then
continue
fi
echo "deleting duplicated: $file"
rm "$file"
done < <(fd . "${directories[@]}" -d 1 -tf)
files_home_clean=(.pki HuionCore.pid DriverUI.pid huion.log)
for file in "${files_home_clean[@]}"; do
file=$HOME/$file
if [ ! -e "$file" ]; then
continue
fi
rm -rf "$file"
done

View File

@ -0,0 +1,55 @@
#!/usr/bin/env nix-shell
#! nix-shell -i bash -p bash curl jq dig
# Shell script to update namecheap.com dynamic dns
# for a domain to your external IP address
# namecheap
hostnames=(cloud @ 6fxAtnPxEeI8hN)
domain=rotehaare.art
password=60d672be5d9d4828a0f96264babe0ac1
ip=$(curl -s ipecho.net/plain)
for hostname in "${hostnames[@]}"; do
curl "https://dynamicdns.park-your-domain.com/update?host=$hostname&domain=$domain&password=$password&ip=$ip"
done
# cloudflare
zone_id=833996ed25eb09f1a50606e0457790e4
record=servidos.lat
record_id=6b117173e53a7511ba36ceb9637ede63
cloudflare_token=VdKosfThQmOcuywLOUq9DY4-df9EmbHrDWyf_vUb
# get record_id
# curl -s -X GET "https://api.cloudflare.com/client/v4/zones/${zone_id}/dns_records?type=A&name=${record}" \
# -H "Authorization: Bearer ${cloudflare_token}" \
# -H "Content-Type: application/json" | jq -r '{"result"}[] | .[0] | .id'
curr_ip=$(curl -s -X GET https://checkip.amazonaws.com)
curr_reg=$(dig ${record} +short @1.1.1.1)
if echo "${curr_reg}" | grep "${curr_ip}"; then
echo "$(date --rfc-3339=seconds) - OK - Current record matches current IP (${curr_ip})"
else
curl -s -X PUT "https://api.cloudflare.com/client/v4/zones/${zone_id}/dns_records/${record_id}" \
-H "Authorization: Bearer ${cloudflare_token}" \
-H "Content-Type: application/json" \
--data "{\"type\":\"A\",\"name\":\"${record}\",\"content\":\"$curr_ip\",\"ttl\":1,\"proxied\":false}" >/dev/null
echo "$(date --rfc-3339=seconds) - NOK - Record Updated to $curr_ip from ${curr_reg}"
fi
# godaddy
domain=danilo-reyes.com
host=@
APIKey=AEjhf24Sczj_BpoXZmSK1Zha3pvRpRYxnf
APISecret=5pumrt9iMaSxR8U4PjhRCE
WanIP=$(curl -s "https://api.ipify.org")
GDIP=$(curl -s -X GET -H "Authorization: sso-key ${APIKey}:${APISecret}" "https://api.godaddy.com/v1/domains/${domain}/records/A/${host}" | cut -d'[' -f 2 | cut -d']' -f 1)
if [ "$WanIP" != "$GDIP" ] && [ "$WanIP" != "" ]; then
echo "Actualizando ip godaddy"
curl -s -X PUT "https://api.godaddy.com/v1/domains/${domain}/records/A/${host}" \
-H "Authorization: sso-key ${APIKey}:${APISecret}" \
-H "Content-Type: application/json" \
-d "[{\"data\": \"${WanIP}\"}]"
fi

View File

@ -0,0 +1,17 @@
{ config, pkgs, lib, ... }:
{
networking = {
useDHCP = lib.mkDefault true;
enableIPv6 = false;
networkmanager.enable = true;
extraHosts = ''
192.168.1.64 workstation
192.168.1.69 server
192.168.1.100 miniserver
'';
firewall = {
enable = true;
};
};
}

View File

@ -0,0 +1,22 @@
{ config, lib, pkgs, ... }:
{
services.xserver.videoDrivers = [ "nvidia" ];
hardware = {
opengl = {
enable = true;
driSupport = true;
driSupport32Bit = true;
extraPackages = with pkgs; [
nvidia-vaapi-driver
vaapiVdpau
libvdpau-va-gl
];
};
nvidia = {
modesetting.enable = true;
powerManagement.enable = true;
powerManagement.finegrained = false;
};
};
}

View File

@ -0,0 +1,13 @@
{ config, lib, pkgs, ... }:
let printingDrivers = [ pkgs.hplip pkgs.hplipWithPlugin ];
in {
services.printing = {
enable = true;
drivers = printingDrivers;
};
hardware.sane = {
enable = true;
extraBackends = printingDrivers;
};
users.users.jawz.packages = [ pkgs.gnome.simple-scan ];
}

View File

@ -0,0 +1,24 @@
{ config, lib, pkgs, ... }:
# let
# nixGaming = import (builtins.fetchTarball
# "https://github.com/fufexan/nix-gaming/archive/master.tar.gz");
# in
{
imports = [
# nixGaming.nixosModules.pipewireLowLatency
];
hardware.pulseaudio.enable = false;
security.rtkit.enable = true;
sound.enable = false;
services.pipewire = {
enable = true;
alsa.enable = true;
alsa.support32Bit = true;
pulse.enable = true;
# lowLatency = {
# enable = true;
# quantum = 64;
# rate = 48000;
# };
};
}

View File

@ -0,0 +1,26 @@
{ config, lib, pkgs, ... }:
let
jawzTasks =
pkgs.writeScriptBin "tasks" (builtins.readFile ./scripts/tasks.sh);
in {
systemd = {
user.services.tasks = {
restartIfChanged = true;
description = "Run a tasks script which keeps a lot of things organized";
wantedBy = [ "default.target" ];
path = [ pkgs.bash pkgs.nix jawzTasks ];
serviceConfig = {
Restart = "on-failure";
RestartSec = 30;
ExecStart = "${jawzTasks}/bin/tasks";
};
timers.tasks = {
enable = true;
description =
"Run a tasks script which keeps a lot of things organized";
wantedBy = [ "timers.target" ];
timerConfig = { OnCalendar = "*:0/10"; };
};
};
};
}