113 lines
3.3 KiB
Python

#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Personal functions to aid on multiple scripts"""
import sys
import fileinput
import re
import os
import logging
from pathlib import Path
import yaml
VERBOSE_G = False
LOG = logging.getLogger()
HANDLER = logging.StreamHandler()
FORMATTER = logging.Formatter(
"[%(filename)s][%(levelname)s] %(funcName)s '%(message)s'"
)
HANDLER.setFormatter(FORMATTER)
LOG.addHandler(HANDLER)
LOG.setLevel(logging.INFO)
def validate_twitter_link(line: str) -> str:
"""returns a fixed link, which ends with /media"""
# if url contains /media at the end just write the line
if re.search(r"\/media$", line):
return line
# if does not contain /media at the end then add /media
return f"{line}/media"
def parse_link(link: str) -> str:
"""Fixes links"""
if not re.search(r"(twitter\.com\/\w+(\/)?(?!.*status))", link):
LOG.debug("No modifications needed for the link %s", link)
return link
# if url contains /media at the end just write the line
fixed_link = validate_twitter_link(link)
LOG.debug("Processed link %s", fixed_link)
return fixed_link
def load_config_variables():
"""Loads all the variables from the config file"""
config_file = Path("~/.config/jawz/config.yaml")
with open(config_file.expanduser(), encoding="utf-8") as open_file:
return yaml.safe_load(open_file)
def clean_cache(directory: Path):
"""Recursively deletes all the content of a directory,
including the directory itself."""
if not directory.is_dir():
return
for file in filter(lambda x: x.is_file(), directory.iterdir()):
file.unlink()
for dir in filter(lambda x: x.is_dir(), directory.iterdir()):
dir.rmdir()
directory.rmdir()
def run(command: str, verbose: bool):
"""Run command in a subprocess"""
# pylint: disable=subprocess-run-check
# This toggle allows for a really wasy debug when using -v
if verbose:
print(command)
else:
os.system(command)
def list_lines(i: int, line: str) -> str:
"""Create a numbered list"""
return f"{i}) {line}"
def quote(line: str) -> str:
"""Quote the line"""
return f'"{line}"'
def sort_txt_file(file_path: Path):
"""Sort every line alphabetically
remove duplicated and empty lines"""
file = str(file_path.resolve())
run(f"sort -u {quote(file)} -o {quote(file)}", VERBOSE_G)
run(f"sed -i '/^$/d' {quote(file)}", VERBOSE_G)
run(f'sed -i -e "s,http:,https:," {quote(file)}', VERBOSE_G)
# fix this using strip on python
# line.strip("/")
run(f'sed -i -e "s,/$,," {quote(file)}', VERBOSE_G) # trailing /
def randomize_txt_file(file_path: Path):
"""Randomize the order of the
lines of the txt file"""
file = str(file_path.resolve())
run(f"sort -R {quote(file)} -o {quote(file)}", VERBOSE_G)
def parse_list(file):
"""Replace http with https and remove trailing /"""
for line in fileinput.input(file, inplace=True):
sys.stdout.write(str(line).replace("http://", "https://"))
with open(file, "r+", encoding="utf-8") as open_file:
f_content = open_file.read()
f_content = re.compile(r"\/$", 0).sub(r"\/$", "")
open_file.seek(0)
open_file.truncate()
print(f_content)
sort_txt_file(file)