flake setup and fragmented

This commit is contained in:
2024-12-06 13:34:53 -06:00
parent 85ee243f8d
commit 3f658af00f
11 changed files with 37 additions and 26 deletions

1
source/tuhmayto/.envrc Normal file
View File

@@ -0,0 +1 @@
use nix

View File

@@ -0,0 +1,64 @@
<!doctype html>
<html>
<head>
<title>Activity Heatmap</title>
<script src="https://cdn.amcharts.com/lib/5/index.js"></script>
<script src="https://cdn.amcharts.com/lib/5/xy.js"></script>
<script src="https://cdn.amcharts.com/lib/5/themes/Animated.js"></script>
</head>
<body>
<div id="chartdiv" style="width: 100%; height: 500px"></div>
<script>
am5.ready(function () {
var root = am5.Root.new("chartdiv");
root.setThemes([am5themes_Animated.new(root)]);
var chart = root.container.children.push(
am5xy.XYChart.new(root, {
panX: true,
panY: true,
wheelX: "none",
wheelY: "none",
}),
);
var xAxis = chart.xAxes.push(
am5xy.CategoryAxis.new(root, {
categoryField: "date",
renderer: am5xy.AxisRendererX.new(root, {}),
tooltip: am5.Tooltip.new(root, {}),
}),
);
var yAxis = chart.yAxes.push(
am5xy.ValueAxis.new(root, {
renderer: am5xy.AxisRendererY.new(root, {}),
}),
);
var series = chart.series.push(
am5xy.ColumnSeries.new(root, {
name: "Activity",
xAxis: xAxis,
yAxis: yAxis,
valueYField: "count",
categoryXField: "date",
tooltip: am5.Tooltip.new(root, {
labelText: "{valueY}",
}),
}),
);
fetch("activity_data.json")
.then((response) => response.json())
.then((data) => {
xAxis.data.setAll(data);
series.data.setAll(data);
});
chart.appear(1000, 100);
});
</script>
</body>
</html>

20
source/tuhmayto/dummy.sql Normal file
View File

@@ -0,0 +1,20 @@
-- Create the table if it doesn't exist
CREATE TABLE IF NOT EXISTS activity_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT NOT NULL
);
-- Insert dummy data for one year
DELETE FROM activity_log; -- Clear existing data
WITH RECURSIVE dates(date) AS (
SELECT datetime('2023-12-01 00:00:00')
UNION ALL
SELECT datetime(date, '+1 hour')
FROM dates
WHERE date < datetime('2024-12-01 00:00:00')
)
INSERT INTO activity_log (timestamp)
SELECT date
FROM dates
WHERE random() % 4 = 0; -- Randomly select approximately 25% of hours for activity

View File

@@ -0,0 +1,42 @@
import sqlite3
from datetime import datetime
from collections import Counter
import json
def fetch_logs() -> list[str]:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute("SELECT timestamp FROM activity_log")
logs = [row[0] for row in cursor.fetchall()]
conn.close()
return logs
def group_logs_by_day(timestamps: list[str]) -> Counter:
days = [
datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%d")
for ts in timestamps
]
return Counter(days)
def export_to_json(activity_counts: Counter, output_file: str) -> None:
data = [
{
"date": date,
"count": count,
}
for date, count in activity_counts.items()
]
with open(output_file, "w") as f:
json.dump(data, f, indent=4)
if __name__ == "__main__":
logs = fetch_logs()
if not logs:
print("No logs found.")
else:
activity_counts = group_logs_by_day(logs)
export_to_json(activity_counts, "activity_data.json")

37
source/tuhmayto/logs.py Normal file
View File

@@ -0,0 +1,37 @@
import sqlite3
from datetime import datetime
from typing import List, Tuple
def fetch_logs() -> List[Tuple[int, str]]:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute("SELECT id, timestamp FROM activity_log ORDER BY id ASC")
logs = cursor.fetchall()
conn.close()
return logs
def filter_logs_by_hour(logs: List[Tuple[int, str]]) -> List[Tuple[int, str]]:
filtered_logs = []
last_log_time = None
for log_id, timestamp in logs:
log_time = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
if last_log_time is None or (log_time - last_log_time).seconds >= 3600:
filtered_logs.append((log_id, timestamp))
last_log_time = log_time
return filtered_logs
def display_logs() -> None:
logs = fetch_logs()
if not logs:
print("No logs found.")
return
filtered_logs = filter_logs_by_hour(logs)
for log_id, timestamp in filtered_logs:
print(f"ID: {log_id}, Timestamp: {timestamp}")
if __name__ == "__main__":
display_logs()

10
source/tuhmayto/setup.cfg Normal file
View File

@@ -0,0 +1,10 @@
[metadata]
name = tuh-activity-logger
[options]
py_modules =
tracker
[options.entry_points]
console_scripts =
tuh-activity-logger = tracker:main

3
source/tuhmayto/setup.py Normal file
View File

@@ -0,0 +1,3 @@
from setuptools import setup
setup()

14
source/tuhmayto/shell.nix Normal file
View File

@@ -0,0 +1,14 @@
{
pkgs ? import <nixpkgs> { },
}:
pkgs.mkShell {
buildInputs = with pkgs; [
sqlite
(python3.withPackages (ps: [
ps.beautifulsoup4
ps.requests
ps.matplotlib
]))
];
}

104
source/tuhmayto/tracker.py Normal file
View File

@@ -0,0 +1,104 @@
import re
from datetime import datetime, timedelta
import sqlite3
import requests
from bs4 import BeautifulSoup
DB_FILE = "/home/jawz/.config/jawz/tuh_online_log.db"
def setup_database() -> None:
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS activity_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT UNIQUE NOT NULL
)
"""
)
conn.commit()
conn.close()
def log_activity(timestamp: str) -> None:
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
cursor.execute(
"INSERT OR IGNORE INTO activity_log (timestamp) VALUES (?)", (timestamp,)
)
conn.commit()
conn.close()
def fetch_latest_log() -> datetime | None:
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
cursor.execute("SELECT timestamp FROM activity_log ORDER BY timestamp DESC LIMIT 1")
result = cursor.fetchone()
conn.close()
return datetime.strptime(result[0], "%Y-%m-%d %H:%M:%S") if result else None
def parse_last_seen(text: str) -> datetime | None:
now = datetime.now()
if "Visto por última vez" in text:
days_match = re.search(r"(\d+) día", text)
hours_match = re.search(r"(\d+) horas", text)
minutes_match = re.search(r"(\d+) minutos", text)
if days_match:
days_ago = int(days_match.group(1))
return now - timedelta(days=days_ago)
if hours_match:
hours_ago = int(hours_match.group(1))
return now - timedelta(hours=hours_ago)
if minutes_match:
minutes_ago = int(minutes_match.group(1))
return now - timedelta(minutes=minutes_ago)
elif "online" in text.lower():
return now
return None
def scrape_and_log(url: str) -> None:
response = requests.get(url)
if response.status_code != 200:
return
soup = BeautifulSoup(response.text, "html.parser")
details_row = soup.find("div", class_="details-row")
if not details_row:
return
offline_div = details_row.find("div", class_="offline")
if not offline_div:
return
last_seen_text = offline_div.text.strip()
last_seen_time = parse_last_seen(last_seen_text)
if not last_seen_time:
return
latest_log = fetch_latest_log()
if latest_log and last_seen_time.date() <= latest_log.date():
print(f"A log already exists for {latest_log} or later. Skipping new log.")
return
if latest_log and last_seen_time.hour == latest_log.hour:
print(f"An entry for this hour already exists. Skipping new log.")
return
timestamp = last_seen_time.strftime("%Y-%m-%d %H:%M:%S")
log_activity(timestamp)
print(f"Logged activity: {timestamp}")
def main():
url = "https://es.xhamsterporno.mx/users/johnneal911"
setup_database()
scrape_and_log(url)
if __name__ == "__main__":
main()