track tuhmayto xhamster activity

This commit is contained in:
Danilo Reyes 2024-12-02 12:26:59 -06:00
parent 3fe4b39dca
commit 029e596dbc
11 changed files with 2274 additions and 0 deletions

View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -e
if [[ ! -d "/home/jawz/Development/Scripts/tuhmayto" ]]; then
echo "Cannot find source directory; Did you move it?"
echo "(Looking for "/home/jawz/Development/Scripts/tuhmayto")"
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
exit 1
fi
# rebuild the cache forcefully
_nix_direnv_force_reload=1 direnv exec "/home/jawz/Development/Scripts/tuhmayto" true
# Update the mtime for .envrc.
# This will cause direnv to reload again - but without re-building.
touch "/home/jawz/Development/Scripts/tuhmayto/.envrc"
# Also update the timestamp of whatever profile_rc we have.
# This makes sure that we know we are up to date.
touch -r "/home/jawz/Development/Scripts/tuhmayto/.envrc" "/home/jawz/Development/Scripts/tuhmayto/.direnv"/*.rc

View File

@ -0,0 +1 @@
/nix/store/l0a5irlx6dqq5v2ix8q9y4ccgwql60sm-nix-shell-env

File diff suppressed because it is too large Load Diff

1
tuhmayto/.envrc Normal file
View File

@ -0,0 +1 @@
use nix

BIN
tuhmayto/activity_log.db Normal file

Binary file not shown.

64
tuhmayto/chart.html Normal file
View File

@ -0,0 +1,64 @@
<!doctype html>
<html>
<head>
<title>Activity Heatmap</title>
<script src="https://cdn.amcharts.com/lib/5/index.js"></script>
<script src="https://cdn.amcharts.com/lib/5/xy.js"></script>
<script src="https://cdn.amcharts.com/lib/5/themes/Animated.js"></script>
</head>
<body>
<div id="chartdiv" style="width: 100%; height: 500px"></div>
<script>
am5.ready(function () {
var root = am5.Root.new("chartdiv");
root.setThemes([am5themes_Animated.new(root)]);
var chart = root.container.children.push(
am5xy.XYChart.new(root, {
panX: true,
panY: true,
wheelX: "none",
wheelY: "none",
}),
);
var xAxis = chart.xAxes.push(
am5xy.CategoryAxis.new(root, {
categoryField: "date",
renderer: am5xy.AxisRendererX.new(root, {}),
tooltip: am5.Tooltip.new(root, {}),
}),
);
var yAxis = chart.yAxes.push(
am5xy.ValueAxis.new(root, {
renderer: am5xy.AxisRendererY.new(root, {}),
}),
);
var series = chart.series.push(
am5xy.ColumnSeries.new(root, {
name: "Activity",
xAxis: xAxis,
yAxis: yAxis,
valueYField: "count",
categoryXField: "date",
tooltip: am5.Tooltip.new(root, {
labelText: "{valueY}",
}),
}),
);
fetch("activity_data.json")
.then((response) => response.json())
.then((data) => {
xAxis.data.setAll(data);
series.data.setAll(data);
});
chart.appear(1000, 100);
});
</script>
</body>
</html>

20
tuhmayto/dummy.sql Normal file
View File

@ -0,0 +1,20 @@
-- Create the table if it doesn't exist
CREATE TABLE IF NOT EXISTS activity_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT NOT NULL
);
-- Insert dummy data for one year
DELETE FROM activity_log; -- Clear existing data
WITH RECURSIVE dates(date) AS (
SELECT datetime('2023-12-01 00:00:00')
UNION ALL
SELECT datetime(date, '+1 hour')
FROM dates
WHERE date < datetime('2024-12-01 00:00:00')
)
INSERT INTO activity_log (timestamp)
SELECT date
FROM dates
WHERE random() % 4 = 0; -- Randomly select approximately 25% of hours for activity

42
tuhmayto/export_json.py Normal file
View File

@ -0,0 +1,42 @@
import sqlite3
from datetime import datetime
from collections import Counter
import json
def fetch_logs() -> list[str]:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute("SELECT timestamp FROM activity_log")
logs = [row[0] for row in cursor.fetchall()]
conn.close()
return logs
def group_logs_by_day(timestamps: list[str]) -> Counter:
days = [
datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%d")
for ts in timestamps
]
return Counter(days)
def export_to_json(activity_counts: Counter, output_file: str) -> None:
data = [
{
"date": date,
"count": count,
}
for date, count in activity_counts.items()
]
with open(output_file, "w") as f:
json.dump(data, f, indent=4)
if __name__ == "__main__":
logs = fetch_logs()
if not logs:
print("No logs found.")
else:
activity_counts = group_logs_by_day(logs)
export_to_json(activity_counts, "activity_data.json")

37
tuhmayto/logs.py Normal file
View File

@ -0,0 +1,37 @@
import sqlite3
from datetime import datetime
from typing import List, Tuple
def fetch_logs() -> List[Tuple[int, str]]:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute("SELECT id, timestamp FROM activity_log ORDER BY id ASC")
logs = cursor.fetchall()
conn.close()
return logs
def filter_logs_by_hour(logs: List[Tuple[int, str]]) -> List[Tuple[int, str]]:
filtered_logs = []
last_log_time = None
for log_id, timestamp in logs:
log_time = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
if last_log_time is None or (log_time - last_log_time).seconds >= 3600:
filtered_logs.append((log_id, timestamp))
last_log_time = log_time
return filtered_logs
def display_logs() -> None:
logs = fetch_logs()
if not logs:
print("No logs found.")
return
filtered_logs = filter_logs_by_hour(logs)
for log_id, timestamp in filtered_logs:
print(f"ID: {log_id}, Timestamp: {timestamp}")
if __name__ == "__main__":
display_logs()

14
tuhmayto/shell.nix Normal file
View File

@ -0,0 +1,14 @@
{
pkgs ? import <nixpkgs> { },
}:
pkgs.mkShell {
buildInputs = with pkgs; [
sqlite
(python3.withPackages (ps: [
ps.beautifulsoup4
ps.requests
ps.matplotlib
]))
];
}

72
tuhmayto/tracker.py Normal file
View File

@ -0,0 +1,72 @@
import re
from datetime import datetime, timedelta
import sqlite3
import requests
from bs4 import BeautifulSoup
def setup_database() -> None:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS activity_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT UNIQUE NOT NULL
)
"""
)
conn.commit()
conn.close()
def log_activity(timestamp: str) -> None:
conn = sqlite3.connect("activity_log.db")
cursor = conn.cursor()
cursor.execute(
"INSERT OR IGNORE INTO activity_log (timestamp) VALUES (?)", (timestamp,)
)
conn.commit()
conn.close()
def parse_last_seen(text: str) -> datetime | None:
now = datetime.now()
if "Visto por última vez" in text:
minutes_match = re.search(r"(\d+) minutos", text)
if not minutes_match:
return None
minutes_ago = int(minutes_match.group(1))
return now - timedelta(minutes=minutes_ago)
if "online" in text.lower():
return now
return None
def scrape_and_log(url: str) -> None:
response = requests.get(url)
if response.status_code != 200:
return
soup = BeautifulSoup(response.text, "html.parser")
details_row = soup.find("div", class_="details-row")
if not details_row:
return
offline_div = details_row.find("div", class_="offline")
if not offline_div:
return
last_seen_text = offline_div.text.strip()
last_seen_time = parse_last_seen(last_seen_text)
if not last_seen_time:
return
timestamp = last_seen_time.strftime("%Y-%m-%d %H:%M:%S")
log_activity(timestamp)
if __name__ == "__main__":
url = "https://es.xhamsterporno.mx/users/johnneal911"
setup_database()
scrape_and_log(url)