track tuhmayto xhamster activity
This commit is contained in:
parent
3fe4b39dca
commit
029e596dbc
19
tuhmayto/.direnv/bin/nix-direnv-reload
Executable file
19
tuhmayto/.direnv/bin/nix-direnv-reload
Executable file
@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
if [[ ! -d "/home/jawz/Development/Scripts/tuhmayto" ]]; then
|
||||||
|
echo "Cannot find source directory; Did you move it?"
|
||||||
|
echo "(Looking for "/home/jawz/Development/Scripts/tuhmayto")"
|
||||||
|
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# rebuild the cache forcefully
|
||||||
|
_nix_direnv_force_reload=1 direnv exec "/home/jawz/Development/Scripts/tuhmayto" true
|
||||||
|
|
||||||
|
# Update the mtime for .envrc.
|
||||||
|
# This will cause direnv to reload again - but without re-building.
|
||||||
|
touch "/home/jawz/Development/Scripts/tuhmayto/.envrc"
|
||||||
|
|
||||||
|
# Also update the timestamp of whatever profile_rc we have.
|
||||||
|
# This makes sure that we know we are up to date.
|
||||||
|
touch -r "/home/jawz/Development/Scripts/tuhmayto/.envrc" "/home/jawz/Development/Scripts/tuhmayto/.direnv"/*.rc
|
||||||
1
tuhmayto/.direnv/nix-profile-25.05-h5v6brw3yj14b52a
Symbolic link
1
tuhmayto/.direnv/nix-profile-25.05-h5v6brw3yj14b52a
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
/nix/store/l0a5irlx6dqq5v2ix8q9y4ccgwql60sm-nix-shell-env
|
||||||
2004
tuhmayto/.direnv/nix-profile-25.05-h5v6brw3yj14b52a.rc
Normal file
2004
tuhmayto/.direnv/nix-profile-25.05-h5v6brw3yj14b52a.rc
Normal file
File diff suppressed because it is too large
Load Diff
1
tuhmayto/.envrc
Normal file
1
tuhmayto/.envrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
use nix
|
||||||
BIN
tuhmayto/activity_log.db
Normal file
BIN
tuhmayto/activity_log.db
Normal file
Binary file not shown.
64
tuhmayto/chart.html
Normal file
64
tuhmayto/chart.html
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Activity Heatmap</title>
|
||||||
|
<script src="https://cdn.amcharts.com/lib/5/index.js"></script>
|
||||||
|
<script src="https://cdn.amcharts.com/lib/5/xy.js"></script>
|
||||||
|
<script src="https://cdn.amcharts.com/lib/5/themes/Animated.js"></script>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="chartdiv" style="width: 100%; height: 500px"></div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
am5.ready(function () {
|
||||||
|
var root = am5.Root.new("chartdiv");
|
||||||
|
root.setThemes([am5themes_Animated.new(root)]);
|
||||||
|
|
||||||
|
var chart = root.container.children.push(
|
||||||
|
am5xy.XYChart.new(root, {
|
||||||
|
panX: true,
|
||||||
|
panY: true,
|
||||||
|
wheelX: "none",
|
||||||
|
wheelY: "none",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
var xAxis = chart.xAxes.push(
|
||||||
|
am5xy.CategoryAxis.new(root, {
|
||||||
|
categoryField: "date",
|
||||||
|
renderer: am5xy.AxisRendererX.new(root, {}),
|
||||||
|
tooltip: am5.Tooltip.new(root, {}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
var yAxis = chart.yAxes.push(
|
||||||
|
am5xy.ValueAxis.new(root, {
|
||||||
|
renderer: am5xy.AxisRendererY.new(root, {}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
var series = chart.series.push(
|
||||||
|
am5xy.ColumnSeries.new(root, {
|
||||||
|
name: "Activity",
|
||||||
|
xAxis: xAxis,
|
||||||
|
yAxis: yAxis,
|
||||||
|
valueYField: "count",
|
||||||
|
categoryXField: "date",
|
||||||
|
tooltip: am5.Tooltip.new(root, {
|
||||||
|
labelText: "{valueY}",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
fetch("activity_data.json")
|
||||||
|
.then((response) => response.json())
|
||||||
|
.then((data) => {
|
||||||
|
xAxis.data.setAll(data);
|
||||||
|
series.data.setAll(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
chart.appear(1000, 100);
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
20
tuhmayto/dummy.sql
Normal file
20
tuhmayto/dummy.sql
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
-- Create the table if it doesn't exist
|
||||||
|
CREATE TABLE IF NOT EXISTS activity_log (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Insert dummy data for one year
|
||||||
|
DELETE FROM activity_log; -- Clear existing data
|
||||||
|
|
||||||
|
WITH RECURSIVE dates(date) AS (
|
||||||
|
SELECT datetime('2023-12-01 00:00:00')
|
||||||
|
UNION ALL
|
||||||
|
SELECT datetime(date, '+1 hour')
|
||||||
|
FROM dates
|
||||||
|
WHERE date < datetime('2024-12-01 00:00:00')
|
||||||
|
)
|
||||||
|
INSERT INTO activity_log (timestamp)
|
||||||
|
SELECT date
|
||||||
|
FROM dates
|
||||||
|
WHERE random() % 4 = 0; -- Randomly select approximately 25% of hours for activity
|
||||||
42
tuhmayto/export_json.py
Normal file
42
tuhmayto/export_json.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import sqlite3
|
||||||
|
from datetime import datetime
|
||||||
|
from collections import Counter
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_logs() -> list[str]:
|
||||||
|
conn = sqlite3.connect("activity_log.db")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT timestamp FROM activity_log")
|
||||||
|
logs = [row[0] for row in cursor.fetchall()]
|
||||||
|
conn.close()
|
||||||
|
return logs
|
||||||
|
|
||||||
|
|
||||||
|
def group_logs_by_day(timestamps: list[str]) -> Counter:
|
||||||
|
days = [
|
||||||
|
datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%d")
|
||||||
|
for ts in timestamps
|
||||||
|
]
|
||||||
|
return Counter(days)
|
||||||
|
|
||||||
|
|
||||||
|
def export_to_json(activity_counts: Counter, output_file: str) -> None:
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"date": date,
|
||||||
|
"count": count,
|
||||||
|
}
|
||||||
|
for date, count in activity_counts.items()
|
||||||
|
]
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
json.dump(data, f, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logs = fetch_logs()
|
||||||
|
if not logs:
|
||||||
|
print("No logs found.")
|
||||||
|
else:
|
||||||
|
activity_counts = group_logs_by_day(logs)
|
||||||
|
export_to_json(activity_counts, "activity_data.json")
|
||||||
37
tuhmayto/logs.py
Normal file
37
tuhmayto/logs.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import sqlite3
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_logs() -> List[Tuple[int, str]]:
|
||||||
|
conn = sqlite3.connect("activity_log.db")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT id, timestamp FROM activity_log ORDER BY id ASC")
|
||||||
|
logs = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return logs
|
||||||
|
|
||||||
|
|
||||||
|
def filter_logs_by_hour(logs: List[Tuple[int, str]]) -> List[Tuple[int, str]]:
|
||||||
|
filtered_logs = []
|
||||||
|
last_log_time = None
|
||||||
|
for log_id, timestamp in logs:
|
||||||
|
log_time = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
|
||||||
|
if last_log_time is None or (log_time - last_log_time).seconds >= 3600:
|
||||||
|
filtered_logs.append((log_id, timestamp))
|
||||||
|
last_log_time = log_time
|
||||||
|
return filtered_logs
|
||||||
|
|
||||||
|
|
||||||
|
def display_logs() -> None:
|
||||||
|
logs = fetch_logs()
|
||||||
|
if not logs:
|
||||||
|
print("No logs found.")
|
||||||
|
return
|
||||||
|
filtered_logs = filter_logs_by_hour(logs)
|
||||||
|
for log_id, timestamp in filtered_logs:
|
||||||
|
print(f"ID: {log_id}, Timestamp: {timestamp}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
display_logs()
|
||||||
14
tuhmayto/shell.nix
Normal file
14
tuhmayto/shell.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
pkgs ? import <nixpkgs> { },
|
||||||
|
}:
|
||||||
|
|
||||||
|
pkgs.mkShell {
|
||||||
|
buildInputs = with pkgs; [
|
||||||
|
sqlite
|
||||||
|
(python3.withPackages (ps: [
|
||||||
|
ps.beautifulsoup4
|
||||||
|
ps.requests
|
||||||
|
ps.matplotlib
|
||||||
|
]))
|
||||||
|
];
|
||||||
|
}
|
||||||
72
tuhmayto/tracker.py
Normal file
72
tuhmayto/tracker.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import re
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import sqlite3
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|
||||||
|
def setup_database() -> None:
|
||||||
|
conn = sqlite3.connect("activity_log.db")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS activity_log (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp TEXT UNIQUE NOT NULL
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def log_activity(timestamp: str) -> None:
|
||||||
|
conn = sqlite3.connect("activity_log.db")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT OR IGNORE INTO activity_log (timestamp) VALUES (?)", (timestamp,)
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_last_seen(text: str) -> datetime | None:
|
||||||
|
now = datetime.now()
|
||||||
|
if "Visto por última vez" in text:
|
||||||
|
minutes_match = re.search(r"(\d+) minutos", text)
|
||||||
|
if not minutes_match:
|
||||||
|
return None
|
||||||
|
minutes_ago = int(minutes_match.group(1))
|
||||||
|
return now - timedelta(minutes=minutes_ago)
|
||||||
|
if "online" in text.lower():
|
||||||
|
return now
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def scrape_and_log(url: str) -> None:
|
||||||
|
response = requests.get(url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
return
|
||||||
|
|
||||||
|
soup = BeautifulSoup(response.text, "html.parser")
|
||||||
|
details_row = soup.find("div", class_="details-row")
|
||||||
|
if not details_row:
|
||||||
|
return
|
||||||
|
|
||||||
|
offline_div = details_row.find("div", class_="offline")
|
||||||
|
if not offline_div:
|
||||||
|
return
|
||||||
|
|
||||||
|
last_seen_text = offline_div.text.strip()
|
||||||
|
last_seen_time = parse_last_seen(last_seen_text)
|
||||||
|
if not last_seen_time:
|
||||||
|
return
|
||||||
|
|
||||||
|
timestamp = last_seen_time.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
log_activity(timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
url = "https://es.xhamsterporno.mx/users/johnneal911"
|
||||||
|
setup_database()
|
||||||
|
scrape_and_log(url)
|
||||||
Loading…
x
Reference in New Issue
Block a user