this post was submitted on 14 Oct 2025
3 points (100.0% liked)

Python

7707 readers
1 users here now

Welcome to the Python community on the programming.dev Lemmy instance!

📅 Events

PastNovember 2023

October 2023

July 2023

August 2023

September 2023

🐍 Python project:
💓 Python Community:
✨ Python Ecosystem:
🌌 Fediverse
Communities
Projects
Feeds

founded 2 years ago
MODERATORS
 

cross-posted from: https://lemmy.dbzer0.com/post/55501944

Hey, I’ve been kicking around an idea for a bot: it would look for fanfiction links shared in a server and keep track of which ones get shared the most.

The concept:

  • Track sites like AO3, FanFiction.net, ScribbleHub, etc.
  • Count how often each link gets posted
  • Commands to see the “top” links or which domains are tracked

It’s just a rough idea, I haven’t built it yet. Curious if anyone thinks this would actually be useful or has tips for implementing it without overcomplicating things.

import re
import json
import discord
from collections import Counter
from discord.ext import commands

TOKEN = "YOUR_BOT_TOKEN"
intents = discord.Intents.default()
intents.message_content = True
bot = commands.Bot(command_prefix="!", intents=intents)

# Domain list you want to track (lower-case)
TRACK_DOMAINS = {
    "archiveofourown.org",
    "fanfiction.net",
    "forum.questionablequesting.com",
    "forums.spacebattles.com",
    "forums.sufficientvelocity.com",
    "webnovel.com",
    "hentai-foundry.com",
    "scribblehub.com",
}

link_pattern = re.compile(r'https?://\S+')

link_counter = Counter()

def domain_of_url(url: str) -> str | None:
    try:
        # Extract domain part
        from urllib.parse import urlparse
        parsed = urlparse(url)
        domain = parsed.netloc.lower()
        # remove leading “www.”
        if domain.startswith("www."):
            domain = domain[4:]
        return domain
    except Exception:
        return None

def save_links():
    with open("links.json", "w") as f:
        # convert counts to dict
        json.dump(dict(link_counter), f)

def load_links():
    try:
        with open("links.json") as f:
            data = json.load(f)
        for link, cnt in data.items():
            link_counter[link] = cnt
    except FileNotFoundError:
        pass

@bot.event
async def on_ready():
    load_links()
    print(f"Bot is ready. Logged in as {bot.user}")

@bot.event
async def on_message(message):
    if message.author.bot:
        return
    links = link_pattern.findall(message.content)
    for link in links:
        dom = domain_of_url(link)
        if dom in TRACK_DOMAINS:
            link_counter[link] += 1
    await bot.process_commands(message)

@bot.command(name="links")
async def links(ctx, top: int = 10):
    if not link_counter:
        await ctx.send("No links recorded.")
        return
    sorted_links = sorted(link_counter.items(), key=lambda x: x[1], reverse=True)
    display = sorted_links[:top]
    lines = [f"{link} — {count}" for link, count in display]
    await ctx.send("**Top links:**\n" + "\n".join(lines))

@bot.command(name="domains")
async def domains(ctx):
    """Show which domains are tracked."""
    await ctx.send("Tracked domains: " + ", ".join(sorted(TRACK_DOMAINS)))

@bot.command(name="dump")
async def dump(ctx):
    """For admin: dump full counts (might be large)."""
    if not link_counter:
        await ctx.send("No data.")
        return
    lines = [f"{link} — {cnt}" for link, cnt in sorted(link_counter.items(), key=lambda x: x[1], reverse=True)]
    chunk = "\n".join(lines)
    # Discord message length limit; you may need to split
    await ctx.send(f"All counts:\n{chunk}")

@bot.event
async def on_disconnect():
    save_links()

bot.run(TOKEN)
no comments (yet)
sorted by: hot top controversial new old
there doesn't seem to be anything here