mirror of
https://github.com/alterware/aw-bot.git
synced 2026-01-21 15:21:52 +00:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
1d84d26008
|
|||
| bf3b5bd6e4 | |||
|
808c564d1c
|
|||
|
|
5697883bd5 | ||
|
3f000b3cf7
|
|||
|
0a9b862fe9
|
|||
|
8713ff3b2d
|
|||
|
d008d19ba3
|
|||
|
372d3b4c59
|
|||
|
5be09cd891
|
|||
|
769309007c
|
|||
|
a99b82265e
|
|||
|
726e19565e
|
|||
|
0b5308d1e9
|
|||
|
8d215f56f3
|
21
.github/dependabot.yml
vendored
21
.github/dependabot.yml
vendored
@@ -1,10 +1,15 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: monthly
|
||||
groups:
|
||||
python-packages:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: monthly
|
||||
groups:
|
||||
python-packages:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: monthly
|
||||
|
||||
11
.github/workflows/docker-publish.yml
vendored
11
.github/workflows/docker-publish.yml
vendored
@@ -18,17 +18,20 @@ jobs:
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.7.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
uses: docker/setup-buildx-action@v3.12.0
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- id: meta
|
||||
uses: docker/metadata-action@v5.7.0
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
with:
|
||||
images: |
|
||||
alterware/aw-bot
|
||||
@@ -38,7 +41,7 @@ jobs:
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
@@ -11,21 +11,18 @@ COPY requirements.txt .
|
||||
RUN /bot-env/bin/pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY bot /aw-bot/bot
|
||||
COPY chat /aw-bot/chat
|
||||
COPY database /aw-bot/database
|
||||
COPY sounds /aw-bot/sounds
|
||||
COPY aw.py .
|
||||
COPY LICENSE .
|
||||
|
||||
ENV BOT_TOKEN=""
|
||||
ENV GOOGLE_API_KEY=""
|
||||
ENV DISCOURSE_API_KEY=""
|
||||
ENV DISCOURSE_BASE_URL=""
|
||||
ENV DISCOURSE_USERNAME=""
|
||||
|
||||
# Where the database will be stored
|
||||
ENV BOT_DATA_DIR=""
|
||||
|
||||
ENV MONGO_URI=""
|
||||
|
||||
# Accept build arguments for metadata
|
||||
ARG BUILD_DATE=""
|
||||
ARG GIT_TAG=""
|
||||
|
||||
3
aw.py
3
aw.py
@@ -4,7 +4,6 @@ import discord
|
||||
from discord.ext import commands
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from bot.ai.handle_request import DiscourseSummarizer
|
||||
from bot.log import logger
|
||||
from database import initialize_db
|
||||
|
||||
@@ -24,8 +23,6 @@ git_tag = os.getenv("GIT_TAG")
|
||||
|
||||
initialize_db()
|
||||
|
||||
bot.ai_helper = DiscourseSummarizer()
|
||||
|
||||
|
||||
@bot.event
|
||||
async def on_ready():
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
from .handle_request import forward_to_google_api
|
||||
@@ -1,183 +0,0 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from bot.log import logger
|
||||
|
||||
API_KEY = os.getenv("GOOGLE_API_KEY")
|
||||
|
||||
GENERIC_INSTRUCTION = "You are a Discord chatbot named 'AlterWare' who helps users with all kinds of topics across various subjects. You should limit your answers to fewer than 2000 characters."
|
||||
SPECIFIC_INSTRUCTION = "You are a Discord chatbot named 'AlterWare' who helps users. You should limit your answers to fewer than 2000 characters."
|
||||
|
||||
|
||||
class DiscourseSummarizer:
|
||||
def __init__(self):
|
||||
self.model = "gemini-2.0-flash"
|
||||
self.display_name = "alterware"
|
||||
self.cache = None
|
||||
self.ttl = "21600s"
|
||||
self.discourse_data = None
|
||||
|
||||
if not API_KEY:
|
||||
logger.error("Google API key is not set. Please contact the administrator.")
|
||||
return
|
||||
|
||||
self.client = genai.Client(api_key=API_KEY)
|
||||
|
||||
def set_discourse_data(self, topic_data):
|
||||
"""
|
||||
Sets the discourse data for the summarizer.
|
||||
|
||||
Args:
|
||||
topic_data (str): The combined text of discourse posts.
|
||||
"""
|
||||
self.discourse_data = topic_data
|
||||
|
||||
def summarize_discourse_topic(self, topic_data, system_instruction=None):
|
||||
"""
|
||||
Creates a cache for the discourse topic data.
|
||||
|
||||
Args:
|
||||
topic_data (str): The combined text of discourse posts.
|
||||
system_instruction (str, optional): Custom system instruction for the model.
|
||||
"""
|
||||
self.cache = self.client.caches.create(
|
||||
model=self.model,
|
||||
config=types.CreateCachedContentConfig(
|
||||
display_name=self.display_name,
|
||||
system_instruction=system_instruction or (SPECIFIC_INSTRUCTION),
|
||||
contents=[topic_data],
|
||||
ttl=self.ttl,
|
||||
),
|
||||
)
|
||||
logger.info("Cached content created: %s", self.cache.name)
|
||||
|
||||
def update_cache(self):
|
||||
"""
|
||||
Updates the cache TTL.
|
||||
"""
|
||||
if not self.cache:
|
||||
raise RuntimeError(
|
||||
"Cache has not been created. Run summarize_discourse_topic first."
|
||||
)
|
||||
|
||||
self.client.caches.update(
|
||||
name=self.cache.name, config=types.UpdateCachedContentConfig(ttl="21600s")
|
||||
)
|
||||
logger.info("Cache updated.")
|
||||
|
||||
def ask(self, prompt):
|
||||
"""
|
||||
Generates a response using the cached content.
|
||||
|
||||
Args:
|
||||
prompt (str): The user prompt.
|
||||
|
||||
Returns:
|
||||
str: The generated response.
|
||||
"""
|
||||
if not self.cache:
|
||||
raise RuntimeError(
|
||||
"Cache has not been created. Run summarize_discourse_topic first."
|
||||
)
|
||||
|
||||
response = self.client.models.generate_content(
|
||||
model=self.model,
|
||||
contents=prompt,
|
||||
config=types.GenerateContentConfig(
|
||||
max_output_tokens=400,
|
||||
system_instruction=SPECIFIC_INSTRUCTION,
|
||||
cached_content=self.cache.name,
|
||||
),
|
||||
)
|
||||
return response.text
|
||||
|
||||
def ask_without_cache(self, prompt):
|
||||
"""
|
||||
Generates a response without using cached content, including discourse data.
|
||||
|
||||
Args:
|
||||
prompt (str): The user prompt.
|
||||
|
||||
Returns:
|
||||
str: The generated response.
|
||||
"""
|
||||
if not self.discourse_data:
|
||||
return "Discourse data has not been set."
|
||||
|
||||
prompt.insert(0, self.discourse_data)
|
||||
response = self.client.models.generate_content(
|
||||
model=self.model,
|
||||
contents=prompt,
|
||||
config=types.GenerateContentConfig(
|
||||
max_output_tokens=400,
|
||||
system_instruction=SPECIFIC_INSTRUCTION,
|
||||
),
|
||||
)
|
||||
return response.text
|
||||
|
||||
def ask_without_context(self, prompt):
|
||||
response = self.client.models.generate_content(
|
||||
model=self.model,
|
||||
contents=prompt,
|
||||
config=types.GenerateContentConfig(
|
||||
max_output_tokens=400,
|
||||
system_instruction=GENERIC_INSTRUCTION,
|
||||
),
|
||||
)
|
||||
return response.text
|
||||
|
||||
|
||||
async def forward_to_google_api(
|
||||
prompt, bot, image_object=None, reply=None, no_context=False
|
||||
):
|
||||
"""
|
||||
Forwards the message content and optional image object to a Google API.
|
||||
|
||||
Args:
|
||||
prompt (discord.Message): The message object to forward.
|
||||
bot (discord.Client): The Discord bot instance.
|
||||
image_object (tuple, optional): A tuple containing the image URL and its MIME type (e.g., ("url", "image/jpeg")).
|
||||
reply (discord.Message, optional): The message that was referenced by prompt.
|
||||
no_context (bool, optional): If True, the bot will not use any cached content or context.
|
||||
"""
|
||||
if not API_KEY:
|
||||
await prompt.reply(
|
||||
"Google API key is not set. Please contact the administrator.",
|
||||
mention_author=True,
|
||||
)
|
||||
return
|
||||
|
||||
input = [prompt.content]
|
||||
|
||||
# Have the reply come first in the prompt
|
||||
if reply:
|
||||
input.insert(0, reply.content)
|
||||
|
||||
if image_object:
|
||||
try:
|
||||
image_url, mime_type = image_object
|
||||
image = requests.get(image_url)
|
||||
image.raise_for_status()
|
||||
|
||||
# If there is an image, add it to the input before anything else
|
||||
input.insert(
|
||||
0, types.Part.from_bytes(data=image.content, mime_type=mime_type)
|
||||
)
|
||||
except requests.RequestException:
|
||||
await prompt.reply(f"Failed to fetch the image", mention_author=True)
|
||||
return
|
||||
|
||||
response = None
|
||||
|
||||
if no_context:
|
||||
response = bot.ai_helper.ask_without_context(input)
|
||||
else:
|
||||
response = bot.ai_helper.ask_without_cache(input)
|
||||
|
||||
reply_message = await prompt.reply(
|
||||
response,
|
||||
mention_author=True,
|
||||
)
|
||||
@@ -4,10 +4,15 @@ from typing import Literal
|
||||
import discord
|
||||
from discord import app_commands
|
||||
|
||||
from bot.config import message_patterns, update_patterns
|
||||
from bot.log import logger
|
||||
from bot.utils import compile_stats, fetch_game_stats, perform_search
|
||||
from database import add_pattern, add_user_to_blacklist, is_user_blacklisted
|
||||
from database import (
|
||||
get_meme_patterns,
|
||||
add_aka_response,
|
||||
search_aka,
|
||||
add_meme_pattern,
|
||||
add_user_to_blacklist,
|
||||
is_user_blacklisted,
|
||||
)
|
||||
|
||||
GUILD_ID = 1110531063161299074
|
||||
|
||||
@@ -33,17 +38,32 @@ async def setup(bot):
|
||||
bot.tree.on_error = on_tree_error
|
||||
|
||||
@bot.tree.command(
|
||||
name="add_pattern",
|
||||
name="add_aka_message",
|
||||
description="Add a new aka message to the database.",
|
||||
guild=discord.Object(id=GUILD_ID),
|
||||
)
|
||||
@app_commands.checks.has_permissions(administrator=True)
|
||||
async def add_aka_message(
|
||||
interaction: discord.Interaction, aka: str, response: str
|
||||
):
|
||||
"""Slash command to add a new aka pattern to the database."""
|
||||
add_aka_response(aka, response)
|
||||
await interaction.response.send_message(
|
||||
f"Pattern added!\n**AKA:** `{aka}`\n**Response:** `{response}`"
|
||||
)
|
||||
|
||||
@bot.tree.command(
|
||||
name="add_meme_pattern",
|
||||
description="Add a new message pattern to the database.",
|
||||
guild=discord.Object(id=GUILD_ID),
|
||||
)
|
||||
@app_commands.checks.has_permissions(administrator=True)
|
||||
async def add_pattern_cmd(
|
||||
async def add_meme_pattern_cmd(
|
||||
interaction: discord.Interaction, regex: str, response: str
|
||||
):
|
||||
"""Slash command to add a new message pattern to the database."""
|
||||
add_pattern(regex, response)
|
||||
update_patterns(regex, response)
|
||||
add_meme_pattern(regex, response)
|
||||
logger.info("Saved a new meme pattern: %s", regex)
|
||||
await interaction.response.send_message(
|
||||
f"Pattern added!\n**Regex:** `{regex}`\n**Response:** `{response}`"
|
||||
)
|
||||
@@ -64,41 +84,34 @@ async def setup(bot):
|
||||
)
|
||||
|
||||
@bot.tree.command(
|
||||
name="search",
|
||||
description="Search for servers by hostname or IP.",
|
||||
name="aka",
|
||||
description="Check if the input matches any predefined aka patterns.",
|
||||
guild=discord.Object(id=GUILD_ID),
|
||||
)
|
||||
async def slash_search(interaction: discord.Interaction, query: str):
|
||||
results = await perform_search(query)
|
||||
await interaction.response.send_message(results)
|
||||
async def aka(interaction: discord.Interaction, input: str):
|
||||
"""
|
||||
Slash command to check if the input matches any predefined aka patterns.
|
||||
"""
|
||||
# Check if the user is blacklisted
|
||||
if is_user_blacklisted(interaction.user.id):
|
||||
await interaction.response.send_message(
|
||||
"You are blacklisted from using this command.", ephemeral=True
|
||||
)
|
||||
return
|
||||
|
||||
@app_commands.checks.cooldown(1, 60, key=lambda i: (i.guild_id, i.user.id))
|
||||
@bot.tree.command(
|
||||
name="stats",
|
||||
description="Get stats for a specific game or all games",
|
||||
guild=discord.Object(id=GUILD_ID),
|
||||
)
|
||||
async def stats(
|
||||
interaction: discord.Interaction, game: Literal["s1", "iw6", "t7", "all"]
|
||||
):
|
||||
if game == "all":
|
||||
stats_message = await compile_stats()
|
||||
# Search the database for a match
|
||||
response = search_aka(input)
|
||||
|
||||
if response:
|
||||
await interaction.response.send_message(response, ephemeral=False)
|
||||
else:
|
||||
data = await fetch_game_stats(game)
|
||||
if data:
|
||||
stats_message = f"**Stats for {game.upper()}:**\n"
|
||||
count_servers = data.get("countServers", "N/A")
|
||||
count_players = data.get("countPlayers", "N/A")
|
||||
stats_message += f"Total Servers: {count_servers}\n"
|
||||
stats_message += f"Total Players: {count_players}\n"
|
||||
else:
|
||||
stats_message = "Failed to fetch game stats. Please try again later."
|
||||
|
||||
await interaction.response.send_message(stats_message, ephemeral=True)
|
||||
await interaction.response.send_message(
|
||||
"No matching aka patterns found.", ephemeral=True
|
||||
)
|
||||
|
||||
@bot.tree.command(
|
||||
name="meme",
|
||||
description="Check if the input matches any predefined memess.",
|
||||
description="Check if the input matches any predefined memes.",
|
||||
guild=discord.Object(id=GUILD_ID),
|
||||
)
|
||||
async def meme(interaction: discord.Interaction, input: str):
|
||||
@@ -112,6 +125,7 @@ async def setup(bot):
|
||||
)
|
||||
return
|
||||
|
||||
message_patterns = get_meme_patterns()
|
||||
# Check if any of the patterns match the input
|
||||
for pattern in message_patterns:
|
||||
if re.search(pattern["regex"], input, re.IGNORECASE):
|
||||
|
||||
@@ -1,74 +1,7 @@
|
||||
import csv
|
||||
import os
|
||||
import glob
|
||||
|
||||
from bot.log import logger
|
||||
from database import get_patterns
|
||||
MONGO_URI = os.getenv("MONGO_URI")
|
||||
|
||||
message_patterns = get_patterns()
|
||||
# load global variables
|
||||
|
||||
|
||||
def update_patterns(regex: str, response: str):
|
||||
"""update patterns in memory."""
|
||||
message_patterns.append({"regex": regex, "response": response})
|
||||
logger.info(f"Pattern added in memory: {regex}")
|
||||
|
||||
|
||||
def load_chat_messages_from_disk(csv_path="chat/chat_messages_blue.csv"):
|
||||
"""
|
||||
Loads all messages from the given CSV file.
|
||||
|
||||
Args:
|
||||
csv_path (str): Path to the CSV file.
|
||||
|
||||
Returns:
|
||||
list: List of message strings.
|
||||
"""
|
||||
messages = []
|
||||
if not os.path.exists(csv_path):
|
||||
logger.info(f"CSV file not found: {csv_path}")
|
||||
return messages
|
||||
|
||||
with open(csv_path, newline="", encoding="utf-8") as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
msg = row.get("Message")
|
||||
if msg:
|
||||
messages.append(msg)
|
||||
return messages
|
||||
|
||||
|
||||
def load_chat_messages():
|
||||
"""
|
||||
Loads and combines chat messages from all CSV files in the chat folder.
|
||||
|
||||
Returns:
|
||||
list: Combined list of all message strings from all CSV files.
|
||||
"""
|
||||
messages = []
|
||||
|
||||
# Find all CSV files in the chat folder
|
||||
csv_files = glob.glob("chat/*.csv")
|
||||
|
||||
if not csv_files:
|
||||
logger.error("No CSV files found in chat folder")
|
||||
return messages
|
||||
|
||||
logger.info(
|
||||
f"Found {len(csv_files)} CSV files: {[os.path.basename(f) for f in csv_files]}"
|
||||
)
|
||||
|
||||
# Load messages from each CSV file
|
||||
for csv_file in csv_files:
|
||||
file_messages = load_chat_messages_from_disk(csv_file)
|
||||
messages.extend(file_messages)
|
||||
logger.info(
|
||||
f"Loaded {len(file_messages)} messages from {os.path.basename(csv_file)}"
|
||||
)
|
||||
|
||||
logger.info(f"Total messages loaded: {len(messages)}")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
schizo_messages = load_chat_messages()
|
||||
# There are none !
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
from .handle_request import fetch_cooked_posts, get_topics_by_id, get_topics_by_tag
|
||||
@@ -1,134 +0,0 @@
|
||||
import os
|
||||
import aiohttp
|
||||
import asyncio
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from bot.log import logger
|
||||
|
||||
DISCOURSE_BASE_URL = os.getenv("DISCOURSE_BASE_URL")
|
||||
API_KEY = os.getenv("DISCOURSE_API_KEY")
|
||||
API_USERNAME = os.getenv("DISCOURSE_API_USERNAME")
|
||||
|
||||
headers = {"Api-Key": API_KEY, "Api-Username": API_USERNAME}
|
||||
|
||||
|
||||
async def get_topics_by_id(topic_id):
|
||||
"""
|
||||
Async: Fetches a topic by its ID and returns the topic data.
|
||||
|
||||
Args:
|
||||
topic_id (int): The ID of the topic to fetch.
|
||||
|
||||
Returns:
|
||||
dict or None: The topic data if successful, otherwise None.
|
||||
"""
|
||||
url = f"{DISCOURSE_BASE_URL}/t/{topic_id}.json"
|
||||
timeout = aiohttp.ClientTimeout(total=5)
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, headers=headers, timeout=timeout) as response:
|
||||
if response.status == 200:
|
||||
return await response.json()
|
||||
elif response.status == 403:
|
||||
logger.error(
|
||||
f"Access forbidden for topic {topic_id}: {response.status}"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
text = await response.text()
|
||||
logger.error(
|
||||
f"Error fetching topic {topic_id}: {response.status} - {text}"
|
||||
)
|
||||
return None
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"Timeout while fetching topic {topic_id}")
|
||||
return None
|
||||
except aiohttp.ClientError as e:
|
||||
logger.error(f"Request failed for topic {topic_id}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def get_topics_by_tag(tag_name):
|
||||
"""
|
||||
Async: Fetches all topics with a specific tag and retrieves the cooked string from each post.
|
||||
|
||||
Args:
|
||||
tag_name (str): The name of the tag to filter topics.
|
||||
|
||||
Returns:
|
||||
list: A list of cooked strings from all posts in the topics.
|
||||
"""
|
||||
url = f"{DISCOURSE_BASE_URL}/tag/{tag_name}.json"
|
||||
timeout = aiohttp.ClientTimeout(total=5)
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, headers=headers, timeout=timeout) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
topics = data.get("topic_list", {}).get("topics", [])
|
||||
cooked_strings = []
|
||||
for topic in topics:
|
||||
topic_id = topic["id"]
|
||||
topic_data = await get_topics_by_id(topic_id)
|
||||
if topic_data:
|
||||
posts = topic_data.get("post_stream", {}).get("posts", [])
|
||||
for post in posts:
|
||||
cooked_strings.append(post.get("cooked", ""))
|
||||
return cooked_strings
|
||||
elif response.status == 403:
|
||||
logger.error(
|
||||
f"Access forbidden for tag '{tag_name}': {response.status}"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
text = await response.text()
|
||||
logger.error(
|
||||
f"Error fetching topics with tag '{tag_name}': {response.status} - {text}"
|
||||
)
|
||||
return []
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"Timeout while fetching topics with tag '{tag_name}'")
|
||||
return []
|
||||
except aiohttp.ClientError as e:
|
||||
logger.error(f"Request failed for topics with tag {tag_name}: {e}")
|
||||
return []
|
||||
|
||||
|
||||
async def fetch_cooked_posts(tag_name):
|
||||
"""
|
||||
Async: Fetches cooked strings from posts with a specific tag.
|
||||
|
||||
Args:
|
||||
tag_name (str): The name of the tag to filter topics.
|
||||
|
||||
Returns:
|
||||
list: A list of cooked strings from posts with the specified tag.
|
||||
"""
|
||||
return await get_topics_by_tag(tag_name)
|
||||
|
||||
|
||||
def html_to_text(html_content):
|
||||
"""
|
||||
Cleans the provided HTML content and converts it to plain text.
|
||||
|
||||
Args:
|
||||
html_content (str): The HTML content to clean.
|
||||
|
||||
Returns:
|
||||
str: The cleaned plain text.
|
||||
"""
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
return soup.get_text(separator="\n").strip()
|
||||
|
||||
|
||||
def combine_posts_text(posts):
|
||||
"""
|
||||
Combines the cooked content of all posts into a single plain text block.
|
||||
|
||||
Args:
|
||||
posts (list): A list of posts, each containing a "cooked" HTML string.
|
||||
|
||||
Returns:
|
||||
str: The combined plain text of all posts.
|
||||
"""
|
||||
return "\n\n".join([html_to_text(post["cooked"]) for post in posts])
|
||||
@@ -1,12 +1,14 @@
|
||||
import time
|
||||
import re
|
||||
from datetime import timedelta
|
||||
|
||||
import discord
|
||||
|
||||
from bot.ai.handle_request import forward_to_google_api
|
||||
from bot.log import logger
|
||||
from bot.utils import aware_utcnow, timeout_member, safe_truncate
|
||||
from database import add_user_to_role, is_user_blacklisted
|
||||
from database import add_user_to_role
|
||||
from bot.mongodb.load_db import DeletedMessage
|
||||
from bot.mongodb.load_db import write_deleted_message_to_collection
|
||||
|
||||
BOT_LOG = 1112049391482703873
|
||||
GENERAL_CHANNEL = 1110531063744303138
|
||||
@@ -56,71 +58,6 @@ def fetch_image_from_message(message):
|
||||
return image_object
|
||||
|
||||
|
||||
async def handle_bot_mention(message, bot, no_context=False):
|
||||
staff_role = message.guild.get_role(ADMIN_ROLE_ID)
|
||||
member = message.guild.get_member(message.author.id)
|
||||
|
||||
# Check if the message is in an allowed channel
|
||||
if message.channel.id not in ALLOWED_CHANNELS:
|
||||
await message.reply(
|
||||
"The AI cannot used in this channel.",
|
||||
mention_author=True,
|
||||
)
|
||||
return True
|
||||
|
||||
if is_user_blacklisted(message.author.id):
|
||||
message.reply(
|
||||
"You are blacklisted from using this command.", mention_author=True
|
||||
)
|
||||
return True
|
||||
|
||||
# Cooldown logic: max 1 use per minute per user
|
||||
now = time.time()
|
||||
user_id = message.author.id
|
||||
timestamps = MENTION_COOLDOWNS.get(user_id, [])
|
||||
# Remove timestamps older than 60 seconds
|
||||
timestamps = [t for t in timestamps if now - t < 60]
|
||||
if len(timestamps) >= 1 and not staff_role in member.roles:
|
||||
await message.reply(
|
||||
"You are using this feature too quickly. Please wait before trying again.",
|
||||
mention_author=True,
|
||||
)
|
||||
return True
|
||||
timestamps.append(now)
|
||||
MENTION_COOLDOWNS[user_id] = timestamps
|
||||
|
||||
# Prioritize the image object from the first message
|
||||
image_object = fetch_image_from_message(message)
|
||||
|
||||
# Check if the message is a reply to another message
|
||||
reply_content = None
|
||||
if message.reference:
|
||||
try:
|
||||
referenced_message = await message.channel.fetch_message(
|
||||
message.reference.message_id
|
||||
)
|
||||
reply_content = referenced_message
|
||||
|
||||
# Check if the referenced message has an image object (if not already set)
|
||||
if image_object is None:
|
||||
image_object = fetch_image_from_message(referenced_message)
|
||||
|
||||
except discord.NotFound:
|
||||
logger.error("Referenced message not found.")
|
||||
except discord.Forbidden:
|
||||
logger.error(
|
||||
"Bot does not have permission to fetch the referenced message."
|
||||
)
|
||||
except discord.HTTPException as e:
|
||||
logger.error(
|
||||
"An error occurred while fetching the referenced message: %s", e
|
||||
)
|
||||
|
||||
# Pass the reply content to forward_to_google_api
|
||||
await forward_to_google_api(message, bot, image_object, reply_content, no_context)
|
||||
return True
|
||||
|
||||
|
||||
async def handle_dm(message):
|
||||
await message.channel.send(
|
||||
"If you DM this bot again, I will carpet-bomb your house."
|
||||
@@ -322,6 +259,17 @@ async def handle_bulk_message_delete(messages, bot):
|
||||
return
|
||||
|
||||
for message in messages:
|
||||
deleted_message = DeletedMessage(
|
||||
message_id=message.id,
|
||||
channel_id=message.channel.id,
|
||||
author_id=message.author.id,
|
||||
author_name=message.author.name,
|
||||
content=message.content or "",
|
||||
timestamp=message.created_at,
|
||||
)
|
||||
|
||||
write_deleted_message_to_collection(deleted_message)
|
||||
|
||||
embed = discord.Embed(
|
||||
title="Deleted Message",
|
||||
description="A message was deleted.",
|
||||
@@ -362,6 +310,17 @@ async def handle_message_delete(message, bot):
|
||||
# It is impossible to recover the message at this point
|
||||
return
|
||||
|
||||
deleted_message = DeletedMessage(
|
||||
message_id=message.id,
|
||||
channel_id=message.channel.id,
|
||||
author_id=message.author.id,
|
||||
author_name=message.author.name,
|
||||
content=message.content or "",
|
||||
timestamp=message.created_at,
|
||||
)
|
||||
|
||||
write_deleted_message_to_collection(deleted_message)
|
||||
|
||||
embed = discord.Embed(
|
||||
title="Deleted Message",
|
||||
description="A message was deleted.",
|
||||
@@ -376,16 +335,23 @@ async def handle_message_delete(message, bot):
|
||||
inline=False,
|
||||
) # noqa
|
||||
|
||||
if message.reference is not None:
|
||||
original_message = await message.channel.fetch_message(
|
||||
message.reference.message_id
|
||||
)
|
||||
try:
|
||||
if message.reference is not None:
|
||||
original_message = await message.channel.fetch_message(
|
||||
message.reference.message_id
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
name="Replied",
|
||||
value=original_message.author.mention,
|
||||
inline=False, # noqa
|
||||
) # noqa
|
||||
embed.add_field(
|
||||
name="Replied",
|
||||
value=original_message.author.mention,
|
||||
inline=False, # noqa
|
||||
) # noqa
|
||||
except discord.NotFound:
|
||||
logger.warning("Referenced message not found")
|
||||
except discord.Forbidden:
|
||||
logger.error("No permission to access the referenced message")
|
||||
except discord.HTTPException as e:
|
||||
logger.error(f"Error fetching message: {e}")
|
||||
|
||||
embed.set_footer(
|
||||
text=f"Message ID: {message.id} | Author ID: {message.author.id}" # noqa
|
||||
@@ -409,15 +375,6 @@ async def handle_message(message, bot):
|
||||
await handle_dm(message)
|
||||
return
|
||||
|
||||
grok_role = message.guild.get_role(GROK_ROLE_ID)
|
||||
if grok_role in message.role_mentions:
|
||||
if await handle_bot_mention(message, bot, True):
|
||||
return
|
||||
|
||||
if bot.user in message.mentions:
|
||||
if await handle_bot_mention(message, bot):
|
||||
return
|
||||
|
||||
# Too many mentions
|
||||
if len(message.mentions) >= 3:
|
||||
member = message.guild.get_member(message.author.id)
|
||||
@@ -425,12 +382,22 @@ async def handle_message(message, bot):
|
||||
await message.delete()
|
||||
return
|
||||
|
||||
if len(message.embeds) > 2:
|
||||
if len(message.embeds) > 2 or len(message.attachments) > 3:
|
||||
member = message.guild.get_member(message.author.id)
|
||||
await timeout_member(member, timedelta(minutes=5), "Too many embeds")
|
||||
await message.delete()
|
||||
return
|
||||
|
||||
image_pattern = r"\.(?:jpg|jpeg|png|gif|webp|bmp)\b"
|
||||
raw_attachment_count = len(
|
||||
re.findall(image_pattern, message.content, re.IGNORECASE)
|
||||
)
|
||||
if raw_attachment_count > 3:
|
||||
member = message.guild.get_member(message.author.id)
|
||||
await timeout_member(member, timedelta(minutes=5), "Suspicious")
|
||||
await message.delete()
|
||||
return
|
||||
|
||||
if "@everyone" in message.content or "@here" in message.content:
|
||||
if not message.channel.permissions_for(message.author).mention_everyone:
|
||||
spam_role = message.guild.get_role(SPAM_ROLE_ID)
|
||||
|
||||
1
bot/mongodb/__init__.py
Normal file
1
bot/mongodb/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .load_db import load_chat_messages_from_db, read_random_message_from_collection
|
||||
146
bot/mongodb/load_db.py
Normal file
146
bot/mongodb/load_db.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from dataclasses import dataclass, asdict, field
|
||||
|
||||
from bot.log import logger
|
||||
|
||||
from pymongo import MongoClient
|
||||
|
||||
MONGO_URI = os.getenv("MONGO_URI")
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeletedMessage:
|
||||
message_id: int
|
||||
channel_id: int
|
||||
author_id: int
|
||||
author_name: str
|
||||
content: str
|
||||
timestamp: datetime
|
||||
deleted_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
|
||||
def get_mongodb_uri():
|
||||
if not MONGO_URI:
|
||||
logger.error("MONGO_URI is not set. Please contact the administrator.")
|
||||
return "mongodb://localhost:27017"
|
||||
|
||||
return MONGO_URI
|
||||
|
||||
|
||||
def write_deleted_message_to_collection(
|
||||
deleted_message: DeletedMessage,
|
||||
database="discord_bot",
|
||||
collection="deleted_messages",
|
||||
):
|
||||
mongo_uri = get_mongodb_uri()
|
||||
|
||||
try:
|
||||
with MongoClient(mongo_uri) as client:
|
||||
db = client[database]
|
||||
col = db[collection]
|
||||
|
||||
logger.debug(
|
||||
f"Connecting to MongoDB at {mongo_uri}, DB='{database}', Collection='{collection}'"
|
||||
)
|
||||
|
||||
result = col.insert_one(deleted_message.to_dict())
|
||||
logger.debug(f"Deleted message logged with _id: {result.inserted_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to write a deleted message to MongoDB: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def read_messages_from_collection(
|
||||
database="discord_bot",
|
||||
collection="messages",
|
||||
):
|
||||
"""
|
||||
Loads all chat messages from MongoDB.
|
||||
|
||||
Args:
|
||||
mongo_uri (str): MongoDB connection URI
|
||||
database (str): Name of the MongoDB database
|
||||
collection (str): Name of the collection
|
||||
|
||||
Returns:
|
||||
list: list of message strings
|
||||
"""
|
||||
mongo_uri = get_mongodb_uri()
|
||||
|
||||
try:
|
||||
with MongoClient(mongo_uri) as client:
|
||||
db = client[database]
|
||||
col = db[collection]
|
||||
|
||||
logger.debug(
|
||||
f"Connecting to MongoDB at {mongo_uri}, DB='{database}', Collection='{collection}'"
|
||||
)
|
||||
|
||||
cursor = col.find({}, {"message": 1})
|
||||
messages = [doc["message"] for doc in cursor if "message" in doc]
|
||||
|
||||
logger.info(f"Loaded {len(messages)} messages from MongoDB")
|
||||
|
||||
return messages
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load messages from MongoDB: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def read_random_message_from_collection(
|
||||
database="discord_bot",
|
||||
collection="messages",
|
||||
):
|
||||
"""
|
||||
Loads a random chat message from MongoDB.
|
||||
|
||||
Args:
|
||||
database (str): Name of the MongoDB database
|
||||
collection (str): Name of the collection
|
||||
|
||||
Returns:
|
||||
str or None: random message string, or None if collection is empty
|
||||
"""
|
||||
mongo_uri = get_mongodb_uri()
|
||||
|
||||
try:
|
||||
with MongoClient(mongo_uri) as client:
|
||||
db = client[database]
|
||||
col = db[collection]
|
||||
|
||||
logger.debug(
|
||||
f"Connecting to MongoDB at {mongo_uri}, DB='{database}', Collection='{collection}'"
|
||||
)
|
||||
|
||||
# Use aggregation with $sample to get a random document
|
||||
pipeline = [{"$sample": {"size": 1}}]
|
||||
|
||||
cursor = col.aggregate(pipeline)
|
||||
# almost random
|
||||
random_docs = list(cursor)
|
||||
|
||||
if random_docs and "message" in random_docs[0]:
|
||||
message = random_docs[0]["message"]
|
||||
logger.info(f"Loaded random message from MongoDB: {message[:100]}...")
|
||||
return message
|
||||
|
||||
logger.warning("No messages found in collection")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load random message from MongoDB: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def load_chat_messages_from_db():
|
||||
messages = []
|
||||
|
||||
messages = read_messages_from_collection()
|
||||
if not messages:
|
||||
logger.warning("messages collection is empty after loading from MongoDB!")
|
||||
|
||||
return messages
|
||||
57
bot/tasks.py
57
bot/tasks.py
@@ -5,10 +5,9 @@ import discord
|
||||
import requests
|
||||
from discord.ext import commands, tasks
|
||||
|
||||
from bot.config import schizo_messages
|
||||
from bot.discourse.handle_request import combine_posts_text, fetch_cooked_posts
|
||||
from bot.log import logger
|
||||
from bot.utils import aware_utcnow, fetch_api_data
|
||||
from bot.mongodb import read_random_message_from_collection
|
||||
from database import migrate_users_with_role
|
||||
|
||||
TARGET_DATE = datetime(2036, 8, 12, tzinfo=timezone.utc)
|
||||
@@ -125,44 +124,20 @@ class SteamSaleChecker(commands.Cog):
|
||||
await self.bot.wait_until_ready()
|
||||
|
||||
|
||||
class DiscourseUpdater(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
self.update_discourse_data.start() # Start the task when the cog is loaded
|
||||
|
||||
def cog_unload(self):
|
||||
self.update_discourse_data.cancel() # Stop the task when the cog is unloaded
|
||||
|
||||
@tasks.loop(hours=6)
|
||||
async def update_discourse_data(self):
|
||||
"""
|
||||
Periodically fetches and updates Discourse data for the bot.
|
||||
"""
|
||||
tag_name = "docs"
|
||||
logger.info("Fetching Discourse data...")
|
||||
cooked_posts = await fetch_cooked_posts(tag_name)
|
||||
if cooked_posts:
|
||||
combined_text = combine_posts_text(
|
||||
[{"cooked": post} for post in cooked_posts]
|
||||
)
|
||||
self.bot.ai_helper.set_discourse_data(combined_text)
|
||||
logger.info("Discourse data updated successfully.")
|
||||
else:
|
||||
logger.warning(f"No posts found for tag '{tag_name}'.")
|
||||
|
||||
@update_discourse_data.before_loop
|
||||
async def before_update_discourse_data(self):
|
||||
await self.bot.wait_until_ready()
|
||||
|
||||
|
||||
async def setup(bot):
|
||||
@tasks.loop(minutes=10)
|
||||
async def update_status():
|
||||
data = fetch_api_data()
|
||||
countPlayers = data.get("countPlayers", 0)
|
||||
countServers = data.get("countServers", 0)
|
||||
total_players = 0
|
||||
total_servers = 0
|
||||
|
||||
if data:
|
||||
for game_key, game_data in data.items():
|
||||
total_players += game_data.get("players", 0)
|
||||
total_servers += game_data.get("servers", 0)
|
||||
|
||||
activity = discord.Game(
|
||||
name=f"with {countPlayers} players on {countServers} servers"
|
||||
name=f"with {total_players} players on {total_servers} servers"
|
||||
)
|
||||
await bot.change_presence(activity=activity)
|
||||
|
||||
@@ -192,11 +167,14 @@ async def setup(bot):
|
||||
@tasks.loop(hours=5)
|
||||
async def shizo_message():
|
||||
channel = bot.get_channel(OFFTOPIC_CHANNEL)
|
||||
if channel and schizo_messages:
|
||||
message = random.choice(schizo_messages)
|
||||
await channel.send(message)
|
||||
if channel:
|
||||
message = read_random_message_from_collection()
|
||||
if message:
|
||||
await channel.send(message)
|
||||
else:
|
||||
logger.error("No funny messages were found.")
|
||||
else:
|
||||
logger.error("Channel not found or schizo_messages is empty.")
|
||||
logger.error("Channel not found. Check the OFFTOPIC_CHANNEL variable.")
|
||||
|
||||
@tasks.loop(hours=24)
|
||||
async def share_dementia_image():
|
||||
@@ -215,6 +193,5 @@ async def setup(bot):
|
||||
share_dementia_image.start()
|
||||
|
||||
await bot.add_cog(SteamSaleChecker(bot))
|
||||
await bot.add_cog(DiscourseUpdater(bot))
|
||||
|
||||
logger.info("Tasks extension loaded!")
|
||||
|
||||
84
bot/utils.py
84
bot/utils.py
@@ -1,10 +1,10 @@
|
||||
import random
|
||||
import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import requests
|
||||
from requests.exceptions import RequestException, Timeout, ConnectionError
|
||||
|
||||
import discord
|
||||
import requests
|
||||
|
||||
from bot.log import logger
|
||||
|
||||
|
||||
@@ -13,59 +13,45 @@ def aware_utcnow():
|
||||
|
||||
|
||||
def fetch_api_data():
|
||||
response = requests.get("https://api.getserve.rs/v1/servers/alterware")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {}
|
||||
"""
|
||||
Fetch data from the getserve.rs API
|
||||
|
||||
Returns:
|
||||
dict: API response data or empty dict on failure
|
||||
"""
|
||||
url = "https://server.alterware.dev/stats.json"
|
||||
|
||||
async def fetch_game_stats(game: str):
|
||||
url = f"https://api.getserve.rs/v1/servers/alterware/{game}"
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
response = requests.get(url, timeout=10)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
async def compile_stats():
|
||||
games = ["s1", "iw6", "t7"]
|
||||
stats_message = "**Stats for all games:**\n"
|
||||
for game in games:
|
||||
data = await fetch_game_stats(game)
|
||||
if data:
|
||||
count_servers = data.get("countServers", "N/A")
|
||||
count_players = data.get("countPlayers", "N/A")
|
||||
stats_message += f"**{game.upper()}:** Total Servers: {count_servers}, Total Players: {count_players}\n"
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
stats_message += f"**{game.upper()}:** Failed to fetch stats.\n"
|
||||
return stats_message
|
||||
logger.warning(f"API returned non-200 status: {response.status_code}")
|
||||
return {}
|
||||
|
||||
except Timeout:
|
||||
logger.error(f"Request to {url} timed out after 10 seconds")
|
||||
return {}
|
||||
|
||||
async def perform_search(query: str):
|
||||
data = fetch_api_data()
|
||||
servers = data.get("servers", [])
|
||||
matching_servers = [
|
||||
server
|
||||
for server in servers
|
||||
if query.lower() in server.get("hostnameDisplay", "").lower()
|
||||
or query.lower() in server.get("ip", "").lower()
|
||||
]
|
||||
except ConnectionError as e:
|
||||
# This catches DNS resolution errors, connection refused, etc.
|
||||
logger.error(f"Connection error for {url}: {e}")
|
||||
return {}
|
||||
|
||||
if not matching_servers:
|
||||
return "No servers found matching your query."
|
||||
except RequestException as e:
|
||||
logger.error(f"Request failed for {url}: {e}")
|
||||
return {}
|
||||
|
||||
max_results = 5
|
||||
message = (
|
||||
f'Top {min(len(matching_servers), max_results)} servers matching "{query}":\n'
|
||||
)
|
||||
for server in matching_servers[:max_results]:
|
||||
message += (
|
||||
f"- **{server['hostnameDisplay']}** | {server['gameDisplay']} | "
|
||||
f"**Gametype**: {server['gametypeDisplay']} | **Map**: {server['mapDisplay']} | "
|
||||
f"**Players**: {server['realClients']}/{server['maxplayers']}\n"
|
||||
)
|
||||
return message
|
||||
except ValueError as e:
|
||||
logger.error(f"Failed to parse JSON response from {url}: {e}")
|
||||
return {}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while fetching data from {url}: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
# Timeout a member
|
||||
@@ -80,9 +66,9 @@ async def timeout_member(
|
||||
|
||||
try:
|
||||
# Debug: Print the member object and timeout duration
|
||||
logger.debug(f"Debug: Attempting to timeout member {member} (ID: {member.id}).")
|
||||
logger.debug(f"Debug: Timeout duration set to {duration}.")
|
||||
logger.debug(f"Debug: Reason: {reason}")
|
||||
logger.debug(f"Attempting to timeout member {member} (ID: {member.id}).")
|
||||
logger.debug(f"Timeout duration set to {duration}.")
|
||||
logger.debug(f"Reason: {reason}")
|
||||
|
||||
await member.timeout(duration, reason=reason)
|
||||
logger.info(f"Successfully timed out {member}.")
|
||||
|
||||
121
chat/LICENSE
121
chat/LICENSE
@@ -1,121 +0,0 @@
|
||||
Creative Commons Legal Code
|
||||
|
||||
CC0 1.0 Universal
|
||||
|
||||
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||
HEREUNDER.
|
||||
|
||||
Statement of Purpose
|
||||
|
||||
The laws of most jurisdictions throughout the world automatically confer
|
||||
exclusive Copyright and Related Rights (defined below) upon the creator
|
||||
and subsequent owner(s) (each and all, an "owner") of an original work of
|
||||
authorship and/or a database (each, a "Work").
|
||||
|
||||
Certain owners wish to permanently relinquish those rights to a Work for
|
||||
the purpose of contributing to a commons of creative, cultural and
|
||||
scientific works ("Commons") that the public can reliably and without fear
|
||||
of later claims of infringement build upon, modify, incorporate in other
|
||||
works, reuse and redistribute as freely as possible in any form whatsoever
|
||||
and for any purposes, including without limitation commercial purposes.
|
||||
These owners may contribute to the Commons to promote the ideal of a free
|
||||
culture and the further production of creative, cultural and scientific
|
||||
works, or to gain reputation or greater distribution for their Work in
|
||||
part through the use and efforts of others.
|
||||
|
||||
For these and/or other purposes and motivations, and without any
|
||||
expectation of additional consideration or compensation, the person
|
||||
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
|
||||
is an owner of Copyright and Related Rights in the Work, voluntarily
|
||||
elects to apply CC0 to the Work and publicly distribute the Work under its
|
||||
terms, with knowledge of his or her Copyright and Related Rights in the
|
||||
Work and the meaning and intended legal effect of CC0 on those rights.
|
||||
|
||||
1. Copyright and Related Rights. A Work made available under CC0 may be
|
||||
protected by copyright and related or neighboring rights ("Copyright and
|
||||
Related Rights"). Copyright and Related Rights include, but are not
|
||||
limited to, the following:
|
||||
|
||||
i. the right to reproduce, adapt, distribute, perform, display,
|
||||
communicate, and translate a Work;
|
||||
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||
iii. publicity and privacy rights pertaining to a person's image or
|
||||
likeness depicted in a Work;
|
||||
iv. rights protecting against unfair competition in regards to a Work,
|
||||
subject to the limitations in paragraph 4(a), below;
|
||||
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||
in a Work;
|
||||
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||
European Parliament and of the Council of 11 March 1996 on the legal
|
||||
protection of databases, and under any national implementation
|
||||
thereof, including any amended or successor version of such
|
||||
directive); and
|
||||
vii. other similar, equivalent or corresponding rights throughout the
|
||||
world based on applicable law or treaty, and any national
|
||||
implementations thereof.
|
||||
|
||||
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||
irrevocably and unconditionally waives, abandons, and surrenders all of
|
||||
Affirmer's Copyright and Related Rights and associated claims and causes
|
||||
of action, whether now known or unknown (including existing as well as
|
||||
future claims and causes of action), in the Work (i) in all territories
|
||||
worldwide, (ii) for the maximum duration provided by applicable law or
|
||||
treaty (including future time extensions), (iii) in any current or future
|
||||
medium and for any number of copies, and (iv) for any purpose whatsoever,
|
||||
including without limitation commercial, advertising or promotional
|
||||
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
|
||||
member of the public at large and to the detriment of Affirmer's heirs and
|
||||
successors, fully intending that such Waiver shall not be subject to
|
||||
revocation, rescission, cancellation, termination, or any other legal or
|
||||
equitable action to disrupt the quiet enjoyment of the Work by the public
|
||||
as contemplated by Affirmer's express Statement of Purpose.
|
||||
|
||||
3. Public License Fallback. Should any part of the Waiver for any reason
|
||||
be judged legally invalid or ineffective under applicable law, then the
|
||||
Waiver shall be preserved to the maximum extent permitted taking into
|
||||
account Affirmer's express Statement of Purpose. In addition, to the
|
||||
extent the Waiver is so judged Affirmer hereby grants to each affected
|
||||
person a royalty-free, non transferable, non sublicensable, non exclusive,
|
||||
irrevocable and unconditional license to exercise Affirmer's Copyright and
|
||||
Related Rights in the Work (i) in all territories worldwide, (ii) for the
|
||||
maximum duration provided by applicable law or treaty (including future
|
||||
time extensions), (iii) in any current or future medium and for any number
|
||||
of copies, and (iv) for any purpose whatsoever, including without
|
||||
limitation commercial, advertising or promotional purposes (the
|
||||
"License"). The License shall be deemed effective as of the date CC0 was
|
||||
applied by Affirmer to the Work. Should any part of the License for any
|
||||
reason be judged legally invalid or ineffective under applicable law, such
|
||||
partial invalidity or ineffectiveness shall not invalidate the remainder
|
||||
of the License, and in such case Affirmer hereby affirms that he or she
|
||||
will not (i) exercise any of his or her remaining Copyright and Related
|
||||
Rights in the Work or (ii) assert any associated claims and causes of
|
||||
action with respect to the Work, in either case contrary to Affirmer's
|
||||
express Statement of Purpose.
|
||||
|
||||
4. Limitations and Disclaimers.
|
||||
|
||||
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||
surrendered, licensed or otherwise affected by this document.
|
||||
b. Affirmer offers the Work as-is and makes no representations or
|
||||
warranties of any kind concerning the Work, express, implied,
|
||||
statutory or otherwise, including without limitation warranties of
|
||||
title, merchantability, fitness for a particular purpose, non
|
||||
infringement, or the absence of latent or other defects, accuracy, or
|
||||
the present or absence of errors, whether or not discoverable, all to
|
||||
the greatest extent permissible under applicable law.
|
||||
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||
that may apply to the Work or any use thereof, including without
|
||||
limitation any person's Copyright and Related Rights in the Work.
|
||||
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||
consents, permissions or other rights required for any use of the
|
||||
Work.
|
||||
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||
party to this document and has no duty or obligation with respect to
|
||||
this CC0 or use of the Work.
|
||||
@@ -1,25 +0,0 @@
|
||||
# Game Chat Text Corpus
|
||||
|
||||
## Overview
|
||||
|
||||
This folder contains a collection of text data sourced from public game chat channels in various online multiplayer games.
|
||||
|
||||
## Data Collection
|
||||
|
||||
The text provided in the CSV file was collected **with the expressed consent of server owners** and comprises messages written by players in public game chat environments.
|
||||
|
||||
## Content Warning
|
||||
|
||||
**WARNING: The files contain explicit language including obscenity and profanity.**
|
||||
|
||||
The dataset reflects unfiltered player communications and may include offensive, inappropriate, or otherwise objectionable content. Viewer discretion is advised.
|
||||
|
||||
## License
|
||||
|
||||
This work is licensed under the **CC0 1.0 Universal (CC0 1.0) Public Domain Dedication**.
|
||||
|
||||
For complete license details, please see the full text in [LICENSE](LICENSE).
|
||||
|
||||
## Usage
|
||||
|
||||
By using this dataset, you acknowledge that you have read and understood the content warning and agree to the terms of the CC0 1.0 Universal license.
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,7 @@ def initialize_db():
|
||||
logger.info("Done loading database: %s", DB_PATH)
|
||||
|
||||
|
||||
def add_pattern(regex: str, response: str):
|
||||
def add_meme_pattern(regex: str, response: str):
|
||||
"""Adds a new pattern to the database."""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
@@ -38,7 +38,7 @@ def add_pattern(regex: str, response: str):
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_patterns():
|
||||
def get_meme_patterns():
|
||||
"""Fetches all regex-response pairs from the database."""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
@@ -50,7 +50,7 @@ def get_patterns():
|
||||
return [{"regex": row[0], "response": row[1]} for row in patterns]
|
||||
|
||||
|
||||
def remove_pattern(pattern_id: int):
|
||||
def remove_meme_pattern(pattern_id: int):
|
||||
"""Removes a pattern by ID."""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
@@ -126,3 +126,33 @@ def is_user_blacklisted(user_id: int) -> bool:
|
||||
conn.close()
|
||||
|
||||
return result is not None
|
||||
|
||||
|
||||
def add_aka_response(aka: str, response: str) -> None:
|
||||
"""
|
||||
Insert a new AKA/response pair into the database.
|
||||
"""
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"INSERT INTO aka_list (aka, response) VALUES (?, ?)", (aka, response)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def search_aka(keyword: str) -> str | None:
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Exact match (case-insensitive)
|
||||
cursor.execute(
|
||||
"SELECT response FROM aka_list WHERE LOWER(aka) = LOWER(?) LIMIT 1", (keyword,)
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
return row[0] if row else None
|
||||
|
||||
@@ -16,3 +16,9 @@ CREATE TABLE IF NOT EXISTS black_list (
|
||||
date_assigned TEXT,
|
||||
reason TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS aka_list (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
aka TEXT NOT NULL,
|
||||
response TEXT NOT NULL
|
||||
);
|
||||
|
||||
@@ -3,5 +3,4 @@ requests
|
||||
audioop-lts
|
||||
python-dotenv
|
||||
pynacl
|
||||
google-genai
|
||||
beautifulsoup4
|
||||
pymongo
|
||||
|
||||
Reference in New Issue
Block a user