commit
7626bb6a76
@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create an issue to report a bug
|
||||||
|
title: ''
|
||||||
|
labels: bug
|
||||||
|
assignees: bobloy
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
<!--A clear and concise description of what the bug is.-->
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
<!--Steps to reproduce the behavior:-->
|
||||||
|
1. Load cog '...'
|
||||||
|
2. Run command '....'
|
||||||
|
3. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
<!--A clear and concise description of what you expected to happen.-->
|
||||||
|
|
||||||
|
**Screenshots or Error Messages**
|
||||||
|
<!--If applicable, add screenshots to help explain your problem.-->
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
<!--Add any other context about the problem here.-->
|
@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: "[Feature Request]"
|
||||||
|
labels: enhancement
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
<!--A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]-->
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
<!--A clear and concise description of what you want to happen. Include which cog or cogs this would interact with-->
|
@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
name: New AudioTrivia List
|
||||||
|
about: Submit a new AudioTrivia list to be added
|
||||||
|
title: "[AudioTrivia Submission]"
|
||||||
|
labels: 'cog: audiotrivia'
|
||||||
|
assignees: bobloy
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**What is this trivia list?**
|
||||||
|
<!--What's in the list? What kind of category is?-->
|
||||||
|
|
||||||
|
**Number of Questions**
|
||||||
|
<!--Rough estimate at the number of question in this list-->
|
||||||
|
|
||||||
|
**Original Content?**
|
||||||
|
<!--Did you come up with this list yourself or did you get it from some else's work?-->
|
||||||
|
<!--If no, be sure to include the source-->
|
||||||
|
- [ ] Yes
|
||||||
|
- [ ] No
|
||||||
|
|
||||||
|
|
||||||
|
**Did I test the list?**
|
||||||
|
<!--Did you already try out the list and find no bugs?-->
|
||||||
|
- [ ] Yes
|
||||||
|
- [ ] No
|
@ -0,0 +1,62 @@
|
|||||||
|
'cog: announcedaily':
|
||||||
|
- announcedaily/*
|
||||||
|
'cog: audiotrivia':
|
||||||
|
- audiotrivia/*
|
||||||
|
'cog: ccrole':
|
||||||
|
- ccrole/*
|
||||||
|
'cog: chatter':
|
||||||
|
- chatter/*
|
||||||
|
'cog: conquest':
|
||||||
|
- conquest/*
|
||||||
|
'cog: dad':
|
||||||
|
- dad/*
|
||||||
|
'cog: exclusiverole':
|
||||||
|
- exclusiverole/*
|
||||||
|
'cog: fifo':
|
||||||
|
- fifo/*
|
||||||
|
'cog: firstmessage':
|
||||||
|
- firstmessage/*
|
||||||
|
'cog: flag':
|
||||||
|
- flag/*
|
||||||
|
'cog: forcemention':
|
||||||
|
- forcemention/*
|
||||||
|
'cog: hangman':
|
||||||
|
- hangman
|
||||||
|
'cog: infochannel':
|
||||||
|
- infochannel/*
|
||||||
|
'cog: isitdown':
|
||||||
|
- isitdown/*
|
||||||
|
'cog: launchlib':
|
||||||
|
- launchlib/*
|
||||||
|
'cog: leaver':
|
||||||
|
- leaver/*
|
||||||
|
'cog: lovecalculator':
|
||||||
|
- lovecalculator/*
|
||||||
|
'cog: lseen':
|
||||||
|
- lseen/*
|
||||||
|
'cog: nudity':
|
||||||
|
- nudity/*
|
||||||
|
'cog: planttycoon':
|
||||||
|
- planttycoon/*
|
||||||
|
'cog: qrinvite':
|
||||||
|
- qrinvite/*
|
||||||
|
'cog: reactrestrict':
|
||||||
|
- reactrestrict/*
|
||||||
|
'cog: recyclingplant':
|
||||||
|
- recyclingplant/*
|
||||||
|
'cog: rpsls':
|
||||||
|
- rpsls/*
|
||||||
|
'cog: sayurl':
|
||||||
|
- sayurl/*
|
||||||
|
'cog: scp':
|
||||||
|
- scp/*
|
||||||
|
'cog: stealemoji':
|
||||||
|
- stealemoji/*
|
||||||
|
'cog: timerole':
|
||||||
|
- timerole/*
|
||||||
|
'cog: tts':
|
||||||
|
- tts/*
|
||||||
|
'cog: unicode':
|
||||||
|
- unicode/*
|
||||||
|
'cog: werewolf':
|
||||||
|
- werewolf
|
@ -0,0 +1,20 @@
|
|||||||
|
# GitHub Action that uses Black to reformat the Python code in an incoming pull request.
|
||||||
|
# If all Python code in the pull request is compliant with Black then this Action does nothing.
|
||||||
|
# Othewrwise, Black is run and its changes are committed back to the incoming pull request.
|
||||||
|
# https://github.com/cclauss/autoblack
|
||||||
|
|
||||||
|
name: black
|
||||||
|
on: [pull_request]
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python 3.8
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.8'
|
||||||
|
- name: Install Black
|
||||||
|
run: pip install --upgrade --no-cache-dir black
|
||||||
|
- name: Run black --check .
|
||||||
|
run: black --check --diff -l 99 .
|
@ -0,0 +1,19 @@
|
|||||||
|
# This workflow will triage pull requests and apply a label based on the
|
||||||
|
# paths that are modified in the pull request.
|
||||||
|
#
|
||||||
|
# To use this workflow, you will need to set up a .github/labeler.yml
|
||||||
|
# file with configuration. For more information, see:
|
||||||
|
# https://github.com/actions/labeler
|
||||||
|
|
||||||
|
name: Labeler
|
||||||
|
on: [pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
label:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/labeler@2.2.0
|
||||||
|
with:
|
||||||
|
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
@ -1,4 +1,5 @@
|
|||||||
AUTHOR: Plab
|
AUTHOR: Plab
|
||||||
|
AUDIO: "[Audio] Identify this Anime!"
|
||||||
https://www.youtube.com/watch?v=2uq34TeWEdQ:
|
https://www.youtube.com/watch?v=2uq34TeWEdQ:
|
||||||
- 'Hagane no Renkinjutsushi (2009)'
|
- 'Hagane no Renkinjutsushi (2009)'
|
||||||
- '(2009) الخيميائي المعدني الكامل'
|
- '(2009) الخيميائي المعدني الكامل'
|
@ -1,4 +1,5 @@
|
|||||||
AUTHOR: Lazar
|
AUTHOR: Lazar
|
||||||
|
AUDIO: "[Audio] Identify this NHL Team by their goal horn"
|
||||||
https://youtu.be/6OejNXrGkK0:
|
https://youtu.be/6OejNXrGkK0:
|
||||||
- Anaheim Ducks
|
- Anaheim Ducks
|
||||||
- Anaheim
|
- Anaheim
|
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,14 @@
|
|||||||
AUTHOR: Plab
|
AUTHOR: Plab
|
||||||
https://www.youtube.com/watch?v=--bWm9hhoZo:
|
NEEDS: New links for all songs.
|
||||||
|
https://www.youtube.com/watch?v=f9O2Rjn1azc:
|
||||||
- Transistor
|
- Transistor
|
||||||
https://www.youtube.com/watch?v=-4nCbgayZNE:
|
https://www.youtube.com/watch?v=PgUhYFkVdSY:
|
||||||
- Dark Cloud 2
|
- Dark Cloud 2
|
||||||
- Dark Cloud II
|
- Dark Cloud II
|
||||||
https://www.youtube.com/watch?v=-64NlME4lJU:
|
https://www.youtube.com/watch?v=1T1RZttyMwU:
|
||||||
- Mega Man 7
|
- Mega Man 7
|
||||||
- Mega Man VII
|
- Mega Man VII
|
||||||
https://www.youtube.com/watch?v=-AesqnudNuw:
|
https://www.youtube.com/watch?v=AdDbbzuq1vY:
|
||||||
- Mega Man 9
|
- Mega Man 9
|
||||||
- Mega Man IX
|
- Mega Man IX
|
||||||
https://www.youtube.com/watch?v=-BmGDtP2t7M:
|
https://www.youtube.com/watch?v=-BmGDtP2t7M:
|
@ -0,0 +1,11 @@
|
|||||||
|
from .fifo import FIFO
|
||||||
|
|
||||||
|
|
||||||
|
async def setup(bot):
|
||||||
|
cog = FIFO(bot)
|
||||||
|
bot.add_cog(cog)
|
||||||
|
await cog.initialize()
|
||||||
|
|
||||||
|
|
||||||
|
def teardown(bot):
|
||||||
|
pass
|
@ -0,0 +1,42 @@
|
|||||||
|
from datetime import datetime, tzinfo
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
|
from dateutil import parser
|
||||||
|
from discord.ext.commands import BadArgument, Converter
|
||||||
|
from pytz import timezone
|
||||||
|
|
||||||
|
from fifo.timezones import assemble_timezones
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
DatetimeConverter = datetime
|
||||||
|
CronConverter = str
|
||||||
|
else:
|
||||||
|
|
||||||
|
class TimezoneConverter(Converter):
|
||||||
|
async def convert(self, ctx, argument) -> tzinfo:
|
||||||
|
tzinfos = assemble_timezones()
|
||||||
|
if argument.upper() in tzinfos:
|
||||||
|
return tzinfos[argument.upper()]
|
||||||
|
|
||||||
|
timez = timezone(argument)
|
||||||
|
|
||||||
|
if timez is not None:
|
||||||
|
return timez
|
||||||
|
raise BadArgument()
|
||||||
|
|
||||||
|
class DatetimeConverter(Converter):
|
||||||
|
async def convert(self, ctx, argument) -> datetime:
|
||||||
|
dt = parser.parse(argument, fuzzy=True, tzinfos=assemble_timezones())
|
||||||
|
if dt is not None:
|
||||||
|
return dt
|
||||||
|
raise BadArgument()
|
||||||
|
|
||||||
|
class CronConverter(Converter):
|
||||||
|
async def convert(self, ctx, argument) -> str:
|
||||||
|
try:
|
||||||
|
CronTrigger.from_crontab(argument)
|
||||||
|
except ValueError:
|
||||||
|
raise BadArgument()
|
||||||
|
|
||||||
|
return argument
|
@ -0,0 +1,511 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from apscheduler.job import Job
|
||||||
|
from apscheduler.jobstores.base import JobLookupError
|
||||||
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
from apscheduler.schedulers.base import STATE_PAUSED, STATE_RUNNING
|
||||||
|
from redbot.core import Config, checks, commands
|
||||||
|
from redbot.core.bot import Red
|
||||||
|
from redbot.core.commands import TimedeltaConverter
|
||||||
|
from redbot.core.utils.chat_formatting import pagify
|
||||||
|
|
||||||
|
from .datetime_cron_converters import CronConverter, DatetimeConverter, TimezoneConverter
|
||||||
|
from .task import Task
|
||||||
|
|
||||||
|
schedule_log = logging.getLogger("red.fox_v3.fifo.scheduler")
|
||||||
|
schedule_log.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.fifo")
|
||||||
|
|
||||||
|
|
||||||
|
async def _execute_task(task_state):
|
||||||
|
log.info(f"Executing {task_state=}")
|
||||||
|
task = Task(**task_state)
|
||||||
|
if await task.load_from_config():
|
||||||
|
return await task.execute()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _assemble_job_id(task_name, guild_id):
|
||||||
|
return f"{task_name}_{guild_id}"
|
||||||
|
|
||||||
|
|
||||||
|
def _disassemble_job_id(job_id: str):
|
||||||
|
return job_id.split("_")
|
||||||
|
|
||||||
|
|
||||||
|
class FIFO(commands.Cog):
|
||||||
|
"""
|
||||||
|
Simple Scheduling Cog
|
||||||
|
|
||||||
|
Named after the simplest scheduling algorithm: First In First Out
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, bot: Red):
|
||||||
|
super().__init__()
|
||||||
|
self.bot = bot
|
||||||
|
self.config = Config.get_conf(self, identifier=70737079, force_registration=True)
|
||||||
|
|
||||||
|
default_global = {"jobs": []}
|
||||||
|
default_guild = {"tasks": {}}
|
||||||
|
|
||||||
|
self.config.register_global(**default_global)
|
||||||
|
self.config.register_guild(**default_guild)
|
||||||
|
|
||||||
|
self.scheduler = None
|
||||||
|
self.jobstore = None
|
||||||
|
|
||||||
|
self.tz_cog = None
|
||||||
|
|
||||||
|
async def red_delete_data_for_user(self, **kwargs):
|
||||||
|
"""Nothing to delete"""
|
||||||
|
return
|
||||||
|
|
||||||
|
def cog_unload(self):
|
||||||
|
# self.scheduler.remove_all_jobs()
|
||||||
|
if self.scheduler is not None:
|
||||||
|
self.scheduler.shutdown()
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
|
||||||
|
job_defaults = {"coalesce": False, "max_instances": 1}
|
||||||
|
|
||||||
|
# executors = {"default": AsyncIOExecutor()}
|
||||||
|
|
||||||
|
# Default executor is already AsyncIOExecutor
|
||||||
|
self.scheduler = AsyncIOScheduler(job_defaults=job_defaults, logger=schedule_log)
|
||||||
|
|
||||||
|
from .redconfigjobstore import RedConfigJobStore
|
||||||
|
|
||||||
|
self.jobstore = RedConfigJobStore(self.config, self.bot)
|
||||||
|
await self.jobstore.load_from_config(self.scheduler, "default")
|
||||||
|
self.scheduler.add_jobstore(self.jobstore, "default")
|
||||||
|
|
||||||
|
self.scheduler.start()
|
||||||
|
|
||||||
|
async def _check_parsable_command(self, ctx: commands.Context, command_to_parse: str):
|
||||||
|
message: discord.Message = ctx.message
|
||||||
|
|
||||||
|
message.content = ctx.prefix + command_to_parse
|
||||||
|
message.author = ctx.author
|
||||||
|
|
||||||
|
new_ctx: commands.Context = await self.bot.get_context(message)
|
||||||
|
|
||||||
|
return new_ctx.valid
|
||||||
|
|
||||||
|
async def _delete_task(self, task: Task):
|
||||||
|
job: Union[Job, None] = await self._get_job(task)
|
||||||
|
if job is not None:
|
||||||
|
job.remove()
|
||||||
|
|
||||||
|
await task.delete_self()
|
||||||
|
|
||||||
|
async def _process_task(self, task: Task):
|
||||||
|
job: Union[Job, None] = await self._get_job(task)
|
||||||
|
if job is not None:
|
||||||
|
job.reschedule(await task.get_combined_trigger())
|
||||||
|
return job
|
||||||
|
return await self._add_job(task)
|
||||||
|
|
||||||
|
async def _get_job(self, task: Task) -> Job:
|
||||||
|
return self.scheduler.get_job(_assemble_job_id(task.name, task.guild_id))
|
||||||
|
|
||||||
|
async def _add_job(self, task: Task):
|
||||||
|
return self.scheduler.add_job(
|
||||||
|
_execute_task,
|
||||||
|
args=[task.__getstate__()],
|
||||||
|
id=_assemble_job_id(task.name, task.guild_id),
|
||||||
|
trigger=await task.get_combined_trigger(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _resume_job(self, task: Task):
|
||||||
|
try:
|
||||||
|
job = self.scheduler.resume_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
||||||
|
except JobLookupError:
|
||||||
|
job = await self._process_task(task)
|
||||||
|
return job
|
||||||
|
|
||||||
|
async def _pause_job(self, task: Task):
|
||||||
|
return self.scheduler.pause_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
||||||
|
|
||||||
|
async def _remove_job(self, task: Task):
|
||||||
|
return self.scheduler.remove_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
||||||
|
|
||||||
|
async def _get_tz(self, user: Union[discord.User, discord.Member]) -> Union[None, tzinfo]:
|
||||||
|
if self.tz_cog is None:
|
||||||
|
self.tz_cog = self.bot.get_cog("Timezone")
|
||||||
|
if self.tz_cog is None:
|
||||||
|
self.tz_cog = False # only try once to get the timezone cog
|
||||||
|
|
||||||
|
if not self.tz_cog:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
usertime = await self.tz_cog.config.user(user).usertime()
|
||||||
|
except AttributeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if usertime:
|
||||||
|
return await TimezoneConverter().convert(None, usertime)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
|
@commands.guild_only()
|
||||||
|
@commands.command()
|
||||||
|
async def fifoclear(self, ctx: commands.Context):
|
||||||
|
"""Debug command to clear all current fifo data"""
|
||||||
|
self.scheduler.remove_all_jobs()
|
||||||
|
await self.config.guild(ctx.guild).tasks.clear()
|
||||||
|
await self.config.jobs.clear()
|
||||||
|
# await self.config.jobs_index.clear()
|
||||||
|
await ctx.tick()
|
||||||
|
|
||||||
|
@checks.is_owner() # Will be reduced when I figure out permissions later
|
||||||
|
@commands.guild_only()
|
||||||
|
@commands.group()
|
||||||
|
async def fifo(self, ctx: commands.Context):
|
||||||
|
"""
|
||||||
|
Base command for handling scheduling of tasks
|
||||||
|
"""
|
||||||
|
if ctx.invoked_subcommand is None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@fifo.command(name="set")
|
||||||
|
async def fifo_set(
|
||||||
|
self,
|
||||||
|
ctx: commands.Context,
|
||||||
|
task_name: str,
|
||||||
|
author_or_channel: Union[discord.Member, discord.TextChannel],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Sets a different author or in a different channel for execution of a task.
|
||||||
|
"""
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(author_or_channel, discord.Member):
|
||||||
|
if task.author_id == author_or_channel.id:
|
||||||
|
await ctx.maybe_send_embed("Already executing as that member")
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.set_author(author_or_channel) # also saves
|
||||||
|
elif isinstance(author_or_channel, discord.TextChannel):
|
||||||
|
if task.channel_id == author_or_channel.id:
|
||||||
|
await ctx.maybe_send_embed("Already executing in that channel")
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.set_channel(author_or_channel)
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed("Unsupported result")
|
||||||
|
return
|
||||||
|
|
||||||
|
await ctx.tick()
|
||||||
|
|
||||||
|
@fifo.command(name="resume")
|
||||||
|
async def fifo_resume(self, ctx: commands.Context, task_name: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Provide a task name to resume execution of a task.
|
||||||
|
|
||||||
|
Otherwise resumes execution of all tasks on all guilds
|
||||||
|
If the task isn't currently scheduled, will schedule it
|
||||||
|
"""
|
||||||
|
if task_name is None:
|
||||||
|
if self.scheduler.state == STATE_PAUSED:
|
||||||
|
self.scheduler.resume()
|
||||||
|
await ctx.maybe_send_embed("All task execution for all guilds has been resumed")
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed("Task execution is not paused, can't resume")
|
||||||
|
else:
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if await self._resume_job(task):
|
||||||
|
await ctx.maybe_send_embed(f"Execution of {task_name=} has been resumed")
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed(f"Failed to resume {task_name=}")
|
||||||
|
|
||||||
|
@fifo.command(name="pause")
|
||||||
|
async def fifo_pause(self, ctx: commands.Context, task_name: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Provide a task name to pause execution of a task
|
||||||
|
|
||||||
|
Otherwise pauses execution of all tasks on all guilds
|
||||||
|
"""
|
||||||
|
if task_name is None:
|
||||||
|
if self.scheduler.state == STATE_RUNNING:
|
||||||
|
self.scheduler.pause()
|
||||||
|
await ctx.maybe_send_embed("All task execution for all guilds has been paused")
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed("Task execution is not running, can't pause")
|
||||||
|
else:
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if await self._pause_job(task):
|
||||||
|
await ctx.maybe_send_embed(f"Execution of {task_name=} has been paused")
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed(f"Failed to pause {task_name=}")
|
||||||
|
|
||||||
|
@fifo.command(name="details")
|
||||||
|
async def fifo_details(self, ctx: commands.Context, task_name: str):
|
||||||
|
"""
|
||||||
|
Provide all the details on the specified task name
|
||||||
|
"""
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
embed = discord.Embed(title=f"Task: {task_name}")
|
||||||
|
|
||||||
|
embed.add_field(
|
||||||
|
name="Task command", value=f"{ctx.prefix}{task.get_command_str()}", inline=False
|
||||||
|
)
|
||||||
|
|
||||||
|
guild: discord.Guild = self.bot.get_guild(task.guild_id)
|
||||||
|
|
||||||
|
if guild is not None:
|
||||||
|
author: discord.Member = guild.get_member(task.author_id)
|
||||||
|
channel: discord.TextChannel = guild.get_channel(task.channel_id)
|
||||||
|
embed.add_field(name="Server", value=guild.name)
|
||||||
|
if author is not None:
|
||||||
|
embed.add_field(name="Author", value=author.mention)
|
||||||
|
if channel is not None:
|
||||||
|
embed.add_field(name="Channel", value=channel.mention)
|
||||||
|
|
||||||
|
else:
|
||||||
|
embed.add_field(name="Server", value="Server not found", inline=False)
|
||||||
|
|
||||||
|
trigger_str = "\n".join(str(t) for t in await task.get_triggers())
|
||||||
|
if trigger_str:
|
||||||
|
embed.add_field(name="Triggers", value=trigger_str, inline=False)
|
||||||
|
|
||||||
|
job = await self._get_job(task)
|
||||||
|
if job and job.next_run_time:
|
||||||
|
embed.timestamp = job.next_run_time
|
||||||
|
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
|
@fifo.command(name="list")
|
||||||
|
async def fifo_list(self, ctx: commands.Context, all_guilds: bool = False):
|
||||||
|
"""
|
||||||
|
Lists all current tasks and their triggers.
|
||||||
|
|
||||||
|
Do `[p]fifo list True` to see tasks from all guilds
|
||||||
|
"""
|
||||||
|
if all_guilds:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
out = ""
|
||||||
|
all_tasks = await self.config.guild(ctx.guild).tasks()
|
||||||
|
for task_name, task_data in all_tasks.items():
|
||||||
|
out += f"{task_name}: {task_data}\n"
|
||||||
|
|
||||||
|
if out:
|
||||||
|
if len(out) > 2000:
|
||||||
|
for page in pagify(out):
|
||||||
|
await ctx.maybe_send_embed(page)
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed(out)
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed("No tasks to list")
|
||||||
|
|
||||||
|
@fifo.command(name="add")
|
||||||
|
async def fifo_add(self, ctx: commands.Context, task_name: str, *, command_to_execute: str):
|
||||||
|
"""
|
||||||
|
Add a new task to this guild's task list
|
||||||
|
"""
|
||||||
|
if (await self.config.guild(ctx.guild).tasks.get_raw(task_name, default=None)) is not None:
|
||||||
|
await ctx.maybe_send_embed(f"Task already exists with {task_name=}")
|
||||||
|
return
|
||||||
|
|
||||||
|
if "_" in task_name: # See _disassemble_job_id
|
||||||
|
await ctx.maybe_send_embed("Task name cannot contain underscores")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not await self._check_parsable_command(ctx, command_to_execute):
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
"Failed to parse command. Make sure not to include the prefix"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, ctx.author.id, ctx.channel.id, self.bot)
|
||||||
|
await task.set_commmand_str(command_to_execute)
|
||||||
|
await task.save_all()
|
||||||
|
await ctx.tick()
|
||||||
|
|
||||||
|
@fifo.command(name="delete")
|
||||||
|
async def fifo_delete(self, ctx: commands.Context, task_name: str):
|
||||||
|
"""
|
||||||
|
Deletes a task from this guild's task list
|
||||||
|
"""
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._delete_task(task)
|
||||||
|
await ctx.maybe_send_embed(f"Task[{task_name}] has been deleted from this guild")
|
||||||
|
|
||||||
|
@fifo.command(name="cleartriggers", aliases=["cleartrigger"])
|
||||||
|
async def fifo_cleartriggers(self, ctx: commands.Context, task_name: str):
|
||||||
|
"""
|
||||||
|
Removes all triggers from specified task
|
||||||
|
|
||||||
|
Useful to start over with new trigger
|
||||||
|
"""
|
||||||
|
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.clear_triggers()
|
||||||
|
await ctx.tick()
|
||||||
|
|
||||||
|
@fifo.group(name="addtrigger", aliases=["trigger"])
|
||||||
|
async def fifo_trigger(self, ctx: commands.Context):
|
||||||
|
"""
|
||||||
|
Add a new trigger for a task from the current guild.
|
||||||
|
"""
|
||||||
|
if ctx.invoked_subcommand is None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@fifo_trigger.command(name="interval")
|
||||||
|
async def fifo_trigger_interval(
|
||||||
|
self, ctx: commands.Context, task_name: str, *, interval_str: TimedeltaConverter
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add an interval trigger to the specified task
|
||||||
|
"""
|
||||||
|
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
result = await task.add_trigger("interval", interval_str)
|
||||||
|
if not result:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
"Failed to add an interval trigger to this task, see console for logs"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
await task.save_data()
|
||||||
|
job: Job = await self._process_task(task)
|
||||||
|
delta_from_now: timedelta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task `{task_name}` added interval of {interval_str} to its scheduled runtimes\n\n"
|
||||||
|
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
||||||
|
)
|
||||||
|
|
||||||
|
@fifo_trigger.command(name="date")
|
||||||
|
async def fifo_trigger_date(
|
||||||
|
self, ctx: commands.Context, task_name: str, *, datetime_str: DatetimeConverter
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add a "run once" datetime trigger to the specified task
|
||||||
|
"""
|
||||||
|
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
maybe_tz = await self._get_tz(ctx.author)
|
||||||
|
|
||||||
|
result = await task.add_trigger("date", datetime_str, maybe_tz)
|
||||||
|
if not result:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
"Failed to add a date trigger to this task, see console for logs"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.save_data()
|
||||||
|
job: Job = await self._process_task(task)
|
||||||
|
delta_from_now: timedelta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task `{task_name}` added {datetime_str} to its scheduled runtimes\n"
|
||||||
|
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
||||||
|
)
|
||||||
|
|
||||||
|
@fifo_trigger.command(name="cron")
|
||||||
|
async def fifo_trigger_cron(
|
||||||
|
self,
|
||||||
|
ctx: commands.Context,
|
||||||
|
task_name: str,
|
||||||
|
optional_tz: Optional[TimezoneConverter] = None,
|
||||||
|
*,
|
||||||
|
cron_str: CronConverter,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add a cron "time of day" trigger to the specified task
|
||||||
|
|
||||||
|
See https://crontab.guru/ for help generating the cron_str
|
||||||
|
"""
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if optional_tz is None:
|
||||||
|
optional_tz = await self._get_tz(ctx.author) # might still be None
|
||||||
|
|
||||||
|
result = await task.add_trigger("cron", cron_str, optional_tz)
|
||||||
|
if not result:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
"Failed to add a cron trigger to this task, see console for logs"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.save_data()
|
||||||
|
job: Job = await self._process_task(task)
|
||||||
|
delta_from_now: timedelta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task `{task_name}` added cron_str to its scheduled runtimes\n"
|
||||||
|
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
||||||
|
)
|
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"author": [
|
||||||
|
"Bobloy"
|
||||||
|
],
|
||||||
|
"min_bot_version": "3.4.0",
|
||||||
|
"description": "[BETA] Schedule commands to be run at certain times or intervals",
|
||||||
|
"hidden": false,
|
||||||
|
"install_msg": "Thank you for installing FIFO.\nGet started with `[p]load fifo`, then `[p]help FIFO`",
|
||||||
|
"short": "[BETA] Schedule commands to be run at certain times or intervals",
|
||||||
|
"end_user_data_statement": "This cog does not store any End User Data",
|
||||||
|
"requirements": [
|
||||||
|
"apscheduler",
|
||||||
|
"pytz"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"bobloy",
|
||||||
|
"utilities",
|
||||||
|
"tool",
|
||||||
|
"tools",
|
||||||
|
"roles",
|
||||||
|
"schedule",
|
||||||
|
"cron",
|
||||||
|
"interval",
|
||||||
|
"date",
|
||||||
|
"datetime",
|
||||||
|
"time",
|
||||||
|
"calendar",
|
||||||
|
"timezone"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,183 @@
|
|||||||
|
import asyncio
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
import pickle
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Tuple, Union
|
||||||
|
|
||||||
|
from apscheduler.job import Job
|
||||||
|
from apscheduler.jobstores.base import ConflictingIdError, JobLookupError
|
||||||
|
from apscheduler.jobstores.memory import MemoryJobStore
|
||||||
|
from apscheduler.schedulers.asyncio import run_in_event_loop
|
||||||
|
from apscheduler.util import datetime_to_utc_timestamp
|
||||||
|
from redbot.core import Config
|
||||||
|
|
||||||
|
# TODO: use get_lock on config
|
||||||
|
from redbot.core.bot import Red
|
||||||
|
from redbot.core.utils import AsyncIter
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.fifo.jobstore")
|
||||||
|
log.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
save_task_objects = []
|
||||||
|
|
||||||
|
|
||||||
|
class RedConfigJobStore(MemoryJobStore):
|
||||||
|
def __init__(self, config: Config, bot: Red):
|
||||||
|
super().__init__()
|
||||||
|
self.config = config
|
||||||
|
self.bot = bot
|
||||||
|
self.pickle_protocol = pickle.HIGHEST_PROTOCOL
|
||||||
|
self._eventloop = self.bot.loop
|
||||||
|
# TODO: self.config.jobs_index is never used,
|
||||||
|
# fine but maybe a sign of inefficient use of config
|
||||||
|
|
||||||
|
# task = asyncio.create_task(self.load_from_config())
|
||||||
|
# while not task.done():
|
||||||
|
# sleep(0.1)
|
||||||
|
# future = asyncio.ensure_future(self.load_from_config(), loop=self.bot.loop)
|
||||||
|
|
||||||
|
@run_in_event_loop
|
||||||
|
def start(self, scheduler, alias):
|
||||||
|
super().start(scheduler, alias)
|
||||||
|
|
||||||
|
async def load_from_config(self, scheduler, alias):
|
||||||
|
super().start(scheduler, alias)
|
||||||
|
_jobs = await self.config.jobs()
|
||||||
|
self._jobs = [
|
||||||
|
(await self._decode_job(job), timestamp) async for (job, timestamp) in AsyncIter(_jobs)
|
||||||
|
]
|
||||||
|
# self._jobs_index = await self.config.jobs_index.all() # Overwritten by next
|
||||||
|
self._jobs_index = {job.id: (job, timestamp) for job, timestamp in self._jobs}
|
||||||
|
|
||||||
|
def _encode_job(self, job: Job):
|
||||||
|
job_state = job.__getstate__()
|
||||||
|
new_args = list(job_state["args"])
|
||||||
|
new_args[0]["config"] = None
|
||||||
|
new_args[0]["bot"] = None
|
||||||
|
job_state["args"] = tuple(new_args)
|
||||||
|
encoded = base64.b64encode(pickle.dumps(job_state, self.pickle_protocol))
|
||||||
|
out = {
|
||||||
|
"_id": job.id,
|
||||||
|
"next_run_time": datetime_to_utc_timestamp(job.next_run_time),
|
||||||
|
"job_state": encoded.decode("ascii"),
|
||||||
|
}
|
||||||
|
new_args = list(job_state["args"])
|
||||||
|
new_args[0]["config"] = self.config
|
||||||
|
new_args[0]["bot"] = self.bot
|
||||||
|
job_state["args"] = tuple(new_args)
|
||||||
|
# log.debug(f"Encoding job id: {job.id}\n"
|
||||||
|
# f"Encoded as: {out}")
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
async def _decode_job(self, in_job):
|
||||||
|
if in_job is None:
|
||||||
|
return None
|
||||||
|
job_state = in_job["job_state"]
|
||||||
|
job_state = pickle.loads(base64.b64decode(job_state))
|
||||||
|
new_args = list(job_state["args"])
|
||||||
|
new_args[0]["config"] = self.config
|
||||||
|
new_args[0]["bot"] = self.bot
|
||||||
|
job_state["args"] = tuple(new_args)
|
||||||
|
job = Job.__new__(Job)
|
||||||
|
job.__setstate__(job_state)
|
||||||
|
job._scheduler = self._scheduler
|
||||||
|
job._jobstore_alias = self._alias
|
||||||
|
# task_name, guild_id = _disassemble_job_id(job.id)
|
||||||
|
# task = Task(task_name, guild_id, self.config)
|
||||||
|
# await task.load_from_config()
|
||||||
|
# save_task_objects.append(task)
|
||||||
|
#
|
||||||
|
# job.func = task.execute
|
||||||
|
|
||||||
|
# log.debug(f"Decoded job id: {job.id}\n"
|
||||||
|
# f"Decoded as {job_state}")
|
||||||
|
|
||||||
|
return job
|
||||||
|
|
||||||
|
@run_in_event_loop
|
||||||
|
def add_job(self, job: Job):
|
||||||
|
if job.id in self._jobs_index:
|
||||||
|
raise ConflictingIdError(job.id)
|
||||||
|
# log.debug(f"Check job args: {job.args=}")
|
||||||
|
timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
||||||
|
index = self._get_job_index(timestamp, job.id) # This is fine
|
||||||
|
self._jobs.insert(index, (job, timestamp))
|
||||||
|
self._jobs_index[job.id] = (job, timestamp)
|
||||||
|
asyncio.create_task(self._async_add_job(job, index, timestamp))
|
||||||
|
# log.debug(f"Added job: {self._jobs[index][0].args}")
|
||||||
|
|
||||||
|
async def _async_add_job(self, job, index, timestamp):
|
||||||
|
encoded_job = self._encode_job(job)
|
||||||
|
job_tuple = tuple([encoded_job, timestamp])
|
||||||
|
async with self.config.jobs() as jobs:
|
||||||
|
jobs.insert(index, job_tuple)
|
||||||
|
# await self.config.jobs_index.set_raw(job.id, value=job_tuple)
|
||||||
|
return True
|
||||||
|
|
||||||
|
@run_in_event_loop
|
||||||
|
def update_job(self, job):
|
||||||
|
old_tuple: Tuple[Union[Job, None], Union[datetime, None]] = self._jobs_index.get(
|
||||||
|
job.id, (None, None)
|
||||||
|
)
|
||||||
|
old_job = old_tuple[0]
|
||||||
|
old_timestamp = old_tuple[1]
|
||||||
|
if old_job is None:
|
||||||
|
raise JobLookupError(job.id)
|
||||||
|
|
||||||
|
# If the next run time has not changed, simply replace the job in its present index.
|
||||||
|
# Otherwise, reinsert the job to the list to preserve the ordering.
|
||||||
|
old_index = self._get_job_index(old_timestamp, old_job.id)
|
||||||
|
new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
||||||
|
asyncio.create_task(
|
||||||
|
self._async_update_job(job, new_timestamp, old_index, old_job, old_timestamp)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_update_job(self, job, new_timestamp, old_index, old_job, old_timestamp):
|
||||||
|
encoded_job = self._encode_job(job)
|
||||||
|
if old_timestamp == new_timestamp:
|
||||||
|
self._jobs[old_index] = (job, new_timestamp)
|
||||||
|
async with self.config.jobs() as jobs:
|
||||||
|
jobs[old_index] = (encoded_job, new_timestamp)
|
||||||
|
else:
|
||||||
|
del self._jobs[old_index]
|
||||||
|
new_index = self._get_job_index(new_timestamp, job.id) # This is fine
|
||||||
|
self._jobs.insert(new_index, (job, new_timestamp))
|
||||||
|
async with self.config.jobs() as jobs:
|
||||||
|
del jobs[old_index]
|
||||||
|
jobs.insert(new_index, (encoded_job, new_timestamp))
|
||||||
|
self._jobs_index[old_job.id] = (job, new_timestamp)
|
||||||
|
# await self.config.jobs_index.set_raw(old_job.id, value=(encoded_job, new_timestamp))
|
||||||
|
|
||||||
|
log.debug(f"Async Updated {job.id=}")
|
||||||
|
log.debug(f"Check job args: {job.args=}")
|
||||||
|
|
||||||
|
@run_in_event_loop
|
||||||
|
def remove_job(self, job_id):
|
||||||
|
job, timestamp = self._jobs_index.get(job_id, (None, None))
|
||||||
|
if job is None:
|
||||||
|
raise JobLookupError(job_id)
|
||||||
|
|
||||||
|
index = self._get_job_index(timestamp, job_id)
|
||||||
|
del self._jobs[index]
|
||||||
|
del self._jobs_index[job.id]
|
||||||
|
asyncio.create_task(self._async_remove_job(index, job))
|
||||||
|
|
||||||
|
async def _async_remove_job(self, index, job):
|
||||||
|
async with self.config.jobs() as jobs:
|
||||||
|
del jobs[index]
|
||||||
|
# await self.config.jobs_index.clear_raw(job.id)
|
||||||
|
|
||||||
|
@run_in_event_loop
|
||||||
|
def remove_all_jobs(self):
|
||||||
|
super().remove_all_jobs()
|
||||||
|
asyncio.create_task(self._async_remove_all_jobs())
|
||||||
|
|
||||||
|
async def _async_remove_all_jobs(self):
|
||||||
|
await self.config.jobs.clear()
|
||||||
|
# await self.config.jobs_index.clear()
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
"""Removes all jobs without clearing config"""
|
||||||
|
super().remove_all_jobs()
|
@ -0,0 +1,371 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, List, Union
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from apscheduler.triggers.base import BaseTrigger
|
||||||
|
from apscheduler.triggers.combining import OrTrigger
|
||||||
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
|
from apscheduler.triggers.date import DateTrigger
|
||||||
|
from apscheduler.triggers.interval import IntervalTrigger
|
||||||
|
from discord.utils import time_snowflake
|
||||||
|
from pytz import timezone
|
||||||
|
from redbot.core import Config, commands
|
||||||
|
from redbot.core.bot import Red
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.fifo.task")
|
||||||
|
|
||||||
|
|
||||||
|
async def _do_nothing(*args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_trigger(data):
|
||||||
|
if data["type"] == "interval":
|
||||||
|
parsed_time = data["time_data"]
|
||||||
|
return IntervalTrigger(days=parsed_time.days, seconds=parsed_time.seconds)
|
||||||
|
|
||||||
|
if data["type"] == "date":
|
||||||
|
return DateTrigger(data["time_data"], timezone=data["tzinfo"])
|
||||||
|
|
||||||
|
if data["type"] == "cron":
|
||||||
|
return CronTrigger.from_crontab(data["time_data"], timezone=data["tzinfo"])
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def parse_triggers(data: Union[Dict, None]):
|
||||||
|
if data is None or not data.get("triggers", False): # No triggers
|
||||||
|
return None
|
||||||
|
|
||||||
|
if len(data["triggers"]) > 1: # Multiple triggers
|
||||||
|
return OrTrigger(get_trigger(t_data) for t_data in data["triggers"])
|
||||||
|
|
||||||
|
return get_trigger(data["triggers"][0])
|
||||||
|
|
||||||
|
|
||||||
|
class FakeMessage:
|
||||||
|
def __init__(self, message: discord.Message):
|
||||||
|
d = {k: getattr(message, k, None) for k in dir(message)}
|
||||||
|
self.__dict__.update(**d)
|
||||||
|
|
||||||
|
|
||||||
|
def neuter_message(message: FakeMessage):
|
||||||
|
message.delete = _do_nothing
|
||||||
|
message.edit = _do_nothing
|
||||||
|
message.publish = _do_nothing
|
||||||
|
message.pin = _do_nothing
|
||||||
|
message.unpin = _do_nothing
|
||||||
|
message.add_reaction = _do_nothing
|
||||||
|
message.remove_reaction = _do_nothing
|
||||||
|
message.clear_reaction = _do_nothing
|
||||||
|
message.clear_reactions = _do_nothing
|
||||||
|
message.ack = _do_nothing
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
class Task:
|
||||||
|
default_task_data = {"triggers": [], "command_str": ""}
|
||||||
|
|
||||||
|
default_trigger = {
|
||||||
|
"type": "",
|
||||||
|
"time_data": None, # Used for Interval and Date Triggers
|
||||||
|
"tzinfo": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, name: str, guild_id, config: Config, author_id=None, channel_id=None, bot: Red = None
|
||||||
|
):
|
||||||
|
self.name = name
|
||||||
|
self.guild_id = guild_id
|
||||||
|
self.config = config
|
||||||
|
self.bot = bot
|
||||||
|
self.author_id = author_id
|
||||||
|
self.channel_id = channel_id
|
||||||
|
self.data = None
|
||||||
|
|
||||||
|
async def _encode_time_triggers(self):
|
||||||
|
if not self.data or not self.data.get("triggers", None):
|
||||||
|
return []
|
||||||
|
|
||||||
|
triggers = []
|
||||||
|
for t in self.data["triggers"]:
|
||||||
|
if t["type"] == "interval": # Convert into timedelta
|
||||||
|
td: timedelta = t["time_data"]
|
||||||
|
|
||||||
|
triggers.append(
|
||||||
|
{"type": t["type"], "time_data": {"days": td.days, "seconds": td.seconds}}
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if t["type"] == "date": # Convert into datetime
|
||||||
|
dt: datetime = t["time_data"]
|
||||||
|
triggers.append(
|
||||||
|
{
|
||||||
|
"type": t["type"],
|
||||||
|
"time_data": dt.isoformat(),
|
||||||
|
"tzinfo": getattr(t["tzinfo"], "zone", None),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# triggers.append(
|
||||||
|
# {
|
||||||
|
# "type": t["type"],
|
||||||
|
# "time_data": {
|
||||||
|
# "year": dt.year,
|
||||||
|
# "month": dt.month,
|
||||||
|
# "day": dt.day,
|
||||||
|
# "hour": dt.hour,
|
||||||
|
# "minute": dt.minute,
|
||||||
|
# "second": dt.second,
|
||||||
|
# "tzinfo": dt.tzinfo,
|
||||||
|
# },
|
||||||
|
# }
|
||||||
|
# )
|
||||||
|
continue
|
||||||
|
|
||||||
|
if t["type"] == "cron":
|
||||||
|
if t["tzinfo"] is None:
|
||||||
|
triggers.append(t) # already a string, nothing to do
|
||||||
|
else:
|
||||||
|
triggers.append(
|
||||||
|
{
|
||||||
|
"type": t["type"],
|
||||||
|
"time_data": t["time_data"],
|
||||||
|
"tzinfo": getattr(t["tzinfo"], "zone", None),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise NotImplemented
|
||||||
|
|
||||||
|
return triggers
|
||||||
|
|
||||||
|
async def _decode_time_triggers(self):
|
||||||
|
if not self.data or not self.data.get("triggers", None):
|
||||||
|
return
|
||||||
|
|
||||||
|
for t in self.data["triggers"]:
|
||||||
|
# Backwards compatibility
|
||||||
|
if "tzinfo" not in t:
|
||||||
|
t["tzinfo"] = None
|
||||||
|
|
||||||
|
# First decode timezone if there is one
|
||||||
|
if t["tzinfo"] is not None:
|
||||||
|
t["tzinfo"] = timezone(t["tzinfo"])
|
||||||
|
|
||||||
|
if t["type"] == "interval": # Convert into timedelta
|
||||||
|
t["time_data"] = timedelta(**t["time_data"])
|
||||||
|
continue
|
||||||
|
|
||||||
|
if t["type"] == "date": # Convert into datetime
|
||||||
|
# self.data["triggers"][n]["time_data"] = datetime(**t["time_data"])
|
||||||
|
t["time_data"] = datetime.fromisoformat(t["time_data"])
|
||||||
|
continue
|
||||||
|
|
||||||
|
if t["type"] == "cron":
|
||||||
|
continue # already a string
|
||||||
|
|
||||||
|
raise NotImplemented
|
||||||
|
|
||||||
|
# async def load_from_data(self, data: Dict):
|
||||||
|
# self.data = data.copy()
|
||||||
|
|
||||||
|
async def load_from_config(self):
|
||||||
|
data = await self.config.guild_from_id(self.guild_id).tasks.get_raw(
|
||||||
|
self.name, default=None
|
||||||
|
)
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.author_id = data["author_id"]
|
||||||
|
self.guild_id = data["guild_id"]
|
||||||
|
self.channel_id = data["channel_id"]
|
||||||
|
|
||||||
|
self.data = data["data"]
|
||||||
|
|
||||||
|
await self._decode_time_triggers()
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
async def get_triggers(self) -> List[Union[IntervalTrigger, DateTrigger]]:
|
||||||
|
if not self.data:
|
||||||
|
await self.load_from_config()
|
||||||
|
|
||||||
|
if self.data is None or "triggers" not in self.data: # No triggers
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [get_trigger(t) for t in self.data["triggers"]]
|
||||||
|
|
||||||
|
async def get_combined_trigger(self) -> Union[BaseTrigger, None]:
|
||||||
|
if not self.data:
|
||||||
|
await self.load_from_config()
|
||||||
|
|
||||||
|
return parse_triggers(self.data)
|
||||||
|
|
||||||
|
# async def set_job_id(self, job_id):
|
||||||
|
# if self.data is None:
|
||||||
|
# await self.load_from_config()
|
||||||
|
#
|
||||||
|
# self.data["job_id"] = job_id
|
||||||
|
|
||||||
|
async def save_all(self):
|
||||||
|
"""To be used when creating an new task"""
|
||||||
|
|
||||||
|
data_to_save = self.default_task_data.copy()
|
||||||
|
if self.data:
|
||||||
|
data_to_save["command_str"] = self.get_command_str()
|
||||||
|
data_to_save["triggers"] = await self._encode_time_triggers()
|
||||||
|
|
||||||
|
to_save = {
|
||||||
|
"guild_id": self.guild_id,
|
||||||
|
"author_id": self.author_id,
|
||||||
|
"channel_id": self.channel_id,
|
||||||
|
"data": data_to_save,
|
||||||
|
}
|
||||||
|
await self.config.guild_from_id(self.guild_id).tasks.set_raw(self.name, value=to_save)
|
||||||
|
|
||||||
|
async def save_data(self):
|
||||||
|
"""To be used when updating triggers"""
|
||||||
|
if not self.data:
|
||||||
|
return
|
||||||
|
|
||||||
|
data_to_save = self.data.copy()
|
||||||
|
data_to_save["triggers"] = await self._encode_time_triggers()
|
||||||
|
|
||||||
|
await self.config.guild_from_id(self.guild_id).tasks.set_raw(
|
||||||
|
self.name, "data", value=data_to_save
|
||||||
|
)
|
||||||
|
|
||||||
|
async def execute(self):
|
||||||
|
if not self.data or not self.get_command_str():
|
||||||
|
log.warning(f"Could not execute task due to data problem: {self.data=}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
guild: discord.Guild = self.bot.get_guild(self.guild_id) # used for get_prefix
|
||||||
|
if guild is None:
|
||||||
|
log.warning(f"Could not execute task due to missing guild: {self.guild_id}")
|
||||||
|
return False
|
||||||
|
channel: discord.TextChannel = guild.get_channel(self.channel_id)
|
||||||
|
if channel is None:
|
||||||
|
log.warning(f"Could not execute task due to missing channel: {self.channel_id}")
|
||||||
|
return False
|
||||||
|
author: discord.User = guild.get_member(self.author_id)
|
||||||
|
if author is None:
|
||||||
|
log.warning(f"Could not execute task due to missing author: {self.author_id}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
actual_message: discord.Message = channel.last_message
|
||||||
|
# I'd like to present you my chain of increasingly desperate message fetching attempts
|
||||||
|
if actual_message is None:
|
||||||
|
# log.warning("No message found in channel cache yet, skipping execution")
|
||||||
|
# return
|
||||||
|
actual_message = await channel.fetch_message(channel.last_message_id)
|
||||||
|
if actual_message is None: # last_message_id was an invalid message I guess
|
||||||
|
actual_message = await channel.history(limit=1).flatten()
|
||||||
|
if not actual_message: # Basically only happens if the channel has no messages
|
||||||
|
actual_message = await author.history(limit=1).flatten()
|
||||||
|
if not actual_message: # Okay, the *author* has never sent a message?
|
||||||
|
log.warning("No message found in channel cache yet, skipping execution")
|
||||||
|
return
|
||||||
|
actual_message = actual_message[0]
|
||||||
|
|
||||||
|
message = FakeMessage(actual_message)
|
||||||
|
# message = FakeMessage2
|
||||||
|
message.author = author
|
||||||
|
message.guild = guild # Just in case we got desperate
|
||||||
|
message.channel = channel
|
||||||
|
message.id = time_snowflake(datetime.now()) # Pretend to be now
|
||||||
|
message = neuter_message(message)
|
||||||
|
|
||||||
|
# absolutely weird that this takes a message object instead of guild
|
||||||
|
prefixes = await self.bot.get_prefix(message)
|
||||||
|
if isinstance(prefixes, str):
|
||||||
|
prefix = prefixes
|
||||||
|
else:
|
||||||
|
prefix = prefixes[0]
|
||||||
|
|
||||||
|
message.content = f"{prefix}{self.get_command_str()}"
|
||||||
|
|
||||||
|
if not message.guild or not message.author or not message.content:
|
||||||
|
log.warning(f"Could not execute task due to message problem: {message}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
new_ctx: commands.Context = await self.bot.get_context(message)
|
||||||
|
new_ctx.assume_yes = True
|
||||||
|
if not new_ctx.valid:
|
||||||
|
log.warning(
|
||||||
|
f"Could not execute Task[{self.name}] due invalid context: {new_ctx.invoked_with}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
await self.bot.invoke(new_ctx)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def set_bot(self, bot: Red):
|
||||||
|
self.bot = bot
|
||||||
|
|
||||||
|
async def set_author(self, author: Union[discord.User, discord.Member, str]):
|
||||||
|
self.author_id = getattr(author, "id", None) or author
|
||||||
|
await self.config.guild_from_id(self.guild_id).tasks.set_raw(
|
||||||
|
self.name, "author_id", value=self.author_id
|
||||||
|
)
|
||||||
|
|
||||||
|
async def set_channel(self, channel: Union[discord.TextChannel, str]):
|
||||||
|
self.channel_id = getattr(channel, "id", None) or channel
|
||||||
|
await self.config.guild_from_id(self.guild_id).tasks.set_raw(
|
||||||
|
self.name, "channel_id", value=self.channel_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_command_str(self):
|
||||||
|
return self.data.get("command_str", "")
|
||||||
|
|
||||||
|
async def set_commmand_str(self, command_str):
|
||||||
|
if not self.data:
|
||||||
|
self.data = self.default_task_data.copy()
|
||||||
|
self.data["command_str"] = command_str
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def add_trigger(
|
||||||
|
self, param, parsed_time: Union[timedelta, datetime, str], timezone=None
|
||||||
|
):
|
||||||
|
# TODO: Save timezone separately for cron and date triggers
|
||||||
|
trigger_data = self.default_trigger.copy()
|
||||||
|
trigger_data["type"] = param
|
||||||
|
trigger_data["time_data"] = parsed_time
|
||||||
|
if timezone is not None:
|
||||||
|
trigger_data["tzinfo"] = timezone
|
||||||
|
|
||||||
|
if not get_trigger(trigger_data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.data:
|
||||||
|
self.data = self.default_task_data.copy()
|
||||||
|
|
||||||
|
self.data["triggers"].append(trigger_data)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __setstate__(self, task_state):
|
||||||
|
self.name = task_state["name"]
|
||||||
|
self.guild_id = task_state["guild_id"]
|
||||||
|
self.config = task_state["config"]
|
||||||
|
self.bot = None
|
||||||
|
self.author_id = None
|
||||||
|
self.channel_id = None
|
||||||
|
self.data = None
|
||||||
|
|
||||||
|
def __getstate__(self):
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"guild_id": self.guild_id,
|
||||||
|
"config": self.config,
|
||||||
|
"bot": self.bot,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def clear_triggers(self):
|
||||||
|
self.data["triggers"] = []
|
||||||
|
await self.save_data()
|
||||||
|
|
||||||
|
async def delete_self(self):
|
||||||
|
"""Hopefully nothing uses the object after running this..."""
|
||||||
|
await self.config.guild_from_id(self.guild_id).tasks.clear_raw(self.name)
|
@ -0,0 +1,5 @@
|
|||||||
|
from .firstmessage import FirstMessage
|
||||||
|
|
||||||
|
|
||||||
|
async def setup(bot):
|
||||||
|
bot.add_cog(FirstMessage(bot))
|
@ -0,0 +1,49 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from redbot.core import Config, commands
|
||||||
|
from redbot.core.bot import Red
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.firstmessage")
|
||||||
|
|
||||||
|
|
||||||
|
class FirstMessage(commands.Cog):
|
||||||
|
"""
|
||||||
|
Provides a link to the first message in the provided channel
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, bot: Red):
|
||||||
|
super().__init__()
|
||||||
|
self.bot = bot
|
||||||
|
self.config = Config.get_conf(
|
||||||
|
self, identifier=701051141151167710111511597103101, force_registration=True
|
||||||
|
)
|
||||||
|
|
||||||
|
default_guild = {}
|
||||||
|
|
||||||
|
self.config.register_guild(**default_guild)
|
||||||
|
|
||||||
|
async def red_delete_data_for_user(self, **kwargs):
|
||||||
|
"""Nothing to delete"""
|
||||||
|
return
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
async def firstmessage(self, ctx: commands.Context, channel: discord.TextChannel = None):
|
||||||
|
"""
|
||||||
|
Provide a link to the first message in current or provided channel.
|
||||||
|
"""
|
||||||
|
if channel is None:
|
||||||
|
channel = ctx.channel
|
||||||
|
try:
|
||||||
|
message: discord.Message = (
|
||||||
|
await channel.history(limit=1, oldest_first=True).flatten()
|
||||||
|
)[0]
|
||||||
|
except (discord.Forbidden, discord.HTTPException):
|
||||||
|
log.exception(f"Unable to read message history for {channel.id=}")
|
||||||
|
await ctx.maybe_send_embed("Unable to read message history for that channel")
|
||||||
|
return
|
||||||
|
|
||||||
|
em = discord.Embed(description=f"[First Message in {channel.mention}]({message.jump_url})")
|
||||||
|
em.set_author(name=message.author.display_name, icon_url=message.author.avatar_url)
|
||||||
|
|
||||||
|
await ctx.send(embed=em)
|
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"author": [
|
||||||
|
"Bobloy"
|
||||||
|
],
|
||||||
|
"min_bot_version": "3.4.0",
|
||||||
|
"description": "Simple cog to jump to the first message of a channel easily",
|
||||||
|
"hidden": false,
|
||||||
|
"install_msg": "Thank you for installing FirstMessage.\nGet started with `[p]load firstmessage`, then `[p]help FirstMessage`",
|
||||||
|
"short": "Simple cog to jump to first message of a channel",
|
||||||
|
"end_user_data_statement": "This cog does not store any End User Data",
|
||||||
|
"tags": [
|
||||||
|
"bobloy",
|
||||||
|
"utilities",
|
||||||
|
"tool",
|
||||||
|
"tools"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,5 @@
|
|||||||
|
from .isitdown import IsItDown
|
||||||
|
|
||||||
|
|
||||||
|
async def setup(bot):
|
||||||
|
bot.add_cog(IsItDown(bot))
|
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"author": [
|
||||||
|
"Bobloy"
|
||||||
|
],
|
||||||
|
"min_bot_version": "3.4.0",
|
||||||
|
"description": "Check if a website/url is down using the https://isitdown.site/ api",
|
||||||
|
"hidden": false,
|
||||||
|
"install_msg": "Thank you for installing IsItDown.\nGet started with `[p]load isitdown`, then `[p]help IsItDown`",
|
||||||
|
"short": "Check if a website/url is down",
|
||||||
|
"end_user_data_statement": "This cog does not store any End User Data",
|
||||||
|
"tags": [
|
||||||
|
"bobloy",
|
||||||
|
"utilities",
|
||||||
|
"tool",
|
||||||
|
"tools"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,58 @@
|
|||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from redbot.core import Config, commands
|
||||||
|
from redbot.core.bot import Red
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.isitdown")
|
||||||
|
|
||||||
|
|
||||||
|
class IsItDown(commands.Cog):
|
||||||
|
"""
|
||||||
|
Cog Description
|
||||||
|
|
||||||
|
Less important information about the cog
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, bot: Red):
|
||||||
|
super().__init__()
|
||||||
|
self.bot = bot
|
||||||
|
self.config = Config.get_conf(self, identifier=0, force_registration=True)
|
||||||
|
|
||||||
|
default_guild = {"iids": []} # List of tuple pairs (channel_id, website)
|
||||||
|
|
||||||
|
self.config.register_guild(**default_guild)
|
||||||
|
|
||||||
|
async def red_delete_data_for_user(self, **kwargs):
|
||||||
|
"""Nothing to delete"""
|
||||||
|
return
|
||||||
|
|
||||||
|
@commands.command(alias=["iid"])
|
||||||
|
async def isitdown(self, ctx: commands.Context, url_to_check):
|
||||||
|
"""
|
||||||
|
Check if the provided url is down
|
||||||
|
|
||||||
|
Alias: iid
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
resp = await self._check_if_down(url_to_check)
|
||||||
|
except AssertionError:
|
||||||
|
await ctx.maybe_send_embed("Invalid URL provided. Make sure not to include `http://`")
|
||||||
|
return
|
||||||
|
|
||||||
|
if resp["isitdown"]:
|
||||||
|
await ctx.maybe_send_embed(f"{url_to_check} is DOWN!")
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed(f"{url_to_check} is UP!")
|
||||||
|
|
||||||
|
async def _check_if_down(self, url_to_check):
|
||||||
|
url = re.compile(r"https?://(www\.)?")
|
||||||
|
url.sub("", url_to_check).strip().strip("/")
|
||||||
|
|
||||||
|
url = f"https://isitdown.site/api/v3/{url}"
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url) as response:
|
||||||
|
assert response.status == 200
|
||||||
|
resp = await response.json()
|
||||||
|
return resp
|
@ -0,0 +1,91 @@
|
|||||||
|
"""
|
||||||
|
Role Constants
|
||||||
|
|
||||||
|
Role Alignment guide as follows:
|
||||||
|
Town: 1
|
||||||
|
Werewolf: 2
|
||||||
|
Neutral: 3
|
||||||
|
|
||||||
|
Additional alignments may be added when warring factions are added
|
||||||
|
(Rival werewolves, cultists, vampires)
|
||||||
|
|
||||||
|
Role Category enrollment guide as follows (See Role.category):
|
||||||
|
Town:
|
||||||
|
1: Random, 2: Investigative, 3: Protective, 4: Government,
|
||||||
|
5: Killing, 6: Power (Special night action)
|
||||||
|
|
||||||
|
Werewolf:
|
||||||
|
11: Random, 12: Deception, 15: Killing, 16: Support
|
||||||
|
|
||||||
|
Neutral:
|
||||||
|
21: Benign, 22: Evil, 23: Killing
|
||||||
|
|
||||||
|
|
||||||
|
Example category:
|
||||||
|
category = [1, 5, 6] Could be Veteran
|
||||||
|
category = [1, 5] Could be Bodyguard
|
||||||
|
category = [11, 16] Could be Werewolf Silencer
|
||||||
|
category = [22] Could be Blob (non-killing)
|
||||||
|
category = [22, 23] Could be Serial-Killer
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
ALIGNMENT_TOWN = 1
|
||||||
|
ALIGNMENT_WEREWOLF = 2
|
||||||
|
ALIGNMENT_NEUTRAL = 3
|
||||||
|
ALIGNMENT_MAP = {"Town": 1, "Werewolf": 2, "Neutral": 3}
|
||||||
|
|
||||||
|
# 0-9: Town Role Categories
|
||||||
|
# 10-19: Werewolf Role Categories
|
||||||
|
# 20-29: Neutral Role Categories
|
||||||
|
CATEGORY_TOWN_RANDOM = 1
|
||||||
|
CATEGORY_TOWN_INVESTIGATIVE = 2
|
||||||
|
CATEGORY_TOWN_PROTECTIVE = 3
|
||||||
|
CATEGORY_TOWN_GOVERNMENT = 4
|
||||||
|
CATEGORY_TOWN_KILLING = 5
|
||||||
|
CATEGORY_TOWN_POWER = 6
|
||||||
|
|
||||||
|
CATEGORY_WW_RANDOM = 11
|
||||||
|
CATEGORY_WW_DECEPTION = 12
|
||||||
|
CATEGORY_WW_KILLING = 15
|
||||||
|
CATEGORY_WW_SUPPORT = 16
|
||||||
|
|
||||||
|
CATEGORY_NEUTRAL_BENIGN = 21
|
||||||
|
CATEGORY_NEUTRAL_EVIL = 22
|
||||||
|
CATEGORY_NEUTRAL_KILLING = 23
|
||||||
|
|
||||||
|
ROLE_CATEGORY_DESCRIPTIONS = {
|
||||||
|
CATEGORY_TOWN_RANDOM: "Random",
|
||||||
|
CATEGORY_TOWN_INVESTIGATIVE: "Investigative",
|
||||||
|
CATEGORY_TOWN_PROTECTIVE: "Protective",
|
||||||
|
CATEGORY_TOWN_GOVERNMENT: "Government",
|
||||||
|
CATEGORY_TOWN_KILLING: "Killing",
|
||||||
|
CATEGORY_TOWN_POWER: "Power (Special night action)",
|
||||||
|
CATEGORY_WW_RANDOM: "Random",
|
||||||
|
CATEGORY_WW_DECEPTION: "Deception",
|
||||||
|
CATEGORY_WW_KILLING: "Killing",
|
||||||
|
CATEGORY_WW_SUPPORT: "Support",
|
||||||
|
CATEGORY_NEUTRAL_BENIGN: "Benign",
|
||||||
|
CATEGORY_NEUTRAL_EVIL: "Evil",
|
||||||
|
CATEGORY_NEUTRAL_KILLING: "Killing",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
Listener Actions Priority Guide
|
||||||
|
|
||||||
|
Action priority guide as follows (see listeners.py for wolflistener):
|
||||||
|
_at_night_start
|
||||||
|
0. No Action
|
||||||
|
1. Detain actions (Jailer/Kidnapper)
|
||||||
|
2. Group discussions and choose targets
|
||||||
|
|
||||||
|
_at_night_end
|
||||||
|
0. No Action
|
||||||
|
1. Self actions (Veteran)
|
||||||
|
2. Target switching and role blocks (bus driver, witch, escort)
|
||||||
|
3. Protection / Preempt actions (bodyguard/framer)
|
||||||
|
4. Non-disruptive actions (seer/silencer)
|
||||||
|
5. Disruptive actions (Killing)
|
||||||
|
6. Role altering actions (Cult / Mason / Shifter)
|
||||||
|
"""
|
@ -0,0 +1,28 @@
|
|||||||
|
from typing import TYPE_CHECKING, Union
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from discord.ext.commands import BadArgument, Converter
|
||||||
|
from redbot.core import commands
|
||||||
|
|
||||||
|
from werewolf.player import Player
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
PlayerConverter = Union[int, discord.Member]
|
||||||
|
CronConverter = str
|
||||||
|
else:
|
||||||
|
|
||||||
|
class PlayerConverter(Converter):
|
||||||
|
async def convert(self, ctx, argument) -> Player:
|
||||||
|
|
||||||
|
try:
|
||||||
|
target = await commands.MemberConverter().convert(ctx, argument)
|
||||||
|
except BadArgument:
|
||||||
|
try:
|
||||||
|
target = int(argument)
|
||||||
|
assert target >= 0
|
||||||
|
except (ValueError, AssertionError):
|
||||||
|
raise BadArgument
|
||||||
|
|
||||||
|
# TODO: Get the game for context without making a new one
|
||||||
|
# TODO: Get player from game based on either ID or member object
|
||||||
|
return target
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,106 @@
|
|||||||
|
import inspect
|
||||||
|
|
||||||
|
|
||||||
|
def wolflistener(name=None, priority=0):
|
||||||
|
"""A decorator that marks a function as a listener.
|
||||||
|
|
||||||
|
This is the werewolf.Game equivalent of :meth:`.Cog.listener`.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
------------
|
||||||
|
name: :class:`str`
|
||||||
|
The name of the event being listened to. If not provided, it
|
||||||
|
defaults to the function's name.
|
||||||
|
priority: :class:`int`
|
||||||
|
The priority of the listener.
|
||||||
|
Priority guide as follows:
|
||||||
|
_at_night_start
|
||||||
|
0. No Action
|
||||||
|
1. Detain actions (Jailer/Kidnapper)
|
||||||
|
2. Group discussions and choose targets
|
||||||
|
|
||||||
|
_at_night_end
|
||||||
|
0. No Action
|
||||||
|
1. Self actions (Veteran)
|
||||||
|
2. Target switching and role blocks (bus driver, witch, escort)
|
||||||
|
3. Protection / Preempt actions (bodyguard/framer)
|
||||||
|
4. Non-disruptive actions (seer/silencer)
|
||||||
|
5. Disruptive actions (Killing)
|
||||||
|
6. Role altering actions (Cult / Mason / Shifter)
|
||||||
|
|
||||||
|
Raises
|
||||||
|
--------
|
||||||
|
TypeError
|
||||||
|
The function is not a coroutine function or a string was not passed as
|
||||||
|
the name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if name is not None and not isinstance(name, str):
|
||||||
|
raise TypeError(
|
||||||
|
"Game.listener expected str but received {0.__class__.__name__!r} instead.".format(
|
||||||
|
name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
actual = func
|
||||||
|
if isinstance(actual, staticmethod):
|
||||||
|
actual = actual.__func__
|
||||||
|
if not inspect.iscoroutinefunction(actual):
|
||||||
|
raise TypeError("Listener function must be a coroutine function.")
|
||||||
|
actual.__wolf_listener__ = priority
|
||||||
|
to_assign = name or actual.__name__
|
||||||
|
try:
|
||||||
|
actual.__wolf_listener_names__.append((priority, to_assign))
|
||||||
|
except AttributeError:
|
||||||
|
actual.__wolf_listener_names__ = [(priority, to_assign)]
|
||||||
|
# we have to return `func` instead of `actual` because
|
||||||
|
# we need the type to be `staticmethod` for the metaclass
|
||||||
|
# to pick it up but the metaclass unfurls the function and
|
||||||
|
# thus the assignments need to be on the actual function
|
||||||
|
return func
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
class WolfListenerMeta(type):
|
||||||
|
def __new__(mcs, *args, **kwargs):
|
||||||
|
name, bases, attrs = args
|
||||||
|
|
||||||
|
listeners = {}
|
||||||
|
need_at_msg = "Listeners must start with at_ (in method {0.__name__}.{1})"
|
||||||
|
|
||||||
|
new_cls = super().__new__(mcs, name, bases, attrs, **kwargs)
|
||||||
|
for base in reversed(new_cls.__mro__):
|
||||||
|
for elem, value in base.__dict__.items():
|
||||||
|
if elem in listeners:
|
||||||
|
del listeners[elem]
|
||||||
|
|
||||||
|
is_static_method = isinstance(value, staticmethod)
|
||||||
|
if is_static_method:
|
||||||
|
value = value.__func__
|
||||||
|
if inspect.iscoroutinefunction(value):
|
||||||
|
try:
|
||||||
|
is_listener = getattr(value, "__wolf_listener__")
|
||||||
|
except AttributeError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# if not elem.startswith("at_"):
|
||||||
|
# raise TypeError(need_at_msg.format(base, elem))
|
||||||
|
listeners[elem] = value
|
||||||
|
|
||||||
|
listeners_as_list = []
|
||||||
|
for listener in listeners.values():
|
||||||
|
for priority, listener_name in listener.__wolf_listener_names__:
|
||||||
|
# I use __name__ instead of just storing the value so I can inject
|
||||||
|
# the self attribute when the time comes to add them to the bot
|
||||||
|
listeners_as_list.append((priority, listener_name, listener.__name__))
|
||||||
|
|
||||||
|
new_cls.__wolf_listeners__ = listeners_as_list
|
||||||
|
return new_cls
|
||||||
|
|
||||||
|
|
||||||
|
class WolfListener(metaclass=WolfListenerMeta):
|
||||||
|
def __init__(self, game):
|
||||||
|
for priority, name, method_name in self.__wolf_listeners__:
|
||||||
|
game.add_ww_listener(getattr(self, method_name), priority, name)
|
@ -0,0 +1,11 @@
|
|||||||
|
from .villager import Villager
|
||||||
|
from .seer import Seer
|
||||||
|
|
||||||
|
from .vanillawerewolf import VanillaWerewolf
|
||||||
|
|
||||||
|
from .shifter import Shifter
|
||||||
|
|
||||||
|
# Don't sort these imports. They are unstably in order
|
||||||
|
# TODO: Replace with unique IDs for roles in the future
|
||||||
|
|
||||||
|
__all__ = ["Seer", "Shifter", "VanillaWerewolf", "Villager"]
|
@ -0,0 +1,101 @@
|
|||||||
|
import logging
|
||||||
|
import random
|
||||||
|
|
||||||
|
from werewolf.constants import ALIGNMENT_NEUTRAL, CATEGORY_NEUTRAL_EVIL
|
||||||
|
from werewolf.listener import wolflistener
|
||||||
|
from werewolf.player import Player
|
||||||
|
from werewolf.role import Role
|
||||||
|
|
||||||
|
log = logging.getLogger("red.fox_v3.werewolf.role.blob")
|
||||||
|
|
||||||
|
|
||||||
|
class TheBlob(Role):
|
||||||
|
rand_choice = True
|
||||||
|
category = [CATEGORY_NEUTRAL_EVIL] # List of enrolled categories
|
||||||
|
alignment = ALIGNMENT_NEUTRAL # 1: Town, 2: Werewolf, 3: Neutral
|
||||||
|
channel_id = "" # Empty for no private channel
|
||||||
|
unique = True # Only one of this role per game
|
||||||
|
game_start_message = (
|
||||||
|
"Your role is **The Blob**\n"
|
||||||
|
"You win by absorbing everyone town\n"
|
||||||
|
"Lynch players during the day with `[p]ww vote <ID>`\n"
|
||||||
|
"Each night you will absorb an adjacent player"
|
||||||
|
)
|
||||||
|
description = (
|
||||||
|
"A mysterious green blob of jelly, slowly growing in size.\n"
|
||||||
|
"The Blob fears no evil, must be dealt with in town"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, game):
|
||||||
|
super().__init__(game)
|
||||||
|
|
||||||
|
self.blob_target = None
|
||||||
|
|
||||||
|
async def see_alignment(self, source=None):
|
||||||
|
"""
|
||||||
|
Interaction for investigative roles attempting
|
||||||
|
to see team (Village, Werewolf, Other)
|
||||||
|
"""
|
||||||
|
return ALIGNMENT_NEUTRAL
|
||||||
|
|
||||||
|
async def get_role(self, source=None):
|
||||||
|
"""
|
||||||
|
Interaction for powerful access of role
|
||||||
|
Unlikely to be able to deceive this
|
||||||
|
"""
|
||||||
|
return "The Blob"
|
||||||
|
|
||||||
|
async def see_role(self, source=None):
|
||||||
|
"""
|
||||||
|
Interaction for investigative roles.
|
||||||
|
More common to be able to deceive these roles
|
||||||
|
"""
|
||||||
|
return "The Blob"
|
||||||
|
|
||||||
|
async def kill(self, source):
|
||||||
|
"""
|
||||||
|
Called when someone is trying to kill you!
|
||||||
|
Can you do anything about it?
|
||||||
|
self.player.alive is now set to False, set to True to stay alive
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Blob cannot simply be killed
|
||||||
|
self.player.alive = True
|
||||||
|
|
||||||
|
@wolflistener("at_night_start", priority=2)
|
||||||
|
async def _at_night_start(self):
|
||||||
|
if not self.player.alive:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.blob_target = None
|
||||||
|
idx = self.player.id
|
||||||
|
left_or_right = random.choice((-1, 1))
|
||||||
|
while self.blob_target is None:
|
||||||
|
idx += left_or_right
|
||||||
|
if idx >= len(self.game.players):
|
||||||
|
idx = 0
|
||||||
|
|
||||||
|
player = self.game.players[idx]
|
||||||
|
|
||||||
|
# you went full circle, everyone is a blob or something else is wrong
|
||||||
|
if player == self.player:
|
||||||
|
break
|
||||||
|
|
||||||
|
if player.role.properties.get("been_blobbed", False):
|
||||||
|
self.blob_target = player
|
||||||
|
|
||||||
|
if self.blob_target is not None:
|
||||||
|
await self.player.send_dm(f"**You will attempt to absorb {self.blob_target} tonight**")
|
||||||
|
else:
|
||||||
|
await self.player.send_dm(f"**No player will be absorbed tonight**")
|
||||||
|
|
||||||
|
@wolflistener("at_night_end", priority=4)
|
||||||
|
async def _at_night_end(self):
|
||||||
|
if self.blob_target is None or not self.player.alive:
|
||||||
|
return
|
||||||
|
|
||||||
|
target: "Player" = await self.game.visit(self.blob_target, self.player)
|
||||||
|
|
||||||
|
if target is not None:
|
||||||
|
target.role.properties["been_blobbed"] = True
|
||||||
|
self.game.night_results.append("The Blob grows...")
|
@ -0,0 +1 @@
|
|||||||
|
from .wolfvote import WolfVote
|
Loading…
Reference in new issue