Merge Master into timerole_roleconfig
This commit is contained in:
commit
a4f11fde19
1
.gitignore
vendored
1
.gitignore
vendored
@ -4,3 +4,4 @@ venv/
|
|||||||
v-data/
|
v-data/
|
||||||
database.sqlite3
|
database.sqlite3
|
||||||
/venv3.4/
|
/venv3.4/
|
||||||
|
/.venv/
|
||||||
|
@ -95,7 +95,8 @@ pip install --no-deps "chatterbot>=1.1"
|
|||||||
#### Step 1: Built-in Downloader
|
#### Step 1: Built-in Downloader
|
||||||
|
|
||||||
```
|
```
|
||||||
[p]cog install <Fox> Chatter
|
[p]repo add Fox https://github.com/bobloy/Fox-V3
|
||||||
|
[p]cog install Fox chatter
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Step 2: Install Requirements
|
#### Step 2: Install Requirements
|
||||||
|
@ -10,7 +10,7 @@ from chatterbot import ChatBot
|
|||||||
from chatterbot.comparisons import JaccardSimilarity, LevenshteinDistance, SpacySimilarity
|
from chatterbot.comparisons import JaccardSimilarity, LevenshteinDistance, SpacySimilarity
|
||||||
from chatterbot.response_selection import get_random_response
|
from chatterbot.response_selection import get_random_response
|
||||||
from chatterbot.trainers import ChatterBotCorpusTrainer, ListTrainer, UbuntuCorpusTrainer
|
from chatterbot.trainers import ChatterBotCorpusTrainer, ListTrainer, UbuntuCorpusTrainer
|
||||||
from redbot.core import Config, commands
|
from redbot.core import Config, checks, commands
|
||||||
from redbot.core.commands import Cog
|
from redbot.core.commands import Cog
|
||||||
from redbot.core.data_manager import cog_data_path
|
from redbot.core.data_manager import cog_data_path
|
||||||
from redbot.core.utils.predicates import MessagePredicate
|
from redbot.core.utils.predicates import MessagePredicate
|
||||||
@ -159,7 +159,9 @@ class Chatter(Cog):
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
def _train_ubuntu(self):
|
def _train_ubuntu(self):
|
||||||
trainer = UbuntuCorpusTrainer(self.chatbot)
|
trainer = UbuntuCorpusTrainer(
|
||||||
|
self.chatbot, ubuntu_corpus_data_directory=cog_data_path(self) / "ubuntu_data"
|
||||||
|
)
|
||||||
trainer.train()
|
trainer.train()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -191,6 +193,7 @@ class Chatter(Cog):
|
|||||||
if ctx.invoked_subcommand is None:
|
if ctx.invoked_subcommand is None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@checks.admin()
|
||||||
@chatter.command(name="channel")
|
@chatter.command(name="channel")
|
||||||
async def chatter_channel(
|
async def chatter_channel(
|
||||||
self, ctx: commands.Context, channel: Optional[discord.TextChannel] = None
|
self, ctx: commands.Context, channel: Optional[discord.TextChannel] = None
|
||||||
@ -210,6 +213,7 @@ class Chatter(Cog):
|
|||||||
await self.config.guild(ctx.guild).chatchannel.set(channel.id)
|
await self.config.guild(ctx.guild).chatchannel.set(channel.id)
|
||||||
await ctx.maybe_send_embed(f"Chat channel is now {channel.mention}")
|
await ctx.maybe_send_embed(f"Chat channel is now {channel.mention}")
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="cleardata")
|
@chatter.command(name="cleardata")
|
||||||
async def chatter_cleardata(self, ctx: commands.Context, confirm: bool = False):
|
async def chatter_cleardata(self, ctx: commands.Context, confirm: bool = False):
|
||||||
"""
|
"""
|
||||||
@ -242,6 +246,7 @@ class Chatter(Cog):
|
|||||||
|
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="algorithm", aliases=["algo"])
|
@chatter.command(name="algorithm", aliases=["algo"])
|
||||||
async def chatter_algorithm(
|
async def chatter_algorithm(
|
||||||
self, ctx: commands.Context, algo_number: int, threshold: float = None
|
self, ctx: commands.Context, algo_number: int, threshold: float = None
|
||||||
@ -267,7 +272,7 @@ class Chatter(Cog):
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
self.similarity_algo = threshold
|
self.similarity_threshold = threshold
|
||||||
|
|
||||||
self.similarity_algo = algos[algo_number]
|
self.similarity_algo = algos[algo_number]
|
||||||
async with ctx.typing():
|
async with ctx.typing():
|
||||||
@ -275,6 +280,7 @@ class Chatter(Cog):
|
|||||||
|
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="model")
|
@chatter.command(name="model")
|
||||||
async def chatter_model(self, ctx: commands.Context, model_number: int):
|
async def chatter_model(self, ctx: commands.Context, model_number: int):
|
||||||
"""
|
"""
|
||||||
@ -312,6 +318,7 @@ class Chatter(Cog):
|
|||||||
f"Model has been switched to {self.tagger_language.ISO_639_1}"
|
f"Model has been switched to {self.tagger_language.ISO_639_1}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="minutes")
|
@chatter.command(name="minutes")
|
||||||
async def minutes(self, ctx: commands.Context, minutes: int):
|
async def minutes(self, ctx: commands.Context, minutes: int):
|
||||||
"""
|
"""
|
||||||
@ -327,6 +334,7 @@ class Chatter(Cog):
|
|||||||
|
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="age")
|
@chatter.command(name="age")
|
||||||
async def age(self, ctx: commands.Context, days: int):
|
async def age(self, ctx: commands.Context, days: int):
|
||||||
"""
|
"""
|
||||||
@ -341,6 +349,7 @@ class Chatter(Cog):
|
|||||||
await self.config.guild(ctx.guild).days.set(days)
|
await self.config.guild(ctx.guild).days.set(days)
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="backup")
|
@chatter.command(name="backup")
|
||||||
async def backup(self, ctx, backupname):
|
async def backup(self, ctx, backupname):
|
||||||
"""
|
"""
|
||||||
@ -362,6 +371,7 @@ class Chatter(Cog):
|
|||||||
else:
|
else:
|
||||||
await ctx.maybe_send_embed("Error occurred :(")
|
await ctx.maybe_send_embed("Error occurred :(")
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="trainubuntu")
|
@chatter.command(name="trainubuntu")
|
||||||
async def chatter_train_ubuntu(self, ctx: commands.Context, confirmation: bool = False):
|
async def chatter_train_ubuntu(self, ctx: commands.Context, confirmation: bool = False):
|
||||||
"""
|
"""
|
||||||
@ -383,6 +393,7 @@ class Chatter(Cog):
|
|||||||
else:
|
else:
|
||||||
await ctx.send("Error occurred :(")
|
await ctx.send("Error occurred :(")
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command(name="trainenglish")
|
@chatter.command(name="trainenglish")
|
||||||
async def chatter_train_english(self, ctx: commands.Context):
|
async def chatter_train_english(self, ctx: commands.Context):
|
||||||
"""
|
"""
|
||||||
@ -396,6 +407,7 @@ class Chatter(Cog):
|
|||||||
else:
|
else:
|
||||||
await ctx.maybe_send_embed("Error occurred :(")
|
await ctx.maybe_send_embed("Error occurred :(")
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
@chatter.command()
|
@chatter.command()
|
||||||
async def train(self, ctx: commands.Context, channel: discord.TextChannel):
|
async def train(self, ctx: commands.Context, channel: discord.TextChannel):
|
||||||
"""
|
"""
|
||||||
|
160
fifo/fifo.py
160
fifo/fifo.py
@ -1,5 +1,6 @@
|
|||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta, tzinfo
|
from datetime import MAXYEAR, datetime, timedelta, tzinfo
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
@ -10,7 +11,7 @@ from apscheduler.schedulers.base import STATE_PAUSED, STATE_RUNNING
|
|||||||
from redbot.core import Config, checks, commands
|
from redbot.core import Config, checks, commands
|
||||||
from redbot.core.bot import Red
|
from redbot.core.bot import Red
|
||||||
from redbot.core.commands import TimedeltaConverter
|
from redbot.core.commands import TimedeltaConverter
|
||||||
from redbot.core.utils.chat_formatting import pagify
|
from redbot.core.utils.chat_formatting import humanize_timedelta, pagify
|
||||||
|
|
||||||
from .datetime_cron_converters import CronConverter, DatetimeConverter, TimezoneConverter
|
from .datetime_cron_converters import CronConverter, DatetimeConverter, TimezoneConverter
|
||||||
from .task import Task
|
from .task import Task
|
||||||
@ -21,11 +22,12 @@ schedule_log.setLevel(logging.DEBUG)
|
|||||||
log = logging.getLogger("red.fox_v3.fifo")
|
log = logging.getLogger("red.fox_v3.fifo")
|
||||||
|
|
||||||
|
|
||||||
async def _execute_task(task_state):
|
async def _execute_task(**task_state):
|
||||||
log.info(f"Executing {task_state=}")
|
log.info(f"Executing {task_state.get('name')}")
|
||||||
task = Task(**task_state)
|
task = Task(**task_state)
|
||||||
if await task.load_from_config():
|
if await task.load_from_config():
|
||||||
return await task.execute()
|
return await task.execute()
|
||||||
|
log.warning(f"Failed to load data on {task_state=}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -37,6 +39,40 @@ def _disassemble_job_id(job_id: str):
|
|||||||
return job_id.split("_")
|
return job_id.split("_")
|
||||||
|
|
||||||
|
|
||||||
|
def _get_run_times(job: Job, now: datetime = None):
|
||||||
|
"""
|
||||||
|
Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive).
|
||||||
|
|
||||||
|
Modified to be asynchronous and yielding instead of all-or-nothing
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not job.next_run_time:
|
||||||
|
raise StopIteration()
|
||||||
|
|
||||||
|
if now is None:
|
||||||
|
now = datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, tzinfo=job.next_run_time.tzinfo)
|
||||||
|
yield from _get_run_times(job, now)
|
||||||
|
raise StopIteration()
|
||||||
|
|
||||||
|
next_run_time = job.next_run_time
|
||||||
|
while next_run_time and next_run_time <= now:
|
||||||
|
yield next_run_time
|
||||||
|
next_run_time = job.trigger.get_next_fire_time(next_run_time, now)
|
||||||
|
|
||||||
|
|
||||||
|
class CapturePrint:
|
||||||
|
"""Silly little class to get `print` output"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.string = None
|
||||||
|
|
||||||
|
def write(self, string):
|
||||||
|
if self.string is None:
|
||||||
|
self.string = string
|
||||||
|
else:
|
||||||
|
self.string = self.string + "\n" + string
|
||||||
|
|
||||||
|
|
||||||
class FIFO(commands.Cog):
|
class FIFO(commands.Cog):
|
||||||
"""
|
"""
|
||||||
Simple Scheduling Cog
|
Simple Scheduling Cog
|
||||||
@ -55,7 +91,7 @@ class FIFO(commands.Cog):
|
|||||||
self.config.register_global(**default_global)
|
self.config.register_global(**default_global)
|
||||||
self.config.register_guild(**default_guild)
|
self.config.register_guild(**default_guild)
|
||||||
|
|
||||||
self.scheduler = None
|
self.scheduler: Optional[AsyncIOScheduler] = None
|
||||||
self.jobstore = None
|
self.jobstore = None
|
||||||
|
|
||||||
self.tz_cog = None
|
self.tz_cog = None
|
||||||
@ -71,17 +107,22 @@ class FIFO(commands.Cog):
|
|||||||
|
|
||||||
async def initialize(self):
|
async def initialize(self):
|
||||||
|
|
||||||
job_defaults = {"coalesce": False, "max_instances": 1}
|
job_defaults = {
|
||||||
|
"coalesce": True, # Multiple missed triggers within the grace time will only fire once
|
||||||
|
"max_instances": 5, # This is probably way too high, should likely only be one
|
||||||
|
"misfire_grace_time": 15, # 15 seconds ain't much, but it's honest work
|
||||||
|
"replace_existing": True, # Very important for persistent data
|
||||||
|
}
|
||||||
|
|
||||||
# executors = {"default": AsyncIOExecutor()}
|
# executors = {"default": AsyncIOExecutor()}
|
||||||
|
|
||||||
# Default executor is already AsyncIOExecutor
|
# Default executor is already AsyncIOExecutor
|
||||||
self.scheduler = AsyncIOScheduler(job_defaults=job_defaults, logger=schedule_log)
|
self.scheduler = AsyncIOScheduler(job_defaults=job_defaults, logger=schedule_log)
|
||||||
|
|
||||||
from .redconfigjobstore import RedConfigJobStore
|
from .redconfigjobstore import RedConfigJobStore # Wait to import to prevent cyclic import
|
||||||
|
|
||||||
self.jobstore = RedConfigJobStore(self.config, self.bot)
|
self.jobstore = RedConfigJobStore(self.config, self.bot)
|
||||||
await self.jobstore.load_from_config(self.scheduler, "default")
|
await self.jobstore.load_from_config()
|
||||||
self.scheduler.add_jobstore(self.jobstore, "default")
|
self.scheduler.add_jobstore(self.jobstore, "default")
|
||||||
|
|
||||||
self.scheduler.start()
|
self.scheduler.start()
|
||||||
@ -116,9 +157,10 @@ class FIFO(commands.Cog):
|
|||||||
async def _add_job(self, task: Task):
|
async def _add_job(self, task: Task):
|
||||||
return self.scheduler.add_job(
|
return self.scheduler.add_job(
|
||||||
_execute_task,
|
_execute_task,
|
||||||
args=[task.__getstate__()],
|
kwargs=task.__getstate__(),
|
||||||
id=_assemble_job_id(task.name, task.guild_id),
|
id=_assemble_job_id(task.name, task.guild_id),
|
||||||
trigger=await task.get_combined_trigger(),
|
trigger=await task.get_combined_trigger(),
|
||||||
|
name=task.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _resume_job(self, task: Task):
|
async def _resume_job(self, task: Task):
|
||||||
@ -129,10 +171,16 @@ class FIFO(commands.Cog):
|
|||||||
return job
|
return job
|
||||||
|
|
||||||
async def _pause_job(self, task: Task):
|
async def _pause_job(self, task: Task):
|
||||||
return self.scheduler.pause_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
try:
|
||||||
|
return self.scheduler.pause_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
||||||
|
except JobLookupError:
|
||||||
|
return False
|
||||||
|
|
||||||
async def _remove_job(self, task: Task):
|
async def _remove_job(self, task: Task):
|
||||||
return self.scheduler.remove_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
try:
|
||||||
|
self.scheduler.remove_job(job_id=_assemble_job_id(task.name, task.guild_id))
|
||||||
|
except JobLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
async def _get_tz(self, user: Union[discord.User, discord.Member]) -> Union[None, tzinfo]:
|
async def _get_tz(self, user: Union[discord.User, discord.Member]) -> Union[None, tzinfo]:
|
||||||
if self.tz_cog is None:
|
if self.tz_cog is None:
|
||||||
@ -173,6 +221,30 @@ class FIFO(commands.Cog):
|
|||||||
if ctx.invoked_subcommand is None:
|
if ctx.invoked_subcommand is None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@fifo.command(name="checktask", aliases=["checkjob", "check"])
|
||||||
|
async def fifo_checktask(self, ctx: commands.Context, task_name: str):
|
||||||
|
"""Returns the next 10 scheduled executions of the task"""
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
job = await self._get_job(task)
|
||||||
|
if job is None:
|
||||||
|
await ctx.maybe_send_embed("No job scheduled for this task")
|
||||||
|
return
|
||||||
|
now = datetime.now(job.next_run_time.tzinfo)
|
||||||
|
|
||||||
|
times = [
|
||||||
|
humanize_timedelta(timedelta=x - now)
|
||||||
|
for x in itertools.islice(_get_run_times(job), 10)
|
||||||
|
]
|
||||||
|
await ctx.maybe_send_embed("\n\n".join(times))
|
||||||
|
|
||||||
@fifo.command(name="set")
|
@fifo.command(name="set")
|
||||||
async def fifo_set(
|
async def fifo_set(
|
||||||
self,
|
self,
|
||||||
@ -319,12 +391,12 @@ class FIFO(commands.Cog):
|
|||||||
Do `[p]fifo list True` to see tasks from all guilds
|
Do `[p]fifo list True` to see tasks from all guilds
|
||||||
"""
|
"""
|
||||||
if all_guilds:
|
if all_guilds:
|
||||||
pass
|
pass # TODO: All guilds
|
||||||
else:
|
else:
|
||||||
out = ""
|
out = ""
|
||||||
all_tasks = await self.config.guild(ctx.guild).tasks()
|
all_tasks = await self.config.guild(ctx.guild).tasks()
|
||||||
for task_name, task_data in all_tasks.items():
|
for task_name, task_data in all_tasks.items():
|
||||||
out += f"{task_name}: {task_data}\n"
|
out += f"{task_name}: {task_data}\n\n"
|
||||||
|
|
||||||
if out:
|
if out:
|
||||||
if len(out) > 2000:
|
if len(out) > 2000:
|
||||||
@ -335,6 +407,27 @@ class FIFO(commands.Cog):
|
|||||||
else:
|
else:
|
||||||
await ctx.maybe_send_embed("No tasks to list")
|
await ctx.maybe_send_embed("No tasks to list")
|
||||||
|
|
||||||
|
@fifo.command(name="printschedule")
|
||||||
|
async def fifo_printschedule(self, ctx: commands.Context):
|
||||||
|
"""
|
||||||
|
Print the current schedule of execution.
|
||||||
|
|
||||||
|
Useful for debugging.
|
||||||
|
"""
|
||||||
|
cp = CapturePrint()
|
||||||
|
self.scheduler.print_jobs(out=cp)
|
||||||
|
|
||||||
|
out = cp.string
|
||||||
|
|
||||||
|
if out:
|
||||||
|
if len(out) > 2000:
|
||||||
|
for page in pagify(out):
|
||||||
|
await ctx.maybe_send_embed(page)
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed(out)
|
||||||
|
else:
|
||||||
|
await ctx.maybe_send_embed("Failed to get schedule from scheduler")
|
||||||
|
|
||||||
@fifo.command(name="add")
|
@fifo.command(name="add")
|
||||||
async def fifo_add(self, ctx: commands.Context, task_name: str, *, command_to_execute: str):
|
async def fifo_add(self, ctx: commands.Context, task_name: str, *, command_to_execute: str):
|
||||||
"""
|
"""
|
||||||
@ -394,6 +487,7 @@ class FIFO(commands.Cog):
|
|||||||
return
|
return
|
||||||
|
|
||||||
await task.clear_triggers()
|
await task.clear_triggers()
|
||||||
|
await self._remove_job(task)
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
@fifo.group(name="addtrigger", aliases=["trigger"])
|
@fifo.group(name="addtrigger", aliases=["trigger"])
|
||||||
@ -413,7 +507,7 @@ class FIFO(commands.Cog):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
await task.load_from_config()
|
await task.load_from_config() # Will set the channel and author
|
||||||
|
|
||||||
if task.data is None:
|
if task.data is None:
|
||||||
await ctx.maybe_send_embed(
|
await ctx.maybe_send_embed(
|
||||||
@ -435,6 +529,40 @@ class FIFO(commands.Cog):
|
|||||||
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@fifo_trigger.command(name="relative")
|
||||||
|
async def fifo_trigger_relative(
|
||||||
|
self, ctx: commands.Context, task_name: str, *, time_from_now: TimedeltaConverter
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add a "run once" trigger at a time relative from now to the specified task
|
||||||
|
"""
|
||||||
|
|
||||||
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
|
await task.load_from_config()
|
||||||
|
|
||||||
|
if task.data is None:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task by the name of {task_name} is not found in this guild"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
time_to_run = datetime.now() + time_from_now
|
||||||
|
|
||||||
|
result = await task.add_trigger("date", time_to_run, time_to_run.tzinfo)
|
||||||
|
if not result:
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
"Failed to add a date trigger to this task, see console for logs"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
await task.save_data()
|
||||||
|
job: Job = await self._process_task(task)
|
||||||
|
delta_from_now: timedelta = job.next_run_time - datetime.now(job.next_run_time.tzinfo)
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Task `{task_name}` added {time_to_run} to its scheduled runtimes\n"
|
||||||
|
f"Next run time: {job.next_run_time} ({delta_from_now.total_seconds()} seconds)"
|
||||||
|
)
|
||||||
|
|
||||||
@fifo_trigger.command(name="date")
|
@fifo_trigger.command(name="date")
|
||||||
async def fifo_trigger_date(
|
async def fifo_trigger_date(
|
||||||
self, ctx: commands.Context, task_name: str, *, datetime_str: DatetimeConverter
|
self, ctx: commands.Context, task_name: str, *, datetime_str: DatetimeConverter
|
||||||
@ -443,7 +571,7 @@ class FIFO(commands.Cog):
|
|||||||
Add a "run once" datetime trigger to the specified task
|
Add a "run once" datetime trigger to the specified task
|
||||||
"""
|
"""
|
||||||
|
|
||||||
task = Task(task_name, ctx.guild.id, self.config)
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
await task.load_from_config()
|
await task.load_from_config()
|
||||||
|
|
||||||
if task.data is None:
|
if task.data is None:
|
||||||
@ -483,7 +611,7 @@ class FIFO(commands.Cog):
|
|||||||
|
|
||||||
See https://crontab.guru/ for help generating the cron_str
|
See https://crontab.guru/ for help generating the cron_str
|
||||||
"""
|
"""
|
||||||
task = Task(task_name, ctx.guild.id, self.config)
|
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
|
||||||
await task.load_from_config()
|
await task.load_from_config()
|
||||||
|
|
||||||
if task.data is None:
|
if task.data is None:
|
||||||
|
@ -2,17 +2,13 @@ import asyncio
|
|||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
import pickle
|
import pickle
|
||||||
from datetime import datetime
|
|
||||||
from typing import Tuple, Union
|
|
||||||
|
|
||||||
from apscheduler.job import Job
|
from apscheduler.job import Job
|
||||||
from apscheduler.jobstores.base import ConflictingIdError, JobLookupError
|
|
||||||
from apscheduler.jobstores.memory import MemoryJobStore
|
from apscheduler.jobstores.memory import MemoryJobStore
|
||||||
from apscheduler.schedulers.asyncio import run_in_event_loop
|
from apscheduler.schedulers.asyncio import run_in_event_loop
|
||||||
from apscheduler.util import datetime_to_utc_timestamp
|
from apscheduler.util import datetime_to_utc_timestamp
|
||||||
from redbot.core import Config
|
from redbot.core import Config
|
||||||
|
# TODO: use get_lock on config maybe
|
||||||
# TODO: use get_lock on config
|
|
||||||
from redbot.core.bot import Red
|
from redbot.core.bot import Red
|
||||||
from redbot.core.utils import AsyncIter
|
from redbot.core.utils import AsyncIter
|
||||||
|
|
||||||
@ -28,44 +24,56 @@ class RedConfigJobStore(MemoryJobStore):
|
|||||||
self.config = config
|
self.config = config
|
||||||
self.bot = bot
|
self.bot = bot
|
||||||
self.pickle_protocol = pickle.HIGHEST_PROTOCOL
|
self.pickle_protocol = pickle.HIGHEST_PROTOCOL
|
||||||
self._eventloop = self.bot.loop
|
self._eventloop = self.bot.loop # Used for @run_in_event_loop
|
||||||
# TODO: self.config.jobs_index is never used,
|
|
||||||
# fine but maybe a sign of inefficient use of config
|
|
||||||
|
|
||||||
# task = asyncio.create_task(self.load_from_config())
|
|
||||||
# while not task.done():
|
|
||||||
# sleep(0.1)
|
|
||||||
# future = asyncio.ensure_future(self.load_from_config(), loop=self.bot.loop)
|
|
||||||
|
|
||||||
@run_in_event_loop
|
@run_in_event_loop
|
||||||
def start(self, scheduler, alias):
|
def start(self, scheduler, alias):
|
||||||
super().start(scheduler, alias)
|
super().start(scheduler, alias)
|
||||||
|
for job, timestamp in self._jobs:
|
||||||
|
job._scheduler = self._scheduler
|
||||||
|
job._jobstore_alias = self._alias
|
||||||
|
|
||||||
async def load_from_config(self, scheduler, alias):
|
async def load_from_config(self):
|
||||||
super().start(scheduler, alias)
|
|
||||||
_jobs = await self.config.jobs()
|
_jobs = await self.config.jobs()
|
||||||
self._jobs = [
|
# self._jobs = [
|
||||||
(await self._decode_job(job), timestamp) async for (job, timestamp) in AsyncIter(_jobs)
|
# (await self._decode_job(job), timestamp) async for (job, timestamp) in AsyncIter(_jobs)
|
||||||
]
|
# ]
|
||||||
|
async for job, timestamp in AsyncIter(_jobs):
|
||||||
|
job = await self._decode_job(job)
|
||||||
|
index = self._get_job_index(timestamp, job.id)
|
||||||
|
self._jobs.insert(index, (job, timestamp))
|
||||||
|
self._jobs_index[job.id] = (job, timestamp)
|
||||||
|
|
||||||
|
async def save_to_config(self):
|
||||||
|
"""Yea that's basically it"""
|
||||||
|
await self.config.jobs.set(
|
||||||
|
[(self._encode_job(job), timestamp) for job, timestamp in self._jobs]
|
||||||
|
)
|
||||||
|
|
||||||
# self._jobs_index = await self.config.jobs_index.all() # Overwritten by next
|
# self._jobs_index = await self.config.jobs_index.all() # Overwritten by next
|
||||||
self._jobs_index = {job.id: (job, timestamp) for job, timestamp in self._jobs}
|
# self._jobs_index = {job.id: (job, timestamp) for job, timestamp in self._jobs}
|
||||||
|
|
||||||
def _encode_job(self, job: Job):
|
def _encode_job(self, job: Job):
|
||||||
job_state = job.__getstate__()
|
job_state = job.__getstate__()
|
||||||
new_args = list(job_state["args"])
|
job_state["kwargs"]["config"] = None
|
||||||
new_args[0]["config"] = None
|
job_state["kwargs"]["bot"] = None
|
||||||
new_args[0]["bot"] = None
|
# new_kwargs = job_state["kwargs"]
|
||||||
job_state["args"] = tuple(new_args)
|
# new_kwargs["config"] = None
|
||||||
|
# new_kwargs["bot"] = None
|
||||||
|
# job_state["kwargs"] = new_kwargs
|
||||||
encoded = base64.b64encode(pickle.dumps(job_state, self.pickle_protocol))
|
encoded = base64.b64encode(pickle.dumps(job_state, self.pickle_protocol))
|
||||||
out = {
|
out = {
|
||||||
"_id": job.id,
|
"_id": job.id,
|
||||||
"next_run_time": datetime_to_utc_timestamp(job.next_run_time),
|
"next_run_time": datetime_to_utc_timestamp(job.next_run_time),
|
||||||
"job_state": encoded.decode("ascii"),
|
"job_state": encoded.decode("ascii"),
|
||||||
}
|
}
|
||||||
new_args = list(job_state["args"])
|
job_state["kwargs"]["config"] = self.config
|
||||||
new_args[0]["config"] = self.config
|
job_state["kwargs"]["bot"] = self.bot
|
||||||
new_args[0]["bot"] = self.bot
|
# new_kwargs = job_state["kwargs"]
|
||||||
job_state["args"] = tuple(new_args)
|
# new_kwargs["config"] = self.config
|
||||||
|
# new_kwargs["bot"] = self.bot
|
||||||
|
# job_state["kwargs"] = new_kwargs
|
||||||
# log.debug(f"Encoding job id: {job.id}\n"
|
# log.debug(f"Encoding job id: {job.id}\n"
|
||||||
# f"Encoded as: {out}")
|
# f"Encoded as: {out}")
|
||||||
|
|
||||||
@ -76,10 +84,15 @@ class RedConfigJobStore(MemoryJobStore):
|
|||||||
return None
|
return None
|
||||||
job_state = in_job["job_state"]
|
job_state = in_job["job_state"]
|
||||||
job_state = pickle.loads(base64.b64decode(job_state))
|
job_state = pickle.loads(base64.b64decode(job_state))
|
||||||
new_args = list(job_state["args"])
|
if job_state["args"]: # Backwards compatibility on args to kwargs
|
||||||
new_args[0]["config"] = self.config
|
job_state["kwargs"] = {**job_state["args"][0]}
|
||||||
new_args[0]["bot"] = self.bot
|
job_state["args"] = []
|
||||||
job_state["args"] = tuple(new_args)
|
job_state["kwargs"]["config"] = self.config
|
||||||
|
job_state["kwargs"]["bot"] = self.bot
|
||||||
|
# new_kwargs = job_state["kwargs"]
|
||||||
|
# new_kwargs["config"] = self.config
|
||||||
|
# new_kwargs["bot"] = self.bot
|
||||||
|
# job_state["kwargs"] = new_kwargs
|
||||||
job = Job.__new__(Job)
|
job = Job.__new__(Job)
|
||||||
job.__setstate__(job_state)
|
job.__setstate__(job_state)
|
||||||
job._scheduler = self._scheduler
|
job._scheduler = self._scheduler
|
||||||
@ -96,78 +109,82 @@ class RedConfigJobStore(MemoryJobStore):
|
|||||||
|
|
||||||
return job
|
return job
|
||||||
|
|
||||||
@run_in_event_loop
|
# @run_in_event_loop
|
||||||
def add_job(self, job: Job):
|
# def add_job(self, job: Job):
|
||||||
if job.id in self._jobs_index:
|
# if job.id in self._jobs_index:
|
||||||
raise ConflictingIdError(job.id)
|
# raise ConflictingIdError(job.id)
|
||||||
# log.debug(f"Check job args: {job.args=}")
|
# # log.debug(f"Check job args: {job.args=}")
|
||||||
timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
# timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
||||||
index = self._get_job_index(timestamp, job.id) # This is fine
|
# index = self._get_job_index(timestamp, job.id) # This is fine
|
||||||
self._jobs.insert(index, (job, timestamp))
|
# self._jobs.insert(index, (job, timestamp))
|
||||||
self._jobs_index[job.id] = (job, timestamp)
|
# self._jobs_index[job.id] = (job, timestamp)
|
||||||
asyncio.create_task(self._async_add_job(job, index, timestamp))
|
# task = asyncio.create_task(self._async_add_job(job, index, timestamp))
|
||||||
# log.debug(f"Added job: {self._jobs[index][0].args}")
|
# self._eventloop.run_until_complete(task)
|
||||||
|
# # log.debug(f"Added job: {self._jobs[index][0].args}")
|
||||||
|
#
|
||||||
|
# async def _async_add_job(self, job, index, timestamp):
|
||||||
|
# encoded_job = self._encode_job(job)
|
||||||
|
# job_tuple = tuple([encoded_job, timestamp])
|
||||||
|
# async with self.config.jobs() as jobs:
|
||||||
|
# jobs.insert(index, job_tuple)
|
||||||
|
# # await self.config.jobs_index.set_raw(job.id, value=job_tuple)
|
||||||
|
# return True
|
||||||
|
|
||||||
async def _async_add_job(self, job, index, timestamp):
|
# @run_in_event_loop
|
||||||
encoded_job = self._encode_job(job)
|
# def update_job(self, job):
|
||||||
job_tuple = tuple([encoded_job, timestamp])
|
# old_tuple: Tuple[Union[Job, None], Union[datetime, None]] = self._jobs_index.get(
|
||||||
async with self.config.jobs() as jobs:
|
# job.id, (None, None)
|
||||||
jobs.insert(index, job_tuple)
|
# )
|
||||||
# await self.config.jobs_index.set_raw(job.id, value=job_tuple)
|
# old_job = old_tuple[0]
|
||||||
return True
|
# old_timestamp = old_tuple[1]
|
||||||
|
# if old_job is None:
|
||||||
|
# raise JobLookupError(job.id)
|
||||||
|
#
|
||||||
|
# # If the next run time has not changed, simply replace the job in its present index.
|
||||||
|
# # Otherwise, reinsert the job to the list to preserve the ordering.
|
||||||
|
# old_index = self._get_job_index(old_timestamp, old_job.id)
|
||||||
|
# new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
||||||
|
# task = asyncio.create_task(
|
||||||
|
# self._async_update_job(job, new_timestamp, old_index, old_job, old_timestamp)
|
||||||
|
# )
|
||||||
|
# self._eventloop.run_until_complete(task)
|
||||||
|
#
|
||||||
|
# async def _async_update_job(self, job, new_timestamp, old_index, old_job, old_timestamp):
|
||||||
|
# encoded_job = self._encode_job(job)
|
||||||
|
# if old_timestamp == new_timestamp:
|
||||||
|
# self._jobs[old_index] = (job, new_timestamp)
|
||||||
|
# async with self.config.jobs() as jobs:
|
||||||
|
# jobs[old_index] = (encoded_job, new_timestamp)
|
||||||
|
# else:
|
||||||
|
# del self._jobs[old_index]
|
||||||
|
# new_index = self._get_job_index(new_timestamp, job.id) # This is fine
|
||||||
|
# self._jobs.insert(new_index, (job, new_timestamp))
|
||||||
|
# async with self.config.jobs() as jobs:
|
||||||
|
# del jobs[old_index]
|
||||||
|
# jobs.insert(new_index, (encoded_job, new_timestamp))
|
||||||
|
# self._jobs_index[old_job.id] = (job, new_timestamp)
|
||||||
|
# # await self.config.jobs_index.set_raw(old_job.id, value=(encoded_job, new_timestamp))
|
||||||
|
#
|
||||||
|
# log.debug(f"Async Updated {job.id=}")
|
||||||
|
# # log.debug(f"Check job args: {job.kwargs=}")
|
||||||
|
|
||||||
@run_in_event_loop
|
# @run_in_event_loop
|
||||||
def update_job(self, job):
|
# def remove_job(self, job_id):
|
||||||
old_tuple: Tuple[Union[Job, None], Union[datetime, None]] = self._jobs_index.get(
|
# """Copied instead of super for the asyncio args"""
|
||||||
job.id, (None, None)
|
# job, timestamp = self._jobs_index.get(job_id, (None, None))
|
||||||
)
|
# if job is None:
|
||||||
old_job = old_tuple[0]
|
# raise JobLookupError(job_id)
|
||||||
old_timestamp = old_tuple[1]
|
#
|
||||||
if old_job is None:
|
# index = self._get_job_index(timestamp, job_id)
|
||||||
raise JobLookupError(job.id)
|
# del self._jobs[index]
|
||||||
|
# del self._jobs_index[job.id]
|
||||||
# If the next run time has not changed, simply replace the job in its present index.
|
# task = asyncio.create_task(self._async_remove_job(index, job))
|
||||||
# Otherwise, reinsert the job to the list to preserve the ordering.
|
# self._eventloop.run_until_complete(task)
|
||||||
old_index = self._get_job_index(old_timestamp, old_job.id)
|
#
|
||||||
new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
|
# async def _async_remove_job(self, index, job):
|
||||||
asyncio.create_task(
|
# async with self.config.jobs() as jobs:
|
||||||
self._async_update_job(job, new_timestamp, old_index, old_job, old_timestamp)
|
# del jobs[index]
|
||||||
)
|
# # await self.config.jobs_index.clear_raw(job.id)
|
||||||
|
|
||||||
async def _async_update_job(self, job, new_timestamp, old_index, old_job, old_timestamp):
|
|
||||||
encoded_job = self._encode_job(job)
|
|
||||||
if old_timestamp == new_timestamp:
|
|
||||||
self._jobs[old_index] = (job, new_timestamp)
|
|
||||||
async with self.config.jobs() as jobs:
|
|
||||||
jobs[old_index] = (encoded_job, new_timestamp)
|
|
||||||
else:
|
|
||||||
del self._jobs[old_index]
|
|
||||||
new_index = self._get_job_index(new_timestamp, job.id) # This is fine
|
|
||||||
self._jobs.insert(new_index, (job, new_timestamp))
|
|
||||||
async with self.config.jobs() as jobs:
|
|
||||||
del jobs[old_index]
|
|
||||||
jobs.insert(new_index, (encoded_job, new_timestamp))
|
|
||||||
self._jobs_index[old_job.id] = (job, new_timestamp)
|
|
||||||
# await self.config.jobs_index.set_raw(old_job.id, value=(encoded_job, new_timestamp))
|
|
||||||
|
|
||||||
log.debug(f"Async Updated {job.id=}")
|
|
||||||
log.debug(f"Check job args: {job.args=}")
|
|
||||||
|
|
||||||
@run_in_event_loop
|
|
||||||
def remove_job(self, job_id):
|
|
||||||
job, timestamp = self._jobs_index.get(job_id, (None, None))
|
|
||||||
if job is None:
|
|
||||||
raise JobLookupError(job_id)
|
|
||||||
|
|
||||||
index = self._get_job_index(timestamp, job_id)
|
|
||||||
del self._jobs[index]
|
|
||||||
del self._jobs_index[job.id]
|
|
||||||
asyncio.create_task(self._async_remove_job(index, job))
|
|
||||||
|
|
||||||
async def _async_remove_job(self, index, job):
|
|
||||||
async with self.config.jobs() as jobs:
|
|
||||||
del jobs[index]
|
|
||||||
# await self.config.jobs_index.clear_raw(job.id)
|
|
||||||
|
|
||||||
@run_in_event_loop
|
@run_in_event_loop
|
||||||
def remove_all_jobs(self):
|
def remove_all_jobs(self):
|
||||||
@ -180,4 +197,8 @@ class RedConfigJobStore(MemoryJobStore):
|
|||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
"""Removes all jobs without clearing config"""
|
"""Removes all jobs without clearing config"""
|
||||||
|
asyncio.create_task(self.async_shutdown())
|
||||||
|
|
||||||
|
async def async_shutdown(self):
|
||||||
|
await self.save_to_config()
|
||||||
super().remove_all_jobs()
|
super().remove_all_jobs()
|
||||||
|
54
fifo/task.py
54
fifo/task.py
@ -39,9 +39,9 @@ def parse_triggers(data: Union[Dict, None]):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if len(data["triggers"]) > 1: # Multiple triggers
|
if len(data["triggers"]) > 1: # Multiple triggers
|
||||||
return OrTrigger(get_trigger(t_data) for t_data in data["triggers"])
|
return OrTrigger([get_trigger(t_data) for t_data in data["triggers"]])
|
||||||
|
else:
|
||||||
return get_trigger(data["triggers"][0])
|
return get_trigger(data["triggers"][0])
|
||||||
|
|
||||||
|
|
||||||
class FakeMessage:
|
class FakeMessage:
|
||||||
@ -108,20 +108,6 @@ class Task:
|
|||||||
"tzinfo": getattr(t["tzinfo"], "zone", None),
|
"tzinfo": getattr(t["tzinfo"], "zone", None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
# triggers.append(
|
|
||||||
# {
|
|
||||||
# "type": t["type"],
|
|
||||||
# "time_data": {
|
|
||||||
# "year": dt.year,
|
|
||||||
# "month": dt.month,
|
|
||||||
# "day": dt.day,
|
|
||||||
# "hour": dt.hour,
|
|
||||||
# "minute": dt.minute,
|
|
||||||
# "second": dt.second,
|
|
||||||
# "tzinfo": dt.tzinfo,
|
|
||||||
# },
|
|
||||||
# }
|
|
||||||
# )
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if t["type"] == "cron":
|
if t["type"] == "cron":
|
||||||
@ -180,7 +166,7 @@ class Task:
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.author_id = data["author_id"]
|
self.author_id = data["author_id"]
|
||||||
self.guild_id = data["guild_id"]
|
self.guild_id = data["guild_id"] # Weird I'm doing this, since self.guild_id was just used
|
||||||
self.channel_id = data["channel_id"]
|
self.channel_id = data["channel_id"]
|
||||||
|
|
||||||
self.data = data["data"]
|
self.data = data["data"]
|
||||||
@ -239,20 +225,26 @@ class Task:
|
|||||||
|
|
||||||
async def execute(self):
|
async def execute(self):
|
||||||
if not self.data or not self.get_command_str():
|
if not self.data or not self.get_command_str():
|
||||||
log.warning(f"Could not execute task due to data problem: {self.data=}")
|
log.warning(f"Could not execute Task[{self.name}] due to data problem: {self.data=}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
guild: discord.Guild = self.bot.get_guild(self.guild_id) # used for get_prefix
|
guild: discord.Guild = self.bot.get_guild(self.guild_id) # used for get_prefix
|
||||||
if guild is None:
|
if guild is None:
|
||||||
log.warning(f"Could not execute task due to missing guild: {self.guild_id}")
|
log.warning(
|
||||||
|
f"Could not execute Task[{self.name}] due to missing guild: {self.guild_id}"
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
channel: discord.TextChannel = guild.get_channel(self.channel_id)
|
channel: discord.TextChannel = guild.get_channel(self.channel_id)
|
||||||
if channel is None:
|
if channel is None:
|
||||||
log.warning(f"Could not execute task due to missing channel: {self.channel_id}")
|
log.warning(
|
||||||
|
f"Could not execute Task[{self.name}] due to missing channel: {self.channel_id}"
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
author: discord.User = guild.get_member(self.author_id)
|
author: discord.User = guild.get_member(self.author_id)
|
||||||
if author is None:
|
if author is None:
|
||||||
log.warning(f"Could not execute task due to missing author: {self.author_id}")
|
log.warning(
|
||||||
|
f"Could not execute Task[{self.name}] due to missing author: {self.author_id}"
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
actual_message: discord.Message = channel.last_message
|
actual_message: discord.Message = channel.last_message
|
||||||
@ -267,15 +259,15 @@ class Task:
|
|||||||
actual_message = await author.history(limit=1).flatten()
|
actual_message = await author.history(limit=1).flatten()
|
||||||
if not actual_message: # Okay, the *author* has never sent a message?
|
if not actual_message: # Okay, the *author* has never sent a message?
|
||||||
log.warning("No message found in channel cache yet, skipping execution")
|
log.warning("No message found in channel cache yet, skipping execution")
|
||||||
return
|
return False
|
||||||
actual_message = actual_message[0]
|
actual_message = actual_message[0]
|
||||||
|
|
||||||
message = FakeMessage(actual_message)
|
message = FakeMessage(actual_message)
|
||||||
# message = FakeMessage2
|
# message = FakeMessage2
|
||||||
message.author = author
|
message.author = author
|
||||||
message.guild = guild # Just in case we got desperate
|
message.guild = guild # Just in case we got desperate, see above
|
||||||
message.channel = channel
|
message.channel = channel
|
||||||
message.id = time_snowflake(datetime.now()) # Pretend to be now
|
message.id = time_snowflake(datetime.utcnow(), high=False) # Pretend to be now
|
||||||
message = neuter_message(message)
|
message = neuter_message(message)
|
||||||
|
|
||||||
# absolutely weird that this takes a message object instead of guild
|
# absolutely weird that this takes a message object instead of guild
|
||||||
@ -287,15 +279,21 @@ class Task:
|
|||||||
|
|
||||||
message.content = f"{prefix}{self.get_command_str()}"
|
message.content = f"{prefix}{self.get_command_str()}"
|
||||||
|
|
||||||
if not message.guild or not message.author or not message.content:
|
if (
|
||||||
log.warning(f"Could not execute task due to message problem: {message}")
|
not message.guild
|
||||||
|
or not message.author
|
||||||
|
or not message.content
|
||||||
|
or message.content == prefix
|
||||||
|
):
|
||||||
|
log.warning(f"Could not execute Task[{self.name}] due to message problem: {message}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
new_ctx: commands.Context = await self.bot.get_context(message)
|
new_ctx: commands.Context = await self.bot.get_context(message)
|
||||||
new_ctx.assume_yes = True
|
new_ctx.assume_yes = True
|
||||||
if not new_ctx.valid:
|
if not new_ctx.valid:
|
||||||
log.warning(
|
log.warning(
|
||||||
f"Could not execute Task[{self.name}] due invalid context: {new_ctx.invoked_with}"
|
f"Could not execute Task[{self.name}] due invalid context: "
|
||||||
|
f"{new_ctx.invoked_with=} {new_ctx.prefix=} {new_ctx.command=}"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class LastSeen(Cog):
|
|||||||
# description="{} was last seen at this date and time".format(member.display_name),
|
# description="{} was last seen at this date and time".format(member.display_name),
|
||||||
# timestamp=last_seen)
|
# timestamp=last_seen)
|
||||||
|
|
||||||
embed = discord.Embed(timestamp=last_seen)
|
embed = discord.Embed(timestamp=last_seen, color=await self.bot.get_embed_color(ctx))
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
@commands.Cog.listener()
|
@commands.Cog.listener()
|
||||||
|
@ -8,9 +8,7 @@ from redbot.core.data_manager import cog_data_path
|
|||||||
|
|
||||||
|
|
||||||
class Nudity(commands.Cog):
|
class Nudity(commands.Cog):
|
||||||
"""
|
"""Monitor images for NSFW content and moves them to a nsfw channel if possible"""
|
||||||
V3 Cog Template
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, bot: Red):
|
def __init__(self, bot: Red):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -793,7 +793,7 @@ class PlantTycoon(commands.Cog):
|
|||||||
pass
|
pass
|
||||||
await asyncio.sleep(self.defaults["timers"]["notification"] * 60)
|
await asyncio.sleep(self.defaults["timers"]["notification"] * 60)
|
||||||
|
|
||||||
def __unload(self):
|
def cog_unload(self):
|
||||||
self.completion_task.cancel()
|
self.completion_task.cancel()
|
||||||
# self.degradation_task.cancel()
|
# self.degradation_task.cancel()
|
||||||
self.notification_task.cancel()
|
self.notification_task.cancel()
|
||||||
|
@ -50,6 +50,7 @@ class StealEmoji(Cog):
|
|||||||
default_global = {
|
default_global = {
|
||||||
"stolemoji": {},
|
"stolemoji": {},
|
||||||
"guildbanks": [],
|
"guildbanks": [],
|
||||||
|
"autobanked_guilds": [],
|
||||||
"on": False,
|
"on": False,
|
||||||
"notify": 0,
|
"notify": 0,
|
||||||
"autobank": False,
|
"autobank": False,
|
||||||
@ -145,11 +146,54 @@ class StealEmoji(Cog):
|
|||||||
|
|
||||||
await ctx.maybe_send_embed("AutoBanking is now " + str(not curr_setting))
|
await ctx.maybe_send_embed("AutoBanking is now " + str(not curr_setting))
|
||||||
|
|
||||||
|
@checks.is_owner()
|
||||||
|
@commands.guild_only()
|
||||||
|
@stealemoji.command(name="deleteserver", aliases=["deleteguild"])
|
||||||
|
async def se_deleteserver(self, ctx: commands.Context, guild_id=None):
|
||||||
|
"""Delete servers the bot is the owner of.
|
||||||
|
|
||||||
|
Useful for auto-generated guildbanks."""
|
||||||
|
if guild_id is None:
|
||||||
|
guild = ctx.guild
|
||||||
|
else:
|
||||||
|
guild = await self.bot.get_guild(guild_id)
|
||||||
|
|
||||||
|
if guild is None:
|
||||||
|
await ctx.maybe_send_embed("Failed to get guild, cancelling")
|
||||||
|
return
|
||||||
|
guild: discord.Guild
|
||||||
|
await ctx.maybe_send_embed(
|
||||||
|
f"Will attempt to delete {guild.name} ({guild.id})\n" f"Okay to continue? (yes/no)"
|
||||||
|
)
|
||||||
|
|
||||||
|
def check(m):
|
||||||
|
return m.author == ctx.author and m.channel == ctx.channel
|
||||||
|
|
||||||
|
try:
|
||||||
|
answer = await self.bot.wait_for("message", timeout=120, check=check)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
await ctx.send("Timed out, canceling")
|
||||||
|
return
|
||||||
|
|
||||||
|
if answer.content.upper() not in ["Y", "YES"]:
|
||||||
|
await ctx.maybe_send_embed("Cancelling")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
await guild.delete()
|
||||||
|
except discord.Forbidden:
|
||||||
|
log.exception("No permission to delete. I'm probably not the guild owner")
|
||||||
|
await ctx.maybe_send_embed("No permission to delete. I'm probably not the guild owner")
|
||||||
|
except discord.HTTPException:
|
||||||
|
log.exception("Unexpected error when deleting guild")
|
||||||
|
await ctx.maybe_send_embed("Unexpected error when deleting guild")
|
||||||
|
else:
|
||||||
|
await self.bot.send_to_owners(f"Guild {guild.name} deleted")
|
||||||
|
|
||||||
@checks.is_owner()
|
@checks.is_owner()
|
||||||
@commands.guild_only()
|
@commands.guild_only()
|
||||||
@stealemoji.command(name="bank")
|
@stealemoji.command(name="bank")
|
||||||
async def se_bank(self, ctx):
|
async def se_bank(self, ctx):
|
||||||
"""Add current server as emoji bank"""
|
"""Add or remove current server as emoji bank"""
|
||||||
|
|
||||||
def check(m):
|
def check(m):
|
||||||
return (
|
return (
|
||||||
@ -235,6 +279,9 @@ class StealEmoji(Cog):
|
|||||||
return
|
return
|
||||||
async with self.config.guildbanks() as guildbanks:
|
async with self.config.guildbanks() as guildbanks:
|
||||||
guildbanks.append(guildbank.id)
|
guildbanks.append(guildbank.id)
|
||||||
|
# Track generated guilds for easier deletion
|
||||||
|
async with self.config.autobanked_guilds() as autobanked_guilds:
|
||||||
|
autobanked_guilds.append(guildbank.id)
|
||||||
|
|
||||||
await asyncio.sleep(2)
|
await asyncio.sleep(2)
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
from redbot.core import Config, checks, commands
|
from redbot.core import Config, checks, commands
|
||||||
@ -19,6 +20,15 @@ async def sleep_till_next_hour():
|
|||||||
await asyncio.sleep((next_hour - datetime.utcnow()).seconds)
|
await asyncio.sleep((next_hour - datetime.utcnow()).seconds)
|
||||||
|
|
||||||
|
|
||||||
|
async def announce_to_channel(channel, results, title):
|
||||||
|
if channel is not None and results:
|
||||||
|
await channel.send(title)
|
||||||
|
for page in pagify(results, shorten_by=50):
|
||||||
|
await channel.send(page)
|
||||||
|
elif results: # Channel is None, log the results
|
||||||
|
log.info(results)
|
||||||
|
|
||||||
|
|
||||||
class Timerole(Cog):
|
class Timerole(Cog):
|
||||||
"""Add roles to users based on time on server"""
|
"""Add roles to users based on time on server"""
|
||||||
|
|
||||||
@ -27,16 +37,16 @@ class Timerole(Cog):
|
|||||||
self.bot = bot
|
self.bot = bot
|
||||||
self.config = Config.get_conf(self, identifier=9811198108111121, force_registration=True)
|
self.config = Config.get_conf(self, identifier=9811198108111121, force_registration=True)
|
||||||
default_global = {}
|
default_global = {}
|
||||||
default_guild = {"announce": None}
|
default_guild = {"announce": None, "reapply": True}
|
||||||
|
default_rolemember = {"had_role": False, "check_again_time": None}
|
||||||
default_role = {"enabled": False, "days": 0, "hours": 0, "remove": False, "required": []}
|
default_role = {"enabled": False, "days": 0, "hours": 0, "remove": False, "required": []}
|
||||||
default_memberrole = {"had_role": False, "check_again_time": None}
|
|
||||||
|
|
||||||
self.config.register_global(**default_global)
|
self.config.register_global(**default_global)
|
||||||
self.config.register_guild(**default_guild)
|
self.config.register_guild(**default_guild)
|
||||||
self.config.register_role(**default_role)
|
self.config.register_role(**default_role)
|
||||||
|
|
||||||
self.config.init_custom("MemberRole", 2)
|
self.config.init_custom("RoleMember", 2)
|
||||||
self.config.register_custom("MemberRole", **default_memberrole)
|
self.config.register_custom("RoleMember", **default_rolemember)
|
||||||
|
|
||||||
self.updating = asyncio.create_task(self.check_hour())
|
self.updating = asyncio.create_task(self.check_hour())
|
||||||
|
|
||||||
@ -56,11 +66,14 @@ class Timerole(Cog):
|
|||||||
|
|
||||||
Useful for troubleshooting the initial setup
|
Useful for troubleshooting the initial setup
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async with ctx.typing():
|
async with ctx.typing():
|
||||||
|
pre_run = datetime.utcnow()
|
||||||
await self.timerole_update()
|
await self.timerole_update()
|
||||||
|
after_run = datetime.utcnow()
|
||||||
await ctx.tick()
|
await ctx.tick()
|
||||||
|
|
||||||
|
await ctx.maybe_send_embed(f"Took {after_run-pre_run} seconds")
|
||||||
|
|
||||||
@commands.command()
|
@commands.command()
|
||||||
@checks.guildowner()
|
@checks.guildowner()
|
||||||
@commands.guild_only()
|
@commands.guild_only()
|
||||||
@ -104,9 +117,7 @@ class Timerole(Cog):
|
|||||||
await self.config.role(role).required.set(requiredroles)
|
await self.config.role(role).required.set(requiredroles)
|
||||||
await self.config.role(role).enabled.set(True)
|
await self.config.role(role).enabled.set(True)
|
||||||
await ctx.maybe_send_embed(
|
await ctx.maybe_send_embed(
|
||||||
"Time Role for {0} set to {1} days and {2} hours until added".format(
|
f"Time Role for {role.name} set to {days} days and {hours} hours until added"
|
||||||
role.name, days, hours
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@timerole.command()
|
@timerole.command()
|
||||||
@ -136,24 +147,36 @@ class Timerole(Cog):
|
|||||||
await self.config.role(role).required.set(requiredroles)
|
await self.config.role(role).required.set(requiredroles)
|
||||||
await self.config.role(role).enabled.set(True)
|
await self.config.role(role).enabled.set(True)
|
||||||
await ctx.maybe_send_embed(
|
await ctx.maybe_send_embed(
|
||||||
"Time Role for {0} set to {1} days and {2} hours until removed".format(
|
f"Time Role for {role.name} set to {days} days and {hours} hours until removed"
|
||||||
role.name, days, hours
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@timerole.command()
|
@timerole.command()
|
||||||
async def channel(self, ctx: commands.Context, channel: discord.TextChannel):
|
async def channel(self, ctx: commands.Context, channel: Optional[discord.TextChannel] = None):
|
||||||
"""Sets the announce channel for role adds"""
|
"""Sets the announce channel for role adds"""
|
||||||
guild = ctx.guild
|
guild = ctx.guild
|
||||||
|
if channel is None:
|
||||||
|
await self.config.guild(guild).announce.clear()
|
||||||
|
await ctx.maybe_send_embed(f"Announce channel has been cleared")
|
||||||
|
else:
|
||||||
|
await self.config.guild(guild).announce.set(channel.id)
|
||||||
|
await ctx.send(f"Announce channel set to {channel.mention}")
|
||||||
|
|
||||||
await self.config.guild(guild).announce.set(channel.id)
|
@timerole.command()
|
||||||
await ctx.send("Announce channel set to {0}".format(channel.mention))
|
async def reapply(self, ctx: commands.Context):
|
||||||
|
"""Toggle reapplying roles if the member loses it somehow. Defaults to True"""
|
||||||
|
guild = ctx.guild
|
||||||
|
current_setting = await self.config.guild(guild).reapply()
|
||||||
|
await self.config.guild(guild).reapply.set(not current_setting)
|
||||||
|
await ctx.maybe_send_embed(f"Reapplying roles is now set to: {not current_setting}")
|
||||||
|
|
||||||
@timerole.command()
|
@timerole.command()
|
||||||
async def delrole(self, ctx: commands.Context, role: discord.Role):
|
async def delrole(self, ctx: commands.Context, role: discord.Role):
|
||||||
"""Deletes a role from being added/removed after specified time"""
|
"""Deletes a role from being added/removed after specified time"""
|
||||||
|
|
||||||
await self.config.roles(role).enabled.set(False)
|
await self.config.roles(role).enabled.set(False)
|
||||||
await ctx.send(f"{role.name} will no longer be processed")
|
await self.config.custom("RoleMember", role.id).clear()
|
||||||
|
await ctx.maybe_send_embed(f"{role.name} will no longer be applied")
|
||||||
|
|
||||||
|
|
||||||
@timerole.command()
|
@timerole.command()
|
||||||
async def list(self, ctx: commands.Context):
|
async def list(self, ctx: commands.Context):
|
||||||
@ -173,89 +196,197 @@ class Timerole(Cog):
|
|||||||
str(discord.utils.get(guild.roles, id=int(new_id)))
|
str(discord.utils.get(guild.roles, id=int(new_id)))
|
||||||
for new_id in r_data["required"]
|
for new_id in r_data["required"]
|
||||||
]
|
]
|
||||||
out += "{} | {} days | requires: {}\n".format(str(role), r_data["days"], r_roles)
|
out += f"{role} | {r_data['days']} days | requires: {r_roles}\n"
|
||||||
await ctx.maybe_send_embed(out)
|
await ctx.maybe_send_embed(out)
|
||||||
|
|
||||||
async def timerole_update(self):
|
async def timerole_update(self):
|
||||||
async for guild in AsyncIter(self.bot.guilds):
|
utcnow = datetime.utcnow()
|
||||||
addlist = []
|
all_guilds = await self.config.all_guilds()
|
||||||
removelist = []
|
|
||||||
|
|
||||||
role_dict = await self.config.guild(guild).roles()
|
# all_mrs = await self.config.custom("RoleMember").all()
|
||||||
if not any(role_data for role_data in role_dict.values()): # No roles
|
|
||||||
|
# log.debug(f"Begin timerole update")
|
||||||
|
|
||||||
|
for guild in self.bot.guilds:
|
||||||
|
guild_id = guild.id
|
||||||
|
if guild_id not in all_guilds:
|
||||||
|
log.debug(f"Guild has no configured settings: {guild}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
async for member in AsyncIter(guild.members):
|
add_results = ""
|
||||||
has_roles = [r.id for r in member.roles]
|
remove_results = ""
|
||||||
|
reapply = all_guilds[guild_id]["reapply"]
|
||||||
|
role_dict = all_guilds[guild_id]["roles"]
|
||||||
|
|
||||||
|
if not any(role_data for role_data in role_dict.values()): # No roles
|
||||||
|
log.debug(f"No roles are configured for guild: {guild}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# all_mr = await self.config.all_custom("RoleMember")
|
||||||
|
# log.debug(f"{all_mr=}")
|
||||||
|
|
||||||
|
async for member in AsyncIter(guild.members, steps=10):
|
||||||
|
addlist = []
|
||||||
|
removelist = []
|
||||||
|
|
||||||
|
for role_id, role_data in role_dict.items():
|
||||||
|
# Skip non-configured roles
|
||||||
|
if not role_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
mr_dict = await self.config.custom("RoleMember", role_id, member.id).all()
|
||||||
|
|
||||||
|
# Stop if they've had the role and reapplying is disabled
|
||||||
|
if not reapply and mr_dict["had_role"]:
|
||||||
|
log.debug(f"{member.display_name} - Not reapplying")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Stop if the check_again_time hasn't passed yet
|
||||||
|
if (
|
||||||
|
mr_dict["check_again_time"] is not None
|
||||||
|
and datetime.fromisoformat(mr_dict["check_again_time"]) >= utcnow
|
||||||
|
):
|
||||||
|
log.debug(f"{member.display_name} - Not time to check again yet")
|
||||||
|
continue
|
||||||
|
member: discord.Member
|
||||||
|
has_roles = set(r.id for r in member.roles)
|
||||||
|
|
||||||
|
# Stop if they currently have or don't have the role, and mark had_role
|
||||||
|
if (int(role_id) in has_roles and not role_data["remove"]) or (
|
||||||
|
int(role_id) not in has_roles and role_data["remove"]
|
||||||
|
):
|
||||||
|
if not mr_dict["had_role"]:
|
||||||
|
await self.config.custom(
|
||||||
|
"RoleMember", role_id, member.id
|
||||||
|
).had_role.set(True)
|
||||||
|
log.debug(f"{member.display_name} - applying had_role")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Stop if they don't have all the required roles
|
||||||
|
if role_data is None or (
|
||||||
|
"required" in role_data and not set(role_data["required"]) & has_roles
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
check_time = member.joined_at + timedelta(
|
||||||
|
days=role_data["days"],
|
||||||
|
hours=role_data.get("hours", 0),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if enough time has passed to get the role and save the check_again_time
|
||||||
|
if check_time >= utcnow:
|
||||||
|
await self.config.custom(
|
||||||
|
"RoleMember", role_id, member.id
|
||||||
|
).check_again_time.set(check_time.isoformat())
|
||||||
|
log.debug(
|
||||||
|
f"{member.display_name} - Not enough time has passed to qualify for the role\n"
|
||||||
|
f"Waiting until {check_time}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if role_data["remove"]:
|
||||||
|
removelist.append(role_id)
|
||||||
|
else:
|
||||||
|
addlist.append(role_id)
|
||||||
|
|
||||||
|
# Done iterating through roles, now add or remove the roles
|
||||||
|
if not addlist and not removelist:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# log.debug(f"{addlist=}\n{removelist=}")
|
||||||
add_roles = [
|
add_roles = [
|
||||||
int(rID)
|
discord.utils.get(guild.roles, id=int(role_id)) for role_id in addlist
|
||||||
for rID, r_data in role_dict.items()
|
|
||||||
if r_data is not None and not r_data["remove"]
|
|
||||||
]
|
]
|
||||||
remove_roles = [
|
remove_roles = [
|
||||||
int(rID)
|
discord.utils.get(guild.roles, id=int(role_id)) for role_id in removelist
|
||||||
for rID, r_data in role_dict.items()
|
|
||||||
if r_data is not None and r_data["remove"]
|
|
||||||
]
|
]
|
||||||
|
|
||||||
check_add_roles = set(add_roles) - set(has_roles)
|
if None in add_roles or None in remove_roles:
|
||||||
check_remove_roles = set(remove_roles) & set(has_roles)
|
log.info(
|
||||||
|
f"Timerole ran into an error with the roles in: {add_roles + remove_roles}"
|
||||||
|
)
|
||||||
|
|
||||||
await self.check_required_and_date(
|
if addlist:
|
||||||
addlist, check_add_roles, has_roles, member, role_dict
|
try:
|
||||||
)
|
await member.add_roles(*add_roles, reason="Timerole", atomic=False)
|
||||||
await self.check_required_and_date(
|
except (discord.Forbidden, discord.NotFound) as e:
|
||||||
removelist, check_remove_roles, has_roles, member, role_dict
|
log.exception("Failed Adding Roles")
|
||||||
)
|
add_results += f"{member.display_name} : **(Failed Adding Roles)**\n"
|
||||||
|
else:
|
||||||
|
add_results += " \n".join(
|
||||||
|
f"{member.display_name} : {role.name}" for role in add_roles
|
||||||
|
)
|
||||||
|
for role_id in addlist:
|
||||||
|
await self.config.custom(
|
||||||
|
"RoleMember", role_id, member.id
|
||||||
|
).had_role.set(True)
|
||||||
|
|
||||||
|
if removelist:
|
||||||
|
try:
|
||||||
|
await member.remove_roles(*remove_roles, reason="Timerole", atomic=False)
|
||||||
|
except (discord.Forbidden, discord.NotFound) as e:
|
||||||
|
log.exception("Failed Removing Roles")
|
||||||
|
remove_results += f"{member.display_name} : **(Failed Removing Roles)**\n"
|
||||||
|
else:
|
||||||
|
remove_results += " \n".join(
|
||||||
|
f"{member.display_name} : {role.name}" for role in remove_roles
|
||||||
|
)
|
||||||
|
for role_id in removelist:
|
||||||
|
await self.config.custom(
|
||||||
|
"RoleMember", role_id, member.id
|
||||||
|
).had_role.set(True)
|
||||||
|
|
||||||
|
# Done iterating through members, now maybe announce to the guild
|
||||||
channel = await self.config.guild(guild).announce()
|
channel = await self.config.guild(guild).announce()
|
||||||
if channel is not None:
|
if channel is not None:
|
||||||
channel = guild.get_channel(channel)
|
channel = guild.get_channel(channel)
|
||||||
|
|
||||||
title = "**These members have received the following roles**\n"
|
if add_results:
|
||||||
await self.announce_roles(title, addlist, channel, guild, to_add=True)
|
title = "**These members have received the following roles**\n"
|
||||||
title = "**These members have lost the following roles**\n"
|
await announce_to_channel(channel, add_results, title)
|
||||||
await self.announce_roles(title, removelist, channel, guild, to_add=False)
|
if remove_results:
|
||||||
|
title = "**These members have lost the following roles**\n"
|
||||||
|
await announce_to_channel(channel, remove_results, title)
|
||||||
|
# End
|
||||||
|
|
||||||
async def announce_roles(self, title, role_list, channel, guild, to_add: True):
|
# async def announce_roles(self, title, role_list, channel, guild, to_add: True):
|
||||||
results = ""
|
# results = ""
|
||||||
async for member, role_id in AsyncIter(role_list):
|
# async for member, role_id in AsyncIter(role_list):
|
||||||
role = discord.utils.get(guild.roles, id=role_id)
|
# role = discord.utils.get(guild.roles, id=role_id)
|
||||||
try:
|
# try:
|
||||||
if to_add:
|
# if to_add:
|
||||||
await member.add_roles(role, reason="Timerole")
|
# await member.add_roles(role, reason="Timerole")
|
||||||
else:
|
# else:
|
||||||
await member.remove_roles(role, reason="Timerole")
|
# await member.remove_roles(role, reason="Timerole")
|
||||||
except (discord.Forbidden, discord.NotFound) as e:
|
# except (discord.Forbidden, discord.NotFound) as e:
|
||||||
results += "{} : {} **(Failed)**\n".format(member.display_name, role.name)
|
# results += f"{member.display_name} : {role.name} **(Failed)**\n"
|
||||||
else:
|
# else:
|
||||||
results += "{} : {}\n".format(member.display_name, role.name)
|
# results += f"{member.display_name} : {role.name}\n"
|
||||||
if channel is not None and results:
|
# if channel is not None and results:
|
||||||
await channel.send(title)
|
# await channel.send(title)
|
||||||
for page in pagify(results, shorten_by=50):
|
# for page in pagify(results, shorten_by=50):
|
||||||
await channel.send(page)
|
# await channel.send(page)
|
||||||
elif results: # Channel is None, log the results
|
# elif results: # Channel is None, log the results
|
||||||
log.info(results)
|
# log.info(results)
|
||||||
|
|
||||||
async def check_required_and_date(self, role_list, check_roles, has_roles, member, role_dict):
|
# async def check_required_and_date(self, role_list, check_roles, has_roles, member, role_dict):
|
||||||
async for role_id in AsyncIter(check_roles):
|
# async for role_id in AsyncIter(check_roles):
|
||||||
# Check for required role
|
# # Check for required role
|
||||||
if "required" in role_dict[str(role_id)]:
|
# if "required" in role_dict[str(role_id)]:
|
||||||
if not set(role_dict[str(role_id)]["required"]) & set(has_roles):
|
# if not set(role_dict[str(role_id)]["required"]) & set(has_roles):
|
||||||
# Doesn't have required role
|
# # Doesn't have required role
|
||||||
continue
|
# continue
|
||||||
|
#
|
||||||
if (
|
# if (
|
||||||
member.joined_at
|
# member.joined_at
|
||||||
+ timedelta(
|
# + timedelta(
|
||||||
days=role_dict[str(role_id)]["days"],
|
# days=role_dict[str(role_id)]["days"],
|
||||||
hours=role_dict[str(role_id)].get("hours", 0),
|
# hours=role_dict[str(role_id)].get("hours", 0),
|
||||||
)
|
# )
|
||||||
<= datetime.today()
|
# <= datetime.utcnow()
|
||||||
):
|
# ):
|
||||||
# Qualifies
|
# # Qualifies
|
||||||
role_list.append((member, role_id))
|
# role_list.append((member, role_id))
|
||||||
|
|
||||||
async def check_hour(self):
|
async def check_hour(self):
|
||||||
await sleep_till_next_hour()
|
await sleep_till_next_hour()
|
||||||
|
@ -56,7 +56,7 @@ class Werewolf(Cog):
|
|||||||
"""Nothing to delete"""
|
"""Nothing to delete"""
|
||||||
return
|
return
|
||||||
|
|
||||||
def __unload(self):
|
def cog_unload(self):
|
||||||
log.debug("Unload called")
|
log.debug("Unload called")
|
||||||
for game in self.games.values():
|
for game in self.games.values():
|
||||||
del game
|
del game
|
||||||
|
Loading…
x
Reference in New Issue
Block a user