Add and remove comments

pull/163/head
bobloy 4 years ago
parent b2ebddc825
commit 19ee6e6f24

@ -1,6 +1,6 @@
import itertools import itertools
import logging import logging
from datetime import datetime, timedelta, tzinfo, MAXYEAR from datetime import MAXYEAR, datetime, timedelta, tzinfo
from typing import Optional, Union from typing import Optional, Union
import discord import discord
@ -11,7 +11,7 @@ from apscheduler.schedulers.base import STATE_PAUSED, STATE_RUNNING
from redbot.core import Config, checks, commands from redbot.core import Config, checks, commands
from redbot.core.bot import Red from redbot.core.bot import Red
from redbot.core.commands import TimedeltaConverter from redbot.core.commands import TimedeltaConverter
from redbot.core.utils.chat_formatting import humanize_list, humanize_timedelta, pagify from redbot.core.utils.chat_formatting import humanize_timedelta, pagify
from .datetime_cron_converters import CronConverter, DatetimeConverter, TimezoneConverter from .datetime_cron_converters import CronConverter, DatetimeConverter, TimezoneConverter
from .task import Task from .task import Task
@ -108,10 +108,10 @@ class FIFO(commands.Cog):
async def initialize(self): async def initialize(self):
job_defaults = { job_defaults = {
"coalesce": True, "coalesce": True, # Multiple missed triggers within the grace time will only fire once
"max_instances": 5, "max_instances": 5, # This is probably way too high, should likely only be one
"misfire_grace_time": 15, "misfire_grace_time": 15, # 15 seconds ain't much, but it's honest work
"replace_existing": True, "replace_existing": True, # Very important for persistent data
} }
# executors = {"default": AsyncIOExecutor()} # executors = {"default": AsyncIOExecutor()}
@ -119,7 +119,7 @@ class FIFO(commands.Cog):
# Default executor is already AsyncIOExecutor # Default executor is already AsyncIOExecutor
self.scheduler = AsyncIOScheduler(job_defaults=job_defaults, logger=schedule_log) self.scheduler = AsyncIOScheduler(job_defaults=job_defaults, logger=schedule_log)
from .redconfigjobstore import RedConfigJobStore from .redconfigjobstore import RedConfigJobStore # Wait to import to prevent cyclic import
self.jobstore = RedConfigJobStore(self.config, self.bot) self.jobstore = RedConfigJobStore(self.config, self.bot)
await self.jobstore.load_from_config() await self.jobstore.load_from_config()
@ -507,7 +507,7 @@ class FIFO(commands.Cog):
""" """
task = Task(task_name, ctx.guild.id, self.config, bot=self.bot) task = Task(task_name, ctx.guild.id, self.config, bot=self.bot)
await task.load_from_config() await task.load_from_config() # Will set the channel and author
if task.data is None: if task.data is None:
await ctx.maybe_send_embed( await ctx.maybe_send_embed(

@ -2,17 +2,13 @@ import asyncio
import base64 import base64
import logging import logging
import pickle import pickle
from datetime import datetime
from typing import Tuple, Union
from apscheduler.job import Job from apscheduler.job import Job
from apscheduler.jobstores.base import ConflictingIdError, JobLookupError
from apscheduler.jobstores.memory import MemoryJobStore from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.schedulers.asyncio import run_in_event_loop from apscheduler.schedulers.asyncio import run_in_event_loop
from apscheduler.util import datetime_to_utc_timestamp from apscheduler.util import datetime_to_utc_timestamp
from redbot.core import Config from redbot.core import Config
# TODO: use get_lock on config maybe
# TODO: use get_lock on config
from redbot.core.bot import Red from redbot.core.bot import Red
from redbot.core.utils import AsyncIter from redbot.core.utils import AsyncIter
@ -29,13 +25,7 @@ class RedConfigJobStore(MemoryJobStore):
self.bot = bot self.bot = bot
self.pickle_protocol = pickle.HIGHEST_PROTOCOL self.pickle_protocol = pickle.HIGHEST_PROTOCOL
self._eventloop = self.bot.loop # Used for @run_in_event_loop self._eventloop = self.bot.loop # Used for @run_in_event_loop
# TODO: self.config.jobs_index is never used,
# fine but maybe a sign of inefficient use of config
# task = asyncio.create_task(self.load_from_config())
# while not task.done():
# sleep(0.1)
# future = asyncio.ensure_future(self.load_from_config(), loop=self.bot.loop)
@run_in_event_loop @run_in_event_loop
def start(self, scheduler, alias): def start(self, scheduler, alias):

@ -166,7 +166,7 @@ class Task:
return return
self.author_id = data["author_id"] self.author_id = data["author_id"]
self.guild_id = data["guild_id"] self.guild_id = data["guild_id"] # Weird I'm doing this, since self.guild_id was just used
self.channel_id = data["channel_id"] self.channel_id = data["channel_id"]
self.data = data["data"] self.data = data["data"]

Loading…
Cancel
Save