removed data module
This commit is contained in:
@@ -1,3 +1,3 @@
|
|||||||
from .kwaylon import Kwaylon
|
from .kwaylon import Kwaylon
|
||||||
from .data import MsgData
|
from .reactions import ReactionData
|
||||||
from .jokes import Joke, GifJoke
|
from .jokes import Joke, GifJoke
|
||||||
|
|||||||
115
kwaylon/data.py
115
kwaylon/data.py
@@ -1,115 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
import pandas as pd
|
|
||||||
from nextcord import Client, Message
|
|
||||||
|
|
||||||
from .msg import LOGGER, reaction_df
|
|
||||||
from .msg import reaction_dict
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MsgData:
|
|
||||||
"""Wrapper class to manage saving and loading the DataFrame of reactions"""
|
|
||||||
db_path: Path
|
|
||||||
msgs: pd.DataFrame
|
|
||||||
reactions: pd.DataFrame
|
|
||||||
lock: asyncio.Lock
|
|
||||||
|
|
||||||
def __init__(self, path: Union[str, Path]):
|
|
||||||
self.lock = asyncio.Lock()
|
|
||||||
self.db_path: Path = Path(path) if isinstance(path, str) else path
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f'<{__name__}.{self.__class__.__name__} with {self.reactions.shape[0]} reactions>'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sql_context(self):
|
|
||||||
return sqlite3.connect(self.db_path)
|
|
||||||
|
|
||||||
async def load_sql(self, local_tz='US/Central'):
|
|
||||||
async with self.lock:
|
|
||||||
with self.sql_context as con:
|
|
||||||
LOGGER.info(f'Opened {self.db_path.name}')
|
|
||||||
try:
|
|
||||||
self.reactions = pd.read_sql('select * from reactions', con=con).reset_index(drop=True)
|
|
||||||
except:
|
|
||||||
LOGGER.warning(f'failed to read reactions from: {self.db_path.resolve()}')
|
|
||||||
else:
|
|
||||||
LOGGER.info(f'read {self.reactions.shape[0]:,} reactions')
|
|
||||||
self.reactions['datetime'] = pd.to_datetime(self.reactions['datetime'])
|
|
||||||
# LOGGER.info(f"'datetime' dtype: {self.reactions['datetime'].dtype}")
|
|
||||||
# LOGGER.info(f"{self.reactions['datetime'].values[:3]}...")
|
|
||||||
|
|
||||||
# try:
|
|
||||||
# self.reactions['datetime'] = pd.to_datetime(self.reactions['datetime']).dt.tz_convert(local_tz)
|
|
||||||
# except Exception as e:
|
|
||||||
# LOGGER.exception(e)
|
|
||||||
# try:
|
|
||||||
# self.reactions['datetime'] = pd.to_datetime(self.reactions['datetime']).dt.tz_localize(local_tz)
|
|
||||||
# except Exception as e:
|
|
||||||
# LOGGER.exception(e)
|
|
||||||
# LOGGER.warning(f'Error converting timezone to {local_tz}')
|
|
||||||
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
async def write_sql(self):
|
|
||||||
async with self.lock:
|
|
||||||
with self.sql_context as con:
|
|
||||||
self.reactions.to_sql(
|
|
||||||
name='reactions',
|
|
||||||
con=con,
|
|
||||||
if_exists='replace',
|
|
||||||
index=False,
|
|
||||||
# index_label=self.reactions.index.name
|
|
||||||
)
|
|
||||||
LOGGER.info(f'wrote {self.reactions.shape[0]:,} reactions into {self.db_path.name}')
|
|
||||||
|
|
||||||
async def scan_messages(self, client: Client, **kwargs):
|
|
||||||
async with self.lock:
|
|
||||||
self.reactions = await reaction_df(client, **kwargs)
|
|
||||||
await self.write_sql()
|
|
||||||
|
|
||||||
def most(self, emoji: str):
|
|
||||||
matching = self.reactions['emoji'] == emoji
|
|
||||||
if not matching.any():
|
|
||||||
LOGGER.info(f'No reactions with {emoji}')
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
return self.reactions.loc[matching].sort_values('count', ascending=False).reset_index(drop=True)
|
|
||||||
|
|
||||||
async def get_emoji_info(self, emoji: str):
|
|
||||||
async with self.lock:
|
|
||||||
try:
|
|
||||||
with self.sql_context as con:
|
|
||||||
res = pd.read_sql(f"SELECT * FROM reactions WHERE emoji LIKE '{emoji}'", con=con, index_col=None)
|
|
||||||
res['datetime'] = pd.to_datetime(res['datetime'])
|
|
||||||
except Exception as e:
|
|
||||||
LOGGER.exception(e)
|
|
||||||
res = None
|
|
||||||
else:
|
|
||||||
LOGGER.info(f'Read {res.shape[0]} reactions')
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
return res
|
|
||||||
|
|
||||||
async def update_reaction(self, msg: Message):
|
|
||||||
async with self.lock:
|
|
||||||
try:
|
|
||||||
with self.sql_context as con:
|
|
||||||
con.execute(f'DELETE FROM reactions WHERE msg_id = {msg.id}')
|
|
||||||
data = [tuple(reaction_dict(reaction).values()) for reaction in msg.reactions]
|
|
||||||
if len(data) > 0:
|
|
||||||
query = f'INSERT INTO reactions VALUES({",".join("?" for _ in range(8))})'
|
|
||||||
LOGGER.info(f'SQL: {query}')
|
|
||||||
con.executemany(query, data)
|
|
||||||
except:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
LOGGER.info(f'Success')
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
@@ -50,11 +50,14 @@ class ReactionData:
|
|||||||
return self.read_sql(query='SELECT * FROM reactions', con=con)
|
return self.read_sql(query='SELECT * FROM reactions', con=con)
|
||||||
|
|
||||||
def read_sql(self, query: str, con: sqlite3.Connection = None):
|
def read_sql(self, query: str, con: sqlite3.Connection = None):
|
||||||
|
close = con is None
|
||||||
con = con or sqlite3.connect(self.path)
|
con = con or sqlite3.connect(self.path)
|
||||||
|
|
||||||
res = pd.read_sql(query, con=con, index_col=None)
|
res = pd.read_sql(query, con=con, index_col=None)
|
||||||
LOGGER.info(f'Read {res.shape[0]} reactions')
|
LOGGER.info(f'Read {res.shape[0]} reactions')
|
||||||
|
if close:
|
||||||
|
con.close()
|
||||||
|
|
||||||
res['datetime'] = pd.to_datetime(res['datetime'])
|
res['datetime'] = pd.to_datetime(res['datetime'])
|
||||||
|
|
||||||
return res.sort_values('count', ascending=False)
|
return res.sort_values('count', ascending=False)
|
||||||
|
|||||||
Reference in New Issue
Block a user