feat: 初始化项目架构
This commit is contained in:
14
ruoyi-fastapi-backend/config/database.py
Normal file
14
ruoyi-fastapi-backend/config/database.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from urllib.parse import quote_plus
|
||||
from config.env import DataBaseConfig
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"mysql+pymysql://{DataBaseConfig.USERNAME}:{quote_plus(DataBaseConfig.PASSWORD)}@" \
|
||||
f"{DataBaseConfig.HOST}:{DataBaseConfig.PORT}/{DataBaseConfig.DB}"
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL, echo=True
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
Base = declarative_base()
|
54
ruoyi-fastapi-backend/config/env.py
Normal file
54
ruoyi-fastapi-backend/config/env.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import os
|
||||
|
||||
|
||||
class JwtConfig:
|
||||
"""
|
||||
Jwt配置
|
||||
"""
|
||||
SECRET_KEY = "b01c66dc2c58dc6a0aabfe2144256be36226de378bf87f72c0c795dda67f4d55"
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 1440
|
||||
REDIS_TOKEN_EXPIRE_MINUTES = 30
|
||||
|
||||
|
||||
class DataBaseConfig:
|
||||
"""
|
||||
数据库配置
|
||||
"""
|
||||
HOST = "127.0.0.1"
|
||||
PORT = 3306
|
||||
USERNAME = 'root'
|
||||
PASSWORD = 'mysqlroot'
|
||||
DB = 'ruoyi-fastapi'
|
||||
|
||||
|
||||
class RedisConfig:
|
||||
"""
|
||||
Redis配置
|
||||
"""
|
||||
HOST = "127.0.0.1"
|
||||
PORT = 6379
|
||||
USERNAME = ''
|
||||
PASSWORD = ''
|
||||
DB = 2
|
||||
|
||||
|
||||
class CachePathConfig:
|
||||
"""
|
||||
缓存目录配置
|
||||
"""
|
||||
PATH = os.path.join(os.path.abspath(os.getcwd()), 'caches')
|
||||
PATHSTR = 'caches'
|
||||
|
||||
|
||||
class RedisInitKeyConfig:
|
||||
"""
|
||||
系统内置Redis键名
|
||||
"""
|
||||
ACCESS_TOKEN = {'key': 'access_token', 'remark': '登录令牌信息'}
|
||||
SYS_DICT = {'key': 'sys_dict', 'remark': '数据字典'}
|
||||
SYS_CONFIG = {'key': 'sys_config', 'remark': '配置信息'}
|
||||
CAPTCHA_CODES = {'key': 'captcha_codes', 'remark': '图片验证码'}
|
||||
ACCOUNT_LOCK = {'key': 'account_lock', 'remark': '用户锁定'}
|
||||
PASSWORD_ERROR_COUNT = {'key': 'password_error_count', 'remark': '密码错误次数'}
|
||||
SMS_CODE = {'key': 'sms_code', 'remark': '短信验证码'}
|
27
ruoyi-fastapi-backend/config/get_db.py
Normal file
27
ruoyi-fastapi-backend/config/get_db.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from config.database import *
|
||||
from utils.log_util import logger
|
||||
|
||||
|
||||
def get_db_pro():
|
||||
"""
|
||||
每一个请求处理完毕后会关闭当前连接,不同的请求使用不同的连接
|
||||
:return:
|
||||
"""
|
||||
current_db = SessionLocal()
|
||||
try:
|
||||
yield current_db
|
||||
finally:
|
||||
current_db.close()
|
||||
|
||||
|
||||
async def init_create_table():
|
||||
"""
|
||||
应用启动时初始化数据库连接
|
||||
:return:
|
||||
"""
|
||||
logger.info("初始化数据库连接...")
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logger.info("数据库连接成功")
|
||||
|
||||
|
||||
get_db = get_db_pro
|
65
ruoyi-fastapi-backend/config/get_redis.py
Normal file
65
ruoyi-fastapi-backend/config/get_redis.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import aioredis
|
||||
from module_admin.service.dict_service import DictDataService
|
||||
from module_admin.service.config_service import ConfigService
|
||||
from config.env import RedisConfig
|
||||
from config.database import SessionLocal
|
||||
from utils.log_util import logger
|
||||
|
||||
|
||||
class RedisUtil:
|
||||
"""
|
||||
Redis相关方法
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
async def create_redis_pool(cls) -> aioredis.Redis:
|
||||
"""
|
||||
应用启动时初始化redis连接
|
||||
:return: Redis连接对象
|
||||
"""
|
||||
logger.info("开始连接redis...")
|
||||
redis = await aioredis.from_url(
|
||||
url=f"redis://{RedisConfig.HOST}",
|
||||
port=RedisConfig.PORT,
|
||||
username=RedisConfig.USERNAME,
|
||||
password=RedisConfig.PASSWORD,
|
||||
db=RedisConfig.DB,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
)
|
||||
logger.info("redis连接成功")
|
||||
return redis
|
||||
|
||||
@classmethod
|
||||
async def close_redis_pool(cls, app):
|
||||
"""
|
||||
应用关闭时关闭redis连接
|
||||
:param app: fastapi对象
|
||||
:return:
|
||||
"""
|
||||
await app.state.redis.close()
|
||||
logger.info("关闭redis连接成功")
|
||||
|
||||
@classmethod
|
||||
async def init_sys_dict(cls, redis):
|
||||
"""
|
||||
应用启动时缓存字典表
|
||||
:param redis: redis对象
|
||||
:return:
|
||||
"""
|
||||
session = SessionLocal()
|
||||
await DictDataService.init_cache_sys_dict_services(session, redis)
|
||||
|
||||
session.close()
|
||||
|
||||
@classmethod
|
||||
async def init_sys_config(cls, redis):
|
||||
"""
|
||||
应用启动时缓存参数配置表
|
||||
:param redis: redis对象
|
||||
:return:
|
||||
"""
|
||||
session = SessionLocal()
|
||||
await ConfigService.init_cache_sys_config_services(session, redis)
|
||||
|
||||
session.close()
|
232
ruoyi-fastapi-backend/config/get_scheduler.py
Normal file
232
ruoyi-fastapi-backend/config/get_scheduler.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.jobstores.memory import MemoryJobStore
|
||||
from apscheduler.jobstores.redis import RedisJobStore
|
||||
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from apscheduler.events import EVENT_ALL
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from config.database import engine, SQLALCHEMY_DATABASE_URL, SessionLocal
|
||||
from config.env import RedisConfig
|
||||
from module_admin.service.job_log_service import JobLogService, JobLogModel
|
||||
from module_admin.dao.job_dao import Session, JobDao
|
||||
from utils.log_util import logger
|
||||
import module_task
|
||||
|
||||
|
||||
# 重写Cron定时
|
||||
class MyCronTrigger(CronTrigger):
|
||||
@classmethod
|
||||
def from_crontab(cls, expr, timezone=None):
|
||||
values = expr.split()
|
||||
if len(values) != 6 and len(values) != 7:
|
||||
raise ValueError('Wrong number of fields; got {}, expected 6 or 7'.format(len(values)))
|
||||
|
||||
second = values[0]
|
||||
minute = values[1]
|
||||
hour = values[2]
|
||||
if '?' in values[3]:
|
||||
day = None
|
||||
elif 'L' in values[5]:
|
||||
day = f"last {values[5].replace('L', '')}"
|
||||
elif 'W' in values[3]:
|
||||
day = cls.__find_recent_workday(int(values[3].split('W')[0]))
|
||||
else:
|
||||
day = values[3].replace('L', 'last')
|
||||
month = values[4]
|
||||
if '?' in values[5] or 'L' in values[5]:
|
||||
week = None
|
||||
elif '#' in values[5]:
|
||||
week = int(values[5].split('#')[1])
|
||||
else:
|
||||
week = values[5]
|
||||
if '#' in values[5]:
|
||||
day_of_week = int(values[5].split('#')[0]) - 1
|
||||
else:
|
||||
day_of_week = None
|
||||
year = values[6] if len(values) == 7 else None
|
||||
return cls(second=second, minute=minute, hour=hour, day=day, month=month, week=week,
|
||||
day_of_week=day_of_week, year=year, timezone=timezone)
|
||||
|
||||
@classmethod
|
||||
def __find_recent_workday(cls, day):
|
||||
now = datetime.now()
|
||||
date = datetime(now.year, now.month, day)
|
||||
if date.weekday() < 5:
|
||||
return date.day
|
||||
else:
|
||||
diff = 1
|
||||
while True:
|
||||
previous_day = date - timedelta(days=diff)
|
||||
if previous_day.weekday() < 5:
|
||||
return previous_day.day
|
||||
else:
|
||||
diff += 1
|
||||
|
||||
|
||||
job_stores = {
|
||||
'default': MemoryJobStore(),
|
||||
'sqlalchemy': SQLAlchemyJobStore(url=SQLALCHEMY_DATABASE_URL, engine=engine),
|
||||
'redis': RedisJobStore(
|
||||
**dict(
|
||||
host=RedisConfig.HOST,
|
||||
port=RedisConfig.PORT,
|
||||
username=RedisConfig.USERNAME,
|
||||
password=RedisConfig.PASSWORD,
|
||||
db=RedisConfig.DB
|
||||
)
|
||||
)
|
||||
}
|
||||
executors = {
|
||||
'default': ThreadPoolExecutor(20),
|
||||
'processpool': ProcessPoolExecutor(5)
|
||||
}
|
||||
job_defaults = {
|
||||
'coalesce': False,
|
||||
'max_instance': 1
|
||||
}
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.configure(jobstores=job_stores, executors=executors, job_defaults=job_defaults)
|
||||
|
||||
|
||||
class SchedulerUtil:
|
||||
"""
|
||||
定时任务相关方法
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
async def init_system_scheduler(cls, query_db: Session = SessionLocal()):
|
||||
"""
|
||||
应用启动时初始化定时任务
|
||||
:return:
|
||||
"""
|
||||
logger.info("开始启动定时任务...")
|
||||
scheduler.start()
|
||||
job_list = JobDao.get_job_list_for_scheduler(query_db)
|
||||
for item in job_list:
|
||||
query_job = cls.get_scheduler_job(job_id=str(item.job_id))
|
||||
if query_job:
|
||||
cls.remove_scheduler_job(job_id=str(item.job_id))
|
||||
cls.add_scheduler_job(item)
|
||||
query_db.close()
|
||||
scheduler.add_listener(cls.scheduler_event_listener, EVENT_ALL)
|
||||
logger.info("系统初始定时任务加载成功")
|
||||
|
||||
@classmethod
|
||||
async def close_system_scheduler(cls):
|
||||
"""
|
||||
应用关闭时关闭定时任务
|
||||
:return:
|
||||
"""
|
||||
scheduler.shutdown()
|
||||
logger.info("关闭定时任务成功")
|
||||
|
||||
@classmethod
|
||||
def get_scheduler_job(cls, job_id):
|
||||
"""
|
||||
根据任务id获取任务对象
|
||||
:param job_id: 任务id
|
||||
:return: 任务对象
|
||||
"""
|
||||
query_job = scheduler.get_job(job_id=str(job_id))
|
||||
|
||||
return query_job
|
||||
|
||||
@classmethod
|
||||
def add_scheduler_job(cls, job_info):
|
||||
"""
|
||||
根据输入的任务对象信息添加任务
|
||||
:param job_info: 任务对象信息
|
||||
:return:
|
||||
"""
|
||||
scheduler.add_job(
|
||||
func=eval(job_info.invoke_target),
|
||||
trigger=MyCronTrigger.from_crontab(job_info.cron_expression),
|
||||
args=job_info.job_args.split(',') if job_info.job_args else None,
|
||||
kwargs=json.loads(job_info.job_kwargs) if job_info.job_kwargs else None,
|
||||
id=str(job_info.job_id),
|
||||
name=job_info.job_name,
|
||||
misfire_grace_time=1000000000000 if job_info.misfire_policy == '3' else None,
|
||||
coalesce=True if job_info.misfire_policy == '2' else False,
|
||||
max_instances=3 if job_info.concurrent == '0' else 1,
|
||||
jobstore=job_info.job_group,
|
||||
executor=job_info.job_executor
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def execute_scheduler_job_once(cls, job_info):
|
||||
"""
|
||||
根据输入的任务对象执行一次任务
|
||||
:param job_info: 任务对象信息
|
||||
:return:
|
||||
"""
|
||||
scheduler.add_job(
|
||||
func=eval(job_info.invoke_target),
|
||||
trigger='date',
|
||||
run_date=datetime.now() + timedelta(seconds=1),
|
||||
args=job_info.job_args.split(',') if job_info.job_args else None,
|
||||
kwargs=json.loads(job_info.job_kwargs) if job_info.job_kwargs else None,
|
||||
id=str(job_info.job_id),
|
||||
name=job_info.job_name,
|
||||
misfire_grace_time=1000000000000 if job_info.misfire_policy == '3' else None,
|
||||
coalesce=True if job_info.misfire_policy == '2' else False,
|
||||
max_instances=3 if job_info.concurrent == '0' else 1,
|
||||
jobstore=job_info.job_group,
|
||||
executor=job_info.job_executor
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def remove_scheduler_job(cls, job_id):
|
||||
"""
|
||||
根据任务id移除任务
|
||||
:param job_id: 任务id
|
||||
:return:
|
||||
"""
|
||||
scheduler.remove_job(job_id=str(job_id))
|
||||
|
||||
@classmethod
|
||||
def scheduler_event_listener(cls, event):
|
||||
# 获取事件类型和任务ID
|
||||
event_type = event.__class__.__name__
|
||||
# 获取任务执行异常信息
|
||||
status = '0'
|
||||
exception_info = ''
|
||||
if event_type == 'JobExecutionEvent' and event.exception:
|
||||
exception_info = str(event.exception)
|
||||
status = '1'
|
||||
job_id = event.job_id
|
||||
query_job = cls.get_scheduler_job(job_id=job_id)
|
||||
if query_job:
|
||||
query_job_info = query_job.__getstate__()
|
||||
# 获取任务名称
|
||||
job_name = query_job_info.get('name')
|
||||
# 获取任务组名
|
||||
job_group = query_job._jobstore_alias
|
||||
# 获取任务执行器
|
||||
job_executor = query_job_info.get('executor')
|
||||
# 获取调用目标字符串
|
||||
invoke_target = query_job_info.get('func')
|
||||
# 获取调用函数位置参数
|
||||
job_args = ','.join(query_job_info.get('args'))
|
||||
# 获取调用函数关键字参数
|
||||
job_kwargs = json.dumps(query_job_info.get('kwargs'))
|
||||
# 获取任务触发器
|
||||
job_trigger = str(query_job_info.get('trigger'))
|
||||
# 构造日志消息
|
||||
job_message = f"事件类型: {event_type}, 任务ID: {job_id}, 任务名称: {job_name}, 执行于{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
job_log = JobLogModel(
|
||||
jobName=job_name,
|
||||
jobGroup=job_group,
|
||||
jobExecutor=job_executor,
|
||||
invokeTarget=invoke_target,
|
||||
jobArgs=job_args,
|
||||
jobKwargs=job_kwargs,
|
||||
jobTrigger=job_trigger,
|
||||
jobMessage=job_message,
|
||||
status=status,
|
||||
exceptionInfo=exception_info
|
||||
)
|
||||
session = SessionLocal()
|
||||
JobLogService.add_job_log_services(session, job_log)
|
||||
session.close()
|
Reference in New Issue
Block a user