style: 使用ruff格式化config模块,优化导入
This commit is contained in:
@@ -11,6 +11,7 @@ class CommonConstant:
|
||||
UNIQUE: 校验是否唯一的返回标识(是)
|
||||
NOT_UNIQUE: 校验是否唯一的返回标识(否)
|
||||
"""
|
||||
|
||||
WWW = 'www.'
|
||||
HTTP = 'http://'
|
||||
HTTPS = 'https://'
|
||||
@@ -43,6 +44,7 @@ class HttpStatusConstant:
|
||||
NOT_IMPLEMENTED: 接口未实现
|
||||
WARN: 系统警告消息
|
||||
"""
|
||||
|
||||
SUCCESS = 200
|
||||
CREATED = 201
|
||||
ACCEPTED = 202
|
||||
@@ -74,6 +76,7 @@ class MenuConstant:
|
||||
PARENT_VIEW: ParentView组件标识
|
||||
INNER_LINK: InnerLink组件标识
|
||||
"""
|
||||
|
||||
TYPE_DIR = 'M'
|
||||
TYPE_MENU = 'C'
|
||||
TYPE_BUTTON = 'F'
|
||||
|
@@ -5,8 +5,10 @@ from sqlalchemy.orm import DeclarativeBase
|
||||
from urllib.parse import quote_plus
|
||||
from config.env import DataBaseConfig
|
||||
|
||||
ASYNC_SQLALCHEMY_DATABASE_URL = f"mysql+asyncmy://{DataBaseConfig.db_username}:{quote_plus(DataBaseConfig.db_password)}@" \
|
||||
f"{DataBaseConfig.db_host}:{DataBaseConfig.db_port}/{DataBaseConfig.db_database}"
|
||||
ASYNC_SQLALCHEMY_DATABASE_URL = (
|
||||
f'mysql+asyncmy://{DataBaseConfig.db_username}:{quote_plus(DataBaseConfig.db_password)}@'
|
||||
f'{DataBaseConfig.db_host}:{DataBaseConfig.db_port}/{DataBaseConfig.db_database}'
|
||||
)
|
||||
|
||||
async_engine = create_async_engine(
|
||||
ASYNC_SQLALCHEMY_DATABASE_URL,
|
||||
@@ -14,7 +16,7 @@ async_engine = create_async_engine(
|
||||
max_overflow=DataBaseConfig.db_max_overflow,
|
||||
pool_size=DataBaseConfig.db_pool_size,
|
||||
pool_recycle=DataBaseConfig.db_pool_recycle,
|
||||
pool_timeout=DataBaseConfig.db_pool_timeout
|
||||
pool_timeout=DataBaseConfig.db_pool_timeout,
|
||||
)
|
||||
AsyncSessionLocal = async_sessionmaker(autocommit=False, autoflush=False, bind=async_engine)
|
||||
|
||||
|
@@ -15,6 +15,7 @@ class BusinessType(Enum):
|
||||
GENCODE: 生成代码
|
||||
CLEAN: 清空数据
|
||||
"""
|
||||
|
||||
OTHER = 0
|
||||
INSERT = 1
|
||||
UPDATE = 2
|
||||
|
@@ -1,15 +1,16 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
from pydantic_settings import BaseSettings
|
||||
from functools import lru_cache
|
||||
from dotenv import load_dotenv
|
||||
from functools import lru_cache
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class AppSettings(BaseSettings):
|
||||
"""
|
||||
应用配置
|
||||
"""
|
||||
|
||||
app_env: str = 'dev'
|
||||
app_name: str = 'RuoYi-FasAPI'
|
||||
app_root_path: str = '/dev-api'
|
||||
@@ -25,6 +26,7 @@ class JwtSettings(BaseSettings):
|
||||
"""
|
||||
Jwt配置
|
||||
"""
|
||||
|
||||
jwt_secret_key: str = 'b01c66dc2c58dc6a0aabfe2144256be36226de378bf87f72c0c795dda67f4d55'
|
||||
jwt_algorithm: str = 'HS256'
|
||||
jwt_expire_minutes: int = 1440
|
||||
@@ -35,6 +37,7 @@ class DataBaseSettings(BaseSettings):
|
||||
"""
|
||||
数据库配置
|
||||
"""
|
||||
|
||||
db_host: str = '127.0.0.1'
|
||||
db_port: int = 3306
|
||||
db_username: str = 'root'
|
||||
@@ -51,6 +54,7 @@ class RedisSettings(BaseSettings):
|
||||
"""
|
||||
Redis配置
|
||||
"""
|
||||
|
||||
redis_host: str = '127.0.0.1'
|
||||
redis_port: int = 6379
|
||||
redis_username: str = ''
|
||||
@@ -62,20 +66,38 @@ class UploadSettings:
|
||||
"""
|
||||
上传配置
|
||||
"""
|
||||
|
||||
UPLOAD_PREFIX = '/profile'
|
||||
UPLOAD_PATH = 'vf_admin/upload_path'
|
||||
UPLOAD_MACHINE = 'A'
|
||||
DEFAULT_ALLOWED_EXTENSION = [
|
||||
# 图片
|
||||
"bmp", "gif", "jpg", "jpeg", "png",
|
||||
'bmp',
|
||||
'gif',
|
||||
'jpg',
|
||||
'jpeg',
|
||||
'png',
|
||||
# word excel powerpoint
|
||||
"doc", "docx", "xls", "xlsx", "ppt", "pptx", "html", "htm", "txt",
|
||||
'doc',
|
||||
'docx',
|
||||
'xls',
|
||||
'xlsx',
|
||||
'ppt',
|
||||
'pptx',
|
||||
'html',
|
||||
'htm',
|
||||
'txt',
|
||||
# 压缩文件
|
||||
"rar", "zip", "gz", "bz2",
|
||||
'rar',
|
||||
'zip',
|
||||
'gz',
|
||||
'bz2',
|
||||
# 视频格式
|
||||
"mp4", "avi", "rmvb",
|
||||
'mp4',
|
||||
'avi',
|
||||
'rmvb',
|
||||
# pdf
|
||||
"pdf"
|
||||
'pdf',
|
||||
]
|
||||
DOWNLOAD_PATH = 'vf_admin/download_path'
|
||||
|
||||
@@ -90,6 +112,7 @@ class CachePathConfig:
|
||||
"""
|
||||
缓存目录配置
|
||||
"""
|
||||
|
||||
PATH = os.path.join(os.path.abspath(os.getcwd()), 'caches')
|
||||
PATHSTR = 'caches'
|
||||
|
||||
@@ -98,6 +121,7 @@ class RedisInitKeyConfig:
|
||||
"""
|
||||
系统内置Redis键名
|
||||
"""
|
||||
|
||||
ACCESS_TOKEN = {'key': 'access_token', 'remark': '登录令牌信息'}
|
||||
SYS_DICT = {'key': 'sys_dict', 'remark': '数据字典'}
|
||||
SYS_CONFIG = {'key': 'sys_config', 'remark': '配置信息'}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from config.database import *
|
||||
from config.database import async_engine, AsyncSessionLocal, Base
|
||||
from utils.log_util import logger
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ async def init_create_table():
|
||||
应用启动时初始化数据库连接
|
||||
:return:
|
||||
"""
|
||||
logger.info("初始化数据库连接...")
|
||||
logger.info('初始化数据库连接...')
|
||||
async with async_engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
logger.info("数据库连接成功")
|
||||
logger.info('数据库连接成功')
|
||||
|
@@ -1,9 +1,9 @@
|
||||
from redis import asyncio as aioredis
|
||||
from redis.exceptions import AuthenticationError, TimeoutError, RedisError
|
||||
from module_admin.service.dict_service import DictDataService
|
||||
from module_admin.service.config_service import ConfigService
|
||||
from config.env import RedisConfig
|
||||
from config.database import AsyncSessionLocal
|
||||
from config.env import RedisConfig
|
||||
from module_admin.service.config_service import ConfigService
|
||||
from module_admin.service.dict_service import DictDataService
|
||||
from utils.log_util import logger
|
||||
|
||||
|
||||
@@ -18,28 +18,28 @@ class RedisUtil:
|
||||
应用启动时初始化redis连接
|
||||
:return: Redis连接对象
|
||||
"""
|
||||
logger.info("开始连接redis...")
|
||||
logger.info('开始连接redis...')
|
||||
redis = await aioredis.from_url(
|
||||
url=f"redis://{RedisConfig.redis_host}",
|
||||
url=f'redis://{RedisConfig.redis_host}',
|
||||
port=RedisConfig.redis_port,
|
||||
username=RedisConfig.redis_username,
|
||||
password=RedisConfig.redis_password,
|
||||
db=RedisConfig.redis_database,
|
||||
encoding="utf-8",
|
||||
decode_responses=True
|
||||
encoding='utf-8',
|
||||
decode_responses=True,
|
||||
)
|
||||
try:
|
||||
connection = await redis.ping()
|
||||
if connection:
|
||||
logger.info("redis连接成功")
|
||||
logger.info('redis连接成功')
|
||||
else:
|
||||
logger.error("redis连接失败")
|
||||
logger.error('redis连接失败')
|
||||
except AuthenticationError as e:
|
||||
logger.error(f"redis用户名或密码错误,详细错误信息:{e}")
|
||||
logger.error(f'redis用户名或密码错误,详细错误信息:{e}')
|
||||
except TimeoutError as e:
|
||||
logger.error(f"redis连接超时,详细错误信息:{e}")
|
||||
logger.error(f'redis连接超时,详细错误信息:{e}')
|
||||
except RedisError as e:
|
||||
logger.error(f"redis连接错误,详细错误信息:{e}")
|
||||
logger.error(f'redis连接错误,详细错误信息:{e}')
|
||||
return redis
|
||||
|
||||
@classmethod
|
||||
@@ -50,7 +50,7 @@ class RedisUtil:
|
||||
:return:
|
||||
"""
|
||||
await app.state.redis.close()
|
||||
logger.info("关闭redis连接成功")
|
||||
logger.info('关闭redis连接成功')
|
||||
|
||||
@classmethod
|
||||
async def init_sys_dict(cls, redis):
|
||||
|
@@ -1,20 +1,21 @@
|
||||
import json
|
||||
from apscheduler.events import EVENT_ALL
|
||||
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.jobstores.memory import MemoryJobStore
|
||||
from apscheduler.jobstores.redis import RedisJobStore
|
||||
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
|
||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from apscheduler.events import EVENT_ALL
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy.engine import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from datetime import datetime, timedelta
|
||||
from config.database import quote_plus, AsyncSessionLocal
|
||||
from config.database import AsyncSessionLocal, quote_plus
|
||||
from config.env import DataBaseConfig, RedisConfig
|
||||
from module_admin.service.job_log_service import JobLogService, JobLogModel
|
||||
from module_admin.dao.job_dao import JobDao
|
||||
from module_admin.entity.vo.job_vo import JobLogModel
|
||||
from module_admin.service.job_log_service import JobLogService
|
||||
from utils.log_util import logger
|
||||
import module_task
|
||||
import module_task # noqa: F401
|
||||
|
||||
|
||||
# 重写Cron定时
|
||||
@@ -48,8 +49,17 @@ class MyCronTrigger(CronTrigger):
|
||||
else:
|
||||
day_of_week = None
|
||||
year = values[6] if len(values) == 7 else None
|
||||
return cls(second=second, minute=minute, hour=hour, day=day, month=month, week=week,
|
||||
day_of_week=day_of_week, year=year, timezone=timezone)
|
||||
return cls(
|
||||
second=second,
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day=day,
|
||||
month=month,
|
||||
week=week,
|
||||
day_of_week=day_of_week,
|
||||
year=year,
|
||||
timezone=timezone,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __find_recent_workday(cls, day):
|
||||
@@ -67,15 +77,17 @@ class MyCronTrigger(CronTrigger):
|
||||
diff += 1
|
||||
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = f"mysql+pymysql://{DataBaseConfig.db_username}:{quote_plus(DataBaseConfig.db_password)}@" \
|
||||
f"{DataBaseConfig.db_host}:{DataBaseConfig.db_port}/{DataBaseConfig.db_database}"
|
||||
SQLALCHEMY_DATABASE_URL = (
|
||||
f'mysql+pymysql://{DataBaseConfig.db_username}:{quote_plus(DataBaseConfig.db_password)}@'
|
||||
f'{DataBaseConfig.db_host}:{DataBaseConfig.db_port}/{DataBaseConfig.db_database}'
|
||||
)
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
echo=DataBaseConfig.db_echo,
|
||||
max_overflow=DataBaseConfig.db_max_overflow,
|
||||
pool_size=DataBaseConfig.db_pool_size,
|
||||
pool_recycle=DataBaseConfig.db_pool_recycle,
|
||||
pool_timeout=DataBaseConfig.db_pool_timeout
|
||||
pool_timeout=DataBaseConfig.db_pool_timeout,
|
||||
)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
job_stores = {
|
||||
@@ -87,18 +99,12 @@ job_stores = {
|
||||
port=RedisConfig.redis_port,
|
||||
username=RedisConfig.redis_username,
|
||||
password=RedisConfig.redis_password,
|
||||
db=RedisConfig.redis_database
|
||||
db=RedisConfig.redis_database,
|
||||
)
|
||||
)
|
||||
}
|
||||
executors = {
|
||||
'default': ThreadPoolExecutor(20),
|
||||
'processpool': ProcessPoolExecutor(5)
|
||||
}
|
||||
job_defaults = {
|
||||
'coalesce': False,
|
||||
'max_instance': 1
|
||||
),
|
||||
}
|
||||
executors = {'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5)}
|
||||
job_defaults = {'coalesce': False, 'max_instance': 1}
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.configure(jobstores=job_stores, executors=executors, job_defaults=job_defaults)
|
||||
|
||||
@@ -114,7 +120,7 @@ class SchedulerUtil:
|
||||
应用启动时初始化定时任务
|
||||
:return:
|
||||
"""
|
||||
logger.info("开始启动定时任务...")
|
||||
logger.info('开始启动定时任务...')
|
||||
scheduler.start()
|
||||
async with AsyncSessionLocal() as session:
|
||||
job_list = await JobDao.get_job_list_for_scheduler(session)
|
||||
@@ -124,7 +130,7 @@ class SchedulerUtil:
|
||||
cls.remove_scheduler_job(job_id=str(item.job_id))
|
||||
cls.add_scheduler_job(item)
|
||||
scheduler.add_listener(cls.scheduler_event_listener, EVENT_ALL)
|
||||
logger.info("系统初始定时任务加载成功")
|
||||
logger.info('系统初始定时任务加载成功')
|
||||
|
||||
@classmethod
|
||||
async def close_system_scheduler(cls):
|
||||
@@ -133,7 +139,7 @@ class SchedulerUtil:
|
||||
:return:
|
||||
"""
|
||||
scheduler.shutdown()
|
||||
logger.info("关闭定时任务成功")
|
||||
logger.info('关闭定时任务成功')
|
||||
|
||||
@classmethod
|
||||
def get_scheduler_job(cls, job_id):
|
||||
@@ -164,7 +170,7 @@ class SchedulerUtil:
|
||||
coalesce=True if job_info.misfire_policy == '2' else False,
|
||||
max_instances=3 if job_info.concurrent == '0' else 1,
|
||||
jobstore=job_info.job_group,
|
||||
executor=job_info.job_executor
|
||||
executor=job_info.job_executor,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -186,7 +192,7 @@ class SchedulerUtil:
|
||||
coalesce=True if job_info.misfire_policy == '2' else False,
|
||||
max_instances=3 if job_info.concurrent == '0' else 1,
|
||||
jobstore=job_info.job_group,
|
||||
executor=job_info.job_executor
|
||||
executor=job_info.job_executor,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -239,7 +245,7 @@ class SchedulerUtil:
|
||||
jobMessage=job_message,
|
||||
status=status,
|
||||
exceptionInfo=exception_info,
|
||||
createTime=datetime.now()
|
||||
createTime=datetime.now(),
|
||||
)
|
||||
session = SessionLocal()
|
||||
JobLogService.add_job_log_services(session, job_log)
|
||||
|
Reference in New Issue
Block a user