Merge remote-tracking branch 'origin/develop' into develop

This commit is contained in:
李强
2024-08-29 08:33:31 +08:00
21 changed files with 768 additions and 530 deletions

View File

@@ -0,0 +1,5 @@
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)

View File

@@ -15,7 +15,7 @@ else:
from celery import Celery
app = Celery(f"application")
app.config_from_object('django.conf:settings')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
platforms.C_FORCE_ROOT = True

View File

@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
import pypinyin
from django.db.models import Q
from rest_framework import serializers
@@ -15,6 +16,11 @@ class AreaSerializer(CustomModelSerializer):
"""
pcode_count = serializers.SerializerMethodField(read_only=True)
hasChild = serializers.SerializerMethodField()
pcode_info = serializers.SerializerMethodField()
def get_pcode_info(self, instance):
pcode = Area.objects.filter(code=instance.pcode_id).values("name", "code")
return pcode
def get_pcode_count(self, instance: Area):
return Area.objects.filter(pcode=instance).count()
@@ -36,6 +42,18 @@ class AreaCreateUpdateSerializer(CustomModelSerializer):
地区管理 创建/更新时的列化器
"""
def to_internal_value(self, data):
pinyin = ''.join([''.join(i) for i in pypinyin.pinyin(data["name"], style=pypinyin.NORMAL)])
data["level"] = 1
data["pinyin"] = pinyin
data["initials"] = pinyin[0].upper() if pinyin else "#"
pcode = data["pcode"] if 'pcode' in data else None
if pcode:
pcode = Area.objects.get(pk=pcode)
data["pcode"] = pcode.code
data["level"] = pcode.level + 1
return super().to_internal_value(data)
class Meta:
model = Area
fields = '__all__'
@@ -52,20 +70,28 @@ class AreaViewSet(CustomModelViewSet, FieldPermissionMixin):
"""
queryset = Area.objects.all()
serializer_class = AreaSerializer
create_serializer_class = AreaCreateUpdateSerializer
update_serializer_class = AreaCreateUpdateSerializer
extra_filter_class = []
def get_queryset(self):
def list(self, request, *args, **kwargs):
self.request.query_params._mutable = True
params = self.request.query_params
pcode = params.get('pcode', None)
page = params.get('page', None)
limit = params.get('limit', None)
if page:
del params['page']
if limit:
del params['limit']
if params and pcode:
queryset = self.queryset.filter(enable=True, pcode=pcode)
else:
known_params = {'page', 'limit', 'pcode'}
# 使用集合操作检查是否有未知参数
other_params_exist = any(param not in known_params for param in params)
if other_params_exist:
queryset = self.queryset.filter(enable=True)
return queryset
else:
pcode = params.get('pcode', None)
params['limit'] = 999
if params and pcode:
queryset = self.queryset.filter(enable=True, pcode=pcode)
else:
queryset = self.queryset.filter(enable=True, level=1)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True, request=request)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True, request=request)
return SuccessResponse(data=serializer.data, msg="获取成功")

View File

@@ -124,6 +124,7 @@ class LoginSerializer(TokenObtainPairSerializer):
user.is_active = False
user.save()
raise CustomValidationError("账号已被锁定,联系管理员解锁")
user.save()
count = 5 - user.login_error_count
raise CustomValidationError(f"账号/密码错误;重试{count}次后将被锁定~")

View File

@@ -1,4 +1,7 @@
# -*- coding: utf-8 -*-
from itertools import groupby
from django.db.models import F
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
@@ -35,4 +38,34 @@ class FieldPermissionMixin:
data= FieldPermission.objects.filter(
field__model=model['model'],role__in=roles
).values( 'is_create', 'is_query', 'is_update',field_name=F('field__field_name'))
"""
合并权限
这段代码首先根据 field_name 对列表进行排序,
然后使用 groupby 按 field_name 进行分组。
对于每个组,它创建一个新的字典 merged
并遍历组中的每个字典将布尔值字段使用逻辑或or操作符进行合并如果 merged 中还没有该字段,则默认为 False
其他字段(如 field_name则直接取组的关键字即 key
"""
# 使用field_name对列表进行分组, # groupby 需要先对列表进行排序,因为它只能对连续相同的元素进行分组。
grouped = groupby(sorted(list(data), key=lambda x: x['field_name']), key=lambda x: x['field_name'])
data = []
# 遍历分组,合并权限
for key, group in grouped:
# 初始化一个空字典来存储合并后的结果
merged = {}
for item in group:
# 合并权限, True值优先
merged['is_create'] = merged.get('is_create', False) or item['is_create']
merged['is_query'] = merged.get('is_query', False) or item['is_query']
merged['is_update'] = merged.get('is_update', False) or item['is_update']
merged['field_name'] = key
data.append(merged)
return DetailResponse(data=data)

View File

@@ -37,11 +37,11 @@ class CoreModelFilterBankend(BaseFilterBackend):
if any([create_datetime_after, create_datetime_before, update_datetime_after, update_datetime_before]):
create_filter = Q()
if create_datetime_after and create_datetime_before:
create_filter &= Q(create_datetime__gte=create_datetime_after) & Q(create_datetime__lte=create_datetime_before)
create_filter &= Q(create_datetime__gte=create_datetime_after) & Q(create_datetime__lte=f'{create_datetime_before} 23:59:59')
elif create_datetime_after:
create_filter &= Q(create_datetime__gte=create_datetime_after)
elif create_datetime_before:
create_filter &= Q(create_datetime__lte=create_datetime_before)
create_filter &= Q(create_datetime__lte=f'{create_datetime_before} 23:59:59')
# 更新时间范围过滤条件
update_filter = Q()

View File

@@ -32,6 +32,14 @@ class ApiLoggingMiddleware(MiddlewareMixin):
request.request_path = get_request_path(request)
def __handle_response(self, request, response):
# 判断有无log_id属性使用All记录时会出现此情况
if request.request_data.get('log_id', None) is None:
return
# 移除log_id不记录此ID
log_id = request.request_data.pop('log_id')
# request_data,request_ip由PermissionInterfaceMiddleware中间件中添加的属性
body = getattr(request, 'request_data', {})
# 请求含有password则用*替换掉(暂时先用于所有接口的password请求参数)
@@ -60,7 +68,7 @@ class ApiLoggingMiddleware(MiddlewareMixin):
'status': True if response.data.get('code') in [2000, ] else False,
'json_result': {"code": response.data.get('code'), "msg": response.data.get('msg')},
}
operation_log, creat = OperationLog.objects.update_or_create(defaults=info, id=self.operation_log_id)
operation_log, creat = OperationLog.objects.update_or_create(defaults=info, id=log_id)
if not operation_log.request_modular and settings.API_MODEL_MAP.get(request.request_path, None):
operation_log.request_modular = settings.API_MODEL_MAP[request.request_path]
operation_log.save()
@@ -71,7 +79,8 @@ class ApiLoggingMiddleware(MiddlewareMixin):
if self.methods == 'ALL' or request.method in self.methods:
log = OperationLog(request_modular=get_verbose_name(view_func.cls.queryset))
log.save()
self.operation_log_id = log.id
# self.operation_log_id = log.id
request.request_data['log_id'] = log.id
return

View File

@@ -216,9 +216,13 @@ def get_all_models_objects(model_name=None):
def get_model_from_app(app_name):
"""获取模型里的字段"""
model_module = import_module(app_name + '.models')
exclude_models = getattr(model_module, 'exclude_models', [])
filter_model = [
getattr(model_module, item) for item in dir(model_module)
if item != 'CoreModel' and issubclass(getattr(model_module, item).__class__, models.base.ModelBase)
value for key, value in model_module.__dict__.items()
if key != 'CoreModel'
and isinstance(value, type)
and issubclass(value, models.Model)
and key not in exclude_models
]
model_list = []
for model in filter_model: