From 8e89d42343a8edc61416fc61b097516cb916e7ca Mon Sep 17 00:00:00 2001 From: wangruidong <940853815@qq.com> Date: Mon, 11 Dec 2023 18:06:22 +0800 Subject: [PATCH] =?UTF-8?q?perf:=20=E5=90=8C=E5=90=8D=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E5=A4=84=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/ops/api/job.py | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/apps/ops/api/job.py b/apps/ops/api/job.py index 3bcb7393e..b3b421da4 100644 --- a/apps/ops/api/job.py +++ b/apps/ops/api/job.py @@ -98,34 +98,44 @@ class JobViewSet(OrgBulkModelViewSet): transaction.on_commit( lambda: run_ops_job_execution.apply_async((str(execution.id),), task_id=str(execution.id))) + @staticmethod + def get_same_filenames(files): + filename_set = set() + same_filenames = [] + for file in files: + filename = file.name + if filename in filename_set: + same_filenames.append(filename) + filename_set.add(filename) + return same_filenames + @action(methods=[POST], detail=False, serializer_class=FileSerializer, permission_classes=[IsValidUser, ], url_path='upload') def upload(self, request, *args, **kwargs): + uploaded_files = request.FILES.getlist('files') serializer = self.get_serializer(data=request.data) + if not serializer.is_valid(): msg = 'Upload data invalid: {}'.format(serializer.errors) return Response({'msg': msg}, status=400) - uploaded_files = request.FILES.getlist('files') + + same_filenames = self.get_same_filenames(uploaded_files) + if same_filenames: + return Response({'msg': _("Duplicate file exists")}, status=400) + job_id = request.data.get('job_id', '') job = get_object_or_404(Job, pk=job_id) job_args = json.loads(job.args) src_path_info = [] - filename_set = set() - same_filenames = [] - upload_file_dir = safe_join(settings.DATA_DIR, 'job_upload_file') + upload_file_dir = safe_join(settings.DATA_DIR, 'job_upload_file', job_id) for uploaded_file in uploaded_files: filename = uploaded_file.name - saved_path = safe_join(upload_file_dir, f'{job_id}/{filename}') + saved_path = safe_join(upload_file_dir, f'{filename}') os.makedirs(os.path.dirname(saved_path), exist_ok=True) with open(saved_path, 'wb+') as destination: for chunk in uploaded_file.chunks(): destination.write(chunk) - if filename in filename_set: - same_filenames.append(filename) - filename_set.add(filename) src_path_info.append({'filename': filename, 'md5': get_file_md5(saved_path)}) - if same_filenames: - return Response({'msg': _("Duplicate file exists")}, status=400) job_args['src_path_info'] = src_path_info job.args = json.dumps(job_args) job.save()