)
+
## \[2.27.0\] - 2025-02-04
diff --git a/backend_entrypoint.sh b/backend_entrypoint.sh
index bac37c76e5be..5b7f24ebe7e4 100755
--- a/backend_entrypoint.sh
+++ b/backend_entrypoint.sh
@@ -11,6 +11,10 @@ wait_for_db() {
wait-for-it "${CVAT_POSTGRES_HOST}:${CVAT_POSTGRES_PORT:-5432}" -t 0
}
+wait_for_redis_inmem() {
+ wait-for-it "${CVAT_REDIS_INMEM_HOST}:${CVAT_REDIS_INMEM_PORT:-6379}" -t 0
+}
+
cmd_bash() {
exec bash "$@"
}
@@ -19,7 +23,8 @@ cmd_init() {
wait_for_db
~/manage.py migrate
- wait-for-it "${CVAT_REDIS_INMEM_HOST}:${CVAT_REDIS_INMEM_PORT:-6379}" -t 0
+ wait_for_redis_inmem
+ ~/manage.py migrateredis
~/manage.py syncperiodicjobs
}
@@ -39,6 +44,12 @@ cmd_run() {
sleep 10
done
+ wait_for_redis_inmem
+ echo "waiting for Redis migrations to complete..."
+ while ! ~/manage.py migrateredis --check; do
+ sleep 10
+ done
+
exec supervisord -c "supervisord/$1.conf"
}
diff --git a/cvat-canvas/src/typescript/canvasView.ts b/cvat-canvas/src/typescript/canvasView.ts
index 60d51c369aff..1bf30e34ad9b 100644
--- a/cvat-canvas/src/typescript/canvasView.ts
+++ b/cvat-canvas/src/typescript/canvasView.ts
@@ -404,6 +404,32 @@ export class CanvasViewImpl implements CanvasView, Listener {
this.canvas.style.cursor = '';
this.mode = Mode.IDLE;
if (state && points) {
+ // we need to store "updated" and set "points" to an empty array
+ // as this information is used to define "updated" objects in diff logic during canvas objects setup
+ // if because of any reason updating was actually rejected somewhere, we must reset view inside this logic
+
+ // there is one more deeper issue:
+ // somewhere canvas updates drawn views and then sends request,
+ // updating internal CVAT state (e.g. drag, resize)
+ // somewhere, however, it just sends request to update internal CVAT state
+ // (e.g. remove point, edit polygon/polyline)
+ // if object view was not changed by canvas and points accepted as is without any changes
+ // the view will not be updated during objects setup if we just set points as is here
+ // that is why we need to set points to an empty array (something that can't normally come from CVAT)
+ // I do not think it can be easily fixed now, hovewer in the future we should refactor code
+ if (Number.isInteger(state.parentID)) {
+ const { elements } = this.drawnStates[state.parentID];
+ const drawnElement = elements.find((el) => el.clientID === state.clientID);
+ drawnElement.updated = 0;
+ drawnElement.points = [];
+
+ this.drawnStates[state.parentID].updated = 0;
+ this.drawnStates[state.parentID].points = [];
+ } else {
+ this.drawnStates[state.clientID].updated = 0;
+ this.drawnStates[state.clientID].points = [];
+ }
+
const event: CustomEvent = new CustomEvent('canvas.edited', {
bubbles: false,
cancelable: true,
diff --git a/cvat-canvas/src/typescript/consts.ts b/cvat-canvas/src/typescript/consts.ts
index 3ea75dbb557d..1e39c1316879 100644
--- a/cvat-canvas/src/typescript/consts.ts
+++ b/cvat-canvas/src/typescript/consts.ts
@@ -1,4 +1,5 @@
// Copyright (C) 2019-2022 Intel Corporation
+// Copyright (C) CVAT.ai Corporation
//
// SPDX-License-Identifier: MIT
@@ -6,8 +7,7 @@ const BASE_STROKE_WIDTH = 1.25;
const BASE_GRID_WIDTH = 2;
const BASE_POINT_SIZE = 4;
const TEXT_MARGIN = 10;
-const AREA_THRESHOLD = 9;
-const SIZE_THRESHOLD = 3;
+const SIZE_THRESHOLD = 1;
const POINTS_STROKE_WIDTH = 1;
const POINTS_SELECTED_STROKE_WIDTH = 4;
const MIN_EDGE_LENGTH = 3;
@@ -36,7 +36,6 @@ export default {
BASE_GRID_WIDTH,
BASE_POINT_SIZE,
TEXT_MARGIN,
- AREA_THRESHOLD,
SIZE_THRESHOLD,
POINTS_STROKE_WIDTH,
POINTS_SELECTED_STROKE_WIDTH,
diff --git a/cvat-canvas/src/typescript/drawHandler.ts b/cvat-canvas/src/typescript/drawHandler.ts
index d54117c72957..ebee54109a04 100644
--- a/cvat-canvas/src/typescript/drawHandler.ts
+++ b/cvat-canvas/src/typescript/drawHandler.ts
@@ -47,16 +47,18 @@ interface FinalCoordinates {
function checkConstraint(shapeType: string, points: number[], box: Box | null = null): boolean {
if (shapeType === 'rectangle') {
const [xtl, ytl, xbr, ybr] = points;
- return (xbr - xtl) * (ybr - ytl) >= consts.AREA_THRESHOLD;
+ const [width, height] = [xbr - xtl, ybr - ytl];
+ return width >= consts.SIZE_THRESHOLD && height >= consts.SIZE_THRESHOLD;
}
if (shapeType === 'polygon') {
- return (box.xbr - box.xtl) * (box.ybr - box.ytl) >= consts.AREA_THRESHOLD && points.length >= 3 * 2;
+ const [width, height] = [box.xbr - box.xtl, box.ybr - box.ytl];
+ return (width >= consts.SIZE_THRESHOLD || height > consts.SIZE_THRESHOLD) && points.length >= 3 * 2;
}
if (shapeType === 'polyline') {
- return (box.xbr - box.xtl >= consts.SIZE_THRESHOLD ||
- box.ybr - box.ytl >= consts.SIZE_THRESHOLD) && points.length >= 2 * 2;
+ const [width, height] = [box.xbr - box.xtl, box.ybr - box.ytl];
+ return (width >= consts.SIZE_THRESHOLD || height >= consts.SIZE_THRESHOLD) && points.length >= 2 * 2;
}
if (shapeType === 'points') {
@@ -64,18 +66,22 @@ function checkConstraint(shapeType: string, points: number[], box: Box | null =
}
if (shapeType === 'ellipse') {
- const [rx, ry] = [points[2] - points[0], points[1] - points[3]];
- return rx * ry * Math.PI >= consts.AREA_THRESHOLD;
+ const [width, height] = [(points[2] - points[0]) * 2, (points[1] - points[3]) * 2];
+ return width >= consts.SIZE_THRESHOLD && height > consts.SIZE_THRESHOLD;
}
if (shapeType === 'cuboid') {
return points.length === 4 * 2 || points.length === 8 * 2 ||
- (points.length === 2 * 2 && (points[2] - points[0]) * (points[3] - points[1]) >= consts.AREA_THRESHOLD);
+ (points.length === 2 * 2 &&
+ (points[2] - points[0]) >= consts.SIZE_THRESHOLD &&
+ (points[3] - points[1]) >= consts.SIZE_THRESHOLD
+ );
}
if (shapeType === 'skeleton') {
const [xtl, ytl, xbr, ybr] = points;
- return (xbr - xtl >= 1 || ybr - ytl >= 1);
+ const [width, height] = [xbr - xtl, ybr - ytl];
+ return width >= consts.SIZE_THRESHOLD || height >= consts.SIZE_THRESHOLD;
}
return false;
diff --git a/cvat-canvas/src/typescript/shared.ts b/cvat-canvas/src/typescript/shared.ts
index 9e210067e7d7..bde8cdbb8671 100644
--- a/cvat-canvas/src/typescript/shared.ts
+++ b/cvat-canvas/src/typescript/shared.ts
@@ -100,7 +100,7 @@ export function displayShapeSize(shapesContainer: SVG.Container, textContainer:
.fill('white')
.addClass('cvat_canvas_text'),
update(shape: SVG.Shape): void {
- let text = `${Math.round(shape.width())}x${Math.round(shape.height())}px`;
+ let text = `${Math.floor(shape.width())}x${Math.floor(shape.height())}px`;
if (shape.type === 'rect' || shape.type === 'ellipse') {
let rotation = shape.transform().rotation || 0;
// be sure, that rotation in range [0; 360]
diff --git a/cvat-cli/requirements/base.txt b/cvat-cli/requirements/base.txt
index 49a9838f2d76..2152fa5ae4ed 100644
--- a/cvat-cli/requirements/base.txt
+++ b/cvat-cli/requirements/base.txt
@@ -1,4 +1,4 @@
-cvat-sdk==2.27.0
+cvat-sdk==2.28.0
attrs>=24.2.0
Pillow>=10.3.0
diff --git a/cvat-cli/src/cvat_cli/version.py b/cvat-cli/src/cvat_cli/version.py
index 7a2e04f16f6b..a30944db8122 100644
--- a/cvat-cli/src/cvat_cli/version.py
+++ b/cvat-cli/src/cvat_cli/version.py
@@ -1 +1 @@
-VERSION = "2.27.0"
+VERSION = "2.28.0"
diff --git a/cvat-core/src/object-utils.ts b/cvat-core/src/object-utils.ts
index 12712032dca5..06a90a958924 100644
--- a/cvat-core/src/object-utils.ts
+++ b/cvat-core/src/object-utils.ts
@@ -67,45 +67,48 @@ export function findAngleDiff(rightAngle: number, leftAngle: number): number {
}
export function checkShapeArea(shapeType: ShapeType, points: number[]): boolean {
- const MIN_SHAPE_LENGTH = 3;
- const MIN_SHAPE_AREA = 9;
- const MIN_MASK_SHAPE_AREA = 1;
+ const MIN_SHAPE_SIZE = 1;
if (shapeType === ShapeType.POINTS) {
return true;
}
+ let width = 0;
+ let height = 0;
+
if (shapeType === ShapeType.MASK) {
const [left, top, right, bottom] = points.slice(-4);
- const area = (right - left + 1) * (bottom - top + 1);
- return area >= MIN_MASK_SHAPE_AREA;
- }
-
- if (shapeType === ShapeType.ELLIPSE) {
+ [width, height] = [right - left + 1, bottom - top + 1];
+ } else if (shapeType === ShapeType.RECTANGLE) {
+ const [xtl, ytl, xbr, ybr] = points;
+ [width, height] = [xbr - xtl, ybr - ytl];
+ } else if (shapeType === ShapeType.ELLIPSE) {
const [cx, cy, rightX, topY] = points;
- const [rx, ry] = [rightX - cx, cy - topY];
- return rx * ry * Math.PI > MIN_SHAPE_AREA;
- }
-
- let xmin = Number.MAX_SAFE_INTEGER;
- let xmax = Number.MIN_SAFE_INTEGER;
- let ymin = Number.MAX_SAFE_INTEGER;
- let ymax = Number.MIN_SAFE_INTEGER;
+ [width, height] = [(rightX - cx) * 2, (cy - topY) * 2];
+ } else {
+ // polygon, polyline, cuboid, skeleton
+ let xmin = Number.MAX_SAFE_INTEGER;
+ let xmax = Number.MIN_SAFE_INTEGER;
+ let ymin = Number.MAX_SAFE_INTEGER;
+ let ymax = Number.MIN_SAFE_INTEGER;
+
+ for (let i = 0; i < points.length - 1; i += 2) {
+ xmin = Math.min(xmin, points[i]);
+ xmax = Math.max(xmax, points[i]);
+ ymin = Math.min(ymin, points[i + 1]);
+ ymax = Math.max(ymax, points[i + 1]);
+ }
- for (let i = 0; i < points.length - 1; i += 2) {
- xmin = Math.min(xmin, points[i]);
- xmax = Math.max(xmax, points[i]);
- ymin = Math.min(ymin, points[i + 1]);
- ymax = Math.max(ymax, points[i + 1]);
- }
+ if ([ShapeType.POLYLINE, ShapeType.SKELETON, ShapeType.POLYGON].includes(shapeType)) {
+ // for polyshapes consider at least one dimension
+ // skeleton in corner cases may be a regular polyshape
+ return Math.max(xmax - xmin, ymax - ymin) >= MIN_SHAPE_SIZE;
+ }
- if (shapeType === ShapeType.POLYLINE) {
- const length = Math.max(xmax - xmin, ymax - ymin);
- return length >= MIN_SHAPE_LENGTH;
+ [width, height] = [xmax - xmin, ymax - ymin];
}
- const area = (xmax - xmin) * (ymax - ymin);
- return area >= MIN_SHAPE_AREA;
+ return width >= MIN_SHAPE_SIZE && height >= MIN_SHAPE_SIZE;
}
export function rotatePoint(x: number, y: number, angle: number, cx = 0, cy = 0): number[] {
diff --git a/cvat-sdk/gen/generate.sh b/cvat-sdk/gen/generate.sh
index 27f38bec5a63..1029239d0ee5 100755
--- a/cvat-sdk/gen/generate.sh
+++ b/cvat-sdk/gen/generate.sh
@@ -8,7 +8,7 @@ set -e
GENERATOR_VERSION="v6.0.1"
-VERSION="2.27.0"
+VERSION="2.28.0"
LIB_NAME="cvat_sdk"
LAYER1_LIB_NAME="${LIB_NAME}/api_client"
DST_DIR="$(cd "$(dirname -- "$0")/.." && pwd)"
diff --git a/cvat-ui/package.json b/cvat-ui/package.json
index 5a754af28597..4cff87a0ee14 100644
--- a/cvat-ui/package.json
+++ b/cvat-ui/package.json
@@ -1,6 +1,6 @@
{
"name": "cvat-ui",
- "version": "2.27.0",
+ "version": "2.28.0",
"description": "CVAT single-page application",
"main": "src/index.tsx",
"scripts": {
diff --git a/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx b/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx
index e624e7c4adf0..c659daa8ac0e 100644
--- a/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx
+++ b/cvat-ui/src/components/header/settings-modal/workspace-settings.tsx
@@ -80,8 +80,8 @@ function WorkspaceSettingsComponent(props: Props): JSX.Element {
const maxAutoSaveInterval = 60;
const minAAMMargin = 0;
const maxAAMMargin = 1000;
- const minControlPointsSize = 4;
- const maxControlPointsSize = 8;
+ const minControlPointsSize = 2;
+ const maxControlPointsSize = 10;
return (
diff --git a/cvat/__init__.py b/cvat/__init__.py
index 2b802df76213..525a12be9a35 100644
--- a/cvat/__init__.py
+++ b/cvat/__init__.py
@@ -4,6 +4,6 @@
from cvat.utils.version import get_version
-VERSION = (2, 27, 0, "final", 0)
+VERSION = (2, 28, 0, "final", 0)
__version__ = get_version(VERSION)
diff --git a/cvat/apps/engine/redis_migrations/001_cleanup_scheduled_jobs.py b/cvat/apps/engine/redis_migrations/001_cleanup_scheduled_jobs.py
new file mode 100644
index 000000000000..39276bb03c70
--- /dev/null
+++ b/cvat/apps/engine/redis_migrations/001_cleanup_scheduled_jobs.py
@@ -0,0 +1,20 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
+
+import django_rq
+from django.conf import settings
+from rq_scheduler import Scheduler
+
+from cvat.apps.redis_handler.redis_migrations import BaseMigration
+
+
+class Migration(BaseMigration):
+ @classmethod
+ def run(cls):
+ scheduler: Scheduler = django_rq.get_scheduler(settings.CVAT_QUEUES.EXPORT_DATA.value)
+
+ for job in scheduler.get_jobs():
+ if job.func_name == "cvat.apps.dataset_manager.views.clear_export_cache":
+ scheduler.cancel(job)
+ job.delete()
diff --git a/cvat/apps/engine/redis_migrations/__init__.py b/cvat/apps/engine/redis_migrations/__init__.py
new file mode 100644
index 000000000000..fea87fe021d6
--- /dev/null
+++ b/cvat/apps/engine/redis_migrations/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
diff --git a/cvat/apps/engine/serializers.py b/cvat/apps/engine/serializers.py
index 1d1124661f49..ac479b84003f 100644
--- a/cvat/apps/engine/serializers.py
+++ b/cvat/apps/engine/serializers.py
@@ -53,6 +53,7 @@
reverse,
take_by,
)
+from utils.dataset_manifest import ImageManifestManager
slogger = ServerLogManager(__name__)
@@ -1041,6 +1042,8 @@ def update(self, instance: models.Job, validated_data: dict[str, Any]) -> models
f"Honeypots cannot exist in {models.JobType.GROUND_TRUTH} jobs"
)
+ assert not hasattr(db_data, 'video')
+
frame_step = db_data.get_frame_step()
def _to_rel_frame(abs_frame: int) -> int:
@@ -1179,6 +1182,12 @@ def _to_abs_frame(rel_frame: int) -> int:
# Remove annotations on changed validation frames
self._clear_annotations_on_frames(db_segment, updated_honeypots)
+ # Update manifest
+ manifest_path = db_data.get_manifest_path()
+ if os.path.isfile(manifest_path):
+ manifest = ImageManifestManager(manifest_path)
+ manifest.reorder([db_frame.path for db_frame in db_frames.values()])
+
# Update chunks
job_frame_provider = JobFrameProvider(db_job)
updated_segment_chunk_ids = set(
@@ -1435,6 +1444,11 @@ def validate(self, attrs):
@transaction.atomic
def update(self, instance: models.Task, validated_data: dict[str, Any]) -> models.Task:
+ # FIXME: this operation is not atomic and it is not protected from race conditions
+ # (basically, as many others). Currently, it's up to the user to ensure no parallel
+ # calls happen. It also affects any image access, including exports with images, backups,
+ # automatic annotation, chunk downloading, etc.
+
db_validation_layout: models.ValidationLayout | None = (
getattr(instance.data, 'validation_layout', None)
)
@@ -1475,6 +1489,8 @@ def update(self, instance: models.Task, validated_data: dict[str, Any]) -> model
if not frame_selection_method:
return instance
+ assert not hasattr(instance.data, 'video')
+
# Populate the prefetch cache for required objects
prefetch_related_objects([instance],
Prefetch('data__images', queryset=models.Image.objects.order_by('frame')),
@@ -1655,6 +1671,12 @@ def _update_frames_in_bulk(
models.RelatedFile.images.through.objects.bulk_create(new_m2m_objects, batch_size=1000)
+ # Update manifest if present
+ manifest_path = db_task.data.get_manifest_path()
+ if os.path.isfile(manifest_path):
+ manifest = ImageManifestManager(manifest_path)
+ manifest.reorder([db_frame.path for db_frame in bulk_context.all_db_frames.values()])
+
def _clear_annotations_on_frames(self, db_task: models.Task, frames: Sequence[int]):
models.clear_annotations_on_frames_in_honeypot_task(db_task, frames=frames)
diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py
index 084945e7681d..b16fe71d33f6 100644
--- a/cvat/apps/engine/views.py
+++ b/cvat/apps/engine/views.py
@@ -1821,6 +1821,11 @@ def preview(self, request, pk):
@extend_schema(
methods=["PATCH"],
summary="Allows updating current validation configuration",
+ description=textwrap.dedent("""
+ WARNING: this operation is not protected from race conditions.
+ It's up to the user to ensure no parallel calls to this operation happen.
+ It affects image access, including exports with images, backups, chunk downloading etc.
+ """),
request=TaskValidationLayoutWriteSerializer,
responses={
'200': OpenApiResponse(TaskValidationLayoutReadSerializer),
@@ -2427,6 +2432,11 @@ def preview(self, request, pk):
@extend_schema(
methods=["PATCH"],
summary="Allows updating current validation configuration",
+ description=textwrap.dedent("""
+ WARNING: this operation is not protected from race conditions.
+ It's up to the user to ensure no parallel calls to this operation happen.
+ It affects image access, including exports with images, backups, chunk downloading etc.
+ """),
request=JobValidationLayoutWriteSerializer,
responses={
'200': OpenApiResponse(JobValidationLayoutReadSerializer),
diff --git a/cvat/apps/redis_handler/__init__.py b/cvat/apps/redis_handler/__init__.py
new file mode 100644
index 000000000000..fea87fe021d6
--- /dev/null
+++ b/cvat/apps/redis_handler/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
diff --git a/cvat/apps/redis_handler/apps.py b/cvat/apps/redis_handler/apps.py
new file mode 100644
index 000000000000..a00543165e7f
--- /dev/null
+++ b/cvat/apps/redis_handler/apps.py
@@ -0,0 +1,10 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
+
+
+from django.apps import AppConfig
+
+
+class RedisHandlerConfig(AppConfig):
+ name = "cvat.apps.redis_handler"
diff --git a/cvat/apps/redis_handler/management/__init__.py b/cvat/apps/redis_handler/management/__init__.py
new file mode 100644
index 000000000000..fea87fe021d6
--- /dev/null
+++ b/cvat/apps/redis_handler/management/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
diff --git a/cvat/apps/redis_handler/management/commands/__init__.py b/cvat/apps/redis_handler/management/commands/__init__.py
new file mode 100644
index 000000000000..fea87fe021d6
--- /dev/null
+++ b/cvat/apps/redis_handler/management/commands/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
diff --git a/cvat/apps/redis_handler/management/commands/migrateredis.py b/cvat/apps/redis_handler/management/commands/migrateredis.py
new file mode 100644
index 000000000000..e29a8d74f0ac
--- /dev/null
+++ b/cvat/apps/redis_handler/management/commands/migrateredis.py
@@ -0,0 +1,69 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
+
+import sys
+import traceback
+from argparse import ArgumentParser
+
+from django.conf import settings
+from django.core.management.base import BaseCommand, CommandError
+from redis import Redis
+
+from cvat.apps.redis_handler.migration_loader import AppliedMigration, MigrationLoader
+
+
+class Command(BaseCommand):
+ help = "Applies Redis migrations and records them in the database"
+
+ def add_arguments(self, parser: ArgumentParser) -> None:
+ parser.add_argument(
+ "--check",
+ action="store_true",
+ help="Checks whether Redis migrations have been applied; exits with non-zero status if not",
+ )
+
+ def handle(self, *args, **options) -> None:
+ conn = Redis(
+ host=settings.REDIS_INMEM_SETTINGS["HOST"],
+ port=settings.REDIS_INMEM_SETTINGS["PORT"],
+ db=settings.REDIS_INMEM_SETTINGS["DB"],
+ password=settings.REDIS_INMEM_SETTINGS["PASSWORD"],
+ )
+ loader = MigrationLoader(connection=conn)
+
+ if options["check"]:
+ if not loader:
+ return
+
+ sys.exit(1)
+
+ if not loader:
+ self.stdout.write("No migrations to apply")
+ return
+
+ for migration in loader:
+ try:
+ migration.run()
+
+ # add migration to applied ones
+ applied_migration = AppliedMigration(
+ name=migration.name,
+ app_label=migration.app_label,
+ )
+ applied_migration.save(connection=conn)
+
+ except Exception as ex:
+ self.stderr.write(
+ self.style.ERROR(
+ f"[{migration.app_label}] Failed to apply migration: {migration.name}"
+ )
+ )
+ self.stderr.write(self.style.ERROR(f"\n{traceback.format_exc()}"))
+ raise CommandError(str(ex))
+
+ self.stdout.write(
+ self.style.SUCCESS(
+ f"[{migration.app_label}] Successfully applied migration: {migration.name}"
+ )
+ )
diff --git a/cvat/apps/engine/management/commands/runperiodicjob.py b/cvat/apps/redis_handler/management/commands/runperiodicjob.py
similarity index 100%
rename from cvat/apps/engine/management/commands/runperiodicjob.py
rename to cvat/apps/redis_handler/management/commands/runperiodicjob.py
diff --git a/cvat/apps/engine/management/commands/syncperiodicjobs.py b/cvat/apps/redis_handler/management/commands/syncperiodicjobs.py
similarity index 100%
rename from cvat/apps/engine/management/commands/syncperiodicjobs.py
rename to cvat/apps/redis_handler/management/commands/syncperiodicjobs.py
diff --git a/cvat/apps/redis_handler/migration_loader.py b/cvat/apps/redis_handler/migration_loader.py
new file mode 100644
index 000000000000..21973fdd16ef
--- /dev/null
+++ b/cvat/apps/redis_handler/migration_loader.py
@@ -0,0 +1,126 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
+
+import importlib
+from datetime import datetime
+from pathlib import Path
+from typing import Any, ClassVar
+
+from attrs import field, frozen, validators
+from django.apps import AppConfig, apps
+from django.utils import timezone
+from redis import Redis
+
+from cvat.apps.redis_handler.redis_migrations import BaseMigration
+
+
+def to_datetime(value: float | str | datetime) -> datetime:
+ if isinstance(value, datetime):
+ return value
+ elif isinstance(value, str):
+ value = float(value)
+
+ return datetime.fromtimestamp(value)
+
+
+@frozen
+class AppliedMigration:
+ SET_KEY: ClassVar[str] = "cvat:applied_migrations"
+ KEY_PREFIX: ClassVar[str] = "cvat:applied_migration:"
+
+ name: str = field(validator=[validators.instance_of(str), validators.max_len(128)])
+ app_label: str = field(validator=[validators.instance_of(str), validators.max_len(128)])
+ applied_date: datetime = field(
+ validator=[validators.instance_of(datetime)], converter=to_datetime, factory=timezone.now
+ )
+
+ def get_key(self) -> str:
+ return f"{self.app_label}.{self.name}"
+
+ def get_key_with_prefix(self) -> str:
+ return self.KEY_PREFIX + self.get_key()
+
+ def to_dict(self) -> dict[str, Any]:
+ return {
+ "applied_date": self.applied_date.timestamp(),
+ }
+
+ def save(self, *, connection: Redis) -> None:
+ with connection.pipeline() as pipe:
+ pipe.hset(self.get_key_with_prefix(), mapping=self.to_dict())
+ pipe.sadd(self.SET_KEY, self.get_key())
+ pipe.execute()
+
+
+class LoaderError(Exception):
+ pass
+
+
+class MigrationLoader:
+ REDIS_MIGRATIONS_DIR_NAME = "redis_migrations"
+ REDIS_MIGRATION_CLASS_NAME = "Migration"
+
+ def __init__(self, *, connection: Redis) -> None:
+ self._connection = connection
+ self._app_config_mapping = {
+ app_config.label: app_config for app_config in self._find_app_configs()
+ }
+ self._disk_migrations_per_app: dict[str, list[str]] = {}
+ self._applied_migrations: dict[str, set[str]] = {}
+ self._unapplied_migrations: list[BaseMigration] = []
+
+ self._load_from_disk()
+ self._init_applied_migrations()
+ self._init_unapplied_migrations()
+
+ def _find_app_configs(self) -> list[AppConfig]:
+ return [
+ app_config
+ for app_config in apps.get_app_configs()
+ if app_config.name.startswith("cvat")
+ and (Path(app_config.path) / self.REDIS_MIGRATIONS_DIR_NAME).exists()
+ ]
+
+ def _load_from_disk(self):
+ for app_label, app_config in self._app_config_mapping.items():
+ migrations_dir = Path(app_config.path) / self.REDIS_MIGRATIONS_DIR_NAME
+ for migration_file in sorted(migrations_dir.glob("[0-9]*.py")):
+ migration_name = migration_file.stem
+ (self._disk_migrations_per_app.setdefault(app_label, [])).append(migration_name)
+
+ def _init_applied_migrations(self):
+ applied_migration_keys: list[str] = [
+ i.decode("utf-8") for i in self._connection.smembers(AppliedMigration.SET_KEY)
+ ]
+ for key in applied_migration_keys:
+ app_label, migration_name = key.split(".")
+ self._applied_migrations.setdefault(app_label, set()).add(migration_name)
+
+ def _init_unapplied_migrations(self):
+ for app_label, migration_names in self._disk_migrations_per_app.items():
+ app_config = self._app_config_mapping[app_label]
+ app_unapplied_migrations = sorted(
+ set(migration_names) - self._applied_migrations.get(app_label, set())
+ )
+ for migration_name in app_unapplied_migrations:
+ MigrationClass = self.get_migration_class(app_config.name, migration_name)
+ self._unapplied_migrations.append(
+ MigrationClass(migration_name, app_config.label, connection=self._connection)
+ )
+
+ def get_migration_class(self, app_name: str, migration_name: str) -> BaseMigration:
+ migration_module_path = ".".join([app_name, self.REDIS_MIGRATIONS_DIR_NAME, migration_name])
+ module = importlib.import_module(migration_module_path)
+ MigrationClass = getattr(module, self.REDIS_MIGRATION_CLASS_NAME, None)
+
+ if not MigrationClass or not issubclass(MigrationClass, BaseMigration):
+ raise LoaderError(f"Invalid migration: {migration_module_path}")
+
+ return MigrationClass
+
+ def __iter__(self):
+ yield from self._unapplied_migrations
+
+ def __len__(self):
+ return len(self._unapplied_migrations)
diff --git a/cvat/apps/redis_handler/redis_migrations/__init__.py b/cvat/apps/redis_handler/redis_migrations/__init__.py
new file mode 100644
index 000000000000..d08c396410cc
--- /dev/null
+++ b/cvat/apps/redis_handler/redis_migrations/__init__.py
@@ -0,0 +1,19 @@
+# Copyright (C) CVAT.ai Corporation
+#
+# SPDX-License-Identifier: MIT
+
+from abc import ABCMeta, abstractmethod
+
+from attrs import define, field, validators
+from redis import Redis
+
+
+@define
+class BaseMigration(metaclass=ABCMeta):
+ name: str = field(validator=[validators.instance_of(str)])
+ app_label: str = field(validator=[validators.instance_of(str)])
+ connection: Redis = field(validator=[validators.instance_of(Redis)], kw_only=True)
+
+ @classmethod
+ @abstractmethod
+ def run(cls) -> None: ...
diff --git a/cvat/schema.yml b/cvat/schema.yml
index a36335209164..298595eca242 100644
--- a/cvat/schema.yml
+++ b/cvat/schema.yml
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: CVAT REST API
- version: 2.27.0
+ version: 2.28.0
description: REST API for Computer Vision Annotation Tool (CVAT)
termsOfService: https://www.google.com/policies/terms/
contact:
@@ -2673,6 +2673,11 @@ paths:
description: ''
patch:
operationId: jobs_partial_update_validation_layout
+ description: |2
+
+ WARNING: this operation is not protected from race conditions.
+ It's up to the user to ensure no parallel calls to this operation happen.
+ It affects image access, including exports with images, backups, chunk downloading etc.
summary: Allows updating current validation configuration
parameters:
- in: path
@@ -6156,6 +6161,11 @@ paths:
description: ''
patch:
operationId: tasks_partial_update_validation_layout
+ description: |2
+
+ WARNING: this operation is not protected from race conditions.
+ It's up to the user to ensure no parallel calls to this operation happen.
+ It affects image access, including exports with images, backups, chunk downloading etc.
summary: Allows updating current validation configuration
parameters:
- in: path
diff --git a/cvat/settings/base.py b/cvat/settings/base.py
index 03aaa1a27c03..e28fda11f587 100644
--- a/cvat/settings/base.py
+++ b/cvat/settings/base.py
@@ -119,6 +119,7 @@ def generate_secret_key():
'cvat.apps.events',
'cvat.apps.quality_control',
'cvat.apps.analytics_report',
+ 'cvat.apps.redis_handler',
]
SITE_ID = 1
@@ -284,7 +285,7 @@ class CVAT_QUEUES(Enum):
redis_inmem_port = os.getenv('CVAT_REDIS_INMEM_PORT', 6379)
redis_inmem_password = os.getenv('CVAT_REDIS_INMEM_PASSWORD', '')
-shared_queue_settings = {
+REDIS_INMEM_SETTINGS = {
'HOST': redis_inmem_host,
'PORT': redis_inmem_port,
'DB': 0,
@@ -293,39 +294,39 @@ class CVAT_QUEUES(Enum):
RQ_QUEUES = {
CVAT_QUEUES.IMPORT_DATA.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '4h',
},
CVAT_QUEUES.EXPORT_DATA.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '4h',
},
CVAT_QUEUES.AUTO_ANNOTATION.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '24h',
},
CVAT_QUEUES.WEBHOOKS.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '1h',
},
CVAT_QUEUES.NOTIFICATIONS.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '1h',
},
CVAT_QUEUES.QUALITY_REPORTS.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '1h',
},
CVAT_QUEUES.ANALYTICS_REPORTS.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '1h',
},
CVAT_QUEUES.CLEANING.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '2h',
},
CVAT_QUEUES.CHUNKS.value: {
- **shared_queue_settings,
+ **REDIS_INMEM_SETTINGS,
'DEFAULT_TIMEOUT': '5m',
},
}
diff --git a/docker-compose.yml b/docker-compose.yml
index f329956df9de..ad9cde68b5a7 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -81,7 +81,7 @@ services:
cvat_server:
container_name: cvat_server
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on:
<<: *backend-deps
@@ -115,7 +115,7 @@ services:
cvat_utils:
container_name: cvat_utils
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -132,7 +132,7 @@ services:
cvat_worker_import:
container_name: cvat_worker_import
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -148,7 +148,7 @@ services:
cvat_worker_export:
container_name: cvat_worker_export
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -164,7 +164,7 @@ services:
cvat_worker_annotation:
container_name: cvat_worker_annotation
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -180,7 +180,7 @@ services:
cvat_worker_webhooks:
container_name: cvat_worker_webhooks
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -196,7 +196,7 @@ services:
cvat_worker_quality_reports:
container_name: cvat_worker_quality_reports
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -212,7 +212,7 @@ services:
cvat_worker_analytics_reports:
container_name: cvat_worker_analytics_reports
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -228,7 +228,7 @@ services:
cvat_worker_chunks:
container_name: cvat_worker_chunks
- image: cvat/server:${CVAT_VERSION:-v2.27.0}
+ image: cvat/server:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on: *backend-deps
environment:
@@ -244,7 +244,7 @@ services:
cvat_ui:
container_name: cvat_ui
- image: cvat/ui:${CVAT_VERSION:-v2.27.0}
+ image: cvat/ui:${CVAT_VERSION:-v2.28.0}
restart: always
depends_on:
- cvat_server
diff --git a/helm-chart/values.yaml b/helm-chart/values.yaml
index f3bc48a615ca..2c744e9a3700 100644
--- a/helm-chart/values.yaml
+++ b/helm-chart/values.yaml
@@ -139,7 +139,7 @@ cvat:
additionalVolumeMounts: []
replicas: 1
image: cvat/server
- tag: v2.27.0
+ tag: v2.28.0
imagePullPolicy: Always
permissionFix:
enabled: true
@@ -161,7 +161,7 @@ cvat:
frontend:
replicas: 1
image: cvat/ui
- tag: v2.27.0
+ tag: v2.28.0
imagePullPolicy: Always
labels: {}
# test: test
diff --git a/site/content/en/docs/contributing/development-environment.md b/site/content/en/docs/contributing/development-environment.md
index 3888550084a4..3bc8283d5e9b 100644
--- a/site/content/en/docs/contributing/development-environment.md
+++ b/site/content/en/docs/contributing/development-environment.md
@@ -165,6 +165,7 @@ description: 'Installing a development environment for different operating syste
```bash
python manage.py migrate
+ python manage.py migrateredis
python manage.py collectstatic
python manage.py syncperiodicjobs
python manage.py createsuperuser
diff --git a/tests/cypress/e2e/issues_prs2/issue_8952_interpolation_impossible.js b/tests/cypress/e2e/issues_prs2/issue_8952_interpolation_impossible.js
new file mode 100644
index 000000000000..51a599672aa3
--- /dev/null
+++ b/tests/cypress/e2e/issues_prs2/issue_8952_interpolation_impossible.js
@@ -0,0 +1,170 @@
+// Copyright (C) CVAT.ai Corporation
+//
+// SPDX-License-Identifier: MIT
+
+///
+
+const taskName = '5frames';
+const labelName = 'label';
+const attrName = 'attr1';
+const textDefaultValue = 'Some text';
+const issueId = '8952';
+const imagesCount = 5;
+const width = 400;
+const height = 400;
+const posX = 50;
+const posY = 50;
+const color = 'white';
+const imageFileName = `image_${issueId}`;
+const archiveName = `${imageFileName}.zip`;
+const archivePath = `cypress/fixtures/${archiveName}`;
+const imagesFolder = `cypress/fixtures/${imageFileName}`;
+
+const rect = [
+ 30,
+ 30,
+ 30 + 34,
+ 30 + 23,
+];
+
+function translatePoints(points, delta, axis) {
+ if (axis === 'x') {
+ return [
+ points[0] + delta,
+ points[1],
+ points[2] + delta,
+ points[3],
+ ];
+ }
+ if (axis === 'y') {
+ return [
+ points[0],
+ points[1] + delta,
+ points[2],
+ points[3] + delta,
+ ];
+ }
+ return points;
+}
+
+context('Create any track, check if track works correctly after deleting some frames', () => {
+ function readShapeCoords() {
+ return cy.get('.cvat_canvas_shape').then(($shape) => ({
+ x: +$shape.attr('x'),
+ y: +$shape.attr('y'),
+ }));
+ }
+
+ function validateShapeCoords({ x, y }) {
+ const precision = 0.01; // db server precision is 2 digits
+ cy.get('.cvat_canvas_shape').then(($shape) => {
+ const [xVal, yVal] = [
+ +$shape.attr('x'),
+ +$shape.attr('y'),
+ ];
+ expect(xVal).to.be.closeTo(x, precision);
+ expect(yVal).to.be.closeTo(y, precision);
+ });
+ }
+
+ describe('Description: user error, Could not receive frame 43 No one left position or right position was found. Interpolation impossible', () => {
+ let jobID = null;
+ const delta = 300;
+ before(() => {
+ cy.visit('/auth/login');
+ cy.login();
+
+ // Create assets for task using nodeJS
+ cy.imageGenerator(imagesFolder, imageFileName, width, height, color, posX, posY, labelName, imagesCount);
+ cy.createZipArchive(imagesFolder, archivePath);
+ cy.createAnnotationTask(taskName, labelName, attrName, textDefaultValue, archiveName);
+
+ cy.goToTaskList();
+ cy.openTaskJob(taskName);
+ cy.url().should('contain', 'jobs').then((url) => {
+ const last = url.lastIndexOf('/');
+ jobID = parseInt(url.slice(last + 1), 10);
+ }).then(() => {
+ // Remove all annotations and draw a track rect
+ const points0 = rect;
+ const points1 = translatePoints(points0, delta, 'x');
+ const points2 = translatePoints(points1, delta, 'y');
+ const track = {
+ shapes: [
+ {
+ frame: 0,
+ type: 'rectangle',
+ points: points0,
+ },
+ {
+ frame: 2,
+ type: 'rectangle',
+ points: points1,
+ },
+ {
+ frame: 4,
+ type: 'rectangle',
+ points: points2,
+ },
+ ],
+ frame: 0,
+ labelName,
+ objectType: 'track',
+ };
+ cy.headlessCreateObjects([track], jobID);
+ });
+ });
+
+ beforeEach(() => {
+ cy.headlessRestoreAllFrames(jobID);
+
+ // Get job meta updates from the server and reload page to bring changes to UI
+ cy.reload();
+
+ cy.saveJob();
+ cy.get('.cvat-player-first-button').click();
+ });
+
+ it('Delete interpolated frames 0, 2, 4. Error should not appear', () => {
+ // Delete frames 0, 2, 4. Watch out for errors
+ cy.get('.cvat-player-first-button').click();
+ cy.checkFrameNum(0);
+ cy.clickDeleteFrameAnnotationView();
+ cy.checkFrameNum(1);
+ cy.goToNextFrame(2);
+ cy.clickDeleteFrameAnnotationView();
+ cy.checkFrameNum(3);
+ cy.goToNextFrame(4);
+ cy.clickDeleteFrameAnnotationView();
+
+ // There should be no objects on the deleted frame
+ cy.get('.cvat_canvas_shape').should('not.exist');
+ cy.clickSaveAnnotationView();
+
+ // Reopening a task with bad metadata might throw an exception that we can catch
+ cy.goToTaskList();
+ cy.openTaskJob(taskName);
+ });
+
+ it('Change track positions on frames 2 and 4. Delete frame. Confirm same shape positions', () => {
+ cy.goCheckFrameNumber(2);
+ cy.clickDeleteFrameAnnotationView();
+ cy.checkFrameNum(3);
+ cy.clickSaveAnnotationView();
+
+ let pos3 = null;
+ readShapeCoords().then((posOnFrame3) => {
+ pos3 = posOnFrame3;
+ cy.goToPreviousFrame(1);
+ });
+ let pos1 = null;
+ readShapeCoords().then((posOnFrame1) => {
+ pos1 = posOnFrame1;
+ });
+ cy.reload().then(() => {
+ cy.goToNextFrame(1).then(() => validateShapeCoords(pos1));
+ cy.goToNextFrame(3).then(() => validateShapeCoords(pos3));
+ });
+ });
+ });
+});
diff --git a/tests/cypress/support/commands.js b/tests/cypress/support/commands.js
index ba0c826e4cd6..ee2f3a12b780 100644
--- a/tests/cypress/support/commands.js
+++ b/tests/cypress/support/commands.js
@@ -348,6 +348,17 @@ Cypress.Commands.add('headlessCreateObjects', (objects, jobID) => {
});
});
+Cypress.Commands.add('headlessRestoreAllFrames', (jobID) => {
+ cy.intercept('PATCH', `/api/jobs/${jobID}/data/meta**`).as('patchMeta');
+ cy.window().then(async ($win) => {
+ await $win.cvat.server.request(`/api/jobs/${jobID}/data/meta`, {
+ method: 'PATCH',
+ data: { deleted_frames: [] },
+ });
+ });
+ cy.wait('@patchMeta');
+});
+
Cypress.Commands.add('headlessCreateTask', (taskSpec, dataSpec, extras) => {
cy.window().then(async ($win) => {
const task = new $win.cvat.classes.Task({
diff --git a/tests/python/rest_api/test_tasks.py b/tests/python/rest_api/test_tasks.py
index 93e894417b4f..22d452637c51 100644
--- a/tests/python/rest_api/test_tasks.py
+++ b/tests/python/rest_api/test_tasks.py
@@ -2807,7 +2807,7 @@ def read_frame(self, i: int) -> Image.Image:
class _TestTasksBase:
_USERNAME = "admin1"
- def _uploaded_images_task_fxt_base(
+ def _image_task_fxt_base(
self,
request: pytest.FixtureRequest,
*,
@@ -2817,6 +2817,8 @@ def _uploaded_images_task_fxt_base(
stop_frame: Optional[int] = None,
step: Optional[int] = None,
segment_size: Optional[int] = None,
+ server_files: Optional[Sequence[str]] = None,
+ cloud_storage_id: Optional[int] = None,
job_replication: Optional[int] = None,
**data_kwargs,
) -> Generator[tuple[_ImagesTaskSpec, int], None, None]:
@@ -2827,13 +2829,16 @@ def _uploaded_images_task_fxt_base(
**({"consensus_replicas": job_replication} if job_replication else {}),
}
- assert bool(image_files) ^ bool(
- frame_count
- ), "Expected only one of 'image_files' and 'frame_count'"
- if not image_files:
- image_files = generate_image_files(frame_count)
- elif not frame_count:
- frame_count = len(image_files)
+ if server_files is not None:
+ assert (
+ image_files is not None
+ ), "'server_files' must be used together with 'image_files'"
+ else:
+ assert bool(image_files) ^ bool(
+ frame_count
+ ), "Expected only one of 'image_files' and 'frame_count'"
+ if not image_files:
+ image_files = generate_image_files(frame_count)
images_data = [f.getvalue() for f in image_files]
@@ -2843,9 +2848,16 @@ def _uploaded_images_task_fxt_base(
data_params = {
"image_quality": 70,
- "client_files": image_files,
"sorting_method": "natural",
"chunk_size": max(1, (segment_size or resulting_task_size) // 2),
+ **(
+ {
+ "server_files": server_files,
+ "cloud_storage_id": cloud_storage_id,
+ }
+ if server_files
+ else {"client_files": image_files}
+ ),
}
data_params.update(data_kwargs)
@@ -2873,13 +2885,13 @@ def get_frame(i: int) -> bytes:
def fxt_uploaded_images_task(
self, request: pytest.FixtureRequest
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_fxt_base(request=request)
+ yield from self._image_task_fxt_base(request=request)
@pytest.fixture(scope="class")
def fxt_uploaded_images_task_with_segments(
self, request: pytest.FixtureRequest
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_fxt_base(request=request, segment_size=4)
+ yield from self._image_task_fxt_base(request=request, segment_size=4)
@fixture(scope="class")
@parametrize("step", [2, 5])
@@ -2888,7 +2900,7 @@ def fxt_uploaded_images_task_with_segments(
def fxt_uploaded_images_task_with_segments_start_stop_step(
self, request: pytest.FixtureRequest, start_frame: int, stop_frame: Optional[int], step: int
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_fxt_base(
+ yield from self._image_task_fxt_base(
request=request,
frame_count=30,
segment_size=4,
@@ -2901,17 +2913,19 @@ def fxt_uploaded_images_task_with_segments_start_stop_step(
def fxt_uploaded_images_task_with_segments_and_consensus(
self, request: pytest.FixtureRequest
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_fxt_base(
- request=request, segment_size=4, job_replication=2
- )
+ yield from self._image_task_fxt_base(request=request, segment_size=4, job_replication=2)
- def _uploaded_images_task_with_honeypots_and_segments_base(
+ def _image_task_with_honeypots_and_segments_base(
self,
request: pytest.FixtureRequest,
*,
start_frame: Optional[int] = None,
step: Optional[int] = None,
random_seed: int = 42,
+ image_files: Optional[Sequence[io.BytesIO]] = None,
+ server_files: Optional[Sequence[str]] = None,
+ cloud_storage_id: Optional[int] = None,
+ **kwargs,
) -> Generator[tuple[_TaskSpec, int], None, None]:
validation_params = models.DataRequestValidationParams._from_openapi_data(
mode="gt_pool",
@@ -2933,10 +2947,16 @@ def _uploaded_images_task_with_honeypots_and_segments_base(
+ validation_params.frame_count
)
- image_files = generate_image_files(total_frame_count)
+ if image_files:
+ if len(image_files) != total_frame_count:
+ raise ValueError(
+ f"If provided, image_files must contain {total_frame_count} images"
+ )
+ else:
+ image_files = generate_image_files(total_frame_count)
with closing(
- self._uploaded_images_task_fxt_base(
+ self._image_task_fxt_base(
request=request,
frame_count=None,
image_files=image_files,
@@ -2945,6 +2965,9 @@ def _uploaded_images_task_with_honeypots_and_segments_base(
start_frame=start_frame,
step=step,
validation_params=validation_params,
+ server_files=server_files,
+ cloud_storage_id=cloud_storage_id,
+ **kwargs,
)
) as task_gen:
for task_spec, task_id in task_gen:
@@ -2975,25 +2998,23 @@ def _uploaded_images_task_with_honeypots_and_segments_base(
def fxt_uploaded_images_task_with_honeypots_and_segments(
self, request: pytest.FixtureRequest
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_with_honeypots_and_segments_base(request)
+ yield from self._image_task_with_honeypots_and_segments_base(request)
@fixture(scope="class")
@parametrize("start_frame, step", [(2, 3)])
def fxt_uploaded_images_task_with_honeypots_and_segments_start_step(
self, request: pytest.FixtureRequest, start_frame: Optional[int], step: Optional[int]
) -> Generator[tuple[_TaskSpec, int], None, None]:
- yield from self._uploaded_images_task_with_honeypots_and_segments_base(
+ yield from self._image_task_with_honeypots_and_segments_base(
request, start_frame=start_frame, step=step
)
- @fixture(scope="class")
- @parametrize("random_seed", [1, 2, 5])
- def fxt_uploaded_images_task_with_honeypots_and_changed_real_frames(
- self, request: pytest.FixtureRequest, random_seed: int
- ) -> Generator[tuple[_TaskSpec, int], None, None]:
+ def _images_task_with_honeypots_and_changed_real_frames_base(
+ self, request: pytest.FixtureRequest, **kwargs
+ ):
with closing(
- self._uploaded_images_task_with_honeypots_and_segments_base(
- request, start_frame=2, step=3, random_seed=random_seed
+ self._image_task_with_honeypots_and_segments_base(
+ request, start_frame=2, step=3, **kwargs
)
) as gen_iter:
task_spec, task_id = next(gen_iter)
@@ -3024,6 +3045,51 @@ def fxt_uploaded_images_task_with_honeypots_and_changed_real_frames(
yield task_spec, task_id
+ @fixture(scope="class")
+ @parametrize("random_seed", [1, 2, 5])
+ def fxt_uploaded_images_task_with_honeypots_and_changed_real_frames(
+ self, request: pytest.FixtureRequest, random_seed: int
+ ) -> Generator[tuple[_TaskSpec, int], None, None]:
+ yield from self._images_task_with_honeypots_and_changed_real_frames_base(
+ request, random_seed=random_seed
+ )
+
+ @fixture(scope="class")
+ @parametrize(
+ "cloud_storage_id",
+ [pytest.param(2, marks=[pytest.mark.with_external_services, pytest.mark.timeout(60)])],
+ )
+ def fxt_cloud_images_task_with_honeypots_and_changed_real_frames(
+ self, request: pytest.FixtureRequest, cloud_storages, cloud_storage_id: int
+ ) -> Generator[tuple[_TaskSpec, int], None, None]:
+ cloud_storage = cloud_storages[cloud_storage_id]
+ s3_client = s3.make_client(bucket=cloud_storage["resource"])
+
+ image_files = generate_image_files(47)
+
+ for image in image_files:
+ image.name = f"test/{image.name}"
+ image.seek(0)
+
+ s3_client.create_file(data=image, filename=image.name)
+ request.addfinalizer(partial(s3_client.remove_file, filename=image.name))
+
+ server_files = [f.name for f in image_files]
+
+ for image in image_files:
+ image.seek(0)
+
+ yield from self._images_task_with_honeypots_and_changed_real_frames_base(
+ request,
+ image_files=image_files,
+ server_files=server_files,
+ cloud_storage_id=cloud_storage_id,
+ # FIXME: random sorting with frame filter and cloud images (and, optionally, honeypots)
+ # doesn't work with static cache
+ # https://github.com/cvat-ai/cvat/issues/9021
+ use_cache=True,
+ )
+
def _uploaded_images_task_with_gt_and_segments_base(
self,
request: pytest.FixtureRequest,
@@ -3072,7 +3138,7 @@ def _uploaded_images_task_with_gt_and_segments_base(
**validation_params_kwargs,
)
- yield from self._uploaded_images_task_fxt_base(
+ yield from self._image_task_fxt_base(
request=request,
frame_count=None,
image_files=image_files,
@@ -3253,6 +3319,7 @@ def _get_job_abs_frame_set(self, job_meta: models.DataMetaRead) -> Sequence[int]
fixture_ref("fxt_uploaded_images_task_with_honeypots_and_segments"),
fixture_ref("fxt_uploaded_images_task_with_honeypots_and_segments_start_step"),
fixture_ref("fxt_uploaded_images_task_with_honeypots_and_changed_real_frames"),
+ fixture_ref("fxt_cloud_images_task_with_honeypots_and_changed_real_frames"),
]
_tasks_with_simple_gt_job_cases = [
@@ -6663,7 +6730,7 @@ def fxt_uploaded_media_task(
args = dict(request=request, frame_count=frame_count, step=step, start_frame=start_frame)
if media_type == _SourceDataType.images:
- (spec, task_id) = next(self._uploaded_images_task_fxt_base(**args))
+ (spec, task_id) = next(self._image_task_fxt_base(**args))
else:
(spec, task_id) = next(self._uploaded_video_task_fxt_base(**args))
diff --git a/tests/python/shared/fixtures/init.py b/tests/python/shared/fixtures/init.py
index 14a59ab5ee22..93e4d72758d3 100644
--- a/tests/python/shared/fixtures/init.py
+++ b/tests/python/shared/fixtures/init.py
@@ -250,7 +250,14 @@ def kube_restore_clickhouse_db():
def _get_redis_inmem_keys_to_keep():
- return ("rq:worker:", "rq:workers", "rq:scheduler_instance:", "rq:queues:")
+ return (
+ "rq:worker:",
+ "rq:workers",
+ "rq:scheduler_instance:",
+ "rq:queues:",
+ "cvat:applied_migrations",
+ "cvat:applied_migration:",
+ )
def docker_restore_redis_inmem():