Coverage for fastblocks/actions/sync/static.py: 12%
359 statements
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-21 04:50 -0700
« prev ^ index » next coverage.py v7.10.6, created at 2025-09-21 04:50 -0700
1"""Static files synchronization between filesystem, cloud storage, and selective caching.
3Static sync uses selective caching based on file type:
4- Text-based files (CSS, JS, MD, TXT) are cached for performance
5- Binary files (images, fonts, media) sync to storage only to avoid cache bloat
6"""
8import typing as t
9from pathlib import Path
11import yaml
12from acb.debug import debug
13from anyio import Path as AsyncPath
15from .strategies import (
16 ConflictStrategy,
17 SyncDirection,
18 SyncResult,
19 SyncStrategy,
20 create_backup,
21 get_file_info,
22 resolve_conflict,
23 should_sync,
24)
27class StaticSyncResult(SyncResult):
28 def __init__(
29 self,
30 *,
31 assets_processed: list[str] | None = None,
32 mime_types_detected: dict[str, str] | None = None,
33 cache_invalidated: list[str] | None = None,
34 cache_cleared: list[str] | None = None,
35 cacheable_assets: list[str] | None = None,
36 non_cacheable_assets: list[str] | None = None,
37 **kwargs: t.Any,
38 ) -> None:
39 super().__init__(**kwargs)
40 self.assets_processed = assets_processed if assets_processed is not None else []
41 self.mime_types_detected = (
42 mime_types_detected if mime_types_detected is not None else {}
43 )
44 self.cache_invalidated = (
45 cache_invalidated if cache_invalidated is not None else []
46 )
47 self.cache_cleared = cache_cleared if cache_cleared is not None else []
48 self.cacheable_assets = cacheable_assets if cacheable_assets is not None else []
49 self.non_cacheable_assets = (
50 non_cacheable_assets if non_cacheable_assets is not None else []
51 )
54CACHEABLE_EXTENSIONS = {".css", ".js", ".md", ".txt"}
55NON_CACHEABLE_EXTENSIONS = {
56 ".png",
57 ".jpg",
58 ".jpeg",
59 ".gif",
60 ".svg",
61 ".ico",
62 ".webp",
63 ".avif",
64 ".mp4",
65 ".mov",
66 ".mp3",
67 ".wav",
68 ".pdf",
69 ".zip",
70 ".woff",
71 ".woff2",
72 ".ttf",
73 ".eot",
74 ".otf",
75}
78def _is_cacheable_file(file_path: AsyncPath) -> bool:
79 return file_path.suffix.lower() in CACHEABLE_EXTENSIONS
82async def sync_static(
83 *,
84 static_path: AsyncPath | None = None,
85 file_patterns: list[str] | None = None,
86 strategy: SyncStrategy | None = None,
87 storage_bucket: str | None = None,
88 exclude_patterns: list[str] | None = None,
89) -> StaticSyncResult:
90 config = _prepare_static_sync_config(
91 static_path, file_patterns, strategy, exclude_patterns
92 )
93 result = StaticSyncResult()
95 if storage_bucket is None:
96 storage_bucket = await _get_default_static_bucket()
98 adapters = await _initialize_adapters(result)
99 if not adapters:
100 return result
102 static_files = await _discover_static_files(
103 config["static_path"],
104 config["file_patterns"],
105 config["exclude_patterns"],
106 )
107 if not static_files:
108 debug("No static files found to sync")
109 return result
111 debug(f"Found {len(static_files)} static files to sync")
113 await _sync_static_files(
114 static_files,
115 adapters,
116 config["strategy"],
117 storage_bucket,
118 result,
119 )
121 debug(
122 f"Static sync completed: {len(result.synced_items)} synced, {len(result.conflicts)} conflicts",
123 )
125 return result
128def _prepare_static_sync_config(
129 static_path: AsyncPath | None,
130 file_patterns: list[str] | None,
131 strategy: SyncStrategy | None,
132 exclude_patterns: list[str] | None,
133) -> dict[str, t.Any]:
134 return {
135 "static_path": static_path or AsyncPath("static"),
136 "file_patterns": file_patterns
137 or [
138 "*.css",
139 "*.js",
140 "*.png",
141 "*.jpg",
142 "*.jpeg",
143 "*.gif",
144 "*.svg",
145 "*.ico",
146 "*.woff",
147 "*.woff2",
148 "*.ttf",
149 "*.eot",
150 "*.otf",
151 "*.webp",
152 "*.avif",
153 "*.pdf",
154 "*.zip",
155 "*.tar.gz",
156 ],
157 "strategy": strategy or SyncStrategy(),
158 "exclude_patterns": exclude_patterns or ["*.tmp", "*.log", ".*", "__pycache__"],
159 }
162async def _initialize_adapters(result: StaticSyncResult) -> dict[str, t.Any] | None:
163 try:
164 from acb.depends import depends
166 storage = depends.get("storage")
167 cache = depends.get("cache")
168 if not storage:
169 result.errors.append(Exception("Storage adapter not available"))
170 return None
172 return {"storage": storage, "cache": cache}
173 except Exception as e:
174 result.errors.append(e)
175 return None
178async def _get_default_static_bucket() -> str:
179 try:
180 storage_config_path = AsyncPath("settings/storage.yml")
181 if await storage_config_path.exists():
182 content = await storage_config_path.read_text()
183 config = yaml.safe_load(content)
184 if isinstance(config, dict):
185 bucket_name = config.get("buckets", {}).get("static", "static")
186 else:
187 bucket_name = "static"
188 debug(f"Using static bucket from config: {bucket_name}")
189 return bucket_name
190 except Exception as e:
191 debug(f"Could not load storage config, using default: {e}")
192 debug("Using fallback static bucket: static")
193 return "static"
196async def _discover_static_files(
197 static_path: AsyncPath,
198 file_patterns: list[str],
199 exclude_patterns: list[str],
200) -> list[dict[str, t.Any]]:
201 static_files: list[dict[str, t.Any]] = []
203 if not await static_path.exists():
204 debug(f"Static path does not exist: {static_path}")
205 return static_files
207 for pattern in file_patterns:
208 await _discover_files_with_pattern(
209 static_path,
210 pattern,
211 exclude_patterns,
212 static_files,
213 )
215 return static_files
218async def _discover_files_with_pattern(
219 static_path: AsyncPath,
220 pattern: str,
221 exclude_patterns: list[str],
222 static_files: list[dict[str, t.Any]],
223) -> None:
224 async for file_path in static_path.rglob(pattern):
225 if await file_path.is_file():
226 if _should_exclude_file(file_path, exclude_patterns):
227 continue
229 await _process_static_file(
230 file_path,
231 static_path,
232 static_files,
233 )
236def _should_exclude_file(file_path: AsyncPath, exclude_patterns: list[str]) -> bool:
237 import fnmatch
239 file_name = file_path.name
240 relative_path = str(file_path)
241 for pattern in exclude_patterns:
242 if fnmatch.fnmatch(file_name, pattern) or fnmatch.fnmatch(
243 relative_path, pattern
244 ):
245 return True
247 return False
250async def _process_static_file(
251 file_path: AsyncPath,
252 static_path: AsyncPath,
253 static_files: list[dict[str, t.Any]],
254) -> None:
255 try:
256 rel_path = file_path.relative_to(static_path)
257 mime_type = _detect_mime_type(file_path)
258 is_cacheable = _is_cacheable_file(file_path)
260 static_files.append(
261 {
262 "local_path": file_path,
263 "relative_path": rel_path,
264 "storage_path": str(rel_path),
265 "mime_type": mime_type,
266 "is_cacheable": is_cacheable,
267 },
268 )
269 except ValueError:
270 debug(f"Could not get relative path for {file_path}")
273def _detect_mime_type(file_path: AsyncPath) -> str:
274 import mimetypes
276 mime_type, _ = mimetypes.guess_type(str(file_path))
277 return mime_type or "application/octet-stream"
280async def _sync_static_files(
281 static_files: list[dict[str, t.Any]],
282 adapters: dict[str, t.Any],
283 strategy: SyncStrategy,
284 storage_bucket: str,
285 result: StaticSyncResult,
286) -> None:
287 for static_info in static_files:
288 try:
289 file_result = await _sync_single_static_file(
290 static_info,
291 adapters["storage"],
292 adapters["cache"],
293 strategy,
294 storage_bucket,
295 )
296 _accumulate_static_sync_results(file_result, result)
298 if file_result.get("synced"):
299 result.assets_processed.append(static_info["storage_path"])
300 result.mime_types_detected[static_info["storage_path"]] = static_info[
301 "mime_type"
302 ]
304 if static_info["is_cacheable"]:
305 result.cacheable_assets.append(static_info["storage_path"])
306 else:
307 result.non_cacheable_assets.append(static_info["storage_path"])
309 except Exception as e:
310 result.errors.append(e)
311 debug(f"Error syncing static file {static_info['relative_path']}: {e}")
314def _accumulate_static_sync_results(
315 file_result: dict[str, t.Any],
316 result: StaticSyncResult,
317) -> None:
318 if file_result.get("synced"):
319 result.synced_items.extend(file_result["synced"])
320 if file_result.get("conflicts"):
321 result.conflicts.extend(file_result["conflicts"])
322 if file_result.get("errors"):
323 result.errors.extend(file_result["errors"])
324 if file_result.get("skipped"):
325 result.skipped.extend(file_result["skipped"])
326 if file_result.get("backed_up"):
327 result.backed_up.extend(file_result["backed_up"])
328 if file_result.get("cache_invalidated"):
329 result.cache_invalidated.extend(file_result["cache_invalidated"])
330 if file_result.get("cache_cleared"):
331 result.cache_cleared.extend(file_result["cache_cleared"])
334async def _sync_single_static_file(
335 static_info: dict[str, t.Any],
336 storage: t.Any,
337 cache: t.Any,
338 strategy: SyncStrategy,
339 storage_bucket: str,
340) -> dict[str, t.Any]:
341 local_path = static_info["local_path"]
342 storage_path = static_info["storage_path"]
343 mime_type = static_info["mime_type"]
344 is_cacheable = static_info["is_cacheable"]
346 result = _create_sync_result()
348 try:
349 local_info, remote_info = await _get_file_infos(
350 local_path,
351 storage,
352 storage_bucket,
353 storage_path,
354 )
356 if not await _should_sync_file(
357 local_info,
358 remote_info,
359 strategy,
360 storage_path,
361 result,
362 ):
363 return result
365 await _execute_sync_operation(
366 local_path,
367 storage,
368 cache,
369 storage_bucket,
370 storage_path,
371 local_info,
372 remote_info,
373 strategy,
374 mime_type,
375 is_cacheable,
376 result,
377 )
379 except Exception as e:
380 result["errors"].append(e)
381 debug(f"Error in _sync_single_static_file for {storage_path}: {e}")
383 return result
386def _create_sync_result() -> dict[str, t.Any]:
387 return {
388 "synced": [],
389 "conflicts": [],
390 "errors": [],
391 "skipped": [],
392 "backed_up": [],
393 "cache_invalidated": [],
394 "cache_cleared": [],
395 }
398async def _get_file_infos(
399 local_path: t.Any,
400 storage: t.Any,
401 storage_bucket: str,
402 storage_path: str,
403) -> tuple[dict[str, t.Any], dict[str, t.Any]]:
404 local_info = await get_file_info(Path(local_path))
405 remote_info = await _get_storage_file_info(storage, storage_bucket, storage_path)
406 return local_info, remote_info
409async def _should_sync_file(
410 local_info: dict[str, t.Any],
411 remote_info: dict[str, t.Any],
412 strategy: SyncStrategy,
413 storage_path: str,
414 result: dict[str, t.Any],
415) -> bool:
416 sync_needed, reason = should_sync(local_info, remote_info, strategy.direction)
417 if not sync_needed:
418 result["skipped"].append(f"{storage_path} ({reason})")
419 return False
421 debug(f"Syncing static file {storage_path}: {reason}")
422 return True
425async def _execute_sync_operation(
426 local_path: t.Any,
427 storage: t.Any,
428 cache: t.Any,
429 storage_bucket: str,
430 storage_path: str,
431 local_info: dict[str, t.Any],
432 remote_info: dict[str, t.Any],
433 strategy: SyncStrategy,
434 mime_type: str,
435 is_cacheable: bool,
436 result: dict[str, t.Any],
437) -> None:
438 if _should_pull_static(strategy, local_info, remote_info):
439 await _pull_static(
440 local_path,
441 storage,
442 cache,
443 storage_bucket,
444 storage_path,
445 strategy,
446 is_cacheable,
447 result,
448 )
449 elif _should_push_static(strategy, local_info, remote_info):
450 await _push_static(
451 local_path,
452 storage,
453 cache,
454 storage_bucket,
455 storage_path,
456 strategy,
457 mime_type,
458 is_cacheable,
459 result,
460 )
461 elif _has_bidirectional_conflict(strategy, local_info, remote_info):
462 await _handle_static_conflict(
463 local_path,
464 storage,
465 cache,
466 storage_bucket,
467 storage_path,
468 local_info,
469 remote_info,
470 strategy,
471 mime_type,
472 is_cacheable,
473 result,
474 )
477def _should_pull_static(
478 strategy: SyncStrategy,
479 local_info: dict[str, t.Any],
480 remote_info: dict[str, t.Any],
481) -> bool:
482 return strategy.direction == SyncDirection.PULL or (
483 strategy.direction == SyncDirection.BIDIRECTIONAL
484 and remote_info["exists"]
485 and (not local_info["exists"] or remote_info["mtime"] > local_info["mtime"])
486 )
489def _should_push_static(
490 strategy: SyncStrategy,
491 local_info: dict[str, t.Any],
492 remote_info: dict[str, t.Any],
493) -> bool:
494 return strategy.direction == SyncDirection.PUSH or (
495 strategy.direction == SyncDirection.BIDIRECTIONAL
496 and local_info["exists"]
497 and (not remote_info["exists"] or local_info["mtime"] > remote_info["mtime"])
498 )
501def _has_bidirectional_conflict(
502 strategy: SyncStrategy,
503 local_info: dict[str, t.Any],
504 remote_info: dict[str, t.Any],
505) -> bool:
506 return (
507 strategy.direction == SyncDirection.BIDIRECTIONAL
508 and local_info["exists"]
509 and remote_info["exists"]
510 )
513async def _get_storage_file_info(
514 storage: t.Any,
515 bucket: str,
516 file_path: str,
517) -> dict[str, t.Any]:
518 try:
519 bucket_obj = getattr(storage, bucket, None)
521 if not bucket_obj:
522 await storage._create_bucket(bucket)
523 bucket_obj = getattr(storage, bucket)
525 exists = await bucket_obj.exists(file_path)
527 if not exists:
528 return {
529 "exists": False,
530 "size": 0,
531 "mtime": 0,
532 "content_hash": None,
533 }
535 content = await bucket_obj.read(file_path)
536 metadata = await bucket_obj.stat(file_path)
538 import hashlib
540 content_hash = hashlib.blake2b(content).hexdigest()
542 return {
543 "exists": True,
544 "size": len(content),
545 "mtime": metadata.get("mtime", 0),
546 "content_hash": content_hash,
547 "content": content,
548 }
550 except Exception as e:
551 debug(f"Error getting storage file info for {file_path}: {e}")
552 return {
553 "exists": False,
554 "size": 0,
555 "mtime": 0,
556 "content_hash": None,
557 "error": str(e),
558 }
561async def _pull_static(
562 local_path: AsyncPath,
563 storage: t.Any,
564 cache: t.Any,
565 bucket: str,
566 storage_path: str,
567 strategy: SyncStrategy,
568 is_cacheable: bool,
569 result: dict[str, t.Any],
570) -> None:
571 try:
572 bucket_obj = getattr(storage, bucket)
574 if strategy.dry_run:
575 debug(f"DRY RUN: Would pull {storage_path} to {local_path}")
576 result["synced"].append(f"PULL(dry-run): {storage_path}")
577 return
579 if await local_path.exists() and strategy.backup_on_conflict:
580 backup_path = await create_backup(Path(local_path))
581 result["backed_up"].append(str(backup_path))
583 content = await bucket_obj.read(storage_path)
585 await local_path.parent.mkdir(parents=True, exist_ok=True)
586 await local_path.write_bytes(content)
588 result["synced"].append(f"PULL: {storage_path}")
589 debug(f"Pulled static file from storage: {storage_path}")
591 if is_cacheable and cache:
592 await _cache_static_file(cache, storage_path, content, result)
594 except Exception as e:
595 result["errors"].append(e)
596 debug(f"Error pulling static file {storage_path}: {e}")
599async def _push_static(
600 local_path: AsyncPath,
601 storage: t.Any,
602 cache: t.Any,
603 bucket: str,
604 storage_path: str,
605 strategy: SyncStrategy,
606 mime_type: str,
607 is_cacheable: bool,
608 result: dict[str, t.Any],
609) -> None:
610 try:
611 bucket_obj = getattr(storage, bucket)
613 if strategy.dry_run:
614 debug(f"DRY RUN: Would push {local_path} to {storage_path}")
615 result["synced"].append(f"PUSH(dry-run): {storage_path}")
616 return
618 content = await local_path.read_bytes()
620 metadata = {"content_type": mime_type}
621 await bucket_obj.write(storage_path, content, metadata=metadata)
623 result["synced"].append(f"PUSH: {storage_path}")
624 debug(f"Pushed static file to storage: {storage_path} (MIME: {mime_type})")
626 if is_cacheable and cache:
627 await _cache_static_file(cache, storage_path, content, result)
629 except Exception as e:
630 result["errors"].append(e)
631 debug(f"Error pushing static file {storage_path}: {e}")
634async def _handle_static_conflict(
635 local_path: AsyncPath,
636 storage: t.Any,
637 cache: t.Any,
638 bucket: str,
639 storage_path: str,
640 local_info: dict[str, t.Any],
641 remote_info: dict[str, t.Any],
642 strategy: SyncStrategy,
643 mime_type: str,
644 is_cacheable: bool,
645 result: dict[str, t.Any],
646) -> None:
647 try:
648 if strategy.conflict_strategy == ConflictStrategy.MANUAL:
649 result["conflicts"].append(
650 {
651 "path": storage_path,
652 "local_mtime": local_info["mtime"],
653 "remote_mtime": remote_info["mtime"],
654 "reason": "manual_resolution_required",
655 },
656 )
657 return
659 resolved_content, resolution_reason = await resolve_conflict(
660 Path(local_path),
661 remote_info["content"],
662 local_info["content"],
663 strategy.conflict_strategy,
664 local_info["mtime"],
665 remote_info["mtime"],
666 )
668 if strategy.dry_run:
669 debug(
670 f"DRY RUN: Would resolve conflict for {storage_path}: {resolution_reason}",
671 )
672 result["synced"].append(
673 f"CONFLICT(dry-run): {storage_path} - {resolution_reason}",
674 )
675 return
677 if (
678 strategy.backup_on_conflict
679 or strategy.conflict_strategy == ConflictStrategy.BACKUP_BOTH
680 ):
681 backup_path = await create_backup(Path(local_path), "conflict")
682 result["backed_up"].append(str(backup_path))
684 if resolved_content == remote_info["content"]:
685 await local_path.write_bytes(resolved_content)
686 result["synced"].append(
687 f"CONFLICT->REMOTE: {storage_path} - {resolution_reason}",
688 )
689 else:
690 bucket_obj = getattr(storage, bucket)
691 metadata = {"content_type": mime_type}
692 await bucket_obj.write(storage_path, resolved_content, metadata=metadata)
693 result["synced"].append(
694 f"CONFLICT->LOCAL: {storage_path} - {resolution_reason}",
695 )
697 if is_cacheable and cache:
698 await _cache_static_file(cache, storage_path, resolved_content, result)
700 debug(f"Resolved static conflict: {storage_path} - {resolution_reason}")
702 except Exception as e:
703 result["errors"].append(e)
704 result["conflicts"].append(
705 {
706 "path": storage_path,
707 "error": str(e),
708 "reason": "resolution_failed",
709 },
710 )
713async def _cache_static_file(
714 cache: t.Any,
715 storage_path: str,
716 content: bytes,
717 result: dict[str, t.Any],
718) -> None:
719 if not cache:
720 return
722 try:
723 cache_key = f"static:{storage_path}"
724 await cache.set(cache_key, content, ttl=86400)
725 result["cache_invalidated"].append(cache_key)
726 debug(f"Cached static file: {storage_path}")
727 except Exception as e:
728 debug(f"Error caching static file {storage_path}: {e}")
730 pass
733async def warm_static_cache(
734 static_paths: list[str] | None = None,
735 cache_namespace: str = "static",
736) -> dict[str, t.Any]:
737 result: dict[str, t.Any] = {
738 "warmed": [],
739 "errors": [],
740 "skipped": [],
741 }
743 if not static_paths:
744 static_paths = [
745 "css/main.css",
746 "css/app.css",
747 "js/main.js",
748 "js/app.js",
749 ]
751 try:
752 from acb.depends import depends
754 cache = depends.get("cache")
755 storage = depends.get("storage")
757 if not cache or not storage:
758 result["errors"].append(Exception("Cache or storage not available"))
759 return result
761 for static_path in static_paths:
762 try:
763 if not _is_cacheable_file(AsyncPath(static_path)):
764 result["skipped"].append(f"{static_path} (not cacheable)")
765 continue
767 cache_key = f"{cache_namespace}:{static_path}"
768 if await cache.exists(cache_key):
769 result["skipped"].append(static_path)
770 continue
772 content = await storage.static.read(static_path)
773 await cache.set(cache_key, content, ttl=86400)
774 result["warmed"].append(static_path)
776 debug(f"Warmed cache for static file: {static_path}")
778 except Exception as e:
779 result["errors"].append(f"{static_path}: {e}")
780 debug(f"Error warming cache for static file {static_path}: {e}")
782 except Exception as e:
783 result["errors"].append(str(e))
784 debug(f"Error in warm_static_cache: {e}")
786 return result
789async def get_static_sync_status(
790 static_path: AsyncPath | None = None,
791 storage_bucket: str = "static",
792) -> dict[str, t.Any]:
793 if static_path is None:
794 static_path = AsyncPath("static")
796 status: dict[str, t.Any] = {
797 "total_static_files": 0,
798 "in_sync": 0,
799 "out_of_sync": 0,
800 "local_only": 0,
801 "remote_only": 0,
802 "conflicts": 0,
803 "details": [],
804 }
806 try:
807 from acb.depends import depends
809 storage = depends.get("storage")
811 if not storage:
812 status["error"] = "Storage adapter not available"
813 return status
815 static_files = await _discover_static_files(
816 static_path,
817 ["*.css", "*.js", "*.png", "*.jpg", "*.jpeg", "*.gif", "*.svg", "*.ico"],
818 ["*.tmp", "*.log", ".*"],
819 )
820 status["total_static_files"] = len(static_files)
822 for static_info in static_files:
823 local_info = await get_file_info(Path(static_info["local_path"]))
824 remote_info = await _get_storage_file_info(
825 storage,
826 storage_bucket,
827 static_info["storage_path"],
828 )
830 file_status: dict[str, t.Any] = {
831 "path": static_info["storage_path"],
832 "mime_type": static_info["mime_type"],
833 "local_exists": local_info["exists"],
834 "remote_exists": remote_info["exists"],
835 }
837 if local_info["exists"] and remote_info["exists"]:
838 if local_info["content_hash"] == remote_info["content_hash"]:
839 file_status["status"] = "in_sync"
840 status["in_sync"] += 1
841 else:
842 file_status["status"] = "conflict"
843 file_status["local_mtime"] = local_info["mtime"]
844 file_status["remote_mtime"] = remote_info["mtime"]
845 status["conflicts"] += 1
846 elif local_info["exists"]:
847 file_status["status"] = "local_only"
848 status["local_only"] += 1
849 elif remote_info["exists"]:
850 file_status["status"] = "remote_only"
851 status["remote_only"] += 1
852 else:
853 file_status["status"] = "missing"
855 status["details"].append(file_status)
857 status["out_of_sync"] = (
858 status["conflicts"] + status["local_only"] + status["remote_only"]
859 )
861 except Exception as e:
862 status["error"] = str(e)
863 debug(f"Error getting static sync status: {e}")
865 return status
868async def backup_static_files(
869 static_path: AsyncPath | None = None,
870 backup_suffix: str | None = None,
871) -> dict[str, t.Any]:
872 static_path = static_path or AsyncPath("static")
873 backup_suffix = backup_suffix or _generate_backup_suffix()
875 result = _create_backup_result()
877 try:
878 if not await static_path.exists():
879 result["errors"].append(f"Static path does not exist: {static_path}")
880 return result
882 await _backup_static_files_with_patterns(static_path, backup_suffix, result)
884 except Exception as e:
885 result["errors"].append(str(e))
886 debug(f"Error in backup_static_files: {e}")
888 return result
891def _generate_backup_suffix() -> str:
892 import time
894 timestamp = int(time.time())
895 return f"backup_{timestamp}"
898def _create_backup_result() -> dict[str, t.Any]:
899 return {
900 "backed_up": [],
901 "errors": [],
902 "skipped": [],
903 }
906async def _backup_static_files_with_patterns(
907 static_path: AsyncPath,
908 backup_suffix: str,
909 result: dict[str, t.Any],
910) -> None:
911 patterns = [
912 "*.css",
913 "*.js",
914 "*.png",
915 "*.jpg",
916 "*.jpeg",
917 "*.gif",
918 "*.svg",
919 "*.ico",
920 "*.woff",
921 "*.woff2",
922 "*.ttf",
923 "*.eot",
924 "*.otf",
925 "*.webp",
926 "*.avif",
927 ]
929 for pattern in patterns:
930 await _backup_files_with_pattern(static_path, pattern, backup_suffix, result)
933async def _backup_files_with_pattern(
934 static_path: AsyncPath,
935 pattern: str,
936 backup_suffix: str,
937 result: dict[str, t.Any],
938) -> None:
939 async for file_path in static_path.rglob(pattern):
940 if await file_path.is_file():
941 await _backup_single_file(file_path, backup_suffix, result)
944async def _backup_single_file(
945 file_path: AsyncPath,
946 backup_suffix: str,
947 result: dict[str, t.Any],
948) -> None:
949 try:
950 backup_path = await create_backup(Path(file_path), backup_suffix)
951 result["backed_up"].append(str(backup_path))
952 except Exception as e:
953 result["errors"].append(f"{file_path}: {e}")