-
Notifications
You must be signed in to change notification settings - Fork 70
feat(tidy3d): FXC-5275 schema-versioned config migrations for tidy3d config #3225
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: develop
Are you sure you want to change the base?
Conversation
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Cursor Bugbot has reviewed your changes and found 2 potential issues.
Bugbot Autofix is OFF. To automatically fix reported issues with Cloud Agents, enable Autofix in the Cursor dashboard.
Diff CoverageDiff: origin/develop...HEAD, staged and unstaged changes
Summary
tidy3d/config/loader.pyLines 87-95 87 )
88 return legacy
89
90 if legacy:
! 91 return self._migrate_legacy_payload(legacy)
92 return {}
93
94 def load_user_profile(self, profile: str) -> dict[str, Any]:
95 """Load user profile overrides (if any)."""Lines 119-128 119 if not data:
120 if profile_path.exists():
121 profile_path.unlink()
122 self._docs.pop(profile_path, None)
! 123 self._versions.pop(profile_path, None)
! 124 self._pending_writes.pop(profile_path, None)
125 return
126 profile_path.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
127 self._atomic_write(profile_path, data)Lines 185-194 185
186 for path, document in list(self._pending_writes.items()):
187 try:
188 self._atomic_write_document(path, document)
! 189 except Exception as exc:
! 190 log.warning(f"Failed to write migrated configuration file '{path}': {exc}")
191 finally:
192 self._pending_writes.pop(path, None)
193
194 def write_document(self, path: Path, document: tomlkit.TOMLDocument) -> None:Lines 226-234 226 return self._apply_schema_migrations(path, data, document)
227
228 def _migrate_legacy_payload(self, data: dict[str, Any]) -> dict[str, Any]:
229 if not data:
! 230 return {}
231 document = tomlkit.parse(toml.dumps(data))
232 apply_migrations(document, 0, CURRENT_CONFIG_VERSION)
233 set_config_version(document, CURRENT_CONFIG_VERSION)
234 migrated = toml.loads(tomlkit.dumps(document))tidy3d/config/migrations.pyLines 42-67 42 """Return the config version stored in a dict or TOML document."""
43
44 if isinstance(source, tomlkit.TOMLDocument):
45 raw = source.get(CONFIG_VERSION_KEY)
! 46 elif isinstance(source, dict):
! 47 raw = source.get(CONFIG_VERSION_KEY)
48 else:
! 49 raw = None
50
51 if raw is None:
52 return 0
53 if isinstance(raw, bool):
! 54 log.warning(f"Invalid '{CONFIG_VERSION_KEY}' value {raw!r}; falling back to version 0.")
! 55 return 0
56 try:
57 version = int(raw)
! 58 except (TypeError, ValueError):
! 59 log.warning(f"Invalid '{CONFIG_VERSION_KEY}' value {raw!r}; falling back to version 0.")
! 60 return 0
61 if version < 0:
! 62 log.warning(f"Invalid '{CONFIG_VERSION_KEY}' value {version!r}; falling back to version 0.")
! 63 return 0
64 return version
65
66
67 def set_config_version(document: tomlkit.TOMLDocument, version: int) -> None:Lines 96-107 96 return True
97 value = raw.strip().lower()
98 if value in {"0", "false", "no", "off"}:
99 return False
! 100 if value in {"1", "true", "yes", "on"}:
! 101 return True
! 102 log.warning(f"Unrecognized '{AUTO_MIGRATE_ENV}' value {raw!r}; defaulting to auto-migrate.")
! 103 return True
104
105
106 def forward_compat_mode() -> str:
107 """Return the forward-compat behavior for newer config versions."""Lines 111-120 111 return FORWARD_COMPAT_BEST_EFFORT
112 value = raw.strip().lower()
113 if value in {FORWARD_COMPAT_STRICT, FORWARD_COMPAT_BEST_EFFORT}:
114 return value
! 115 log.warning(f"Unrecognized '{FORWARD_COMPAT_ENV}' value {raw!r}; defaulting to best-effort.")
! 116 return FORWARD_COMPAT_BEST_EFFORT
117
118
119 def apply_migrations(document: tomlkit.TOMLDocument, from_version: int, to_version: int) -> None:
120 """Apply registered migrations to the document."""Lines 119-129 119 def apply_migrations(document: tomlkit.TOMLDocument, from_version: int, to_version: int) -> None:
120 """Apply registered migrations to the document."""
121
122 if from_version >= to_version:
! 123 return
124 if from_version < 0:
! 125 from_version = 0
126 _ensure_migration_chain(to_version)
127 for version in range(from_version, to_version):
128 for migrator in _MIGRATIONS.get(version, []):
129 migrator(document)Lines 133-141 133 """Drop unknown keys from a config payload using the registered schemas."""
134
135 sections = get_sections()
136 if not sections:
! 137 return strip_config_version(data)
138
139 filtered: dict[str, Any] = {}
140 for key, value in data.items():
141 if key == CONFIG_VERSION_KEY:Lines 140-156 140 for key, value in data.items():
141 if key == CONFIG_VERSION_KEY:
142 continue
143 if key == "plugins":
! 144 filtered_plugins = _filter_plugins(value, sections)
! 145 if filtered_plugins is not None:
! 146 filtered["plugins"] = filtered_plugins
! 147 continue
148
149 schema = sections.get(key)
150 if schema is None:
! 151 filtered[key] = value
! 152 continue
153 if isinstance(value, dict):
154 filtered[key] = _filter_section_data(schema, value)
155 else:
156 log.warning(Lines 176-216 176 else:
177 payload = data.get(name, {})
178
179 if not isinstance(payload, dict):
! 180 payload = {}
181 check_deprecations(schema, payload, (name,))
182 try:
183 schema(**payload)
! 184 except Exception as exc:
! 185 errors.append(exc)
186 if errors:
! 187 raise errors[0]
188
189
190 def _filter_plugins(value: Any, sections: dict[str, type[BaseModel]]) -> Optional[dict[str, Any]]:
! 191 if not isinstance(value, dict):
! 192 log.warning(
193 "Configuration section 'plugins' should be a table; "
194 "ignoring non-table value during best-effort parsing."
195 )
! 196 return None
197
! 198 filtered: dict[str, Any] = {}
! 199 for plugin_name, plugin_data in value.items():
! 200 schema = sections.get(f"plugins.{plugin_name}")
! 201 if schema is None:
! 202 filtered[plugin_name] = plugin_data
! 203 continue
! 204 if isinstance(plugin_data, dict):
! 205 filtered[plugin_name] = _filter_section_data(schema, plugin_data)
206 else:
! 207 log.warning(
208 f"Configuration plugin section '{plugin_name}' should be a table; "
209 "ignoring non-table value during best-effort parsing."
210 )
! 211 filtered[plugin_name] = {}
! 212 return filtered
213
214
215 def _filter_section_data(schema: type[BaseModel], data: dict[str, Any]) -> dict[str, Any]:
216 filtered: dict[str, Any] = {}Lines 219-235 219 continue
220 value = data[field_name]
221 nested_model = _resolve_model_type(field.annotation)
222 if nested_model is not None:
! 223 if isinstance(value, dict):
! 224 filtered[field_name] = _filter_section_data(nested_model, value)
! 225 continue
! 226 if isinstance(value, list):
! 227 filtered[field_name] = [
228 _filter_section_data(nested_model, item) if isinstance(item, dict) else item
229 for item in value
230 ]
! 231 continue
232 filtered[field_name] = value
233 return filtered
234 Lines 234-242 234
235
236 def _resolve_model_type(annotation: Any) -> Optional[type[BaseModel]]:
237 if isinstance(annotation, type) and issubclass(annotation, BaseModel):
! 238 return annotation
239
240 origin = get_origin(annotation)
241 if origin is None:
242 return NoneLines 243-251 243
244 for arg in get_args(annotation):
245 nested = _resolve_model_type(arg)
246 if nested is not None:
! 247 return nested
248 return None
249
250
251 def _normalize_version(value: Any, path: str, label: str) -> Optional[int]:Lines 251-263 251 def _normalize_version(value: Any, path: str, label: str) -> Optional[int]:
252 if value is None:
253 return None
254 if isinstance(value, bool) or not isinstance(value, int):
! 255 log.warning(f"Ignoring invalid {label}={value!r} on '{path}'.")
! 256 return None
257 if value < 0:
! 258 log.warning(f"Ignoring invalid {label}={value!r} on '{path}'.")
! 259 return None
260 return value
261
262
263 def check_deprecations(Lines 283-292 283 schema_extra.get("removed_in"), field_path, "removed_in"
284 )
285 replaced_by = schema_extra.get("replaced_by")
286 if deprecated_in is not None and removed_in is not None:
! 287 if removed_in < deprecated_in + 2:
! 288 log.warning(
289 f"Deprecation metadata for '{field_path}' violates the minimum window "
290 f"(removed_in={removed_in}, deprecated_in={deprecated_in}).",
291 log_once=True,
292 )Lines 304-313 304
305 nested_model = _resolve_model_type(field.annotation)
306 nested_value = data.get(field_name)
307 if nested_model is not None:
! 308 if isinstance(nested_value, dict):
! 309 check_deprecations(
310 nested_model,
311 nested_value,
312 (*prefix, field_name),
313 current_version=active_version,Lines 311-322 311 nested_value,
312 (*prefix, field_name),
313 current_version=active_version,
314 )
! 315 elif isinstance(nested_value, list):
! 316 for item in nested_value:
! 317 if isinstance(item, dict):
! 318 check_deprecations(
319 nested_model,
320 item,
321 (*prefix, field_name),
322 current_version=active_version,Lines 338-346 338
339 def _validate_migration_chain(target_version: int) -> None:
340 for version in range(target_version):
341 if version not in _MIGRATIONS or not _MIGRATIONS[version]:
! 342 raise RuntimeError(f"Missing config migration step for v{version} -> v{version + 1}.")
343
344
345 @register_migration(0)
346 def _migrate_v0_to_v1(document: tomlkit.TOMLDocument) -> None:tidy3d/web/cli/app.pyLines 363-371 363 if profiles:
364 for profile in profiles:
365 path = profiles_dir / f"{profile}.toml"
366 if not path.exists():
! 367 raise click.ClickException(f"Profile '{profile}' not found at '{path}'.")
368 targets.append(path)
369 return targets
370
371 base_path = config_dir / "config.toml"Lines 372-380 372 if base_path.exists():
373 targets.append(base_path)
374
375 if profiles_dir.exists():
! 376 targets.extend(sorted(profiles_dir.glob("*.toml")))
377 return targets
378
379
380 def _preview_schema_upgrade(path: Path) -> dict[str, Any]:Lines 379-397 379
380 def _preview_schema_upgrade(path: Path) -> dict[str, Any]:
381 try:
382 text = path.read_text(encoding="utf-8")
! 383 except Exception as exc:
! 384 raise click.ClickException(f"Failed to read '{path}': {exc}") from exc
385
386 try:
387 document = tomlkit.parse(text)
! 388 except Exception as exc:
! 389 raise click.ClickException(f"Failed to parse '{path}': {exc}") from exc
390
391 version = get_config_version(document)
392 if version > CURRENT_CONFIG_VERSION:
! 393 return {
394 "path": path,
395 "version": version,
396 "forward": True,
397 "changed": False,Lines 404-417 404 apply_migrations(document, version, CURRENT_CONFIG_VERSION)
405 set_config_version(document, CURRENT_CONFIG_VERSION)
406 after = tomlkit.dumps(document)
407 else:
! 408 after = text
409
410 try:
411 data = toml.loads(after)
! 412 except Exception as exc:
! 413 raise click.ClickException(f"Failed to decode migrated '{path}': {exc}") from exc
414
415 validate_config_data(strip_config_version(data))
416 return {
417 "path": path,Lines 446-455 446
447 loader = ConfigLoader()
448 targets = _collect_upgrade_targets(loader.config_dir, profiles)
449 if not targets:
! 450 click.echo("No configuration files found to upgrade.")
! 451 return
452
453 forward_mode = forward_compat_mode()
454 changed_paths: list[Path] = []
455 forward_paths: list[Path] = []Lines 456-475 456
457 for path in targets:
458 result = _preview_schema_upgrade(path)
459 if result["forward"]:
! 460 forward_paths.append(path)
! 461 if forward_mode == FORWARD_COMPAT_STRICT or check:
! 462 raise click.ClickException(
463 f"Configuration file '{path}' targets config_version {result['version']}, "
464 f"which is newer than supported version {CURRENT_CONFIG_VERSION}."
465 )
! 466 click.echo(
467 f"Warning: '{path}' targets config_version {result['version']} "
468 f"(current={CURRENT_CONFIG_VERSION}); skipping upgrade.",
469 err=True,
470 )
! 471 continue
472
473 if result["changed"]:
474 changed_paths.append(path)
475 if dry_run:Lines 479-496 479 elif not check and result["document"] is not None:
480 loader.write_document(path, result["document"])
481
482 if check:
! 483 if changed_paths:
! 484 raise click.ClickException("Configuration upgrade required.")
! 485 if forward_paths:
! 486 raise click.ClickException("Configuration files target a newer schema version.")
! 487 click.echo("Configuration files are up to date.")
! 488 return
489
490 if dry_run and not changed_paths and not forward_paths:
! 491 click.echo("Configuration files are already up to date.")
! 492 return
493
494 if not dry_run and changed_paths:
495 click.echo(f"Upgraded {len(changed_paths)} configuration file(s).") |
…-for-tidy-3-d-config
da895f9 to
2bd65c2
Compare
Note
Medium Risk
Touches core config load/validation and introduces auto write-back behavior, which could affect startup and persistence semantics across existing user config files.
Overview
Introduces schema-versioned configuration files via a root
config_versionkey and a newtidy3d.config.migrationsregistry, with automatic in-memory upgrades on load and optional post-validation write-back controlled byTIDY3D_CONFIG_AUTO_MIGRATE.Adds forward-compat handling for newer config versions: default best-effort parsing that filters/normalizes unknown or malformed sections (with warnings), or
TIDY3D_CONFIG_FORWARD_COMPAT=strictto hard-fail. Config loading now also enforcesdeprecated_in/removed_inmetadata by warning or raising during model build.Extends the CLI with
tidy3d config upgradeto preview diffs (--dry-run), enforce freshness in CI (--check), and upgrade specific profiles, and updates docs/tests/changelog accordingly.Written by Cursor Bugbot for commit 2bd65c2. This will update automatically on new commits. Configure here.