Skip to content

Commit b939e40

Browse files
authored
feat(cli): add export option to readiness assessment and update documentation links (#488)
* feat(cli): add export option to readiness assessment and update documentation links * feat: test exports command * feat: allow export directory to be configured * fix: build update
1 parent 755dc76 commit b939e40

File tree

12 files changed

+108
-61
lines changed

12 files changed

+108
-61
lines changed

.github/workflows/release.yaml

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -110,10 +110,6 @@ jobs:
110110
- target: x86_64-pc-windows-msvc
111111
os: windows-latest
112112
release_suffix: x86_64-windows
113-
# - target: i686-pc-windows-msvc
114-
# os: windows-latest
115-
# release_suffix: i686-windows
116-
# macOS
117113
- target: aarch64-apple-darwin
118114
os: macos-latest
119115
release_suffix: aarch64-osx
@@ -128,10 +124,10 @@ jobs:
128124
PYAPP_VERSION: v0.27.0
129125
PYAPP_PYTHON_VERSION: "3.12"
130126
PYAPP_PROJECT_FEATURES: oracle,postgres,mssql,mysql,server
131-
PYAPP_DISTRIBUTION_EMBED: "1"
132-
PYAPP_UV_ENABLED: "1"
127+
PYAPP_DISTRIBUTION_EMBED: "true"
128+
PYAPP_UV_ENABLED: "true"
133129
HATCH_BUILD_LOCATION: dist
134-
PYAPP_FULL_ISOLATION: "1"
130+
PYAPP_FULL_ISOLATION: "true"
135131
steps:
136132
- name: Checkout code
137133
uses: actions/checkout@v4

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,8 @@ duck.db
180180
local.duckdb
181181
node_modules
182182
opdb__*
183-
183+
assessment.db
184+
assessment.db.wal*
184185
src/dma/static
185186
src/dma/templates/*
186187
.astro

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ repos:
3232

3333
# Ruff replaces black, flake8, autoflake and isort
3434
- repo: https://github.com/charliermarsh/ruff-pre-commit
35-
rev: "v0.9.10" # make sure this is always consistent with hatch configs
35+
rev: "v0.11.1" # make sure this is always consistent with hatch configs
3636
hooks:
3737
- id: ruff
3838
args: [--config, ./pyproject.toml]

src/dma/cli/_utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,3 @@
1818
__all__ = ("console",)
1919

2020
console = get_console()
21-
console._width = 80

src/dma/cli/main.py

Lines changed: 35 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -44,11 +44,6 @@ def app(ctx: Context) -> None:
4444
"""Database Migration Assessment"""
4545

4646

47-
@app.command(
48-
name="collect",
49-
no_args_is_help=True,
50-
short_help="Collect data from a source database.",
51-
)
5247
@click.option(
5348
"--no-prompt",
5449
help="Do not prompt for confirmation before executing check.",
@@ -168,10 +163,11 @@ def _collect_data(
168163
database: str,
169164
collection_identifier: str | None,
170165
working_path: Path | None = None,
166+
export_path: Path | None = None,
167+
export_delimiter: str = "|",
171168
) -> None:
172-
working_path = working_path or Path("tmp/")
173169
_execution_id = f"{src_info.db_type}_{current_version!s}_{datetime.now(tz=timezone.utc).strftime('%y%m%d%H%M%S')}"
174-
with get_duckdb_connection(working_path) as local_db:
170+
with get_duckdb_connection(working_path=working_path, export_path=export_path) as local_db:
175171
canonical_query_manager = next(provide_canonical_queries(local_db=local_db, working_path=working_path))
176172
collection_extractor = CollectionExtractor(
177173
local_db=local_db,
@@ -182,7 +178,9 @@ def _collect_data(
182178
collection_identifier=collection_identifier,
183179
)
184180
collection_extractor.execute()
185-
collection_extractor.dump_database(working_path)
181+
if collection_extractor is not None and export_path is not None:
182+
collection_extractor.dump_database(export_path=export_path, delimiter=export_delimiter)
183+
console.rule("Assessment complete.", align="left")
186184

187185

188186
@app.command(
@@ -262,6 +260,24 @@ def _collect_data(
262260
required=False,
263261
show_default=False,
264262
)
263+
@click.option(
264+
"--export",
265+
"-e",
266+
help="Path to export the results.",
267+
default=None,
268+
type=click.Path(),
269+
required=False,
270+
show_default=False,
271+
)
272+
@click.option(
273+
"--working-path",
274+
"-wp",
275+
help="Path to store the temporary artifacts during assessment.",
276+
default=None,
277+
type=click.Path(),
278+
required=False,
279+
show_default=False,
280+
)
265281
def readiness_assessment(
266282
no_prompt: bool,
267283
db_type: Literal["mysql", "postgres", "mssql", "oracle"],
@@ -271,6 +287,8 @@ def readiness_assessment(
271287
port: int | None = None,
272288
database: str | None = None,
273289
collection_identifier: str | None = None,
290+
export: str | None = None,
291+
working_path: str | None = None,
274292
) -> None:
275293
"""Process a collection of advisor extracts."""
276294
print_app_info()
@@ -298,6 +316,8 @@ def readiness_assessment(
298316
),
299317
database=database,
300318
collection_identifier=collection_identifier,
319+
working_path=Path(working_path) if working_path else None,
320+
export_path=Path(export) if export else None,
301321
)
302322
else:
303323
console.rule("Skipping execution until input is confirmed", align="left")
@@ -309,10 +329,11 @@ def _readiness_check(
309329
database: str,
310330
collection_identifier: str | None,
311331
working_path: Path | None = None,
332+
export_path: Path | None = None,
333+
export_delimiter: str = "|",
312334
) -> None:
313-
working_path = working_path or Path("tmp/")
314335
_execution_id = f"{src_info.db_type}_{current_version!s}_{datetime.now(tz=timezone.utc).strftime('%y%m%d%H%M%S')}"
315-
with get_duckdb_connection(working_path) as local_db:
336+
with get_duckdb_connection(working_path=working_path, export_path=export_path) as local_db:
316337
workflow = ReadinessCheck(
317338
local_db=local_db,
318339
src_info=src_info,
@@ -325,8 +346,9 @@ def _readiness_check(
325346
console.print(Padding("", 1, expand=True))
326347
console.rule("Processing collected data.", align="left")
327348
workflow.print_summary()
328-
if workflow.collection_extractor is not None:
329-
workflow.collection_extractor.dump_database(working_path)
349+
if workflow.collection_extractor is not None and export_path is not None:
350+
workflow.collection_extractor.dump_database(export_path=export_path, delimiter=export_delimiter)
351+
console.rule("Assessment complete.", align="left")
330352

331353

332354
def print_app_info() -> None:
@@ -335,4 +357,4 @@ def print_app_info() -> None:
335357
table.add_row(
336358
f"[bold green]Google Database Migration Assessment[/] [cyan]version {current_version}[/]"
337359
)
338-
console.print(table, width=80)
360+
console.print(table)

src/dma/collector/dependencies.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,11 +92,13 @@ def provide_collection_query_manager(
9292

9393

9494
def provide_canonical_queries(
95-
local_db: duckdb.DuckDBPyConnection | None = None, working_path: Path | None = None
95+
local_db: duckdb.DuckDBPyConnection | None = None,
96+
working_path: Path | None = None,
97+
export_path: Path | None = None,
9698
) -> Generator[CanonicalQueryManager, None, None]:
9799
"""Construct repository and service objects for the request."""
98100
if local_db:
99101
yield CanonicalQueryManager(connection=local_db)
100102
else:
101-
with get_duckdb_connection(working_path=working_path) as db_connection:
103+
with get_duckdb_connection(working_path=working_path, export_path=export_path) as db_connection:
102104
yield CanonicalQueryManager(connection=db_connection)

src/dma/collector/workflows/readiness_check/_postgres/main.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -762,11 +762,12 @@ def table_for_target(migration_target: PostgresVariants) -> None:
762762
self.console.print(count_table)
763763
if migration_target == ALLOYDB:
764764
self.console.print(
765-
"Please refer to https://cloud.google.com/database-migration/docs/postgresql-to-alloydb/configure-source-database for more details."
765+
"Please refer to the Alloy DB documentation for more details: https://cloud.google.com/database-migration/docs/postgresql-to-alloydb/configure-source-database"
766766
)
767767
if migration_target == CLOUDSQL:
768768
self.console.print(
769-
"Please refer to https://cloud.google.com/database-migration/docs/postgres/configure-source-database for more details."
769+
"Please refer to the CloudSQL documentation for more details: https://cloud.google.com/database-migration/docs/postgres/configure-source-database",
770+
markup=True,
770771
)
771772

772773
for v in db_variants:

src/dma/lib/db/local.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,15 @@
2525

2626

2727
@contextmanager
28-
def get_duckdb_connection(working_path: Path | None = None) -> Iterator[duckdb.DuckDBPyConnection]:
28+
def get_duckdb_connection(
29+
working_path: Path | None = None, export_path: Path | None = None, database: str | None = None
30+
) -> Iterator[duckdb.DuckDBPyConnection]:
2931
"""Yield a new duckdb connections and automatically manages resource cleanup."""
32+
33+
if database is None and export_path is not None:
34+
database = f"{Path(export_path / 'assessment.db').absolute()!s}"
35+
elif database is None:
36+
database = ":memory:"
3037
if working_path is None:
3138
working_path = Path(tempfile.gettempdir())
3239
config = {
@@ -37,15 +44,11 @@ def get_duckdb_connection(working_path: Path | None = None) -> Iterator[duckdb.D
3744
}
3845
Path(working_path).mkdir(parents=True, exist_ok=True)
3946
with duckdb.connect(
40-
database=f"{working_path!s}/assessment.db",
47+
database=database,
4148
read_only=False,
4249
config=config,
4350
) as local_db:
4451
try:
45-
# for extension in extensions:
46-
"""
47-
local_db.execute("SET disabled_optimizers TO 'join_order'")
48-
"""
4952
yield local_db
5053
finally:
5154
local_db.close()

0 commit comments

Comments
 (0)