diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 2632ed4d..51d5a866 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -1,3 +1,5 @@ +permissions: + contents: read name: End-to-End Tests on: @@ -27,3 +29,37 @@ jobs: run: | chmod +x ./run_e2e_tests.sh ./run_e2e_tests.sh ${{ github.event.inputs.odoo_version }} + + e2e-migration: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools + python -m pip install . + + - name: Run migration e2e tests for Odoo ${{ github.event.inputs.odoo_version }} + run: | + chmod +x ./run_migration_e2e_tests.sh + ./run_migration_e2e_tests.sh ${{ github.event.inputs.odoo_version }} + + e2e-advanced: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools + python -m pip install . + + - name: Run advanced e2e tests for Odoo ${{ github.event.inputs.odoo_version }} + run: | + chmod +x ./run_advanced_e2e_tests.sh + ./run_advanced_e2e_tests.sh ${{ github.event.inputs.odoo_version }} diff --git a/docker-compose.advanced.yml b/docker-compose.advanced.yml new file mode 100644 index 00000000..43b29c93 --- /dev/null +++ b/docker-compose.advanced.yml @@ -0,0 +1,46 @@ +services: + db-source: + image: postgres:15 + container_name: odf_adv_db_source + environment: + - POSTGRES_DB=postgres + - POSTGRES_PASSWORD=odoo + - POSTGRES_USER=odoo + + odoo-source: + image: odoo:16.0 + container_name: odf_adv_odoo_source + depends_on: + - db-source + ports: + - "8069:8069" + environment: + - HOST=db-source + - PORT=5432 + - USER=odoo + - PASSWORD=odoo + volumes: + - .:/odoo-data-flow:z + + db-target: + image: postgres:15 + container_name: odf_adv_db_target + environment: + - POSTGRES_DB=postgres + - POSTGRES_PASSWORD=odoo + - POSTGRES_USER=odoo + + odoo-target: + image: odoo:16.0 + container_name: odf_adv_odoo_target + depends_on: + - db-target + ports: + - "8070:8069" + environment: + - HOST=db-target + - PORT=5432 + - USER=odoo + - PASSWORD=odoo + volumes: + - .:/odoo-data-flow:z diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..ffd93ce4 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,25 @@ + +services: + db: + image: postgres:15 + environment: + - POSTGRES_DB=postgres + - POSTGRES_PASSWORD=odoo + - POSTGRES_USER=odoo + ports: + - "5432:5432" + + odoo: + image: odoo:16.0 + depends_on: + - db + ports: + - "8069:8069" + environment: + - PGHOST=db + - PGPORT=5432 + - PGUSER=odoo + - PGPASSWORD=odoo + + volumes: + - .:/odoo-data-flow:z diff --git a/docs/guides/server_to_server_migration.md b/docs/guides/server_to_server_migration.md index 186dd0d0..95917613 100644 --- a/docs/guides/server_to_server_migration.md +++ b/docs/guides/server_to_server_migration.md @@ -1,5 +1,11 @@ # Guide: Direct Server-to-Server Migration +```{warning} +This is an experimental feature. +While it has been unit-tested, it has not been extensively tested in a variety of real-world scenarios. +Please use with caution and always back up your data before proceeding. +``` + The `odoo-data-flow` library includes a powerful `migrate` command designed to perform a **direct, in-memory** data migration from one Odoo database to another. This is an advanced feature that chains together the export, transform, and import processes into a single step, without creating intermediate CSV files. > **When to use this?** This method is fast and convenient for simple, one-shot migrations where you don't need to inspect or modify the data mid-process. diff --git a/noxfile.py b/noxfile.py index d00df751..30a7577b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -150,7 +150,7 @@ def mypy(session: nox.Session) -> None: session.install("mypy") session.install("pytest") - session.install("httpx") + session.install("requests", "types-requests") session.install("-e", ".") session.run("mypy", *args) if not session.posargs: @@ -171,7 +171,7 @@ def tests(session: nox.Session) -> None: external=True, ) - session.install("pytest", "coverage", "pytest-mock") + session.install("pytest", "coverage", "pytest-mock", "polars") session.install("-e", ".") session.run("pytest", *session.posargs) @@ -197,7 +197,7 @@ def coverage(session: nox.Session) -> None: "coverage[toml]", "pytest-cov", "pytest-mock", - "httpx", + "requests", "rich", "polars", "click", diff --git a/pyproject.toml b/pyproject.toml index b0059280..00bfee78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ version = "0.0.1" description = "Odoo Data Flow" readme = "README.md" requires-python = ">=3.9" -license = { file = "LICENSE" } +license = "LGPL-3.0-only" authors = [ { name = "bosd", email = "c5e2fd43-d292-4c90-9d1f-74ff3436329a@anonaddy.me" }, ] diff --git a/run_advanced_e2e_tests.sh b/run_advanced_e2e_tests.sh new file mode 100755 index 00000000..9a42d07a --- /dev/null +++ b/run_advanced_e2e_tests.sh @@ -0,0 +1,124 @@ +#!/bin/bash +set -e + +ODOO_VERSION=$1 +if [ -z "$ODOO_VERSION" ]; then + echo "Usage: $0 " + exit 1 +fi + +COMPOSE_FILE="docker-compose.advanced.yml" + +# Function to handle failures +handle_failure() { + echo "--- An error occurred. Dumping container logs. ---" + docker-compose -f $COMPOSE_FILE logs + echo "--- Tearing down containers ---" + docker-compose -f $COMPOSE_FILE down --volumes + exit 1 +} + +# Trap errors +trap 'handle_failure' ERR + + +# Function to replace Odoo version in compose file +replace_odoo_version() { + sed "s/image: odoo:.*/image: odoo:$ODOO_VERSION/g" $COMPOSE_FILE > docker-compose.advanced.yml.tmp && mv docker-compose.advanced.yml.tmp $COMPOSE_FILE +} + +# Clean up previous run +echo "--- Cleaning up previous run ---" +docker-compose -f $COMPOSE_FILE down --volumes || true +rm -f testdata/res_partner_advanced.csv +rm -f testdata/res_partner_category_advanced.csv +rm -rf .odf_cache +mkdir -p conf conf_target testdata + +# Replace Odoo version in compose file +replace_odoo_version + +# Start Odoo containers +echo "--- Starting containers for Odoo $ODOO_VERSION ---" +docker-compose -f $COMPOSE_FILE up -d --build + +# Wait for databases to be ready +echo "--- Waiting for databases to be ready ---" +sleep 15 # Initial wait + +echo "Waiting for Odoo Source to be ready..." +timeout 600 bash -c 'until curl -s http://localhost:8069/web/login > /dev/null; do echo -n "."; sleep 5; done' +echo "Odoo Source is ready!" + +echo "Waiting for Odoo Target to be ready..." +timeout 600 bash -c 'until curl -s http://localhost:8070/web/login > /dev/null; do echo -n "."; sleep 5; done' +echo "Odoo Target is ready!" + +# Create and initialize databases +echo "--- Creating and initializing databases ---" +docker-compose -f $COMPOSE_FILE exec -T odoo-source odoo -d odoo -i base --stop-after-init --db_host=db-source --db_user=odoo --db_password=odoo +docker-compose -f $COMPOSE_FILE exec -T odoo-target odoo -d odoo -i base --stop-after-init --db_host=db-target --db_user=odoo --db_password=odoo + + +# Install dependencies in containers +echo "--- Installing dependencies ---" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-source bash -c "apt-get update && apt-get install -y git && python3 -m pip install --upgrade pip setuptools && pip install /odoo-data-flow" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-target bash -c "apt-get update && apt-get install -y git && python3 -m pip install --upgrade pip setuptools && pip install /odoo-data-flow" + +# Seed the source database +echo "--- Seeding source database ---" +docker-compose -f $COMPOSE_FILE exec -T odoo-source python3 /odoo-data-flow/tests/e2e/seed_advanced_database.py odoo + +# Create connection configs +cat << EOF > conf/connection.conf +[Connection] +hostname = localhost +port = 8069 +login = admin +password = admin +database = odoo +protocol = jsonrpc +EOF + +cat << EOF > conf_target/connection.conf +[Connection] +hostname = localhost +port = 8070 +login = admin +password = admin +database = odoo +protocol = jsonrpc +EOF + +# Run the export for categories +echo "--- Exporting categories ---" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-source bash -c "chown -R odoo:odoo /odoo-data-flow" +docker-compose -f $COMPOSE_FILE exec -T --user odoo odoo-source bash -c "cd /odoo-data-flow && odoo-data-flow export --config conf/connection.conf --model res.partner.category --domain \"[('name', 'like', 'Test Category%')]\" --fields \"id,name\" --output testdata/res_partner_category_advanced.csv" + +# Run the export for partners +echo "--- Exporting partners ---" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-source bash -c "chown -R odoo:odoo /odoo-data-flow" +docker-compose -f $COMPOSE_FILE exec -T --user odoo odoo-source bash -c "cd /odoo-data-flow && odoo-data-flow export --config conf/connection.conf --model res.partner --domain \"[('name', 'like', 'Advanced Test Partner%')]\" --fields \"id,name,category_id/.id\" --output testdata/res_partner_advanced.csv" + +# Modify the partner export header for import +docker-compose -f $COMPOSE_FILE exec -T --user odoo odoo-source bash -c "cd /odoo-data-flow && sed -i 's/category_id\/.id/category_id/g' testdata/res_partner_advanced.csv" + +# Run the import for categories into the target +echo "--- Importing categories into target ---" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-target bash -c "chown -R odoo:odoo /odoo-data-flow" +docker-compose -f $COMPOSE_FILE exec -T --user odoo odoo-target bash -c "cd /odoo-data-flow && odoo-data-flow import --config conf_target/connection.conf --file testdata/res_partner_category_advanced.csv" + +# Run the import for partners into the target +echo "--- Importing partners into target ---" +docker-compose -f $COMPOSE_FILE exec -T --user root odoo-target bash -c "chown -R odoo:odoo /odoo-data-flow" +docker-compose -f $COMPOSE_FILE exec -T --user odoo odoo-target bash -c "cd /odoo-data-flow && odoo-data-flow import --config conf_target/connection.conf --file testdata/res_partner_advanced.csv --strategy relational" + +# Verify the data in the target database +echo "--- Verifying data in target database ---" +docker-compose -f $COMPOSE_FILE exec -T odoo-target python3 /odoo-data-flow/tests/e2e/verify_advanced_data.py odoo + +# Tear down the containers +echo "--- Tearing down containers ---" +docker-compose -f $COMPOSE_FILE down --volumes + +echo "--- Advanced e2e tests completed successfully! ---" diff --git a/run_migration_e2e_tests.sh b/run_migration_e2e_tests.sh new file mode 100755 index 00000000..4df780e8 --- /dev/null +++ b/run_migration_e2e_tests.sh @@ -0,0 +1,160 @@ +#!/bin/bash + +set -e # Exit immediately if a command exits with a non-zero status. + +# Argument 1 is the Odoo version +ODOO_VERSION=$1 + +if [ -z "$ODOO_VERSION" ]; then + echo "Usage: $0 " + exit 1 +fi + +echo "--- Starting migration e2e tests for Odoo version $ODOO_VERSION ---" + +# Cleanup any previous runs +docker compose down -v || true + +# Use the Odoo version to create a dynamic docker-compose.yml +cat << EOF > docker-compose.yml + +services: + db: + image: postgres:15 + environment: + - POSTGRES_DB=postgres + - POSTGRES_PASSWORD=odoo + - POSTGRES_USER=odoo + ports: + - "5432:5432" + + odoo: + image: odoo:$ODOO_VERSION + depends_on: + - db + ports: + - "8069:8069" + environment: + - PGHOST=db + - PGPORT=5432 + - PGUSER=odoo + - PGPASSWORD=odoo + + volumes: + - .:/odoo-data-flow:z +EOF + +cat docker-compose.yml + +echo "--- Starting containers... ---" +docker compose up -d + +# 2. Wait for Odoo to be ready +echo "Waiting for Odoo to be ready..." +TIMEOUT=300 # 5 minutes timeout +START_TIME=$(date +%s) +until docker compose logs odoo | grep -q "HTTP service (werkzeug) running"; do + CURRENT_TIME=$(date +%s) + ELAPSED_TIME=$((CURRENT_TIME - START_TIME)) + if [ $ELAPSED_TIME -ge $TIMEOUT ]; then + echo "Timeout: Odoo did not become ready within $TIMEOUT seconds." + echo "Odoo container logs:" + docker compose logs odoo + exit 1 + fi + echo -n "." + sleep 5 +done +echo "Odoo is ready!" + + +echo "Checking Odoo accessibility from host..." +curl -v http://localhost:8069/web/login + +# 3. Create the source and target databases +docker compose exec -T odoo odoo -d odoo_data_flow_source_db -i base,contacts --without-demo=True --stop-after-init + +# Wait for source database to be ready +echo "Waiting for source database to be ready..." +TIMEOUT=300 # 5 minutes timeout +START_TIME=$(date +%s) +until docker compose exec -T odoo psql -h db -U odoo -d odoo_data_flow_source_db -c "SELECT 1" > /dev/null 2>&1; do + CURRENT_TIME=$(date +%s) + ELAPSED_TIME=$((CURRENT_TIME - START_TIME)) + if [ $ELAPSED_TIME -ge $TIMEOUT ]; then + echo "Timeout: Source database did not become ready within $TIMEOUT seconds." + exit 1 + fi + echo -n "." + sleep 2 +done +echo "Source database is ready!" + +docker compose exec -T odoo odoo -d odoo_data_flow_target_db -i base,contacts --without-demo=True --stop-after-init + +# Wait for target database to be ready +echo "Waiting for target database to be ready..." +TIMEOUT=300 # 5 minutes timeout +START_TIME=$(date +%s) +until docker compose exec -T odoo psql -h db -U odoo -d odoo_data_flow_target_db -c "SELECT 1" > /dev/null 2>&1; do + CURRENT_TIME=$(date +%s) + ELAPSED_TIME=$((CURRENT_TIME - START_TIME)) + if [ $ELAPSED_TIME -ge $TIMEOUT ]; then + echo "Timeout: Target database did not become ready within $TIMEOUT seconds." + exit 1 + fi + echo -n "." + sleep 2 +done +echo "Target database is ready!" + +# 4. Seed the source database +echo "--- Seeding the source database... ---" +docker compose exec -T odoo python3 /odoo-data-flow/tests/e2e/seed_database.py odoo_data_flow_source_db + +# Create connection configs +mkdir -p conf +cat << EOF > conf/source.conf +[Connection] +hostname = localhost +port = 8069 +login = admin +password = admin +database = odoo_data_flow_source_db +uid = 2 +protocol = jsonrpc +EOF + +cat << EOF > conf/target.conf +[Connection] +hostname = localhost +port = 8069 +login = admin +password = admin +database = odoo_data_flow_target_db +uid = 2 +protocol = jsonrpc +EOF + +# Install odoo-data-flow +uv pip install -e . + +# 5. Run odoo-data-flow migrate +echo "--- Running migration... ---" +./.venv/bin/python3 -m odoo_data_flow migrate \ + --config-export conf/source.conf \ + --config-import conf/target.conf \ + --model res.partner \ + --fields "id,name,email,country_id" \ + --domain "[('name', 'like', 'Test Partner')]" + + + +# 6. Verify the data +echo "--- Verifying the data... ---" +docker compose exec -T odoo python3 /odoo-data-flow/tests/e2e/verify_migration_data.py odoo_data_flow_target_db + +# Cleanup containers +docker compose down -v + +echo "--- Migration e2e tests completed for Odoo version $ODOO_VERSION ---" diff --git a/src/odoo_data_flow/__main__.py b/src/odoo_data_flow/__main__.py index be014b18..4e4a640d 100644 --- a/src/odoo_data_flow/__main__.py +++ b/src/odoo_data_flow/__main__.py @@ -476,6 +476,9 @@ def url_to_image_cmd(**kwargs: Any) -> None: ) def migrate_cmd(**kwargs: Any) -> None: """Performs a direct server-to-server data migration.""" + if "fields" in kwargs and isinstance(kwargs["fields"], str): + kwargs["fields"] = [field.strip() for field in kwargs["fields"].split(",")] + if kwargs.get("mapping"): try: parsed_mapping = ast.literal_eval(kwargs["mapping"]) diff --git a/src/odoo_data_flow/export_threaded.py b/src/odoo_data_flow/export_threaded.py index 310d175b..ef63c567 100755 --- a/src/odoo_data_flow/export_threaded.py +++ b/src/odoo_data_flow/export_threaded.py @@ -140,7 +140,9 @@ def _format_batch_results( else: base_field = field.split("/")[0].replace(".id", "id") value = record.get(base_field) - if field == ".id": + if field == "id": + new_record["id"] = record.get("id") + elif field == ".id": new_record[".id"] = record.get("id") elif field.endswith("/.id"): new_record[field] = ( diff --git a/src/odoo_data_flow/importer.py b/src/odoo_data_flow/importer.py index d867512f..efa8eeff 100755 --- a/src/odoo_data_flow/importer.py +++ b/src/odoo_data_flow/importer.py @@ -5,7 +5,6 @@ import tasks to the multi-threaded `import_threaded` module. """ -import csv import json import os import re @@ -347,6 +346,7 @@ def run_import_for_migration( data: list[list[Any]], worker: int = 1, batch_size: int = 10, + encoding: str = "utf-8", ) -> None: """Orchestrates the data import process from in-memory data. @@ -365,19 +365,21 @@ def run_import_for_migration( log.info("Starting data import from in-memory data...") tmp_path = "" try: + # Use polars to write the DataFrame to a temporary CSV file + df = pl.DataFrame(data, schema=header) with tempfile.NamedTemporaryFile( - mode="w+", delete=False, suffix=".csv", newline="" + mode="w+b", delete=False, suffix=".csv" ) as tmp: - writer = csv.writer(tmp) - writer.writerow(header) - writer.writerows(data) + df.write_csv(tmp, separator=";") tmp_path = tmp.name + log.info(f"In-memory data written to temporary file: {tmp_path}") import_threaded.import_data( config_file=config, model=model, unique_id_field="id", # Migration import assumes 'id' file_csv=tmp_path, + separator=";", context={"tracking_disable": True}, max_connection=int(worker), batch_size=int(batch_size), diff --git a/src/odoo_data_flow/lib/preflight.py b/src/odoo_data_flow/lib/preflight.py index 60eb40ce..22f4e213 100644 --- a/src/odoo_data_flow/lib/preflight.py +++ b/src/odoo_data_flow/lib/preflight.py @@ -210,6 +210,24 @@ def _get_odoo_fields(config: str, model: str) -> Optional[dict[str, Any]]: model_obj = connection.get_model(model) odoo_fields = cast(dict[str, Any], model_obj.fields_get()) + # Supplement with ir.model.fields for m2m relation_table + ir_model_fields = connection.get_model("ir.model.fields") + m2m_fields = [ + field_name + for field_name, field_attrs in odoo_fields.items() + if field_attrs.get("type") == "many2many" + ] + if m2m_fields: + field_details = ir_model_fields.search_read( + [("model", "=", model), ("name", "in", m2m_fields)], + ["name", "relation", "relation_field", "relation_table"], + ) + for detail in field_details: + field_name = detail["name"] + for key in ["relation", "relation_field", "relation_table"]: + if detail.get(key): + odoo_fields[field_name][key] = detail[key] + # 3. Save the result to the cache for next time cache.save_fields_get_cache(config, model, odoo_fields) return odoo_fields @@ -301,14 +319,14 @@ def _plan_deferrals_and_strategies( strategies[clean_field_name] = { "strategy": "direct_relational_import", "relation_table": field_info["relation_table"], - "relation_field": field_info["relation_field"], + "relation_field": field_info.get("relation_field", "id"), "relation": field_info["relation"], } else: strategies[clean_field_name] = { "strategy": "write_tuple", "relation_table": field_info["relation_table"], - "relation_field": field_info["relation_field"], + "relation_field": field_info.get("relation_field", "id"), "relation": field_info["relation"], } elif is_o2m: diff --git a/src/odoo_data_flow/lib/relational_import.py b/src/odoo_data_flow/lib/relational_import.py index 467c4a4f..6ab1a61c 100644 --- a/src/odoo_data_flow/lib/relational_import.py +++ b/src/odoo_data_flow/lib/relational_import.py @@ -31,32 +31,26 @@ def _resolve_related_ids( f"Falling back to slow XML-ID resolution." ) connection = conf_lib.get_connection_from_config(config_file=config) - if not connection.is_connected(): - log.error("Cannot perform XML-ID lookup: Odoo connection failed.") - return None id_list = external_ids.drop_nulls().unique().to_list() log.info(f"Resolving {len(id_list)} unique external IDs for '{related_model}'...") # Split full XML-ID 'module.identifier' into components - split_ids = [(i.split(".", 1)[0], i.split(".", 1)[1]) for i in id_list if "." in i] - invalid_ids = [i for i in id_list if "." not in i] - if invalid_ids: - log.warning( - f"Skipping {len(invalid_ids)} invalid external_ids for model " - f"'{related_model}' (must be in 'module.identifier' format)." - ) + split_ids = [] + for i in id_list: + if "." in i: + split_ids.append((i.split(".", 1)[0], i.split(".", 1)[1])) + else: + split_ids.append(("__export__", i)) + + if not split_ids: + log.warning("No valid external IDs found to resolve.") + return None - domain = [ - "&", - ("module", "=", split_ids[0][0]), - ("name", "=", split_ids[0][1]), - ] - for module, name in split_ids[1:]: - domain.insert(0, "|") - domain.append("&") - domain.append(("module", "=", module)) - domain.append(("name", "=", name)) + domain = [] + for module, name in split_ids: + domain.extend(["|", "&", ("module", "=", module), ("name", "=", name)]) + domain = domain[1:] try: data_model = connection.get_model("ir.model.data") diff --git a/src/odoo_data_flow/lib/transform.py b/src/odoo_data_flow/lib/transform.py index 4f818eeb..c1a81137 100644 --- a/src/odoo_data_flow/lib/transform.py +++ b/src/odoo_data_flow/lib/transform.py @@ -283,11 +283,14 @@ def split(self, split_fun: Callable[..., Any]) -> dict[Any, "Processor"]: def get_o2o_mapping(self) -> dict[str, MapperRepr]: """Generates a direct 1-to-1 mapping dictionary.""" - return { + mapping = { str(column): MapperRepr(f"mapper.val('{column}')", mapper.val(column)) for column in self.dataframe.columns if column } + if "id" not in mapping: + mapping["id"] = MapperRepr("mapper.val('id')", mapper.val("id")) + return mapping def process( self, diff --git a/tests/e2e/seed_advanced_database.py b/tests/e2e/seed_advanced_database.py new file mode 100644 index 00000000..2694662e --- /dev/null +++ b/tests/e2e/seed_advanced_database.py @@ -0,0 +1,127 @@ +"""This file handles the seeding of test data for the advanced e2e tests.""" + +import logging +import os +import sys + +import odoo +from odoo.tools import config + +_logger = logging.getLogger(__name__) + + +def get_baseline_partner_count(env): + """Get the baseline count of partners.""" + partner_model = env["res.partner"] + return partner_model.search_count([]) + + +def seed_database(db_name: str, num_partners: int = 550) -> None: + """Seed partner data for advanced e2e test. + + Args: + db_name: The name of the database to seed. + num_partners: The number of partners to create. + """ + config["db_host"] = os.environ.get("HOST") + config["db_port"] = int(os.environ.get("PORT", 5432)) + config["db_user"] = os.environ.get("USER") + config["db_password"] = os.environ.get("PASSWORD") + + registry = odoo.registry(db_name) + with odoo.api.Environment.manage(), registry.cursor() as cr: + env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) + + _logger.info("Starting to seed the database with advanced partner data...") + + # 1. Create Partner Categories with External IDs + category_model = env["res.partner.category"] + ir_model_data_model = env["ir.model.data"] + + categories_to_create = [ + {"name": "VIP", "xml_id": "__export__.partner_category_vip"}, + {"name": "Standard", "xml_id": "__export__.partner_category_standard"}, + ] + categories = [] + for cat_data in categories_to_create: + category = category_model.create({"name": cat_data["name"]}) + ir_model_data_model.create( + { + "name": cat_data["xml_id"].split(".")[1], + "module": cat_data["xml_id"].split(".")[0], + "model": "res.partner.category", + "res_id": category.id, + } + ) + categories.append(category) + _logger.info(f"Successfully created {len(categories)} partner categories.") + + # 2. Create Parent Companies + partner_model = env["res.partner"] + num_companies = 10 + companies_to_create = [] + for i in range(num_companies): + companies_to_create.append( + { + "name": f"Parent Company {i + 1}", + "is_company": True, + "email": f"company.{i + 1}@example.com", + } + ) + parent_companies = partner_model.create(companies_to_create) + _logger.info(f"Successfully created {len(parent_companies)} parent companies.") + + # 3. Create Child Partners + child_partners_to_create = [] + for i in range(num_partners): + child_partners_to_create.append( + { + "name": f"Child Partner {i + 1}", + "is_company": False, + "email": f"child.partner.{i + 1}@example.com", + } + ) + child_partners = partner_model.create(child_partners_to_create) + _logger.info(f"Successfully created {len(child_partners)} child partners.") + + # 4. Create Partners with Parent, Categories, and Children + partners_to_create = [] + for i in range(num_partners): + parent_company = parent_companies[i % num_companies] + partner_categories = [] + if i % 2 == 0: + partner_categories.append(categories[0].id) + if i % 3 == 0: + partner_categories.append(categories[1].id) + + partners_to_create.append( + { + "name": f"Test Partner {i + 1}", + "is_company": False, + "email": f"test.partner.{i + 1}@example.com", + "parent_id": parent_company.id, + "category_id": [(6, 0, partner_categories)], + "child_ids": [(6, 0, [child_partners[i].id])], + } + ) + + partner_model.create(partners_to_create) + _logger.info(f"Successfully created {num_partners} partner records.") + + +if __name__ == "__main__": + try: + db_name = sys.argv[1] + except IndexError: + _logger.error( + "Database name not provided. " + "Usage: python3 seed_advanced_database.py [num_partners]" + ) + sys.exit(1) + + try: + num_partners_arg = int(sys.argv[2]) + except (IndexError, ValueError): + num_partners_arg = 550 + + seed_database(db_name, num_partners_arg) diff --git a/tests/e2e/seed_database.py b/tests/e2e/seed_database.py index 2f8e2cdd..d50e1d84 100644 --- a/tests/e2e/seed_database.py +++ b/tests/e2e/seed_database.py @@ -28,6 +28,18 @@ def seed_database(db_name: str) -> None: _logger.info("Starting to seed the database with partner data...") partner_model = env["res.partner"] + country_model = env["res.country"] + + # Get the ID for 'United States' + us_country = country_model.search([("code", "=", "US")]) + if not us_country: + _logger.error("Country 'United States' not found. Cannot seed relational data.") + # Optionally create it if it doesn't exist + # us_country = country_model.create({'name': 'United States', 'code': 'US'}) + # _logger.info("Created country 'United States'.") + sys.exit(1) + + us_country_id = us_country.id num_partners = 1000 partners_to_create = [] @@ -37,12 +49,15 @@ def seed_database(db_name: str) -> None: "name": f"Test Partner {i + 1}", "is_company": True, "email": f"test.partner.{i + 1}@example.com", + "country_id": us_country_id, } ) partner_model.create(partners_to_create) - _logger.info(f"Successfully created {num_partners} partner records.") + _logger.info( + f"Successfully created {num_partners} partner records with country 'United States'." + ) # Explicitly commit and close the cursor cr.commit() diff --git a/tests/e2e/verify_advanced_data.py b/tests/e2e/verify_advanced_data.py new file mode 100644 index 00000000..fb49ccc7 --- /dev/null +++ b/tests/e2e/verify_advanced_data.py @@ -0,0 +1,152 @@ +"""This file handles the data verification for the advanced e2e tests.""" + +import logging +import os +import sys + +import odoo +from odoo.tools import config + +_logger = logging.getLogger(__name__) + + +def _get_baseline_partner_count(env): + """Get the baseline count of partners.""" + partner_model = env["res.partner"] + return partner_model.search_count([]) + + +def _verify_categories(env): + """Verify the partner categories.""" + category_model = env["res.partner.category"] + categories = category_model.search([]) + if len(categories) != 2: + raise AssertionError( + f"Expected 2 partner categories, but found {len(categories)}." + ) + _logger.info("Partner categories verification successful.") + + +def _verify_companies(env, baseline_company_count): + """Verify the parent companies.""" + partner_model = env["res.partner"] + parent_companies = partner_model.search([("is_company", "=", True)]) + if len(parent_companies) != baseline_company_count + 10: + raise AssertionError( + f"Expected {baseline_company_count + 10} parent companies, " + f"but found {len(parent_companies)}." + ) + _logger.info("Parent companies verification successful.") + + +def _verify_partners(env, num_partners, baseline_partner_count): + """Verify the partners.""" + partner_model = env["res.partner"] + partners = partner_model.search([("name", "like", "Test Partner")]) + if len(partners) != num_partners: + raise AssertionError( + f"Expected {num_partners} partners, but found {len(partners)}." + ) + _logger.info(f"Partners verification successful. Found {len(partners)} partners.") + + total_partners = partner_model.search_count([]) + expected_total = baseline_partner_count + num_partners * 2 + 10 + if total_partners != expected_total: + raise AssertionError( + f"Expected {expected_total} total partners, but found {total_partners}." + ) + _logger.info("Total partner count verification successful.") + return partners + + +def _verify_child_partners(env, num_partners): + """Verify the child partners.""" + partner_model = env["res.partner"] + child_partners = partner_model.search([("name", "like", "Child Partner")]) + if len(child_partners) != num_partners: + raise AssertionError( + f"Expected {num_partners} child partners, but found {len(child_partners)}." + ) + _logger.info( + f"Child partners verification successful. Found {len(child_partners)} partners." + ) + + +def _verify_relationships(partners): + """Verify the relationships between partners.""" + for _, partner in enumerate(partners): + # Verify parent + if not partner.parent_id: + raise AssertionError(f"Partner {partner.name} has no parent.") + + # Verify categories + has_vip = any(cat.name == "VIP" for cat in partner.category_id) + has_standard = any(cat.name == "Standard" for cat in partner.category_id) + + partner_index = int(partner.name.split(" ")[-1]) - 1 + if partner_index % 2 == 0 and not has_vip: + raise AssertionError(f"Partner {partner.name} should have VIP category.") + if partner_index % 3 == 0 and not has_standard: + raise AssertionError( + f"Partner {partner.name} should have Standard category." + ) + + # Verify children + if len(partner.child_ids) != 1: + raise AssertionError( + f"Partner {partner.name} should have 1 child, " + f"but has {len(partner.child_ids)}." + ) + if partner.child_ids[0].name != f"Child Partner {partner_index + 1}": + raise AssertionError( + f"Partner {partner.name} has wrong child: {partner.child_ids[0].name}." + ) + _logger.info("Partner relationships verification successful.") + + +def verify_data(db_name: str, num_partners: int = 550) -> None: + """Verify the partner data for the advanced e2e test. + + Args: + db_name: The name of the database to verify. + num_partners: The expected number of partners. + """ + _logger.info("Verifying advanced partner data...") + + config["db_host"] = os.environ.get("HOST") + config["db_port"] = int(os.environ.get("PORT", 5432)) + config["db_user"] = os.environ.get("USER") + config["db_password"] = os.environ.get("PASSWORD") + + registry = odoo.registry(db_name) + with odoo.api.Environment.manage(), registry.cursor() as cr: + env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) + + baseline_partner_count = _get_baseline_partner_count(env) + baseline_company_count = env["res.partner"].search_count( + [("is_company", "=", True)] + ) + + _verify_categories(env) + _verify_companies(env, baseline_company_count) + partners = _verify_partners(env, num_partners, baseline_partner_count) + _verify_child_partners(env, num_partners) + _verify_relationships(partners) + + +if __name__ == "__main__": + try: + db_name = sys.argv[1] + except IndexError: + _logger.error( + "Database name not provided. " + "Usage: python3 verify_advanced_data.py [num_partners]" + ) + sys.exit(1) + + try: + num_partners_arg = int(sys.argv[2]) + except (IndexError, ValueError): + num_partners_arg = 550 + + verify_data(db_name, num_partners_arg) diff --git a/tests/e2e/verify_migration_data.py b/tests/e2e/verify_migration_data.py new file mode 100644 index 00000000..7249dc87 --- /dev/null +++ b/tests/e2e/verify_migration_data.py @@ -0,0 +1,94 @@ +"""This file handles the data verification for the migration e2e tests.""" + +import logging +import sys + +import odoo + +_logger = logging.getLogger(__name__) + +try: + db_name = sys.argv[1] +except IndexError: + _logger.error( + "Database name not provided. Usage: python3 verify_migration_data.py " + ) + sys.exit(1) + + +def verify_migration(db_name: str) -> None: + """Verify the partner data after migration.""" + print("Verifying migrated partner data...") + + registry = odoo.sql_db.db_connect(db_name) + cr = registry.cursor() + env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) + + # 1. Verify country 'United States' exists + country_model = env["res.country"] + us_country = country_model.search([("code", "=", "US")]) + if not us_country: + raise AssertionError( + "Country 'United States' not found in the target database." + ) + us_country_id = us_country.id + + # 2. Verify partner count + partner_model = env["res.partner"] + total_partners = partner_model.search_count([]) + expected_count = 1002 + if total_partners != expected_count: + raise AssertionError( + f"Expected {expected_count} partner records in the target database, " + f"but found {total_partners}. Migration might have failed." + ) + print(f"Total partner count is correct: {total_partners}") + + # 3. Verify the migrated data + migrated_partners = partner_model.search_read( + [("name", "like", "Test Partner")], ["name", "country_id"] + ) + + migrated_count = len(migrated_partners) + expected_migrated_count = 1000 + if migrated_count != expected_migrated_count: + raise AssertionError( + f"Expected {expected_migrated_count} migrated partner records, " + f"but found {migrated_count}." + ) + print(f"Found {migrated_count} migrated partners.") + + # 4. Verify the relational field (country_id) + for partner in migrated_partners: + if not partner["country_id"]: + raise AssertionError( + f"Partner '{partner['name']}' is missing country information." + ) + if partner["country_id"][0] != us_country_id: + raise AssertionError( + f"Partner '{partner['name']}' has incorrect country. " + f"Expected 'United States' (ID: {us_country_id}), " + f"but got ID: {partner['country_id'][0]}." + ) + + print("All migrated partners have the correct country.") + + # 5. Final success message + print( + f"Verification successful: Found {total_partners} total partner records, " + f"and all {migrated_count} migrated partners have the correct relational data." + ) + + cr.close() + + +if __name__ == "__main__": + try: + db_name = sys.argv[1] + except IndexError: + _logger.error( + "Database name not provided. Usage: python3 verify_migration_data.py " + ) + sys.exit(1) + + verify_migration(db_name) diff --git a/tests/test_strategy_dispatcher.py b/tests/test_strategy_dispatcher.py new file mode 100644 index 00000000..028f1276 --- /dev/null +++ b/tests/test_strategy_dispatcher.py @@ -0,0 +1,64 @@ +"""Tests for the strategy dispatcher.""" + +from unittest.mock import MagicMock + +import polars as pl +import pytest + +from odoo_data_flow.lib import preflight + + +@pytest.fixture +def mock_odoo_adapter(): + """Fixture for a mocked OdooAdapter.""" + adapter = MagicMock() + adapter.get_fields_metadata.return_value = { + "name": {"type": "char"}, + "parent_id": {"type": "many2one", "relation": "res.partner"}, + "category_id": { + "type": "many2many", + "relation": "res.partner.category", + "relation_table": "res_partner_res_partner_category_rel", + "relation_field": "partner_id", + }, + "child_ids": {"type": "one2many", "relation": "res.partner"}, + } + return adapter + + +def test_strategy_dispatching(mock_odoo_adapter, tmp_path): + """Test that the strategy dispatcher assigns the correct strategies.""" + df = pl.DataFrame( + { + "id": [1, 2], + "name": ["test1", "test2"], + "parent_id/id": ["parent1", "parent2"], + "category_id/id": ["cat1,cat2", "cat2"], + "child_ids/id": ["child1", "child2"], + } + ) + filename = tmp_path / "test.csv" + df.write_csv(filename) + + import_plan = {"deferred_fields": [], "strategies": {}} + preflight._plan_deferrals_and_strategies( + header=df.columns, + odoo_fields=mock_odoo_adapter.get_fields_metadata(), + model="res.partner", + filename=str(filename), + separator=",", + import_plan=import_plan, + ) + + assert "parent_id" in import_plan["deferred_fields"] + assert "category_id" in import_plan["deferred_fields"] + assert "child_ids" in import_plan["deferred_fields"] + + # m2o self is handled by sorting, not a specific strategy here + assert "parent_id" not in import_plan["strategies"] + + assert "category_id" in import_plan["strategies"] + assert import_plan["strategies"]["category_id"]["strategy"] == "write_tuple" + + assert "child_ids" in import_plan["strategies"] + assert import_plan["strategies"]["child_ids"]["strategy"] == "write_o2m_tuple"