Compare commits
38 Commits
lcc-credit
...
eb95a8152a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb95a8152a | ||
|
|
febe877043 | ||
|
|
f07a654c22 | ||
|
|
60d25124c4 | ||
|
|
67c2d5a061 | ||
|
|
e17db5d062 | ||
|
|
89cc3be05e | ||
|
|
22d5b6af7e | ||
|
|
00c12769bc | ||
|
|
4bdedf3759 | ||
|
|
1027428bfd | ||
|
|
01e23cc92c | ||
|
|
d3f0998036 | ||
|
|
914ae34f12 | ||
|
|
176fa0957c | ||
|
|
8061d52d25 | ||
|
|
3fe2e93d3d | ||
|
|
526b27fdec | ||
|
|
266842585b | ||
|
|
30909a3b28 | ||
|
|
4d7933cef0 | ||
|
|
4a3d3b238f | ||
|
|
59fc39620d | ||
|
|
7d001ff163 | ||
|
|
da59dffcfa | ||
|
|
d8b332762b | ||
|
|
023deeea5b | ||
|
|
743d1ce831 | ||
|
|
469fb42e48 | ||
|
|
f18d50cb94 | ||
|
|
afeaa3d00f | ||
|
|
93fc10395f | ||
|
|
458af6a795 | ||
|
|
61733b04a3 | ||
|
|
385b9bc751 | ||
|
|
f432b4c75e | ||
|
|
21028149be | ||
|
|
972e6c7b26 |
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
final_404_addons
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Post migration to 13.0..."
|
||||||
|
|
||||||
|
#compose --debug run ou13 -u base --stop-after-init --no-http
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
echo "Prepare migration to 13.0..."
|
echo "Prepare migration to 13.0..."
|
||||||
|
|
||||||
@@ -17,20 +18,6 @@ EOF
|
|||||||
)
|
)
|
||||||
query_postgres_container "$PRE_MIGRATE_SQL" ou13 || exit 1
|
query_postgres_container "$PRE_MIGRATE_SQL" ou13 || exit 1
|
||||||
|
|
||||||
# Digital currency specific - to comment if not needed
|
|
||||||
INVOICE_NAME_SQL=$(cat <<'EOF'
|
|
||||||
ALTER TABLE credit_request ADD invoice_name VARCHAR;
|
|
||||||
UPDATE credit_request
|
|
||||||
SET invoice_name = (
|
|
||||||
SELECT move_name
|
|
||||||
FROM account_invoice
|
|
||||||
WHERE account_invoice.id = credit_request.invoice_id
|
|
||||||
);
|
|
||||||
UPDATE credit_request SET invoice_id = NULL;
|
|
||||||
EOF
|
|
||||||
)
|
|
||||||
query_postgres_container "$INVOICE_NAME_SQL" ou13 || exit 1
|
|
||||||
|
|
||||||
# Copy filestores
|
# Copy filestores
|
||||||
copy_filestore ou12 ou12 ou13 ou13 || exit 1
|
copy_filestore ou12 ou12 ou13 ou13 || exit 1
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
compose -f ../compose.yml run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
compose -f ../compose.yml run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Post migration to 14.0..."
|
||||||
|
|
||||||
|
#compose --debug run ou14 -u base --stop-after-init --no-http
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
echo "Prepare migration to 14.0..."
|
echo "Prepare migration to 14.0..."
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
compose -f ../compose.yml run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=web,openupgrade_framework
|
compose -f ../compose.yml run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=base,web,openupgrade_framework
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Post migration to 15.0..."
|
||||||
|
|
||||||
|
#compose --debug run ou15 -u base --stop-after-init --no-http
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
echo "Prepare migration to 15.0..."
|
echo "Prepare migration to 15.0..."
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
compose -f ../compose.yml run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=web,openupgrade_framework
|
compose -f ../compose.yml run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=base,web,openupgrade_framework
|
||||||
|
|||||||
@@ -1,17 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
echo "POST migration to 16.0..."
|
echo "Post migration to 16.0..."
|
||||||
|
|
||||||
# Digital currency specific - to comment if not needed
|
#compose --debug run ou16 -u base --stop-after-init --no-http
|
||||||
INVOICE_NAME_SQL=$(cat <<'EOF'
|
|
||||||
UPDATE credit_request
|
|
||||||
SET invoice_id = (
|
|
||||||
SELECT id
|
|
||||||
FROM account_move
|
|
||||||
WHERE account_move.name = credit_request.invoice_name
|
|
||||||
);
|
|
||||||
EOF
|
|
||||||
)
|
|
||||||
query_postgres_container "$INVOICE_NAME_SQL" ou16 || exit 1
|
|
||||||
|
|
||||||
echo "END POST migration to 16.0."
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
echo "Prepare migration to 16.0..."
|
echo "Prepare migration to 16.0..."
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
compose -f ../compose.yml run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=web,openupgrade_framework
|
compose -f ../compose.yml run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=base,web,openupgrade_framework
|
||||||
|
|||||||
32
17.0/post_upgrade.sh
Executable file
32
17.0/post_upgrade.sh
Executable file
@@ -0,0 +1,32 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Post migration to 17.0..."
|
||||||
|
|
||||||
|
# Execute SQL post-migration commands
|
||||||
|
POST_MIGRATE_SQL=$(cat <<'EOF'
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
plan_id INTEGER;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the 'Projects' analytic plan exists
|
||||||
|
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS' LIMIT 1;
|
||||||
|
|
||||||
|
-- If it does exist, delete it
|
||||||
|
IF plan_id IS NOT NULL THEN
|
||||||
|
DELETE FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS';
|
||||||
|
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'Projects' LIMIT 1;
|
||||||
|
-- Delete existing system parameter (if any)
|
||||||
|
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||||
|
-- Insert the system parameter with the correct plan ID
|
||||||
|
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||||
|
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "SQL command = $POST_MIGRATE_SQL"
|
||||||
|
query_postgres_container "$POST_MIGRATE_SQL" ou17 || exit 1
|
||||||
|
|
||||||
|
|
||||||
|
#compose --debug run ou17 -u base --stop-after-init --no-http
|
||||||
57
17.0/pre_upgrade.sh
Executable file
57
17.0/pre_upgrade.sh
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Prepare migration to 17.0..."
|
||||||
|
|
||||||
|
# Copy database
|
||||||
|
copy_database ou16 ou17 ou17 || exit 1
|
||||||
|
|
||||||
|
# Execute SQL pre-migration commands
|
||||||
|
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
plan_id INTEGER;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the 'Projects' analytic plan exists
|
||||||
|
SELECT id INTO plan_id FROM account_analytic_plan WHERE name = 'Projects' LIMIT 1;
|
||||||
|
|
||||||
|
-- If it doesn't exist, create it
|
||||||
|
IF plan_id IS NULL THEN
|
||||||
|
INSERT INTO account_analytic_plan (name, complete_name, default_applicability, create_date, write_date)
|
||||||
|
VALUES ('Projects', 'migration_PROJECTS', 'optional', now(), now())
|
||||||
|
RETURNING id INTO plan_id;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Delete existing system parameter (if any)
|
||||||
|
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||||
|
|
||||||
|
-- Insert the system parameter with the correct plan ID
|
||||||
|
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||||
|
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||||
|
END $$;
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||||
|
query_postgres_container "$PRE_MIGRATE_SQL" ou17 || exit 1
|
||||||
|
|
||||||
|
PRE_MIGRATE_SQL_2=$(cat <<'EOF'
|
||||||
|
DELETE FROM ir_model_fields WHERE name = 'kanban_state_label';
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "SQL command = $PRE_MIGRATE_SQL_2"
|
||||||
|
query_postgres_container "$PRE_MIGRATE_SQL_2" ou17 || exit 1
|
||||||
|
|
||||||
|
PRE_MIGRATE_SQL_3=$(cat <<'EOF'
|
||||||
|
DELETE FROM ir_model_fields WHERE name = 'phone' AND model='hr.employee';
|
||||||
|
DELETE FROM ir_model_fields WHERE name = 'hr_responsible_id' AND model='hr.job';
|
||||||
|
DELETE FROM ir_model_fields WHERE name = 'address_home_id' AND model='hr.employee';
|
||||||
|
DELETE FROM ir_model_fields WHERE name = 'manager_id' AND model='project.task';
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "SQL command = $PRE_MIGRATE_SQL_3"
|
||||||
|
query_postgres_container "$PRE_MIGRATE_SQL_3" ou17 || exit 1
|
||||||
|
|
||||||
|
# Copy filestores
|
||||||
|
copy_filestore ou16 ou16 ou17 ou17 || exit 1
|
||||||
|
|
||||||
|
echo "Ready for migration to 17.0!"
|
||||||
4
17.0/upgrade.sh
Executable file
4
17.0/upgrade.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
compose -f ../compose.yml run -p 8017:8069 ou17 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou17 --load=base,web,openupgrade_framework
|
||||||
6
18.0/post_upgrade.sh
Executable file
6
18.0/post_upgrade.sh
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Post migration to 18.0..."
|
||||||
|
|
||||||
|
#compose --debug run ou18 -u base --stop-after-init --no-http
|
||||||
20
18.0/pre_upgrade.sh
Executable file
20
18.0/pre_upgrade.sh
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Prepare migration to 18.0..."
|
||||||
|
|
||||||
|
# Copy database
|
||||||
|
copy_database ou17 ou18 ou18 || exit 1
|
||||||
|
|
||||||
|
# Execute SQL pre-migration commands
|
||||||
|
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||||
|
UPDATE account_analytic_plan SET default_applicability=NULL WHERE default_applicability='optional';
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||||
|
query_postgres_container "$PRE_MIGRATE_SQL" ou18 || exit 1
|
||||||
|
|
||||||
|
# Copy filestores
|
||||||
|
copy_filestore ou17 ou17 ou18 ou18 || exit 1
|
||||||
|
|
||||||
|
echo "Ready for migration to 18.0!"
|
||||||
4
18.0/upgrade.sh
Executable file
4
18.0/upgrade.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
compose -f ../compose.yml run -p 8018:8069 ou18 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou18 --load=base,web,openupgrade_framework
|
||||||
16
compose.yml
16
compose.yml
@@ -52,7 +52,7 @@ ou15:
|
|||||||
ou16:
|
ou16:
|
||||||
charm: odoo-tecnativa
|
charm: odoo-tecnativa
|
||||||
docker-compose:
|
docker-compose:
|
||||||
image: docker.0k.io/mirror/odoo:rc_16.0-MYC-INIT
|
image: docker.0k.io/mirror/odoo:rc_16.0-ELABORE-LIGHT
|
||||||
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||||
command:
|
command:
|
||||||
- "--log-level=debug"
|
- "--log-level=debug"
|
||||||
@@ -73,6 +73,18 @@ ou17:
|
|||||||
options:
|
options:
|
||||||
workers: 0
|
workers: 0
|
||||||
|
|
||||||
|
ou18:
|
||||||
|
charm: odoo-tecnativa
|
||||||
|
docker-compose:
|
||||||
|
image: docker.0k.io/mirror/odoo:rc_18.0-ELABORE-LIGHT
|
||||||
|
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||||
|
command:
|
||||||
|
- "--log-level=debug"
|
||||||
|
- "--limit-time-cpu=1000000"
|
||||||
|
- "--limit-time-real=1000000"
|
||||||
|
options:
|
||||||
|
workers: 0
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
docker-compose:
|
docker-compose:
|
||||||
image: docker.0k.io/postgres:12.15.0-myc
|
image: docker.0k.io/postgres:17.2.0-myc
|
||||||
|
|||||||
@@ -1,20 +1,51 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
DB_NAME="$1"
|
DB_NAME="$1"
|
||||||
ODOO_SERVICE="$2"
|
ODOO_SERVICE="$2"
|
||||||
|
|
||||||
FINALE_SQL=$(cat <<'EOF'
|
echo "Running SQL cleanup..."
|
||||||
/*Delete sequences that prevent Odoo to start*/
|
CLEANUP_SQL=$(cat <<'EOF'
|
||||||
drop sequence base_registry_signaling;
|
-- Drop sequences that prevent Odoo from starting.
|
||||||
drop sequence base_cache_signaling;
|
-- These sequences are recreated by Odoo on startup but stale values
|
||||||
|
-- from the old version can cause conflicts.
|
||||||
|
DROP SEQUENCE IF EXISTS base_registry_signaling;
|
||||||
|
DROP SEQUENCE IF EXISTS base_cache_signaling;
|
||||||
|
|
||||||
|
-- Reset website templates to their original state.
|
||||||
|
-- Views with arch_fs (file source) that have been customized (arch_db not null)
|
||||||
|
-- are reset to use the file version, EXCEPT for actual website pages which
|
||||||
|
-- contain user content that must be preserved.
|
||||||
|
UPDATE ir_ui_view
|
||||||
|
SET arch_db = NULL
|
||||||
|
WHERE arch_fs IS NOT NULL
|
||||||
|
AND arch_fs LIKE 'website/%'
|
||||||
|
AND arch_db IS NOT NULL
|
||||||
|
AND id NOT IN (SELECT view_id FROM website_page);
|
||||||
|
|
||||||
|
-- Purge compiled frontend assets (CSS/JS bundles).
|
||||||
|
-- These cached files reference old asset versions and must be regenerated
|
||||||
|
-- by Odoo after migration to avoid broken stylesheets and scripts.
|
||||||
|
DELETE FROM ir_attachment
|
||||||
|
WHERE name LIKE '/web/assets/%'
|
||||||
|
OR name LIKE '%.assets_%'
|
||||||
|
OR (res_model = 'ir.ui.view' AND mimetype = 'text/css');
|
||||||
EOF
|
EOF
|
||||||
)
|
)
|
||||||
query_postgres_container "$FINALE_SQL" "$DB_NAME" || exit 1
|
query_postgres_container "$CLEANUP_SQL" "$DB_NAME"
|
||||||
|
|
||||||
|
PYTHON_SCRIPT=post_migration_fix_duplicated_views.py
|
||||||
|
echo "Remove duplicated views with script $PYTHON_SCRIPT ..."
|
||||||
|
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT"
|
||||||
|
|
||||||
|
# Uninstall obsolete add-ons
|
||||||
|
PYTHON_SCRIPT=post_migration_cleanup_obsolete_modules.py
|
||||||
|
echo "Uninstall obsolete add-ons with script $PYTHON_SCRIPT ..."
|
||||||
|
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT" || exit 1
|
||||||
|
|
||||||
# Give back the right to user to access to the tables
|
# Give back the right to user to access to the tables
|
||||||
# docker exec -u 70 "$DB_CONTAINER_NAME" pgm chown "$FINALE_SERVICE_NAME" "$DB_NAME"
|
# docker exec -u 70 "$DB_CONTAINER_NAME" pgm chown "$FINALE_SERVICE_NAME" "$DB_NAME"
|
||||||
|
|
||||||
|
|
||||||
# Launch Odoo with database in finale version to run all updates
|
# Launch Odoo with database in finale version to run all updates
|
||||||
compose --debug run "$ODOO_SERVICE" -u all --stop-after-init --no-http
|
compose --debug run "$ODOO_SERVICE" -u all --log-level=debug --stop-after-init --no-http
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
galicea_base
|
|
||||||
galicea_environment_checkup
|
|
||||||
mass_editing
|
|
||||||
mass_mailing_themes
|
|
||||||
muk_autovacuum
|
|
||||||
muk_fields_lobject
|
|
||||||
muk_fields_stream
|
|
||||||
muk_utils
|
|
||||||
muk_web_theme_mail
|
|
||||||
muk_web_utils
|
|
||||||
account_usability
|
|
||||||
kpi_dashboard
|
|
||||||
web_window_title
|
|
||||||
website_project_kanbanview
|
|
||||||
project_usability
|
|
||||||
project_tag
|
|
||||||
maintenance_server_monitoring_ping
|
|
||||||
maintenance_server_monitoring_ssh
|
|
||||||
maintenance_server_monitoring_memory
|
|
||||||
maintenance_server_monitoring_maintenance_equipment_status
|
|
||||||
maintenance_server_monitoring_disk
|
|
||||||
project_task_assignees_avatar
|
|
||||||
account_partner_reconcile
|
|
||||||
account_invoice_import_simple_pdf
|
|
||||||
96
lib/common.sh
Normal file
96
lib/common.sh
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Common functions for Odoo migration scripts
|
||||||
|
# Source this file from other scripts: source "$(dirname "$0")/lib/common.sh"
|
||||||
|
#
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
readonly DATASTORE_PATH="/srv/datastore/data"
|
||||||
|
readonly FILESTORE_SUBPATH="var/lib/odoo/filestore"
|
||||||
|
|
||||||
|
check_required_commands() {
|
||||||
|
local missing=()
|
||||||
|
for cmd in docker compose sudo rsync; do
|
||||||
|
if ! command -v "$cmd" &>/dev/null; then
|
||||||
|
missing+=("$cmd")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||||
|
log_error "Required commands not found: ${missing[*]}"
|
||||||
|
log_error "Please install them before running this script."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
log_info() { printf "[INFO] %s\n" "$*"; }
|
||||||
|
log_warn() { printf "[WARN] %s\n" "$*" >&2; }
|
||||||
|
log_error() { printf "[ERROR] %s\n" "$*" >&2; }
|
||||||
|
log_step() { printf "\n===== %s =====\n" "$*"; }
|
||||||
|
|
||||||
|
confirm_or_exit() {
|
||||||
|
local message="$1"
|
||||||
|
local choice
|
||||||
|
echo ""
|
||||||
|
echo "$message"
|
||||||
|
echo "Y - Yes, continue"
|
||||||
|
echo "N - No, cancel"
|
||||||
|
read -r -n 1 -p "Your choice: " choice
|
||||||
|
echo ""
|
||||||
|
case "$choice" in
|
||||||
|
[Yy]) return 0 ;;
|
||||||
|
*) log_error "Cancelled by user."; exit 1 ;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
query_postgres_container() {
|
||||||
|
local query="$1"
|
||||||
|
local db_name="$2"
|
||||||
|
|
||||||
|
if [[ -z "$query" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local result
|
||||||
|
if ! result=$(docker exec -u 70 "$POSTGRES_SERVICE_NAME" psql -d "$db_name" -t -A -c "$query"); then
|
||||||
|
printf "Failed to execute SQL query: %s\n" "$query" >&2
|
||||||
|
printf "Error: %s\n" "$result" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo "$result"
|
||||||
|
}
|
||||||
|
|
||||||
|
copy_database() {
|
||||||
|
local from_db="$1"
|
||||||
|
local to_service="$2"
|
||||||
|
local to_db="$3"
|
||||||
|
|
||||||
|
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm cp -f "$from_db" "${to_db}@${to_service}"
|
||||||
|
}
|
||||||
|
|
||||||
|
copy_filestore() {
|
||||||
|
local from_service="$1"
|
||||||
|
local from_db="$2"
|
||||||
|
local to_service="$3"
|
||||||
|
local to_db="$4"
|
||||||
|
|
||||||
|
local src_path="${DATASTORE_PATH}/${from_service}/${FILESTORE_SUBPATH}/${from_db}"
|
||||||
|
local dst_path="${DATASTORE_PATH}/${to_service}/${FILESTORE_SUBPATH}/${to_db}"
|
||||||
|
|
||||||
|
sudo mkdir -p "$(dirname "$dst_path")"
|
||||||
|
sudo rsync -a --delete "${src_path}/" "${dst_path}/"
|
||||||
|
echo "Filestore ${from_service}/${from_db} copied to ${to_service}/${to_db}."
|
||||||
|
}
|
||||||
|
|
||||||
|
exec_python_script_in_odoo_shell() {
|
||||||
|
local service_name="$1"
|
||||||
|
local db_name="$2"
|
||||||
|
local python_script="$3"
|
||||||
|
|
||||||
|
compose --debug run "$service_name" shell -d "$db_name" --no-http --stop-after-init < "$python_script"
|
||||||
|
}
|
||||||
|
|
||||||
|
export DATASTORE_PATH FILESTORE_SUBPATH
|
||||||
|
export -f log_info log_warn log_error log_step confirm_or_exit
|
||||||
|
export -f check_required_commands
|
||||||
|
export -f query_postgres_container copy_database copy_filestore exec_python_script_in_odoo_shell
|
||||||
128
post_migration_cleanup_obsolete_modules.py
Normal file
128
post_migration_cleanup_obsolete_modules.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Post-Migration Obsolete Module Cleanup
|
||||||
|
Run this AFTER migration to detect and remove modules that exist in the database
|
||||||
|
but no longer exist in the filesystem (addons paths).
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("POST-MIGRATION OBSOLETE MODULE CLEANUP")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
import odoo.modules.module as module_lib
|
||||||
|
|
||||||
|
# Get all modules from database
|
||||||
|
all_modules = env['ir.module.module'].search([])
|
||||||
|
|
||||||
|
print(f"Analyzing {len(all_modules)} modules in database...\n")
|
||||||
|
|
||||||
|
# Detect obsolete modules (in database but not in filesystem)
|
||||||
|
obsolete_modules = []
|
||||||
|
for mod in all_modules:
|
||||||
|
mod_path = module_lib.get_module_path(mod.name, display_warning=False)
|
||||||
|
if not mod_path:
|
||||||
|
obsolete_modules.append(mod)
|
||||||
|
|
||||||
|
if not obsolete_modules:
|
||||||
|
print("✓ No obsolete modules found! Database is clean.")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
exit()
|
||||||
|
|
||||||
|
# Separate modules by state
|
||||||
|
safe_to_delete = [m for m in obsolete_modules if m.state != 'installed']
|
||||||
|
installed_obsolete = [m for m in obsolete_modules if m.state == 'installed']
|
||||||
|
|
||||||
|
# Display obsolete modules
|
||||||
|
print(f"Obsolete modules found: {len(obsolete_modules)}\n")
|
||||||
|
|
||||||
|
if installed_obsolete:
|
||||||
|
print("-" * 80)
|
||||||
|
print("⚠️ OBSOLETE INSTALLED MODULES (require attention)")
|
||||||
|
print("-" * 80)
|
||||||
|
for mod in sorted(installed_obsolete, key=lambda m: m.name):
|
||||||
|
print(f" • {mod.name:40} | ID: {mod.id}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
if safe_to_delete:
|
||||||
|
print("-" * 80)
|
||||||
|
print("OBSOLETE UNINSTALLED MODULES (safe to delete)")
|
||||||
|
print("-" * 80)
|
||||||
|
for mod in sorted(safe_to_delete, key=lambda m: m.name):
|
||||||
|
print(f" • {mod.name:40} | State: {mod.state:15} | ID: {mod.id}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print("=" * 80)
|
||||||
|
print("SUMMARY")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
print(f" • Obsolete uninstalled modules (safe to delete): {len(safe_to_delete)}")
|
||||||
|
print(f" • Obsolete INSTALLED modules (caution!): {len(installed_obsolete)}")
|
||||||
|
|
||||||
|
# Delete uninstalled modules
|
||||||
|
if safe_to_delete:
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("DELETING OBSOLETE UNINSTALLED MODULES")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
|
||||||
|
deleted_count = 0
|
||||||
|
failed_deletes = []
|
||||||
|
|
||||||
|
for mod in safe_to_delete:
|
||||||
|
try:
|
||||||
|
mod_name = mod.name
|
||||||
|
mod_id = mod.id
|
||||||
|
mod.unlink()
|
||||||
|
print(f"✓ Deleted: {mod_name} (ID: {mod_id})")
|
||||||
|
deleted_count += 1
|
||||||
|
except Exception as e:
|
||||||
|
print(f"✗ Failed: {mod.name} - {e}")
|
||||||
|
failed_deletes.append({'name': mod.name, 'id': mod.id, 'reason': str(e)})
|
||||||
|
|
||||||
|
# Commit changes
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("COMMITTING CHANGES")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
env.cr.commit()
|
||||||
|
print("✓ All changes committed successfully!")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"✗ Commit failed: {e}")
|
||||||
|
print("Changes were NOT saved!")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Final result
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("RESULT")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
print(f" • Successfully deleted modules: {deleted_count}")
|
||||||
|
print(f" • Failed deletions: {len(failed_deletes)}")
|
||||||
|
|
||||||
|
if failed_deletes:
|
||||||
|
print("\n⚠️ Modules not deleted:")
|
||||||
|
for item in failed_deletes:
|
||||||
|
print(f" • {item['name']} (ID: {item['id']}): {item['reason']}")
|
||||||
|
|
||||||
|
if installed_obsolete:
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("⚠️ WARNING: OBSOLETE INSTALLED MODULES")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
print("The following modules are marked 'installed' but no longer exist")
|
||||||
|
print("in the filesystem. They may cause problems.\n")
|
||||||
|
print("Options:")
|
||||||
|
print(" 1. Check if these modules were renamed/merged in the new version")
|
||||||
|
print(" 2. Manually uninstall them if possible")
|
||||||
|
print(" 3. Force delete them (risky, may break dependencies)\n")
|
||||||
|
|
||||||
|
for mod in sorted(installed_obsolete, key=lambda m: m.name):
|
||||||
|
# Find modules that depend on this module
|
||||||
|
dependents = env['ir.module.module'].search([
|
||||||
|
('state', '=', 'installed'),
|
||||||
|
('dependencies_id.name', '=', mod.name)
|
||||||
|
])
|
||||||
|
dep_info = f" <- Dependents: {dependents.mapped('name')}" if dependents else ""
|
||||||
|
print(f" • {mod.name}{dep_info}")
|
||||||
|
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("CLEANUP COMPLETED!")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
192
post_migration_fix_duplicated_views.py
Normal file
192
post_migration_fix_duplicated_views.py
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Post-Migration Duplicate View Fixer
|
||||||
|
Run this AFTER migration to fix duplicate views automatically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("POST-MIGRATION DUPLICATE VIEW FIXER")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
# Find all duplicate views
|
||||||
|
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||||
|
keys = defaultdict(list)
|
||||||
|
|
||||||
|
for view in all_views:
|
||||||
|
if view.key:
|
||||||
|
keys[view.key].append(view)
|
||||||
|
|
||||||
|
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||||
|
|
||||||
|
print(f"Found {len(duplicates)} keys with duplicate views\n")
|
||||||
|
|
||||||
|
if not duplicates:
|
||||||
|
print("✓ No duplicate views found! Database is clean.")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
exit()
|
||||||
|
|
||||||
|
# Process duplicates
|
||||||
|
views_to_delete = []
|
||||||
|
redirect_log = []
|
||||||
|
|
||||||
|
for key, views in sorted(duplicates.items()):
|
||||||
|
print(f"\nProcessing key: {key}")
|
||||||
|
print("-" * 80)
|
||||||
|
|
||||||
|
# Sort views: module views first, then by ID (older first)
|
||||||
|
sorted_views = sorted(views, key=lambda v: (
|
||||||
|
0 if v.model_data_id else 1, # Module views first
|
||||||
|
v.id # Older views first (lower ID = older)
|
||||||
|
))
|
||||||
|
|
||||||
|
# Keep the first view (should be module view or oldest)
|
||||||
|
keep = sorted_views[0]
|
||||||
|
to_delete = sorted_views[1:]
|
||||||
|
|
||||||
|
module_keep = keep.model_data_id.module if keep.model_data_id else "Custom/DB"
|
||||||
|
print(f"KEEP: ID {keep.id:>6} | Module: {module_keep:<20} | {keep.name}")
|
||||||
|
|
||||||
|
for view in to_delete:
|
||||||
|
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||||
|
print(f"DELETE: ID {view.id:>6} | Module: {module:<20} | {view.name}")
|
||||||
|
|
||||||
|
# Find and redirect children
|
||||||
|
children = env['ir.ui.view'].search([('inherit_id', '=', view.id)])
|
||||||
|
if children:
|
||||||
|
print(f" → Redirecting {len(children)} children {children.ids} to view {keep.id}")
|
||||||
|
for child in children:
|
||||||
|
child_module = child.model_data_id.module if child.model_data_id else "Custom/DB"
|
||||||
|
redirect_log.append({
|
||||||
|
'child_id': child.id,
|
||||||
|
'child_name': child.name,
|
||||||
|
'child_module': child_module,
|
||||||
|
'from': view.id,
|
||||||
|
'to': keep.id
|
||||||
|
})
|
||||||
|
try:
|
||||||
|
children.write({'inherit_id': keep.id})
|
||||||
|
print(f" ✓ Redirected successfully")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ✗ Redirect failed: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
views_to_delete.append(view)
|
||||||
|
|
||||||
|
# Summary before deletion
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("SUMMARY")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
print(f"Views to delete: {len(views_to_delete)}")
|
||||||
|
print(f"Child views to redirect: {len(redirect_log)}\n")
|
||||||
|
|
||||||
|
if redirect_log:
|
||||||
|
print("Redirections that will be performed:")
|
||||||
|
for item in redirect_log[:10]: # Show first 10
|
||||||
|
print(f" • View {item['child_id']} ({item['child_module']})")
|
||||||
|
print(f" '{item['child_name']}'")
|
||||||
|
print(f" Parent: {item['from']} → {item['to']}")
|
||||||
|
|
||||||
|
if len(redirect_log) > 10:
|
||||||
|
print(f" ... and {len(redirect_log) - 10} more redirections")
|
||||||
|
|
||||||
|
# Delete duplicate views
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("DELETING DUPLICATE VIEWS")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
deleted_count = 0
|
||||||
|
failed_deletes = []
|
||||||
|
|
||||||
|
# Sort views by ID descending (delete newer/child views first)
|
||||||
|
views_to_delete_sorted = sorted(views_to_delete, key=lambda v: v.id, reverse=True)
|
||||||
|
|
||||||
|
for view in views_to_delete_sorted:
|
||||||
|
try:
|
||||||
|
# Create savepoint to isolate each deletion
|
||||||
|
env.cr.execute('SAVEPOINT delete_view')
|
||||||
|
|
||||||
|
view_id = view.id
|
||||||
|
view_name = view.name
|
||||||
|
view_key = view.key
|
||||||
|
|
||||||
|
# Double-check it has no children
|
||||||
|
remaining_children = env['ir.ui.view'].search([('inherit_id', '=', view_id)])
|
||||||
|
if remaining_children:
|
||||||
|
print(f"⚠️ Skipping view {view_id}: Still has {len(remaining_children)} children")
|
||||||
|
failed_deletes.append({
|
||||||
|
'id': view_id,
|
||||||
|
'reason': f'Still has {len(remaining_children)} children'
|
||||||
|
})
|
||||||
|
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||||
|
continue
|
||||||
|
|
||||||
|
view.unlink()
|
||||||
|
env.cr.execute('RELEASE SAVEPOINT delete_view')
|
||||||
|
print(f"✓ Deleted view {view_id}: {view_key}")
|
||||||
|
deleted_count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||||
|
print(f"✗ Failed to delete view {view.id}: {e}")
|
||||||
|
failed_deletes.append({
|
||||||
|
'id': view.id,
|
||||||
|
'name': view.name,
|
||||||
|
'reason': str(e)
|
||||||
|
})
|
||||||
|
|
||||||
|
# Commit changes
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("COMMITTING CHANGES")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
env.cr.commit()
|
||||||
|
print("✓ All changes committed successfully!")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"✗ Commit failed: {e}")
|
||||||
|
print("Changes were NOT saved!")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Final verification
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("FINAL VERIFICATION")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
# Re-check for duplicates
|
||||||
|
all_views_after = env['ir.ui.view'].search([('active', '=', True)])
|
||||||
|
keys_after = defaultdict(list)
|
||||||
|
|
||||||
|
for view in all_views_after:
|
||||||
|
if view.key:
|
||||||
|
keys_after[view.key].append(view)
|
||||||
|
|
||||||
|
duplicates_after = {k: v for k, v in keys_after.items() if len(v) > 1}
|
||||||
|
|
||||||
|
print(f"Results:")
|
||||||
|
print(f" • Successfully deleted: {deleted_count} views")
|
||||||
|
print(f" • Failed deletions: {len(failed_deletes)}")
|
||||||
|
print(f" • Child views redirected: {len(redirect_log)}")
|
||||||
|
print(f" • Remaining duplicates: {len(duplicates_after)}")
|
||||||
|
|
||||||
|
if failed_deletes:
|
||||||
|
print(f"\n⚠️ Failed deletions:")
|
||||||
|
for item in failed_deletes:
|
||||||
|
print(f" • View {item['id']}: {item['reason']}")
|
||||||
|
|
||||||
|
if duplicates_after:
|
||||||
|
print(f"\n⚠️ Still have {len(duplicates_after)} duplicate keys:")
|
||||||
|
for key, views in sorted(duplicates_after.items())[:5]:
|
||||||
|
print(f" • {key}: {len(views)} views")
|
||||||
|
for view in views:
|
||||||
|
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||||
|
print(f" - ID {view.id} ({module})")
|
||||||
|
print(f"\n Run this script again to attempt another cleanup.")
|
||||||
|
else:
|
||||||
|
print(f"\n✓ All duplicates resolved!")
|
||||||
|
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("FIX COMPLETED!")
|
||||||
|
print("="*80)
|
||||||
126
pre_migration_view_checking.py
Normal file
126
pre_migration_view_checking.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Pre-Migration Cleanup Script for Odoo
|
||||||
|
Run this BEFORE migrating to identify and clean up custom views.
|
||||||
|
|
||||||
|
Usage: odoo shell -d dbname < pre_migration_cleanup.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("PRE-MIGRATION CLEANUP - VIEW ANALYSIS")
|
||||||
|
print("="*80 + "\n")
|
||||||
|
|
||||||
|
# 1. Find all custom (COW) views
|
||||||
|
print("STEP 1: Identifying Custom/COW Views")
|
||||||
|
print("-"*80)
|
||||||
|
|
||||||
|
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||||
|
cow_views = all_views.filtered(lambda v: not v.model_data_id)
|
||||||
|
|
||||||
|
print(f"Total views in database: {len(all_views)}")
|
||||||
|
print(f"Custom views (no module): {len(cow_views)}")
|
||||||
|
print(f"Module views: {len(all_views) - len(cow_views)}\n")
|
||||||
|
|
||||||
|
if cow_views:
|
||||||
|
print("Custom views found:\n")
|
||||||
|
print(f"{'ID':<8} {'Active':<8} {'Key':<50} {'Name':<40}")
|
||||||
|
print("-"*120)
|
||||||
|
|
||||||
|
for view in cow_views[:50]: # Show first 50
|
||||||
|
active_str = "✓" if view.active else "✗"
|
||||||
|
key_str = view.key[:48] if view.key else "N/A"
|
||||||
|
name_str = view.name[:38] if view.name else "N/A"
|
||||||
|
print(f"{view.id:<8} {active_str:<8} {key_str:<50} {name_str:<40}")
|
||||||
|
|
||||||
|
if len(cow_views) > 50:
|
||||||
|
print(f"\n... and {len(cow_views) - 50} more custom views")
|
||||||
|
|
||||||
|
# 2. Find duplicate views
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("STEP 2: Finding Duplicate Views (Same Key)")
|
||||||
|
print("-"*80 + "\n")
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
keys = defaultdict(list)
|
||||||
|
for view in all_views.filtered(lambda v: v.key and v.active):
|
||||||
|
keys[view.key].append(view)
|
||||||
|
|
||||||
|
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||||
|
|
||||||
|
print(f"Found {len(duplicates)} keys with duplicate views:\n")
|
||||||
|
|
||||||
|
if duplicates:
|
||||||
|
for key, views in sorted(duplicates.items()):
|
||||||
|
print(f"\nKey: {key} ({len(views)} duplicates)")
|
||||||
|
for view in views:
|
||||||
|
module = view.model_data_id.module if view.model_data_id else "⚠️ Custom/DB"
|
||||||
|
print(f" ID {view.id:>6}: {module:<25} | {view.name}")
|
||||||
|
|
||||||
|
# 3. Find views that might have xpath issues
|
||||||
|
print("\n" + "="*80)
|
||||||
|
print("STEP 3: Finding Views with XPath Expressions")
|
||||||
|
print("-"*80 + "\n")
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
views_with_xpath = []
|
||||||
|
xpath_pattern = r'<xpath[^>]+expr="([^"]+)"'
|
||||||
|
|
||||||
|
for view in all_views.filtered(lambda v: v.active and v.inherit_id):
|
||||||
|
xpaths = re.findall(xpath_pattern, view.arch_db)
|
||||||
|
if xpaths:
|
||||||
|
views_with_xpath.append({
|
||||||
|
'view': view,
|
||||||
|
'xpaths': xpaths,
|
||||||
|
'is_custom': not bool(view.model_data_id)
|
||||||
|
})
|
||||||
|
|
||||||
|
print(f"Found {len(views_with_xpath)} views with xpath expressions")
|
||||||
|
|
||||||
|
custom_xpath_views = [v for v in views_with_xpath if v['is_custom']]
|
||||||
|
print(f" - {len(custom_xpath_views)} are custom views (potential issue!)")
|
||||||
|
print(f" - {len(views_with_xpath) - len(custom_xpath_views)} are module views\n")
|
||||||
|
|
||||||
|
if custom_xpath_views:
|
||||||
|
print("Custom views with xpaths (risk for migration issues):\n")
|
||||||
|
for item in custom_xpath_views:
|
||||||
|
view = item['view']
|
||||||
|
print(f"ID {view.id}: {view.name}")
|
||||||
|
print(f" Key: {view.key}")
|
||||||
|
print(f" Inherits from: {view.inherit_id.key}")
|
||||||
|
print(f" XPath count: {len(item['xpaths'])}")
|
||||||
|
print(f" Sample xpaths: {item['xpaths'][:2]}")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# 4. Summary and recommendations
|
||||||
|
print("=" * 80)
|
||||||
|
print("SUMMARY AND RECOMMENDATIONS")
|
||||||
|
print("=" * 80 + "\n")
|
||||||
|
|
||||||
|
print(f"📊 Statistics:")
|
||||||
|
print(f" • Total views: {len(all_views)}")
|
||||||
|
print(f" • Custom views: {len(cow_views)}")
|
||||||
|
print(f" • Duplicate view keys: {len(duplicates)}")
|
||||||
|
print(f" • Custom views with xpaths: {len(custom_xpath_views)}\n")
|
||||||
|
|
||||||
|
print(f"\n📋 RECOMMENDED ACTIONS BEFORE MIGRATION:\n")
|
||||||
|
|
||||||
|
if custom_xpath_views:
|
||||||
|
print(f"1. Archive or delete {len(custom_xpath_views)} custom views with xpaths:")
|
||||||
|
print(f" • Review each one and determine if still needed")
|
||||||
|
print(f" • Archive unnecessary ones: env['ir.ui.view'].browse([ids]).write({{'active': False}})")
|
||||||
|
print(f" • Plan to recreate important ones as proper module views after migration\n")
|
||||||
|
|
||||||
|
if duplicates:
|
||||||
|
print(f"2. Fix {len(duplicates)} duplicate view keys:")
|
||||||
|
print(f" • Manually review and delete obsolete duplicates, keeping the most appropriate one")
|
||||||
|
print(f" • Document the remaining appropriate ones as script post_migration_fix_duplicated_views.py will run AFTER the migration and delete all duplicates.\n")
|
||||||
|
|
||||||
|
if cow_views:
|
||||||
|
print(f"3. Review {len(cow_views)} custom views:")
|
||||||
|
print(f" • Document which ones are important")
|
||||||
|
print(f" • Export their XML for reference")
|
||||||
|
print(f" • Consider converting to module views\n")
|
||||||
|
|
||||||
|
print("=" * 80 + "\n")
|
||||||
@@ -1,21 +1,24 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
# Global variables
|
|
||||||
ODOO_SERVICE="$1"
|
ODOO_SERVICE="$1"
|
||||||
DB_NAME="$2"
|
DB_NAME="$2"
|
||||||
DB_FINALE_MODEL="$3"
|
DB_FINALE_MODEL="$3"
|
||||||
DB_FINALE_SERVICE="$4"
|
DB_FINALE_SERVICE="$4"
|
||||||
|
|
||||||
|
TMPDIR=$(mktemp -d)
|
||||||
|
trap 'rm -rf "$TMPDIR"' EXIT
|
||||||
|
|
||||||
echo "Start database preparation"
|
echo "Start database preparation"
|
||||||
|
|
||||||
# Check POSTGRES container is running
|
# Check POSTGRES container is running
|
||||||
if ! docker ps | grep -q "$DB_CONTAINER_NAME"; then
|
if ! docker ps | grep -q "$POSTGRES_SERVICE_NAME"; then
|
||||||
printf "Docker container %s is not running.\n" "$DB_CONTAINER_NAME" >&2
|
printf "Docker container %s is not running.\n" "$POSTGRES_SERVICE_NAME" >&2
|
||||||
return 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
EXT_EXISTS=$(query_postgres_container "SELECT 1 FROM pg_extension WHERE extname = 'dblink'" "$DB_NAME") || exit 1
|
EXT_EXISTS=$(query_postgres_container "SELECT 1 FROM pg_extension WHERE extname = 'dblink'" "$DB_NAME") || exit 1
|
||||||
if [ "$EXT_EXISTS" != "1" ]; then
|
if [[ "$EXT_EXISTS" != "1" ]]; then
|
||||||
query_postgres_container "CREATE EXTENSION dblink;" "$DB_NAME" || exit 1
|
query_postgres_container "CREATE EXTENSION dblink;" "$DB_NAME" || exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -39,51 +42,32 @@ echo "Base neutralized..."
|
|||||||
## List add-ons not in final version ##
|
## List add-ons not in final version ##
|
||||||
#######################################
|
#######################################
|
||||||
|
|
||||||
# Retrieve add-ons not available on the final Odoo version
|
SQL_MISSING_ADDONS=$(cat <<EOF
|
||||||
SQL_404_ADDONS_LIST="
|
SELECT module_origin.name
|
||||||
SELECT module_origin.name
|
FROM ir_module_module module_origin
|
||||||
FROM ir_module_module module_origin
|
LEFT JOIN (
|
||||||
LEFT JOIN (
|
|
||||||
SELECT *
|
SELECT *
|
||||||
FROM dblink('dbname=$FINALE_DB_NAME','SELECT name, shortdesc, author FROM ir_module_module')
|
FROM dblink('dbname=${FINALE_DB_NAME}','SELECT name, shortdesc, author FROM ir_module_module')
|
||||||
AS tb2(name text, shortdesc text, author text)
|
AS tb2(name text, shortdesc text, author text)
|
||||||
) AS module_dest ON module_dest.name = module_origin.name
|
) AS module_dest ON module_dest.name = module_origin.name
|
||||||
|
WHERE (module_dest.name IS NULL)
|
||||||
|
AND (module_origin.state = 'installed')
|
||||||
|
AND (module_origin.author NOT IN ('Odoo S.A.', 'Lokavaluto', 'Elabore'))
|
||||||
|
ORDER BY module_origin.name;
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
echo "Retrieve missing addons..."
|
||||||
|
missing_addons=$(query_postgres_container "$SQL_MISSING_ADDONS" "$DB_NAME")
|
||||||
|
|
||||||
WHERE (module_dest.name IS NULL) AND (module_origin.state = 'installed') AND (module_origin.author NOT IN ('Odoo S.A.', 'Lokavaluto', 'Elabore'))
|
log_step "ADD-ONS CHECK"
|
||||||
ORDER BY module_origin.name
|
echo "Installed add-ons not available in final Odoo version:"
|
||||||
;
|
echo "$missing_addons"
|
||||||
"
|
confirm_or_exit "Do you accept to migrate with these add-ons still installed?"
|
||||||
echo "Retrieve 404 addons... "
|
|
||||||
echo "SQL REQUEST = $SQL_404_ADDONS_LIST"
|
|
||||||
query_postgres_container "$SQL_404_ADDONS_LIST" "$DB_NAME" > 404_addons || exit 1
|
|
||||||
|
|
||||||
# Keep only the installed add-ons
|
PYTHON_SCRIPT=pre_migration_view_checking.py
|
||||||
INSTALLED_ADDONS="SELECT name FROM ir_module_module WHERE state='installed';"
|
echo "Check views with script $PYTHON_SCRIPT ..."
|
||||||
query_postgres_container "$INSTALLED_ADDONS" "$DB_NAME" > installed_addons || exit 1
|
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT"
|
||||||
|
|
||||||
grep -Fx -f 404_addons installed_addons > final_404_addons
|
confirm_or_exit "Do you accept to migrate with the current views state?"
|
||||||
rm -f 404_addons installed_addons
|
|
||||||
|
|
||||||
# Ask confirmation to uninstall the selected add-ons
|
|
||||||
echo "
|
|
||||||
==== ADD-ONS CHECK ====
|
|
||||||
Installed add-ons not available in final Odoo version:
|
|
||||||
"
|
|
||||||
cat final_404_addons
|
|
||||||
|
|
||||||
|
|
||||||
echo "
|
|
||||||
Do you accept to migrate the database with all these add-ons still installed? (Y/N/R)"
|
|
||||||
echo "Y - Yes, let's go on with the upgrade."
|
|
||||||
echo "N - No, stop the upgrade"
|
|
||||||
read -n 1 -p "Your choice: " choice
|
|
||||||
case "$choice" in
|
|
||||||
[Yy] ) echo "
|
|
||||||
Upgrade confirmed!";;
|
|
||||||
[Nn] ) echo "
|
|
||||||
Upgrade cancelled!"; exit 1;;
|
|
||||||
* ) echo "
|
|
||||||
Please answer by Y or N.";;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "Database successfully prepared!"
|
echo "Database successfully prepared!"
|
||||||
|
|||||||
276
upgrade.sh
276
upgrade.sh
@@ -1,207 +1,139 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
source "${SCRIPT_DIR}/lib/common.sh"
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# GLOBAL VARIABLES #
|
# USAGE & ARGUMENTS
|
||||||
####################
|
####################
|
||||||
|
|
||||||
ORIGIN_VERSION="$1" # "12" for version 12.0
|
usage() {
|
||||||
FINAL_VERSION="$2" # "16" for version 16.0
|
cat <<EOF >&2
|
||||||
# Path to the database to migrate. Must be a .zip file with the following syntax: {DATABASE_NAME}.zip
|
Usage: $0 <origin_version> <final_version> <db_name> <service_name>
|
||||||
ORIGIN_DB_NAME="$3"
|
|
||||||
ORIGIN_SERVICE_NAME="$4"
|
|
||||||
|
|
||||||
# Get origin database name
|
Arguments:
|
||||||
COPY_DB_NAME="ou${ORIGIN_VERSION}"
|
origin_version Origin Odoo version number (e.g., 12 for version 12.0)
|
||||||
# Define finale database name
|
final_version Target Odoo version number (e.g., 16 for version 16.0)
|
||||||
|
db_name Name of the database to migrate
|
||||||
|
service_name Name of the origin Odoo service (docker compose service)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
$0 14 16 elabore_20241208 odoo14
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ $# -lt 4 ]]; then
|
||||||
|
log_error "Missing arguments. Expected 4, got $#."
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
check_required_commands
|
||||||
|
|
||||||
|
readonly ORIGIN_VERSION="$1"
|
||||||
|
readonly FINAL_VERSION="$2"
|
||||||
|
readonly ORIGIN_DB_NAME="$3"
|
||||||
|
readonly ORIGIN_SERVICE_NAME="$4"
|
||||||
|
|
||||||
|
readonly COPY_DB_NAME="ou${ORIGIN_VERSION}"
|
||||||
export FINALE_DB_NAME="ou${FINAL_VERSION}"
|
export FINALE_DB_NAME="ou${FINAL_VERSION}"
|
||||||
# Define finale odoo service name
|
readonly FINALE_DB_NAME
|
||||||
FINALE_SERVICE_NAME="${FINALE_DB_NAME}"
|
readonly FINALE_SERVICE_NAME="${FINALE_DB_NAME}"
|
||||||
|
|
||||||
# Service postgres name
|
readarray -t postgres_containers < <(docker ps --format '{{.Names}}' | grep postgres || true)
|
||||||
export POSTGRES_SERVICE_NAME="lokavaluto_postgres_1"
|
|
||||||
|
|
||||||
#############################################
|
if [[ ${#postgres_containers[@]} -eq 0 ]]; then
|
||||||
# DISPLAYS ALL INPUTS PARAMETERS
|
log_error "No running PostgreSQL container found. Please start a PostgreSQL container and try again."
|
||||||
#############################################
|
|
||||||
|
|
||||||
echo "===== INPUT PARAMETERS ====="
|
|
||||||
echo "Origin version .......... $ORIGIN_VERSION"
|
|
||||||
echo "Final version ........... $FINAL_VERSION"
|
|
||||||
echo "Origin DB name ........... $ORIGIN_DB_NAME"
|
|
||||||
echo "Origin service name ..... $ORIGIN_SERVICE_NAME"
|
|
||||||
|
|
||||||
echo "
|
|
||||||
===== COMPUTED GLOBALE VARIABLES ====="
|
|
||||||
echo "Copy DB name ............. $COPY_DB_NAME"
|
|
||||||
echo "Finale DB name ........... $FINALE_DB_NAME"
|
|
||||||
echo "Finale service name ...... $FINALE_SERVICE_NAME"
|
|
||||||
echo "Postgres service name .... $POSTGRES_SERVICE_NAME"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Function to launch an SQL request to the postgres container
|
|
||||||
query_postgres_container(){
|
|
||||||
local QUERY="$1"
|
|
||||||
local DB_NAME="$2"
|
|
||||||
if [ -z "$QUERY" ]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
local result
|
|
||||||
if ! result=$(docker exec -u 70 "$POSTGRES_SERVICE_NAME" psql -d "$DB_NAME" -t -A -c "$QUERY"); then
|
|
||||||
printf "Failed to execute SQL query: %s\n" "$query" >&2
|
|
||||||
printf "Error: %s\n" "$result" >&2
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
elif [[ ${#postgres_containers[@]} -gt 1 ]]; then
|
||||||
echo "$result"
|
log_error "Multiple PostgreSQL containers found:"
|
||||||
}
|
printf ' %s\n' "${postgres_containers[@]}" >&2
|
||||||
export -f query_postgres_container
|
log_error "Please ensure only one PostgreSQL container is running."
|
||||||
|
|
||||||
# Function to copy the postgres databases
|
|
||||||
copy_database(){
|
|
||||||
local FROM_DB="$1"
|
|
||||||
local TO_SERVICE="$2"
|
|
||||||
local TO_DB="$3"
|
|
||||||
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm cp -f "$FROM_DB" "$TO_DB"@"$TO_SERVICE"
|
|
||||||
}
|
|
||||||
export -f copy_database
|
|
||||||
|
|
||||||
# Function to copy the filetores
|
|
||||||
copy_filestore(){
|
|
||||||
local FROM_SERVICE="$1"
|
|
||||||
local FROM_DB="$2"
|
|
||||||
local TO_SERVICE="$3"
|
|
||||||
local TO_DB="$4"
|
|
||||||
mkdir -p /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
|
||||||
rm -rf /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
|
||||||
cp -a /srv/datastore/data/"$FROM_SERVICE"/var/lib/odoo/filestore/"$FROM_DB" /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
|
||||||
echo "Filestore $FROM_SERVICE/$FROM_DB copied."
|
|
||||||
}
|
|
||||||
export -f copy_filestore
|
|
||||||
|
|
||||||
##############################################
|
|
||||||
# CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE #
|
|
||||||
##############################################
|
|
||||||
|
|
||||||
echo "
|
|
||||||
==== CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE ===="
|
|
||||||
|
|
||||||
# Check POSTGRES container is running
|
|
||||||
if ! docker ps | grep -q "$POSTGRES_SERVICE_NAME"; then
|
|
||||||
printf "Docker container %s is not running.\n" "$POSTGRES_SERVICE_NAME" >&2
|
|
||||||
return 1
|
|
||||||
else
|
|
||||||
echo "UPGRADE: container $POSTGRES_SERVICE_NAME running."
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check origin database is in the local postgres
|
|
||||||
DB_EXISTS=$(docker exec -it -u 70 $POSTGRES_SERVICE_NAME psql -tc "SELECT 1 FROM pg_database WHERE datname = '$ORIGIN_DB_NAME'" | tr -d '[:space:]')
|
|
||||||
if [ "$DB_EXISTS" ]; then
|
|
||||||
echo "UPGRADE: Database '$ORIGIN_DB_NAME' found."
|
|
||||||
else
|
|
||||||
echo "ERROR: Database '$ORIGIN_DB_NAME' not found in the local postgress service. Please add it and restart the upgrade process."
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check that the origin filestore exist
|
export POSTGRES_SERVICE_NAME="${postgres_containers[0]}"
|
||||||
REPERTOIRE="/srv/datastore/data/${ORIGIN_SERVICE_NAME}/var/lib/odoo/filestore/${ORIGIN_DB_NAME}"
|
readonly POSTGRES_SERVICE_NAME
|
||||||
if [ -d $REPERTOIRE ]; then
|
|
||||||
echo "UPGRADE: '$REPERTOIRE' filestore found."
|
log_step "INPUT PARAMETERS"
|
||||||
|
log_info "Origin version .......... $ORIGIN_VERSION"
|
||||||
|
log_info "Final version ........... $FINAL_VERSION"
|
||||||
|
log_info "Origin DB name ........... $ORIGIN_DB_NAME"
|
||||||
|
log_info "Origin service name ..... $ORIGIN_SERVICE_NAME"
|
||||||
|
|
||||||
|
log_step "COMPUTED GLOBAL VARIABLES"
|
||||||
|
log_info "Copy DB name ............. $COPY_DB_NAME"
|
||||||
|
log_info "Finale DB name ........... $FINALE_DB_NAME"
|
||||||
|
log_info "Finale service name ...... $FINALE_SERVICE_NAME"
|
||||||
|
log_info "Postgres service name .... $POSTGRES_SERVICE_NAME"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
log_step "CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE"
|
||||||
|
|
||||||
|
db_exists=$(docker exec -it -u 70 "$POSTGRES_SERVICE_NAME" psql -tc "SELECT 1 FROM pg_database WHERE datname = '$ORIGIN_DB_NAME'" | tr -d '[:space:]')
|
||||||
|
if [[ "$db_exists" ]]; then
|
||||||
|
log_info "Database '$ORIGIN_DB_NAME' found."
|
||||||
else
|
else
|
||||||
echo "ERROR: '$REPERTOIRE' filestore not found, please add it and restart the upgrade process."
|
log_error "Database '$ORIGIN_DB_NAME' not found in the local postgres service. Please add it and restart the upgrade process."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#######################################
|
filestore_path="${DATASTORE_PATH}/${ORIGIN_SERVICE_NAME}/${FILESTORE_SUBPATH}/${ORIGIN_DB_NAME}"
|
||||||
# LAUNCH VIRGIN ODOO IN FINAL VERSION #
|
if [[ -d "$filestore_path" ]]; then
|
||||||
#######################################
|
log_info "Filestore '$filestore_path' found."
|
||||||
|
else
|
||||||
|
log_error "Filestore '$filestore_path' not found, please add it and restart the upgrade process."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_step "LAUNCH VIRGIN ODOO IN FINAL VERSION"
|
||||||
|
|
||||||
# Remove finale database and datastore if already exists (we need a virgin Odoo)
|
|
||||||
if docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm ls | grep -q "$FINALE_SERVICE_NAME"; then
|
if docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm ls | grep -q "$FINALE_SERVICE_NAME"; then
|
||||||
|
log_info "Removing existing finale database and filestore..."
|
||||||
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm rm -f "$FINALE_SERVICE_NAME"
|
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm rm -f "$FINALE_SERVICE_NAME"
|
||||||
rm -rf /srv/datastore/data/"$FINALE_SERVICE_NAME"/var/lib/odoo/filestore/"$FINALE_SERVICE_NAME"
|
sudo rm -rf "${DATASTORE_PATH}/${FINALE_SERVICE_NAME}/${FILESTORE_SUBPATH}/${FINALE_SERVICE_NAME}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
compose --debug run "$FINALE_SERVICE_NAME" -i base --stop-after-init --no-http
|
compose --debug run "$FINALE_SERVICE_NAME" -i base --stop-after-init --no-http
|
||||||
|
|
||||||
echo "Model database in final Odoo version created."
|
log_info "Model database in final Odoo version created."
|
||||||
|
|
||||||
############################
|
log_step "COPY ORIGINAL COMPONENTS"
|
||||||
# COPY ORIGINAL COMPONENTS #
|
|
||||||
############################
|
|
||||||
|
|
||||||
echo "
|
copy_database "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME"
|
||||||
==== COPY ORIGINAL COMPONENTS ===="
|
log_info "Original database copied to ${COPY_DB_NAME}@${COPY_DB_NAME}."
|
||||||
echo "UPGRADE: Start copy"
|
|
||||||
|
|
||||||
# Copy database
|
copy_filestore "$ORIGIN_SERVICE_NAME" "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME"
|
||||||
copy_database "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME" || exit 1
|
log_info "Original filestore copied."
|
||||||
echo "UPGRADE: original database copied in ${COPY_DB_NAME}@${COPY_DB_NAME}."
|
|
||||||
|
|
||||||
# Copy filestore
|
|
||||||
copy_filestore "$ORIGIN_SERVICE_NAME" "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME" || exit 1
|
|
||||||
echo "UPGRADE: original filestore copied."
|
|
||||||
|
|
||||||
|
|
||||||
#####################
|
log_step "PATH OF MIGRATION"
|
||||||
# PATH OF MIGRATION #
|
|
||||||
####################
|
|
||||||
|
|
||||||
echo "
|
readarray -t versions < <(seq $((ORIGIN_VERSION + 1)) "$FINAL_VERSION")
|
||||||
==== PATH OF MIGRATION ===="
|
log_info "Migration path is ${versions[*]}"
|
||||||
# List all the versions to migrate through
|
|
||||||
declare -a versions
|
|
||||||
nb_migrations=$(($FINAL_VERSION - $ORIGIN_VERSION))
|
|
||||||
|
|
||||||
# Build the migration path
|
|
||||||
for ((i = 0; i<$nb_migrations; i++))
|
log_step "DATABASE PREPARATION"
|
||||||
do
|
|
||||||
versions[$i]=$(($ORIGIN_VERSION + 1 + i))
|
./prepare_db.sh "$COPY_DB_NAME" "$COPY_DB_NAME" "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME"
|
||||||
|
|
||||||
|
|
||||||
|
log_step "UPGRADE PROCESS"
|
||||||
|
|
||||||
|
for version in "${versions[@]}"; do
|
||||||
|
log_info "START UPGRADE TO ${version}.0"
|
||||||
|
|
||||||
|
"${SCRIPT_DIR}/${version}.0/pre_upgrade.sh"
|
||||||
|
"${SCRIPT_DIR}/${version}.0/upgrade.sh"
|
||||||
|
"${SCRIPT_DIR}/${version}.0/post_upgrade.sh"
|
||||||
|
|
||||||
|
log_info "END UPGRADE TO ${version}.0"
|
||||||
done
|
done
|
||||||
echo "UPGRADE: Migration path is ${versions[@]}"
|
|
||||||
|
|
||||||
|
log_step "POST-UPGRADE PROCESSES"
|
||||||
|
|
||||||
########################
|
./finalize_db.sh "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME"
|
||||||
# DATABASE PREPARATION #
|
|
||||||
########################
|
|
||||||
|
|
||||||
echo "
|
log_step "UPGRADE PROCESS ENDED WITH SUCCESS"
|
||||||
==== DATABASE PREPARATION ===="
|
|
||||||
|
|
||||||
./prepare_db.sh "$COPY_DB_NAME" "$COPY_DB_NAME" "$FINALE_DB_MODEL_NAME" "$FINALE_SERVICE_NAME" || exit 1
|
|
||||||
|
|
||||||
|
|
||||||
###################
|
|
||||||
# UPGRADE PROCESS #
|
|
||||||
###################
|
|
||||||
|
|
||||||
for version in "${versions[@]}"
|
|
||||||
do
|
|
||||||
echo "START UPGRADE TO ${version}.0"
|
|
||||||
start_version=$((version-1))
|
|
||||||
end_version="$version"
|
|
||||||
|
|
||||||
### Go to the repository holding the upgrate scripts
|
|
||||||
cd "${end_version}.0"
|
|
||||||
|
|
||||||
### Execute pre_upgrade scripts
|
|
||||||
./pre_upgrade.sh || exit 1
|
|
||||||
|
|
||||||
### Start upgrade
|
|
||||||
./upgrade.sh || exit 1
|
|
||||||
|
|
||||||
### Execute post-upgrade scripts
|
|
||||||
./post_upgrade.sh || exit 1
|
|
||||||
|
|
||||||
### Return to parent repository for the following steps
|
|
||||||
cd ..
|
|
||||||
echo "END UPGRADE TO ${version}.0"
|
|
||||||
done
|
|
||||||
## END UPGRADE LOOP
|
|
||||||
|
|
||||||
##########################
|
|
||||||
# POST-UPGRADE PROCESSES #
|
|
||||||
##########################
|
|
||||||
./finalize_db.sh "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME" || exit 1
|
|
||||||
|
|
||||||
|
|
||||||
echo "UPGRADE PROCESS ENDED WITH SUCCESS"
|
|
||||||
|
|||||||
Reference in New Issue
Block a user