Compare commits
9 Commits
e1d1907cce
...
d8b332762b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8b332762b | ||
|
|
023deeea5b | ||
|
|
743d1ce831 | ||
|
|
469fb42e48 | ||
|
|
f18d50cb94 | ||
|
|
afeaa3d00f | ||
|
|
93fc10395f | ||
|
|
458af6a795 | ||
|
|
61733b04a3 |
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
||||
compose -f ../compose.yml run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=web,openupgrade_framework
|
||||
compose -f ../compose.yml run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=base,web,openupgrade_framework
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=web,openupgrade_framework
|
||||
compose -f ../compose.yml run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=base,web,openupgrade_framework
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=web,openupgrade_framework
|
||||
compose -f ../compose.yml run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=base,web,openupgrade_framework
|
||||
|
||||
@@ -2,4 +2,30 @@
|
||||
|
||||
echo "Post migration to 17.0..."
|
||||
|
||||
# Execute SQL post-migration commands
|
||||
POST_MIGRATE_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
plan_id INTEGER;
|
||||
BEGIN
|
||||
-- Check if the 'Projects' analytic plan exists
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS' LIMIT 1;
|
||||
|
||||
-- If it does exist, delete it
|
||||
IF plan_id IS NOT NULL THEN
|
||||
DELETE FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS';
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'Projects' LIMIT 1;
|
||||
-- Delete existing system parameter (if any)
|
||||
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||
-- Insert the system parameter with the correct plan ID
|
||||
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||
END IF;
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $POST_MIGRATE_SQL"
|
||||
query_postgres_container "$POST_MIGRATE_SQL" ou17 || exit 1
|
||||
|
||||
|
||||
#compose --debug run ou17 -u base --stop-after-init --no-http
|
||||
|
||||
@@ -6,10 +6,50 @@ echo "Prepare migration to 17.0..."
|
||||
copy_database ou16 ou17 ou17 || exit 1
|
||||
|
||||
# Execute SQL pre-migration commands
|
||||
PRE_MIGRATE_SQL=""
|
||||
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
plan_id INTEGER;
|
||||
BEGIN
|
||||
-- Check if the 'Projects' analytic plan exists
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE name = 'Projects' LIMIT 1;
|
||||
|
||||
-- If it doesn't exist, create it
|
||||
IF plan_id IS NULL THEN
|
||||
INSERT INTO account_analytic_plan (name, complete_name, default_applicability, create_date, write_date)
|
||||
VALUES ('Projects', 'migration_PROJECTS', 'optional', now(), now())
|
||||
RETURNING id INTO plan_id;
|
||||
END IF;
|
||||
|
||||
-- Delete existing system parameter (if any)
|
||||
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||
|
||||
-- Insert the system parameter with the correct plan ID
|
||||
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou17 || exit 1
|
||||
|
||||
PRE_MIGRATE_SQL_2=$(cat <<'EOF'
|
||||
DELETE FROM ir_model_fields WHERE name = 'kanban_state_label';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL_2"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL_2" ou17 || exit 1
|
||||
|
||||
PRE_MIGRATE_SQL_3=$(cat <<'EOF'
|
||||
DELETE FROM ir_model_fields WHERE name = 'phone' AND model='hr.employee';
|
||||
DELETE FROM ir_model_fields WHERE name = 'hr_responsible_id' AND model='hr.job';
|
||||
DELETE FROM ir_model_fields WHERE name = 'address_home_id' AND model='hr.employee';
|
||||
DELETE FROM ir_model_fields WHERE name = 'manager_id' AND model='project.task';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL_3"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL_3" ou17 || exit 1
|
||||
|
||||
# Copy filestores
|
||||
copy_filestore ou16 ou16 ou17 ou17 || exit 1
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8017:8069 ou17 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou17 --load=web,openupgrade_framework
|
||||
compose -f ../compose.yml run -p 8017:8069 ou17 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou17 --load=base,web,openupgrade_framework
|
||||
|
||||
5
18.0/post_upgrade.sh
Executable file
5
18.0/post_upgrade.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "Post migration to 18.0..."
|
||||
|
||||
#compose --debug run ou18 -u base --stop-after-init --no-http
|
||||
19
18.0/pre_upgrade.sh
Executable file
19
18.0/pre_upgrade.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "Prepare migration to 18.0..."
|
||||
|
||||
# Copy database
|
||||
copy_database ou17 ou18 ou18 || exit 1
|
||||
|
||||
# Execute SQL pre-migration commands
|
||||
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||
UPDATE account_analytic_plan SET default_applicability=NULL WHERE default_applicability='optional';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou18 || exit 1
|
||||
|
||||
# Copy filestores
|
||||
copy_filestore ou17 ou17 ou18 ou18 || exit 1
|
||||
|
||||
echo "Ready for migration to 18.0!"
|
||||
3
18.0/upgrade.sh
Executable file
3
18.0/upgrade.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8018:8069 ou18 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou18 --load=base,web,openupgrade_framework
|
||||
14
compose.yml
14
compose.yml
@@ -52,7 +52,7 @@ ou15:
|
||||
ou16:
|
||||
charm: odoo-tecnativa
|
||||
docker-compose:
|
||||
image: docker.0k.io/mirror/odoo:rc_16.0-MYC-INIT
|
||||
image: docker.0k.io/mirror/odoo:rc_16.0-ELABORE-LIGHT
|
||||
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||
command:
|
||||
- "--log-level=debug"
|
||||
@@ -73,6 +73,18 @@ ou17:
|
||||
options:
|
||||
workers: 0
|
||||
|
||||
ou18:
|
||||
charm: odoo-tecnativa
|
||||
docker-compose:
|
||||
image: docker.0k.io/mirror/odoo:rc_18.0-ELABORE-LIGHT
|
||||
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||
command:
|
||||
- "--log-level=debug"
|
||||
- "--limit-time-cpu=1000000"
|
||||
- "--limit-time-real=1000000"
|
||||
options:
|
||||
workers: 0
|
||||
|
||||
postgres:
|
||||
docker-compose:
|
||||
image: docker.0k.io/postgres:17.2.0-myc
|
||||
|
||||
@@ -11,10 +11,14 @@ EOF
|
||||
)
|
||||
query_postgres_container "$FINALE_SQL" "$DB_NAME" || exit 1
|
||||
|
||||
# Fix duplicated views
|
||||
PYTHON_SCRIPT=post_migration_fix_duplicated_views.py
|
||||
echo "Remove duplicated views with script $PYTHON_SCRIPT ..."
|
||||
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT" || exit 1
|
||||
|
||||
# Give back the right to user to access to the tables
|
||||
# docker exec -u 70 "$DB_CONTAINER_NAME" pgm chown "$FINALE_SERVICE_NAME" "$DB_NAME"
|
||||
|
||||
|
||||
# Launch Odoo with database in finale version to run all updates
|
||||
compose --debug run "$ODOO_SERVICE" -u all --stop-after-init --no-http
|
||||
compose --debug run "$ODOO_SERVICE" -u all --log-level=debug --stop-after-init --no-http
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
galicea_base
|
||||
galicea_environment_checkup
|
||||
mass_editing
|
||||
mass_mailing_themes
|
||||
muk_autovacuum
|
||||
muk_fields_lobject
|
||||
muk_fields_stream
|
||||
muk_utils
|
||||
muk_web_theme_mail
|
||||
muk_web_utils
|
||||
account_usability
|
||||
kpi_dashboard
|
||||
web_window_title
|
||||
website_project_kanbanview
|
||||
project_usability
|
||||
project_tag
|
||||
maintenance_server_monitoring_ping
|
||||
maintenance_server_monitoring_ssh
|
||||
maintenance_server_monitoring_memory
|
||||
maintenance_server_monitoring_maintenance_equipment_status
|
||||
maintenance_server_monitoring_disk
|
||||
project_task_assignees_avatar
|
||||
account_partner_reconcile
|
||||
account_invoice_import_simple_pdf
|
||||
192
post_migration_fix_duplicated_views.py
Normal file
192
post_migration_fix_duplicated_views.py
Normal file
@@ -0,0 +1,192 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Post-Migration Duplicate View Fixer
|
||||
Run this AFTER migration to fix duplicate views automatically.
|
||||
"""
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("POST-MIGRATION DUPLICATE VIEW FIXER")
|
||||
print("="*80 + "\n")
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
# Find all duplicate views
|
||||
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||
keys = defaultdict(list)
|
||||
|
||||
for view in all_views:
|
||||
if view.key:
|
||||
keys[view.key].append(view)
|
||||
|
||||
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||
|
||||
print(f"Found {len(duplicates)} keys with duplicate views\n")
|
||||
|
||||
if not duplicates:
|
||||
print("✓ No duplicate views found! Database is clean.")
|
||||
print("=" * 80 + "\n")
|
||||
exit()
|
||||
|
||||
# Process duplicates
|
||||
views_to_delete = []
|
||||
redirect_log = []
|
||||
|
||||
for key, views in sorted(duplicates.items()):
|
||||
print(f"\nProcessing key: {key}")
|
||||
print("-" * 80)
|
||||
|
||||
# Sort views: module views first, then by ID (older first)
|
||||
sorted_views = sorted(views, key=lambda v: (
|
||||
0 if v.model_data_id else 1, # Module views first
|
||||
v.id # Older views first (lower ID = older)
|
||||
))
|
||||
|
||||
# Keep the first view (should be module view or oldest)
|
||||
keep = sorted_views[0]
|
||||
to_delete = sorted_views[1:]
|
||||
|
||||
module_keep = keep.model_data_id.module if keep.model_data_id else "Custom/DB"
|
||||
print(f"KEEP: ID {keep.id:>6} | Module: {module_keep:<20} | {keep.name}")
|
||||
|
||||
for view in to_delete:
|
||||
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||
print(f"DELETE: ID {view.id:>6} | Module: {module:<20} | {view.name}")
|
||||
|
||||
# Find and redirect children
|
||||
children = env['ir.ui.view'].search([('inherit_id', '=', view.id)])
|
||||
if children:
|
||||
print(f" → Redirecting {len(children)} children {children.ids} to view {keep.id}")
|
||||
for child in children:
|
||||
child_module = child.model_data_id.module if child.model_data_id else "Custom/DB"
|
||||
redirect_log.append({
|
||||
'child_id': child.id,
|
||||
'child_name': child.name,
|
||||
'child_module': child_module,
|
||||
'from': view.id,
|
||||
'to': keep.id
|
||||
})
|
||||
try:
|
||||
children.write({'inherit_id': keep.id})
|
||||
print(f" ✓ Redirected successfully")
|
||||
except Exception as e:
|
||||
print(f" ✗ Redirect failed: {e}")
|
||||
continue
|
||||
|
||||
views_to_delete.append(view)
|
||||
|
||||
# Summary before deletion
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY")
|
||||
print("="*80 + "\n")
|
||||
|
||||
print(f"Views to delete: {len(views_to_delete)}")
|
||||
print(f"Child views to redirect: {len(redirect_log)}\n")
|
||||
|
||||
if redirect_log:
|
||||
print("Redirections that will be performed:")
|
||||
for item in redirect_log[:10]: # Show first 10
|
||||
print(f" • View {item['child_id']} ({item['child_module']})")
|
||||
print(f" '{item['child_name']}'")
|
||||
print(f" Parent: {item['from']} → {item['to']}")
|
||||
|
||||
if len(redirect_log) > 10:
|
||||
print(f" ... and {len(redirect_log) - 10} more redirections")
|
||||
|
||||
# Delete duplicate views
|
||||
print("\n" + "="*80)
|
||||
print("DELETING DUPLICATE VIEWS")
|
||||
print("="*80 + "\n")
|
||||
|
||||
deleted_count = 0
|
||||
failed_deletes = []
|
||||
|
||||
# Sort views by ID descending (delete newer/child views first)
|
||||
views_to_delete_sorted = sorted(views_to_delete, key=lambda v: v.id, reverse=True)
|
||||
|
||||
for view in views_to_delete_sorted:
|
||||
try:
|
||||
# Create savepoint to isolate each deletion
|
||||
env.cr.execute('SAVEPOINT delete_view')
|
||||
|
||||
view_id = view.id
|
||||
view_name = view.name
|
||||
view_key = view.key
|
||||
|
||||
# Double-check it has no children
|
||||
remaining_children = env['ir.ui.view'].search([('inherit_id', '=', view_id)])
|
||||
if remaining_children:
|
||||
print(f"⚠️ Skipping view {view_id}: Still has {len(remaining_children)} children")
|
||||
failed_deletes.append({
|
||||
'id': view_id,
|
||||
'reason': f'Still has {len(remaining_children)} children'
|
||||
})
|
||||
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||
continue
|
||||
|
||||
view.unlink()
|
||||
env.cr.execute('RELEASE SAVEPOINT delete_view')
|
||||
print(f"✓ Deleted view {view_id}: {view_key}")
|
||||
deleted_count += 1
|
||||
|
||||
except Exception as e:
|
||||
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||
print(f"✗ Failed to delete view {view.id}: {e}")
|
||||
failed_deletes.append({
|
||||
'id': view.id,
|
||||
'name': view.name,
|
||||
'reason': str(e)
|
||||
})
|
||||
|
||||
# Commit changes
|
||||
print("\n" + "="*80)
|
||||
print("COMMITTING CHANGES")
|
||||
print("="*80 + "\n")
|
||||
|
||||
try:
|
||||
env.cr.commit()
|
||||
print("✓ All changes committed successfully!")
|
||||
except Exception as e:
|
||||
print(f"✗ Commit failed: {e}")
|
||||
print("Changes were NOT saved!")
|
||||
exit(1)
|
||||
|
||||
# Final verification
|
||||
print("\n" + "="*80)
|
||||
print("FINAL VERIFICATION")
|
||||
print("="*80 + "\n")
|
||||
|
||||
# Re-check for duplicates
|
||||
all_views_after = env['ir.ui.view'].search([('active', '=', True)])
|
||||
keys_after = defaultdict(list)
|
||||
|
||||
for view in all_views_after:
|
||||
if view.key:
|
||||
keys_after[view.key].append(view)
|
||||
|
||||
duplicates_after = {k: v for k, v in keys_after.items() if len(v) > 1}
|
||||
|
||||
print(f"Results:")
|
||||
print(f" • Successfully deleted: {deleted_count} views")
|
||||
print(f" • Failed deletions: {len(failed_deletes)}")
|
||||
print(f" • Child views redirected: {len(redirect_log)}")
|
||||
print(f" • Remaining duplicates: {len(duplicates_after)}")
|
||||
|
||||
if failed_deletes:
|
||||
print(f"\n⚠️ Failed deletions:")
|
||||
for item in failed_deletes:
|
||||
print(f" • View {item['id']}: {item['reason']}")
|
||||
|
||||
if duplicates_after:
|
||||
print(f"\n⚠️ Still have {len(duplicates_after)} duplicate keys:")
|
||||
for key, views in sorted(duplicates_after.items())[:5]:
|
||||
print(f" • {key}: {len(views)} views")
|
||||
for view in views:
|
||||
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||
print(f" - ID {view.id} ({module})")
|
||||
print(f"\n Run this script again to attempt another cleanup.")
|
||||
else:
|
||||
print(f"\n✓ All duplicates resolved!")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("FIX COMPLETED!")
|
||||
print("="*80)
|
||||
126
pre_migration_view_checking.py
Normal file
126
pre_migration_view_checking.py
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pre-Migration Cleanup Script for Odoo
|
||||
Run this BEFORE migrating to identify and clean up custom views.
|
||||
|
||||
Usage: odoo shell -d dbname < pre_migration_cleanup.py
|
||||
"""
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("PRE-MIGRATION CLEANUP - VIEW ANALYSIS")
|
||||
print("="*80 + "\n")
|
||||
|
||||
# 1. Find all custom (COW) views
|
||||
print("STEP 1: Identifying Custom/COW Views")
|
||||
print("-"*80)
|
||||
|
||||
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||
cow_views = all_views.filtered(lambda v: not v.model_data_id)
|
||||
|
||||
print(f"Total views in database: {len(all_views)}")
|
||||
print(f"Custom views (no module): {len(cow_views)}")
|
||||
print(f"Module views: {len(all_views) - len(cow_views)}\n")
|
||||
|
||||
if cow_views:
|
||||
print("Custom views found:\n")
|
||||
print(f"{'ID':<8} {'Active':<8} {'Key':<50} {'Name':<40}")
|
||||
print("-"*120)
|
||||
|
||||
for view in cow_views[:50]: # Show first 50
|
||||
active_str = "✓" if view.active else "✗"
|
||||
key_str = view.key[:48] if view.key else "N/A"
|
||||
name_str = view.name[:38] if view.name else "N/A"
|
||||
print(f"{view.id:<8} {active_str:<8} {key_str:<50} {name_str:<40}")
|
||||
|
||||
if len(cow_views) > 50:
|
||||
print(f"\n... and {len(cow_views) - 50} more custom views")
|
||||
|
||||
# 2. Find duplicate views
|
||||
print("\n" + "="*80)
|
||||
print("STEP 2: Finding Duplicate Views (Same Key)")
|
||||
print("-"*80 + "\n")
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
keys = defaultdict(list)
|
||||
for view in all_views.filtered(lambda v: v.key and v.active):
|
||||
keys[view.key].append(view)
|
||||
|
||||
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||
|
||||
print(f"Found {len(duplicates)} keys with duplicate views:\n")
|
||||
|
||||
if duplicates:
|
||||
for key, views in sorted(duplicates.items()):
|
||||
print(f"\nKey: {key} ({len(views)} duplicates)")
|
||||
for view in views:
|
||||
module = view.model_data_id.module if view.model_data_id else "⚠️ Custom/DB"
|
||||
print(f" ID {view.id:>6}: {module:<25} | {view.name}")
|
||||
|
||||
# 3. Find views that might have xpath issues
|
||||
print("\n" + "="*80)
|
||||
print("STEP 3: Finding Views with XPath Expressions")
|
||||
print("-"*80 + "\n")
|
||||
|
||||
import re
|
||||
|
||||
views_with_xpath = []
|
||||
xpath_pattern = r'<xpath[^>]+expr="([^"]+)"'
|
||||
|
||||
for view in all_views.filtered(lambda v: v.active and v.inherit_id):
|
||||
xpaths = re.findall(xpath_pattern, view.arch_db)
|
||||
if xpaths:
|
||||
views_with_xpath.append({
|
||||
'view': view,
|
||||
'xpaths': xpaths,
|
||||
'is_custom': not bool(view.model_data_id)
|
||||
})
|
||||
|
||||
print(f"Found {len(views_with_xpath)} views with xpath expressions")
|
||||
|
||||
custom_xpath_views = [v for v in views_with_xpath if v['is_custom']]
|
||||
print(f" - {len(custom_xpath_views)} are custom views (potential issue!)")
|
||||
print(f" - {len(views_with_xpath) - len(custom_xpath_views)} are module views\n")
|
||||
|
||||
if custom_xpath_views:
|
||||
print("Custom views with xpaths (risk for migration issues):\n")
|
||||
for item in custom_xpath_views:
|
||||
view = item['view']
|
||||
print(f"ID {view.id}: {view.name}")
|
||||
print(f" Key: {view.key}")
|
||||
print(f" Inherits from: {view.inherit_id.key}")
|
||||
print(f" XPath count: {len(item['xpaths'])}")
|
||||
print(f" Sample xpaths: {item['xpaths'][:2]}")
|
||||
print()
|
||||
|
||||
# 4. Summary and recommendations
|
||||
print("=" * 80)
|
||||
print("SUMMARY AND RECOMMENDATIONS")
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
print(f"📊 Statistics:")
|
||||
print(f" • Total views: {len(all_views)}")
|
||||
print(f" • Custom views: {len(cow_views)}")
|
||||
print(f" • Duplicate view keys: {len(duplicates)}")
|
||||
print(f" • Custom views with xpaths: {len(custom_xpath_views)}\n")
|
||||
|
||||
print(f"\n📋 RECOMMENDED ACTIONS BEFORE MIGRATION:\n")
|
||||
|
||||
if custom_xpath_views:
|
||||
print(f"1. Archive or delete {len(custom_xpath_views)} custom views with xpaths:")
|
||||
print(f" • Review each one and determine if still needed")
|
||||
print(f" • Archive unnecessary ones: env['ir.ui.view'].browse([ids]).write({{'active': False}})")
|
||||
print(f" • Plan to recreate important ones as proper module views after migration\n")
|
||||
|
||||
if duplicates:
|
||||
print(f"2. Fix {len(duplicates)} duplicate view keys:")
|
||||
print(f" • Manually review and delete obsolete duplicates, keeping the most appropriate one")
|
||||
print(f" • Document the remaining appropriate ones as script post_migration_fix_duplicated_views.py will run AFTER the migration and delete all duplicates.\n")
|
||||
|
||||
if cow_views:
|
||||
print(f"3. Review {len(cow_views)} custom views:")
|
||||
print(f" • Document which ones are important")
|
||||
print(f" • Export their XML for reference")
|
||||
print(f" • Consider converting to module views\n")
|
||||
|
||||
print("=" * 80 + "\n")
|
||||
@@ -77,6 +77,26 @@ Do you accept to migrate the database with all these add-ons still installed? (Y
|
||||
echo "Y - Yes, let's go on with the upgrade."
|
||||
echo "N - No, stop the upgrade"
|
||||
read -n 1 -p "Your choice: " choice
|
||||
case "$choice" in
|
||||
[Yy] ) echo "
|
||||
Let's go on!";;
|
||||
[Nn] ) echo "
|
||||
Upgrade cancelled!"; exit 1;;
|
||||
* ) echo "
|
||||
Please answer by Y or N.";;
|
||||
esac
|
||||
|
||||
|
||||
# Check the views
|
||||
PYTHON_SCRIPT=pre_migration_view_checking.py
|
||||
echo "Check views with script $PYTHON_SCRIPT ..."
|
||||
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT" || exit 1
|
||||
|
||||
echo "
|
||||
Do you accept to migrate the database with the current views states? (Y/N/R)"
|
||||
echo "Y - Yes, let's go on with the upgrade."
|
||||
echo "N - No, stop the upgrade"
|
||||
read -n 1 -p "Your choice: " choice
|
||||
case "$choice" in
|
||||
[Yy] ) echo "
|
||||
Upgrade confirmed!";;
|
||||
|
||||
@@ -78,6 +78,15 @@ copy_filestore(){
|
||||
}
|
||||
export -f copy_filestore
|
||||
|
||||
# Function to launch python scripts in Odoo Shell
|
||||
exec_python_script_in_odoo_shell(){
|
||||
local SERVICE_NAME="$1"
|
||||
local DB_NAME="$2"
|
||||
local PYTHON_SCRIPT="$3"
|
||||
compose --debug run "$SERVICE_NAME" shell -d "$DB_NAME" --no-http --stop-after-init < "$PYTHON_SCRIPT"
|
||||
}
|
||||
export -f exec_python_script_in_odoo_shell
|
||||
|
||||
##############################################
|
||||
# CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE #
|
||||
##############################################
|
||||
|
||||
Reference in New Issue
Block a user