Compare commits
1 Commits
main
...
lcc-credit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ddb1fcd0ad |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +0,0 @@
|
||||
final_404_addons
|
||||
0
13.0/post_upgrade.sh
Executable file
0
13.0/post_upgrade.sh
Executable file
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 13.0..."
|
||||
|
||||
@@ -18,6 +17,20 @@ EOF
|
||||
)
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou13 || exit 1
|
||||
|
||||
# Digital currency specific - to comment if not needed
|
||||
INVOICE_NAME_SQL=$(cat <<'EOF'
|
||||
ALTER TABLE credit_request ADD invoice_name VARCHAR;
|
||||
UPDATE credit_request
|
||||
SET invoice_name = (
|
||||
SELECT move_name
|
||||
FROM account_invoice
|
||||
WHERE account_invoice.id = credit_request.invoice_id
|
||||
);
|
||||
UPDATE credit_request SET invoice_id = NULL;
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$INVOICE_NAME_SQL" ou13 || exit 1
|
||||
|
||||
# Copy filestores
|
||||
copy_filestore ou12 ou12 ou13 ou13 || exit 1
|
||||
|
||||
3
13.0/upgrade.sh
Executable file
3
13.0/upgrade.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
||||
0
14.0/post_upgrade.sh
Executable file
0
14.0/post_upgrade.sh
Executable file
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 14.0..."
|
||||
|
||||
3
14.0/upgrade.sh
Executable file
3
14.0/upgrade.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=web,openupgrade_framework
|
||||
0
15.0/post_upgrade.sh
Executable file
0
15.0/post_upgrade.sh
Executable file
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 15.0..."
|
||||
|
||||
3
15.0/upgrade.sh
Executable file
3
15.0/upgrade.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=web,openupgrade_framework
|
||||
17
16.0/post_upgrade.sh
Executable file
17
16.0/post_upgrade.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "POST migration to 16.0..."
|
||||
|
||||
# Digital currency specific - to comment if not needed
|
||||
INVOICE_NAME_SQL=$(cat <<'EOF'
|
||||
UPDATE credit_request
|
||||
SET invoice_id = (
|
||||
SELECT id
|
||||
FROM account_move
|
||||
WHERE account_move.name = credit_request.invoice_name
|
||||
);
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$INVOICE_NAME_SQL" ou16 || exit 1
|
||||
|
||||
echo "END POST migration to 16.0."
|
||||
@@ -1,5 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 16.0..."
|
||||
|
||||
3
16.0/upgrade.sh
Executable file
3
16.0/upgrade.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
compose -f ../compose.yml run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=warn --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=web,openupgrade_framework
|
||||
385
README.md
385
README.md
@@ -1,377 +1,64 @@
|
||||
# 0k-odoo-upgrade
|
||||
|
||||
A tool for migrating Odoo databases between major versions, using [OpenUpgrade](https://github.com/OCA/OpenUpgrade) in a production-like Docker environment.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Installation](#installation)
|
||||
- [Project Structure](#project-structure)
|
||||
- [How It Works](#how-it-works)
|
||||
- [Usage](#usage)
|
||||
- [Customization](#customization)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [0k dev-pack](https://git.myceliandre.fr/Lokavaluto/dev-pack) installed (provides the `compose` command)
|
||||
- Docker and Docker Compose
|
||||
- `rsync` for filestore copying
|
||||
- `sudo` access for filestore operations
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd 0k-odoo-upgrade
|
||||
```
|
||||
- Clone the current repo
|
||||
|
||||
## Project Structure
|
||||
## Configuration
|
||||
|
||||
```
|
||||
.
|
||||
├── upgrade.sh # Main entry point
|
||||
│
|
||||
├── config/
|
||||
│ └── compose.yml # Docker Compose configuration
|
||||
│
|
||||
├── lib/
|
||||
│ ├── common.sh # Shared bash functions
|
||||
│ └── python/ # Python utility scripts
|
||||
│ ├── check_views.py # View analysis (pre-migration)
|
||||
│ ├── validate_views.py # View validation (post-migration)
|
||||
│ ├── fix_duplicated_views.py # Fix duplicated views
|
||||
│ └── cleanup_modules.py # Obsolete module cleanup
|
||||
│
|
||||
├── scripts/
|
||||
│ ├── prepare_db.sh # Database preparation before migration
|
||||
│ ├── finalize_db.sh # Post-migration finalization
|
||||
│ └── validate_migration.sh # Manual post-migration validation
|
||||
│
|
||||
└── versions/ # Version-specific scripts
|
||||
├── 13.0/
|
||||
│ ├── pre_upgrade.sh # SQL fixes before migration
|
||||
│ ├── upgrade.sh # OpenUpgrade execution
|
||||
│ └── post_upgrade.sh # Fixes after migration
|
||||
├── 14.0/
|
||||
├── ...
|
||||
└── 18.0/
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
### Overview
|
||||
|
||||
The script performs a **step-by-step migration** between each major version. For example, to migrate from 14.0 to 17.0, it executes:
|
||||
|
||||
```
|
||||
14.0 → 15.0 → 16.0 → 17.0
|
||||
```
|
||||
|
||||
### Process Steps
|
||||
|
||||
1. **Initial Checks**
|
||||
- Argument validation
|
||||
- Required command verification (`docker`, `compose`, `sudo`, `rsync`)
|
||||
- Source database and filestore existence check
|
||||
|
||||
2. **Environment Preparation**
|
||||
- Creation of a fresh Odoo database in the target version (for module comparison)
|
||||
- Copy of the source database to a working database
|
||||
- Filestore copy
|
||||
|
||||
3. **Database Preparation** (`scripts/prepare_db.sh`)
|
||||
- Neutralization: disable mail servers and cron jobs
|
||||
- Detection of installed modules missing in the target version
|
||||
- View state verification
|
||||
- User confirmation prompt
|
||||
|
||||
4. **Migration Loop** (for each intermediate version)
|
||||
- `pre_upgrade.sh`: version-specific SQL fixes before migration
|
||||
- `upgrade.sh`: OpenUpgrade execution via Docker
|
||||
- `post_upgrade.sh`: fixes after migration
|
||||
|
||||
5. **Finalization** (`scripts/finalize_db.sh`)
|
||||
- Obsolete sequence removal
|
||||
- Modified website template reset
|
||||
- Compiled asset cache purge
|
||||
- Duplicated view fixes
|
||||
- Obsolete module cleanup
|
||||
- Final update with `-u all`
|
||||
|
||||
### Flow Diagram
|
||||
|
||||
```
|
||||
┌─────────────────┐
|
||||
│ upgrade.sh │
|
||||
└────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐ ┌─────────────────┐
|
||||
│ Initial │────▶│ Copy DB + │
|
||||
│ checks │ │ filestore │
|
||||
└─────────────────┘ └────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ prepare_db.sh │
|
||||
│ (neutralization)│
|
||||
└────────┬────────┘
|
||||
│
|
||||
┌───────────────────────┼───────────────────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
||||
│ versions/13.0/ │────▶│ versions/14.0/ │────▶│ versions/N.0/ │
|
||||
│ pre/upgrade/post│ │ pre/upgrade/post│ │ pre/upgrade/post│
|
||||
└─────────────────┘ └─────────────────┘ └────────┬────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ finalize_db.sh │
|
||||
│ (cleanup) │
|
||||
└─────────────────┘
|
||||
```
|
||||
- Requires to have the 0k scripts installed on the computer: https://git.myceliandre.fr/Lokavaluto/dev-pack
|
||||
|
||||
## Usage
|
||||
### Before migration
|
||||
|
||||
### Before Migration
|
||||
- [ ] import the origin database to migrate on local computer
|
||||
- [ ] Uninstall all known useless Odoo add-ons. Warning: do not uninstall add-ons for which the disappearance in the finale version is managed by Open Upgrade scrips.
|
||||
- [ ] Unsure all the add-ons are migrated in the final Odoo version
|
||||
- [ ] (optional) De-active all the website views
|
||||
|
||||
1. **Import the source database** to your local machine
|
||||
### Local Migration process
|
||||
|
||||
2. **Clean up the source database** (recommended)
|
||||
- Uninstall unnecessary modules
|
||||
- Do NOT uninstall modules handled by OpenUpgrade
|
||||
- [ ] launch the origin database `ORIGIN_DATABASE_NAME` with original version of Odoo, with odoo service `ORIGIN_SERVICE`
|
||||
- [ ] launch the following command:
|
||||
|
||||
3. **Check module availability**
|
||||
- Ensure all custom modules are ported to the target version
|
||||
|
||||
4. **Start the Docker environment**
|
||||
```bash
|
||||
# Start the PostgreSQL container
|
||||
compose up -d postgres
|
||||
|
||||
# Verify only one postgres container is running
|
||||
docker ps | grep postgres
|
||||
```
|
||||
|
||||
### Running the Migration
|
||||
|
||||
```bash
|
||||
./upgrade.sh <source_version> <target_version> <database_name> <source_service>
|
||||
``` bash
|
||||
./upgrade.sh {ORIGIN_VERSION} {DESTINATION_VERSION} {ORIGIN_DATABASE_NAME} {ORIGIN_SERVICE}
|
||||
```
|
||||
ex: ./upgrade.sh 14 16 elabore_20241208 odoo14
|
||||
|
||||
**Parameters:**
|
||||
| Parameter | Description | Example |
|
||||
|-----------|-------------|---------|
|
||||
| `source_version` | Source Odoo version (without .0) | `14` |
|
||||
| `target_version` | Target Odoo version (without .0) | `17` |
|
||||
| `database_name` | Database name | `my_prod_db` |
|
||||
| `source_service` | Source Docker Compose service | `odoo14` |
|
||||
- [ ] Inspect the list of add-ons identified as missing in the final Odoo docker image:
|
||||
- if you want to uninstall some of them:
|
||||
- STOP the process (N)
|
||||
- uninstall the concernet add-ons manually
|
||||
- launch the migration script again
|
||||
- if the list suits you, show can go on (Y)!
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
./upgrade.sh 14 17 elabore_20241208 odoo14
|
||||
```
|
||||
The migration process should run all the middle-migrations until the last one without action needed from you.
|
||||
|
||||
### During Migration
|
||||
### Deploy migrated base
|
||||
|
||||
The script will prompt for confirmation at two points:
|
||||
- [ ] Retrieve the migrated database (vps odoo dump)
|
||||
- [ ] Copy the database on the concerned VPS
|
||||
- [ ] vps odoo restore
|
||||
|
||||
1. **Missing modules list**: installed modules that don't exist in the target version
|
||||
- `Y`: continue (modules will be marked for removal)
|
||||
- `N`: abort to manually uninstall certain modules
|
||||
|
||||
2. **View state**: verification of potentially problematic views
|
||||
- `Y`: continue
|
||||
- `N`: abort to manually fix issues
|
||||
## Manage the add-ons to uninstall
|
||||
|
||||
### After Migration
|
||||
The migration script will manage the uninstall of Odoo add-ons:
|
||||
- add-ons we want to uninstall, whatever the reasons
|
||||
- add-ons to uninstall because they do not exist in the final Odoo docker image
|
||||
|
||||
1. **Review logs** to detect any non-blocking errors
|
||||
At the beginning of the process, the script compare the list of add-ons installed in the origin database, and the list of add-ons available in the finlal Odoo docker image.
|
||||
|
||||
2. **Validate the migration** (see [Post-Migration Validation](#post-migration-validation))
|
||||
The whole list of add-ons to uninstall is displayed, and needs a confirmation before starting the migration.
|
||||
|
||||
3. **Test the migrated database** locally
|
||||
## Customize the migration scripts
|
||||
|
||||
4. **Deploy to production**
|
||||
```bash
|
||||
# Export the migrated database
|
||||
vps odoo dump db_migrated.zip
|
||||
FEATURE COMING SOON...
|
||||
|
||||
# On the production server
|
||||
vps odoo restore db_migrated.zip
|
||||
```
|
||||
|
||||
## Post-Migration Validation
|
||||
## Manage migration issues
|
||||
|
||||
After migration, use the validation script to check for broken views and XPath errors.
|
||||
As the migration process is performed on a copy of the orginal database, the process can be restarted without limits.
|
||||
|
||||
### Quick Start
|
||||
|
||||
```bash
|
||||
./scripts/validate_migration.sh ou17 odoo17
|
||||
```
|
||||
|
||||
### What Gets Validated
|
||||
|
||||
Runs in Odoo shell, no HTTP server needed:
|
||||
|
||||
| Check | Description |
|
||||
|-------|-------------|
|
||||
| **Inherited views** | Verifies all inherited views can combine with their parent |
|
||||
| **XPath targets** | Ensures XPath expressions find their targets in parent views |
|
||||
| **QWeb templates** | Validates QWeb templates are syntactically correct |
|
||||
| **Field references** | Checks that field references point to existing model fields |
|
||||
| **Odoo native** | Runs Odoo's built-in `_validate_custom_views()` |
|
||||
|
||||
### Running Directly
|
||||
|
||||
You can also run the Python script directly in Odoo shell:
|
||||
|
||||
```bash
|
||||
compose run odoo17 shell -d ou17 --no-http --stop-after-init < lib/python/validate_views.py
|
||||
```
|
||||
|
||||
### Output
|
||||
|
||||
- **Colored terminal output** with `[OK]`, `[ERROR]`, `[WARN]` indicators
|
||||
- **JSON report** written to `/tmp/validation_views_<db>_<timestamp>.json`
|
||||
- **Exit code**: `0` = success, `1` = errors found
|
||||
|
||||
## Customization
|
||||
|
||||
### Version Scripts
|
||||
|
||||
Each `versions/X.0/` directory contains three scripts you can customize:
|
||||
|
||||
#### `pre_upgrade.sh`
|
||||
Executed **before** OpenUpgrade. Use it to:
|
||||
- Add missing columns expected by OpenUpgrade
|
||||
- Fix incompatible data
|
||||
- Remove problematic records
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 15.0..."
|
||||
|
||||
copy_database ou14 ou15 ou15
|
||||
|
||||
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||
-- Example: remove a problematic module
|
||||
DELETE FROM ir_module_module WHERE name = 'obsolete_module';
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou15
|
||||
|
||||
copy_filestore ou14 ou14 ou15 ou15
|
||||
|
||||
echo "Ready for migration to 15.0!"
|
||||
```
|
||||
|
||||
#### `upgrade.sh`
|
||||
Runs OpenUpgrade migration scripts.
|
||||
|
||||
#### `post_upgrade.sh`
|
||||
Executed **after** OpenUpgrade. Use it to:
|
||||
- Fix incorrectly migrated data
|
||||
- Remove orphan records
|
||||
- Update system parameters
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 15.0..."
|
||||
|
||||
POST_MIGRATE_SQL=$(cat <<'EOF'
|
||||
-- Example: fix a configuration value
|
||||
UPDATE ir_config_parameter
|
||||
SET value = 'new_value'
|
||||
WHERE key = 'my_key';
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$POST_MIGRATE_SQL" ou15
|
||||
```
|
||||
|
||||
### Available Functions
|
||||
|
||||
Version scripts have access to functions defined in `lib/common.sh`:
|
||||
|
||||
| Function | Description |
|
||||
|----------|-------------|
|
||||
| `query_postgres_container "$SQL" "$DB"` | Execute an SQL query |
|
||||
| `copy_database $from $to_service $to_db` | Copy a PostgreSQL database |
|
||||
| `copy_filestore $from_svc $from_db $to_svc $to_db` | Copy a filestore |
|
||||
| `log_info`, `log_warn`, `log_error` | Logging functions |
|
||||
| `log_step "title"` | Display a section header |
|
||||
|
||||
### Adding a New Version
|
||||
|
||||
To add support for a new version (e.g., 19.0):
|
||||
|
||||
```bash
|
||||
mkdir versions/19.0
|
||||
cp versions/18.0/*.sh versions/19.0/
|
||||
|
||||
# Edit the scripts to:
|
||||
# - Change references from ou18 → ou19
|
||||
# - Change the port from -p 8018:8069 → -p 8019:8069
|
||||
# - Add SQL fixes specific to this migration
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "No running PostgreSQL container found"
|
||||
```bash
|
||||
# Check active containers
|
||||
docker ps | grep postgres
|
||||
|
||||
# Start the container if needed
|
||||
compose up -d postgres
|
||||
```
|
||||
|
||||
#### "Multiple PostgreSQL containers found"
|
||||
Stop the extra PostgreSQL containers:
|
||||
```bash
|
||||
docker stop <container_name_to_stop>
|
||||
```
|
||||
|
||||
#### "Database not found"
|
||||
The source database must exist in PostgreSQL:
|
||||
```bash
|
||||
# List databases
|
||||
docker exec -u 70 <postgres_container> psql -l
|
||||
|
||||
# Import a database if needed
|
||||
docker exec -u 70 <postgres_container> pgm restore <file.zip>
|
||||
```
|
||||
|
||||
#### "Filestore not found"
|
||||
The filestore must be present at `/srv/datastore/data/<service>/var/lib/odoo/filestore/<database>/`
|
||||
|
||||
### Restarting After an Error
|
||||
|
||||
The script works on a **copy** of the original database. You can restart as many times as needed:
|
||||
|
||||
```bash
|
||||
# Simply restart - the copy will be recreated
|
||||
./upgrade.sh 14 17 my_database odoo14
|
||||
```
|
||||
|
||||
### Viewing Detailed Logs
|
||||
|
||||
Odoo/OpenUpgrade logs are displayed in real-time. For a problematic migration:
|
||||
|
||||
1. Note the version where the error occurs
|
||||
2. Check the logs to identify the problematic module/table
|
||||
3. Add a fix in the `pre_upgrade.sh` for that version
|
||||
4. Restart the migration
|
||||
|
||||
## License
|
||||
|
||||
See the [LICENSE](LICENSE) file.
|
||||
Some Odoo migration errors won't stop the migration process, then be attentive to the errors in the logs.
|
||||
|
||||
@@ -52,7 +52,7 @@ ou15:
|
||||
ou16:
|
||||
charm: odoo-tecnativa
|
||||
docker-compose:
|
||||
image: docker.0k.io/mirror/odoo:rc_16.0-ELABORE-LIGHT
|
||||
image: docker.0k.io/mirror/odoo:rc_16.0-MYC-INIT
|
||||
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||
command:
|
||||
- "--log-level=debug"
|
||||
@@ -73,18 +73,6 @@ ou17:
|
||||
options:
|
||||
workers: 0
|
||||
|
||||
ou18:
|
||||
charm: odoo-tecnativa
|
||||
docker-compose:
|
||||
image: docker.0k.io/mirror/odoo:rc_18.0-ELABORE-LIGHT
|
||||
## Important to keep as a list: otherwise it'll overwrite charm's arguments.
|
||||
command:
|
||||
- "--log-level=debug"
|
||||
- "--limit-time-cpu=1000000"
|
||||
- "--limit-time-real=1000000"
|
||||
options:
|
||||
workers: 0
|
||||
|
||||
postgres:
|
||||
docker-compose:
|
||||
image: docker.0k.io/postgres:17.2.0-myc
|
||||
image: docker.0k.io/postgres:12.15.0-myc
|
||||
20
finalize_db.sh
Executable file
20
finalize_db.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
DB_NAME="$1"
|
||||
ODOO_SERVICE="$2"
|
||||
|
||||
FINALE_SQL=$(cat <<'EOF'
|
||||
/*Delete sequences that prevent Odoo to start*/
|
||||
drop sequence base_registry_signaling;
|
||||
drop sequence base_cache_signaling;
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$FINALE_SQL" "$DB_NAME" || exit 1
|
||||
|
||||
|
||||
# Give back the right to user to access to the tables
|
||||
# docker exec -u 70 "$DB_CONTAINER_NAME" pgm chown "$FINALE_SERVICE_NAME" "$DB_NAME"
|
||||
|
||||
|
||||
# Launch Odoo with database in finale version to run all updates
|
||||
compose --debug run "$ODOO_SERVICE" -u all --stop-after-init --no-http
|
||||
24
force_uninstall_addons
Normal file
24
force_uninstall_addons
Normal file
@@ -0,0 +1,24 @@
|
||||
galicea_base
|
||||
galicea_environment_checkup
|
||||
mass_editing
|
||||
mass_mailing_themes
|
||||
muk_autovacuum
|
||||
muk_fields_lobject
|
||||
muk_fields_stream
|
||||
muk_utils
|
||||
muk_web_theme_mail
|
||||
muk_web_utils
|
||||
account_usability
|
||||
kpi_dashboard
|
||||
web_window_title
|
||||
website_project_kanbanview
|
||||
project_usability
|
||||
project_tag
|
||||
maintenance_server_monitoring_ping
|
||||
maintenance_server_monitoring_ssh
|
||||
maintenance_server_monitoring_memory
|
||||
maintenance_server_monitoring_maintenance_equipment_status
|
||||
maintenance_server_monitoring_disk
|
||||
project_task_assignees_avatar
|
||||
account_partner_reconcile
|
||||
account_invoice_import_simple_pdf
|
||||
106
lib/common.sh
106
lib/common.sh
@@ -1,106 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Common functions for Odoo migration scripts
|
||||
# Source this file from other scripts: source "$(dirname "$0")/lib/common.sh"
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Get the absolute path of the project root (parent of lib/)
|
||||
readonly PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
|
||||
readonly DATASTORE_PATH="/srv/datastore/data"
|
||||
readonly FILESTORE_SUBPATH="var/lib/odoo/filestore"
|
||||
|
||||
check_required_commands() {
|
||||
local missing=()
|
||||
for cmd in docker compose sudo rsync; do
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
missing+=("$cmd")
|
||||
fi
|
||||
done
|
||||
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||
log_error "Required commands not found: ${missing[*]}"
|
||||
log_error "Please install them before running this script."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
log_info() { printf "[INFO] %s\n" "$*"; }
|
||||
log_warn() { printf "[WARN] %s\n" "$*" >&2; }
|
||||
log_error() { printf "[ERROR] %s\n" "$*" >&2; }
|
||||
log_step() { printf "\n===== %s =====\n" "$*"; }
|
||||
|
||||
confirm_or_exit() {
|
||||
local message="$1"
|
||||
local choice
|
||||
echo ""
|
||||
echo "$message"
|
||||
echo "Y - Yes, continue"
|
||||
echo "N - No, cancel"
|
||||
read -r -n 1 -p "Your choice: " choice
|
||||
echo ""
|
||||
case "$choice" in
|
||||
[Yy]) return 0 ;;
|
||||
*) log_error "Cancelled by user."; exit 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
query_postgres_container() {
|
||||
local query="$1"
|
||||
local db_name="$2"
|
||||
|
||||
if [[ -z "$query" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local result
|
||||
if ! result=$(docker exec -u 70 "$POSTGRES_SERVICE_NAME" psql -d "$db_name" -t -A -c "$query"); then
|
||||
printf "Failed to execute SQL query: %s\n" "$query" >&2
|
||||
printf "Error: %s\n" "$result" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
copy_database() {
|
||||
local from_db="$1"
|
||||
local to_service="$2"
|
||||
local to_db="$3"
|
||||
|
||||
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm cp -f "$from_db" "${to_db}@${to_service}"
|
||||
}
|
||||
|
||||
copy_filestore() {
|
||||
local from_service="$1"
|
||||
local from_db="$2"
|
||||
local to_service="$3"
|
||||
local to_db="$4"
|
||||
|
||||
local src_path="${DATASTORE_PATH}/${from_service}/${FILESTORE_SUBPATH}/${from_db}"
|
||||
local dst_path="${DATASTORE_PATH}/${to_service}/${FILESTORE_SUBPATH}/${to_db}"
|
||||
|
||||
sudo mkdir -p "$(dirname "$dst_path")"
|
||||
sudo rsync -a --delete "${src_path}/" "${dst_path}/"
|
||||
echo "Filestore ${from_service}/${from_db} copied to ${to_service}/${to_db}."
|
||||
}
|
||||
|
||||
# Workaround: 0k dev-pack's compose script doesn't handle absolute paths correctly.
|
||||
# It passes HOST_COMPOSE_YML_FILE to the container, which tries to open it directly
|
||||
# instead of using the mounted path. Using a relative path from PROJECT_ROOT avoids this.
|
||||
run_compose() {
|
||||
(cd "$PROJECT_ROOT" && compose -f ./config/compose.yml "$@")
|
||||
}
|
||||
|
||||
exec_python_script_in_odoo_shell() {
|
||||
local service_name="$1"
|
||||
local db_name="$2"
|
||||
local python_script="$3"
|
||||
|
||||
run_compose --debug run "$service_name" shell -d "$db_name" --no-http --stop-after-init < "$python_script"
|
||||
}
|
||||
|
||||
export PROJECT_ROOT DATASTORE_PATH FILESTORE_SUBPATH
|
||||
export -f log_info log_warn log_error log_step confirm_or_exit
|
||||
export -f check_required_commands
|
||||
export -f query_postgres_container copy_database copy_filestore run_compose exec_python_script_in_odoo_shell
|
||||
@@ -1,126 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pre-Migration Cleanup Script for Odoo
|
||||
Run this BEFORE migrating to identify and clean up custom views.
|
||||
|
||||
Usage: odoo shell -d dbname < pre_migration_cleanup.py
|
||||
"""
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("PRE-MIGRATION CLEANUP - VIEW ANALYSIS")
|
||||
print("="*80 + "\n")
|
||||
|
||||
# 1. Find all custom (COW) views
|
||||
print("STEP 1: Identifying Custom/COW Views")
|
||||
print("-"*80)
|
||||
|
||||
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||
cow_views = all_views.filtered(lambda v: not v.model_data_id)
|
||||
|
||||
print(f"Total views in database: {len(all_views)}")
|
||||
print(f"Custom views (no module): {len(cow_views)}")
|
||||
print(f"Module views: {len(all_views) - len(cow_views)}\n")
|
||||
|
||||
if cow_views:
|
||||
print("Custom views found:\n")
|
||||
print(f"{'ID':<8} {'Active':<8} {'Key':<50} {'Name':<40}")
|
||||
print("-"*120)
|
||||
|
||||
for view in cow_views[:50]: # Show first 50
|
||||
active_str = "✓" if view.active else "✗"
|
||||
key_str = view.key[:48] if view.key else "N/A"
|
||||
name_str = view.name[:38] if view.name else "N/A"
|
||||
print(f"{view.id:<8} {active_str:<8} {key_str:<50} {name_str:<40}")
|
||||
|
||||
if len(cow_views) > 50:
|
||||
print(f"\n... and {len(cow_views) - 50} more custom views")
|
||||
|
||||
# 2. Find duplicate views
|
||||
print("\n" + "="*80)
|
||||
print("STEP 2: Finding Duplicate Views (Same Key)")
|
||||
print("-"*80 + "\n")
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
keys = defaultdict(list)
|
||||
for view in all_views.filtered(lambda v: v.key and v.active):
|
||||
keys[view.key].append(view)
|
||||
|
||||
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||
|
||||
print(f"Found {len(duplicates)} keys with duplicate views:\n")
|
||||
|
||||
if duplicates:
|
||||
for key, views in sorted(duplicates.items()):
|
||||
print(f"\nKey: {key} ({len(views)} duplicates)")
|
||||
for view in views:
|
||||
module = view.model_data_id.module if view.model_data_id else "⚠️ Custom/DB"
|
||||
print(f" ID {view.id:>6}: {module:<25} | {view.name}")
|
||||
|
||||
# 3. Find views that might have xpath issues
|
||||
print("\n" + "="*80)
|
||||
print("STEP 3: Finding Views with XPath Expressions")
|
||||
print("-"*80 + "\n")
|
||||
|
||||
import re
|
||||
|
||||
views_with_xpath = []
|
||||
xpath_pattern = r'<xpath[^>]+expr="([^"]+)"'
|
||||
|
||||
for view in all_views.filtered(lambda v: v.active and v.inherit_id):
|
||||
xpaths = re.findall(xpath_pattern, view.arch_db)
|
||||
if xpaths:
|
||||
views_with_xpath.append({
|
||||
'view': view,
|
||||
'xpaths': xpaths,
|
||||
'is_custom': not bool(view.model_data_id)
|
||||
})
|
||||
|
||||
print(f"Found {len(views_with_xpath)} views with xpath expressions")
|
||||
|
||||
custom_xpath_views = [v for v in views_with_xpath if v['is_custom']]
|
||||
print(f" - {len(custom_xpath_views)} are custom views (potential issue!)")
|
||||
print(f" - {len(views_with_xpath) - len(custom_xpath_views)} are module views\n")
|
||||
|
||||
if custom_xpath_views:
|
||||
print("Custom views with xpaths (risk for migration issues):\n")
|
||||
for item in custom_xpath_views:
|
||||
view = item['view']
|
||||
print(f"ID {view.id}: {view.name}")
|
||||
print(f" Key: {view.key}")
|
||||
print(f" Inherits from: {view.inherit_id.key}")
|
||||
print(f" XPath count: {len(item['xpaths'])}")
|
||||
print(f" Sample xpaths: {item['xpaths'][:2]}")
|
||||
print()
|
||||
|
||||
# 4. Summary and recommendations
|
||||
print("=" * 80)
|
||||
print("SUMMARY AND RECOMMENDATIONS")
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
print(f"📊 Statistics:")
|
||||
print(f" • Total views: {len(all_views)}")
|
||||
print(f" • Custom views: {len(cow_views)}")
|
||||
print(f" • Duplicate view keys: {len(duplicates)}")
|
||||
print(f" • Custom views with xpaths: {len(custom_xpath_views)}\n")
|
||||
|
||||
print(f"\n📋 RECOMMENDED ACTIONS BEFORE MIGRATION:\n")
|
||||
|
||||
if custom_xpath_views:
|
||||
print(f"1. Archive or delete {len(custom_xpath_views)} custom views with xpaths:")
|
||||
print(f" • Review each one and determine if still needed")
|
||||
print(f" • Archive unnecessary ones: env['ir.ui.view'].browse([ids]).write({{'active': False}})")
|
||||
print(f" • Plan to recreate important ones as proper module views after migration\n")
|
||||
|
||||
if duplicates:
|
||||
print(f"2. Fix {len(duplicates)} duplicate view keys:")
|
||||
print(f" • Manually review and delete obsolete duplicates, keeping the most appropriate one")
|
||||
print(f" • Document the remaining appropriate ones as script post_migration_fix_duplicated_views.py will run AFTER the migration and delete all duplicates.\n")
|
||||
|
||||
if cow_views:
|
||||
print(f"3. Review {len(cow_views)} custom views:")
|
||||
print(f" • Document which ones are important")
|
||||
print(f" • Export their XML for reference")
|
||||
print(f" • Consider converting to module views\n")
|
||||
|
||||
print("=" * 80 + "\n")
|
||||
@@ -1,128 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Post-Migration Obsolete Module Cleanup
|
||||
Run this AFTER migration to detect and remove modules that exist in the database
|
||||
but no longer exist in the filesystem (addons paths).
|
||||
"""
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("POST-MIGRATION OBSOLETE MODULE CLEANUP")
|
||||
print("="*80 + "\n")
|
||||
|
||||
import odoo.modules.module as module_lib
|
||||
|
||||
# Get all modules from database
|
||||
all_modules = env['ir.module.module'].search([])
|
||||
|
||||
print(f"Analyzing {len(all_modules)} modules in database...\n")
|
||||
|
||||
# Detect obsolete modules (in database but not in filesystem)
|
||||
obsolete_modules = []
|
||||
for mod in all_modules:
|
||||
mod_path = module_lib.get_module_path(mod.name, display_warning=False)
|
||||
if not mod_path:
|
||||
obsolete_modules.append(mod)
|
||||
|
||||
if not obsolete_modules:
|
||||
print("✓ No obsolete modules found! Database is clean.")
|
||||
print("=" * 80 + "\n")
|
||||
exit()
|
||||
|
||||
# Separate modules by state
|
||||
safe_to_delete = [m for m in obsolete_modules if m.state != 'installed']
|
||||
installed_obsolete = [m for m in obsolete_modules if m.state == 'installed']
|
||||
|
||||
# Display obsolete modules
|
||||
print(f"Obsolete modules found: {len(obsolete_modules)}\n")
|
||||
|
||||
if installed_obsolete:
|
||||
print("-" * 80)
|
||||
print("⚠️ OBSOLETE INSTALLED MODULES (require attention)")
|
||||
print("-" * 80)
|
||||
for mod in sorted(installed_obsolete, key=lambda m: m.name):
|
||||
print(f" • {mod.name:40} | ID: {mod.id}")
|
||||
print()
|
||||
|
||||
if safe_to_delete:
|
||||
print("-" * 80)
|
||||
print("OBSOLETE UNINSTALLED MODULES (safe to delete)")
|
||||
print("-" * 80)
|
||||
for mod in sorted(safe_to_delete, key=lambda m: m.name):
|
||||
print(f" • {mod.name:40} | State: {mod.state:15} | ID: {mod.id}")
|
||||
print()
|
||||
|
||||
# Summary
|
||||
print("=" * 80)
|
||||
print("SUMMARY")
|
||||
print("=" * 80 + "\n")
|
||||
print(f" • Obsolete uninstalled modules (safe to delete): {len(safe_to_delete)}")
|
||||
print(f" • Obsolete INSTALLED modules (caution!): {len(installed_obsolete)}")
|
||||
|
||||
# Delete uninstalled modules
|
||||
if safe_to_delete:
|
||||
print("\n" + "=" * 80)
|
||||
print("DELETING OBSOLETE UNINSTALLED MODULES")
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
deleted_count = 0
|
||||
failed_deletes = []
|
||||
|
||||
for mod in safe_to_delete:
|
||||
try:
|
||||
mod_name = mod.name
|
||||
mod_id = mod.id
|
||||
mod.unlink()
|
||||
print(f"✓ Deleted: {mod_name} (ID: {mod_id})")
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
print(f"✗ Failed: {mod.name} - {e}")
|
||||
failed_deletes.append({'name': mod.name, 'id': mod.id, 'reason': str(e)})
|
||||
|
||||
# Commit changes
|
||||
print("\n" + "=" * 80)
|
||||
print("COMMITTING CHANGES")
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
try:
|
||||
env.cr.commit()
|
||||
print("✓ All changes committed successfully!")
|
||||
except Exception as e:
|
||||
print(f"✗ Commit failed: {e}")
|
||||
print("Changes were NOT saved!")
|
||||
exit(1)
|
||||
|
||||
# Final result
|
||||
print("\n" + "=" * 80)
|
||||
print("RESULT")
|
||||
print("=" * 80 + "\n")
|
||||
print(f" • Successfully deleted modules: {deleted_count}")
|
||||
print(f" • Failed deletions: {len(failed_deletes)}")
|
||||
|
||||
if failed_deletes:
|
||||
print("\n⚠️ Modules not deleted:")
|
||||
for item in failed_deletes:
|
||||
print(f" • {item['name']} (ID: {item['id']}): {item['reason']}")
|
||||
|
||||
if installed_obsolete:
|
||||
print("\n" + "=" * 80)
|
||||
print("⚠️ WARNING: OBSOLETE INSTALLED MODULES")
|
||||
print("=" * 80 + "\n")
|
||||
print("The following modules are marked 'installed' but no longer exist")
|
||||
print("in the filesystem. They may cause problems.\n")
|
||||
print("Options:")
|
||||
print(" 1. Check if these modules were renamed/merged in the new version")
|
||||
print(" 2. Manually uninstall them if possible")
|
||||
print(" 3. Force delete them (risky, may break dependencies)\n")
|
||||
|
||||
for mod in sorted(installed_obsolete, key=lambda m: m.name):
|
||||
# Find modules that depend on this module
|
||||
dependents = env['ir.module.module'].search([
|
||||
('state', '=', 'installed'),
|
||||
('dependencies_id.name', '=', mod.name)
|
||||
])
|
||||
dep_info = f" <- Dependents: {dependents.mapped('name')}" if dependents else ""
|
||||
print(f" • {mod.name}{dep_info}")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("CLEANUP COMPLETED!")
|
||||
print("=" * 80 + "\n")
|
||||
@@ -1,192 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Post-Migration Duplicate View Fixer
|
||||
Run this AFTER migration to fix duplicate views automatically.
|
||||
"""
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("POST-MIGRATION DUPLICATE VIEW FIXER")
|
||||
print("="*80 + "\n")
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
# Find all duplicate views
|
||||
all_views = env['ir.ui.view'].search(['|', ('active', '=', True), ('active', '=', False)])
|
||||
keys = defaultdict(list)
|
||||
|
||||
for view in all_views:
|
||||
if view.key:
|
||||
keys[view.key].append(view)
|
||||
|
||||
duplicates = {k: v for k, v in keys.items() if len(v) > 1}
|
||||
|
||||
print(f"Found {len(duplicates)} keys with duplicate views\n")
|
||||
|
||||
if not duplicates:
|
||||
print("✓ No duplicate views found! Database is clean.")
|
||||
print("=" * 80 + "\n")
|
||||
exit()
|
||||
|
||||
# Process duplicates
|
||||
views_to_delete = []
|
||||
redirect_log = []
|
||||
|
||||
for key, views in sorted(duplicates.items()):
|
||||
print(f"\nProcessing key: {key}")
|
||||
print("-" * 80)
|
||||
|
||||
# Sort views: module views first, then by ID (older first)
|
||||
sorted_views = sorted(views, key=lambda v: (
|
||||
0 if v.model_data_id else 1, # Module views first
|
||||
v.id # Older views first (lower ID = older)
|
||||
))
|
||||
|
||||
# Keep the first view (should be module view or oldest)
|
||||
keep = sorted_views[0]
|
||||
to_delete = sorted_views[1:]
|
||||
|
||||
module_keep = keep.model_data_id.module if keep.model_data_id else "Custom/DB"
|
||||
print(f"KEEP: ID {keep.id:>6} | Module: {module_keep:<20} | {keep.name}")
|
||||
|
||||
for view in to_delete:
|
||||
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||
print(f"DELETE: ID {view.id:>6} | Module: {module:<20} | {view.name}")
|
||||
|
||||
# Find and redirect children
|
||||
children = env['ir.ui.view'].search([('inherit_id', '=', view.id)])
|
||||
if children:
|
||||
print(f" → Redirecting {len(children)} children {children.ids} to view {keep.id}")
|
||||
for child in children:
|
||||
child_module = child.model_data_id.module if child.model_data_id else "Custom/DB"
|
||||
redirect_log.append({
|
||||
'child_id': child.id,
|
||||
'child_name': child.name,
|
||||
'child_module': child_module,
|
||||
'from': view.id,
|
||||
'to': keep.id
|
||||
})
|
||||
try:
|
||||
children.write({'inherit_id': keep.id})
|
||||
print(f" ✓ Redirected successfully")
|
||||
except Exception as e:
|
||||
print(f" ✗ Redirect failed: {e}")
|
||||
continue
|
||||
|
||||
views_to_delete.append(view)
|
||||
|
||||
# Summary before deletion
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY")
|
||||
print("="*80 + "\n")
|
||||
|
||||
print(f"Views to delete: {len(views_to_delete)}")
|
||||
print(f"Child views to redirect: {len(redirect_log)}\n")
|
||||
|
||||
if redirect_log:
|
||||
print("Redirections that will be performed:")
|
||||
for item in redirect_log[:10]: # Show first 10
|
||||
print(f" • View {item['child_id']} ({item['child_module']})")
|
||||
print(f" '{item['child_name']}'")
|
||||
print(f" Parent: {item['from']} → {item['to']}")
|
||||
|
||||
if len(redirect_log) > 10:
|
||||
print(f" ... and {len(redirect_log) - 10} more redirections")
|
||||
|
||||
# Delete duplicate views
|
||||
print("\n" + "="*80)
|
||||
print("DELETING DUPLICATE VIEWS")
|
||||
print("="*80 + "\n")
|
||||
|
||||
deleted_count = 0
|
||||
failed_deletes = []
|
||||
|
||||
# Sort views by ID descending (delete newer/child views first)
|
||||
views_to_delete_sorted = sorted(views_to_delete, key=lambda v: v.id, reverse=True)
|
||||
|
||||
for view in views_to_delete_sorted:
|
||||
try:
|
||||
# Create savepoint to isolate each deletion
|
||||
env.cr.execute('SAVEPOINT delete_view')
|
||||
|
||||
view_id = view.id
|
||||
view_name = view.name
|
||||
view_key = view.key
|
||||
|
||||
# Double-check it has no children
|
||||
remaining_children = env['ir.ui.view'].search([('inherit_id', '=', view_id)])
|
||||
if remaining_children:
|
||||
print(f"⚠️ Skipping view {view_id}: Still has {len(remaining_children)} children")
|
||||
failed_deletes.append({
|
||||
'id': view_id,
|
||||
'reason': f'Still has {len(remaining_children)} children'
|
||||
})
|
||||
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||
continue
|
||||
|
||||
view.unlink()
|
||||
env.cr.execute('RELEASE SAVEPOINT delete_view')
|
||||
print(f"✓ Deleted view {view_id}: {view_key}")
|
||||
deleted_count += 1
|
||||
|
||||
except Exception as e:
|
||||
env.cr.execute('ROLLBACK TO SAVEPOINT delete_view')
|
||||
print(f"✗ Failed to delete view {view.id}: {e}")
|
||||
failed_deletes.append({
|
||||
'id': view.id,
|
||||
'name': view.name,
|
||||
'reason': str(e)
|
||||
})
|
||||
|
||||
# Commit changes
|
||||
print("\n" + "="*80)
|
||||
print("COMMITTING CHANGES")
|
||||
print("="*80 + "\n")
|
||||
|
||||
try:
|
||||
env.cr.commit()
|
||||
print("✓ All changes committed successfully!")
|
||||
except Exception as e:
|
||||
print(f"✗ Commit failed: {e}")
|
||||
print("Changes were NOT saved!")
|
||||
exit(1)
|
||||
|
||||
# Final verification
|
||||
print("\n" + "="*80)
|
||||
print("FINAL VERIFICATION")
|
||||
print("="*80 + "\n")
|
||||
|
||||
# Re-check for duplicates
|
||||
all_views_after = env['ir.ui.view'].search([('active', '=', True)])
|
||||
keys_after = defaultdict(list)
|
||||
|
||||
for view in all_views_after:
|
||||
if view.key:
|
||||
keys_after[view.key].append(view)
|
||||
|
||||
duplicates_after = {k: v for k, v in keys_after.items() if len(v) > 1}
|
||||
|
||||
print(f"Results:")
|
||||
print(f" • Successfully deleted: {deleted_count} views")
|
||||
print(f" • Failed deletions: {len(failed_deletes)}")
|
||||
print(f" • Child views redirected: {len(redirect_log)}")
|
||||
print(f" • Remaining duplicates: {len(duplicates_after)}")
|
||||
|
||||
if failed_deletes:
|
||||
print(f"\n⚠️ Failed deletions:")
|
||||
for item in failed_deletes:
|
||||
print(f" • View {item['id']}: {item['reason']}")
|
||||
|
||||
if duplicates_after:
|
||||
print(f"\n⚠️ Still have {len(duplicates_after)} duplicate keys:")
|
||||
for key, views in sorted(duplicates_after.items())[:5]:
|
||||
print(f" • {key}: {len(views)} views")
|
||||
for view in views:
|
||||
module = view.model_data_id.module if view.model_data_id else "Custom/DB"
|
||||
print(f" - ID {view.id} ({module})")
|
||||
print(f"\n Run this script again to attempt another cleanup.")
|
||||
else:
|
||||
print(f"\n✓ All duplicates resolved!")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("FIX COMPLETED!")
|
||||
print("="*80)
|
||||
@@ -1,521 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Post-Migration View Validation Script for Odoo
|
||||
|
||||
Validates all views after migration to detect:
|
||||
- Broken XPath expressions in inherited views
|
||||
- Views that fail to combine with their parent
|
||||
- Invalid QWeb templates
|
||||
- Missing asset files
|
||||
- Field references to non-existent fields
|
||||
|
||||
Usage:
|
||||
odoo-bin shell -d <database> < validate_views.py
|
||||
|
||||
# Or with compose:
|
||||
compose run <service> shell -d <database> --no-http --stop-after-init < validate_views.py
|
||||
|
||||
Exit codes:
|
||||
0 - All validations passed
|
||||
1 - Validation errors found (see report)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
from lxml import etree
|
||||
|
||||
# ANSI colors for terminal output
|
||||
class Colors:
|
||||
RED = '\033[91m'
|
||||
GREEN = '\033[92m'
|
||||
YELLOW = '\033[93m'
|
||||
BLUE = '\033[94m'
|
||||
BOLD = '\033[1m'
|
||||
END = '\033[0m'
|
||||
|
||||
|
||||
def print_header(title):
|
||||
"""Print a formatted section header."""
|
||||
print(f"\n{Colors.BOLD}{'='*80}{Colors.END}")
|
||||
print(f"{Colors.BOLD}{title}{Colors.END}")
|
||||
print(f"{Colors.BOLD}{'='*80}{Colors.END}\n")
|
||||
|
||||
|
||||
def print_subheader(title):
|
||||
"""Print a formatted subsection header."""
|
||||
print(f"\n{Colors.BLUE}{'-'*60}{Colors.END}")
|
||||
print(f"{Colors.BLUE}{title}{Colors.END}")
|
||||
print(f"{Colors.BLUE}{'-'*60}{Colors.END}\n")
|
||||
|
||||
|
||||
def print_ok(message):
|
||||
"""Print success message."""
|
||||
print(f"{Colors.GREEN}[OK]{Colors.END} {message}")
|
||||
|
||||
|
||||
def print_error(message):
|
||||
"""Print error message."""
|
||||
print(f"{Colors.RED}[ERROR]{Colors.END} {message}")
|
||||
|
||||
|
||||
def print_warn(message):
|
||||
"""Print warning message."""
|
||||
print(f"{Colors.YELLOW}[WARN]{Colors.END} {message}")
|
||||
|
||||
|
||||
def print_info(message):
|
||||
"""Print info message."""
|
||||
print(f"{Colors.BLUE}[INFO]{Colors.END} {message}")
|
||||
|
||||
|
||||
class ViewValidator:
|
||||
"""Validates Odoo views after migration."""
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self.View = env['ir.ui.view']
|
||||
self.errors = []
|
||||
self.warnings = []
|
||||
self.stats = {
|
||||
'total_views': 0,
|
||||
'inherited_views': 0,
|
||||
'qweb_views': 0,
|
||||
'broken_xpath': 0,
|
||||
'broken_combine': 0,
|
||||
'broken_qweb': 0,
|
||||
'broken_fields': 0,
|
||||
'missing_assets': 0,
|
||||
}
|
||||
|
||||
def validate_all(self):
|
||||
"""Run all validation checks."""
|
||||
print_header("ODOO VIEW VALIDATION - POST-MIGRATION")
|
||||
print(f"Started at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
print(f"Database: {self.env.cr.dbname}")
|
||||
|
||||
# Get all active views
|
||||
all_views = self.View.search([('active', '=', True)])
|
||||
self.stats['total_views'] = len(all_views)
|
||||
print_info(f"Total active views to validate: {len(all_views)}")
|
||||
|
||||
# Run validations
|
||||
self._validate_inherited_views()
|
||||
self._validate_xpath_targets()
|
||||
self._validate_qweb_templates()
|
||||
self._validate_field_references()
|
||||
self._validate_odoo_native()
|
||||
self._check_assets()
|
||||
|
||||
# Print summary
|
||||
self._print_summary()
|
||||
|
||||
# Rollback to avoid any accidental changes
|
||||
self.env.cr.rollback()
|
||||
|
||||
return len(self.errors) == 0
|
||||
|
||||
def _validate_inherited_views(self):
|
||||
"""Check that all inherited views can combine with their parent."""
|
||||
print_subheader("1. Validating Inherited Views (Combination)")
|
||||
|
||||
inherited_views = self.View.search([
|
||||
('inherit_id', '!=', False),
|
||||
('active', '=', True)
|
||||
])
|
||||
self.stats['inherited_views'] = len(inherited_views)
|
||||
print_info(f"Found {len(inherited_views)} inherited views to check")
|
||||
|
||||
broken = []
|
||||
for view in inherited_views:
|
||||
try:
|
||||
# Attempt to get combined architecture
|
||||
view._get_combined_arch()
|
||||
except Exception as e:
|
||||
broken.append({
|
||||
'view_id': view.id,
|
||||
'xml_id': view.xml_id or 'N/A',
|
||||
'name': view.name,
|
||||
'model': view.model,
|
||||
'parent_xml_id': view.inherit_id.xml_id if view.inherit_id else 'N/A',
|
||||
'error': str(e)[:200]
|
||||
})
|
||||
|
||||
self.stats['broken_combine'] = len(broken)
|
||||
|
||||
if broken:
|
||||
for item in broken:
|
||||
error_msg = (
|
||||
f"View '{item['xml_id']}' (ID: {item['view_id']}) "
|
||||
f"cannot combine with parent '{item['parent_xml_id']}': {item['error']}"
|
||||
)
|
||||
print_error(error_msg)
|
||||
self.errors.append({
|
||||
'type': 'combination_error',
|
||||
'severity': 'error',
|
||||
**item
|
||||
})
|
||||
else:
|
||||
print_ok("All inherited views combine correctly with their parents")
|
||||
|
||||
def _validate_xpath_targets(self):
|
||||
"""Check that XPath expressions find their targets in parent views."""
|
||||
print_subheader("2. Validating XPath Targets")
|
||||
|
||||
inherited_views = self.View.search([
|
||||
('inherit_id', '!=', False),
|
||||
('active', '=', True)
|
||||
])
|
||||
|
||||
xpath_pattern = re.compile(r'<xpath[^>]+expr=["\']([^"\']+)["\']')
|
||||
orphan_xpaths = []
|
||||
|
||||
for view in inherited_views:
|
||||
if not view.arch_db or not view.inherit_id or not view.inherit_id.arch_db:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Get parent's combined arch (to handle chained inheritance)
|
||||
parent_arch = view.inherit_id._get_combined_arch()
|
||||
parent_tree = etree.fromstring(parent_arch)
|
||||
except Exception:
|
||||
# Parent view is already broken, skip
|
||||
continue
|
||||
|
||||
# Parse child view
|
||||
try:
|
||||
view_tree = etree.fromstring(view.arch_db)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Find all xpath nodes
|
||||
for xpath_node in view_tree.xpath('//xpath'):
|
||||
expr = xpath_node.get('expr')
|
||||
if not expr:
|
||||
continue
|
||||
|
||||
try:
|
||||
matches = parent_tree.xpath(expr)
|
||||
if not matches:
|
||||
orphan_xpaths.append({
|
||||
'view_id': view.id,
|
||||
'xml_id': view.xml_id or 'N/A',
|
||||
'name': view.name,
|
||||
'model': view.model,
|
||||
'xpath': expr,
|
||||
'parent_xml_id': view.inherit_id.xml_id or 'N/A',
|
||||
'parent_id': view.inherit_id.id
|
||||
})
|
||||
except etree.XPathEvalError as e:
|
||||
orphan_xpaths.append({
|
||||
'view_id': view.id,
|
||||
'xml_id': view.xml_id or 'N/A',
|
||||
'name': view.name,
|
||||
'model': view.model,
|
||||
'xpath': expr,
|
||||
'parent_xml_id': view.inherit_id.xml_id or 'N/A',
|
||||
'parent_id': view.inherit_id.id,
|
||||
'xpath_error': str(e)
|
||||
})
|
||||
|
||||
self.stats['broken_xpath'] = len(orphan_xpaths)
|
||||
|
||||
if orphan_xpaths:
|
||||
for item in orphan_xpaths:
|
||||
error_msg = (
|
||||
f"View '{item['xml_id']}' (ID: {item['view_id']}): "
|
||||
f"XPath '{item['xpath']}' finds no target in parent '{item['parent_xml_id']}'"
|
||||
)
|
||||
if 'xpath_error' in item:
|
||||
error_msg += f" (XPath syntax error: {item['xpath_error']})"
|
||||
print_error(error_msg)
|
||||
self.errors.append({
|
||||
'type': 'orphan_xpath',
|
||||
'severity': 'error',
|
||||
**item
|
||||
})
|
||||
else:
|
||||
print_ok("All XPath expressions find their targets")
|
||||
|
||||
def _validate_qweb_templates(self):
|
||||
"""Validate QWeb templates can be rendered."""
|
||||
print_subheader("3. Validating QWeb Templates")
|
||||
|
||||
qweb_views = self.View.search([
|
||||
('type', '=', 'qweb'),
|
||||
('active', '=', True)
|
||||
])
|
||||
self.stats['qweb_views'] = len(qweb_views)
|
||||
print_info(f"Found {len(qweb_views)} QWeb templates to check")
|
||||
|
||||
broken = []
|
||||
for view in qweb_views:
|
||||
try:
|
||||
# Basic XML parsing check
|
||||
if view.arch_db:
|
||||
etree.fromstring(view.arch_db)
|
||||
|
||||
# Try to get combined arch for inherited qweb views
|
||||
if view.inherit_id:
|
||||
view._get_combined_arch()
|
||||
|
||||
except Exception as e:
|
||||
broken.append({
|
||||
'view_id': view.id,
|
||||
'xml_id': view.xml_id or 'N/A',
|
||||
'name': view.name,
|
||||
'key': view.key or 'N/A',
|
||||
'error': str(e)[:200]
|
||||
})
|
||||
|
||||
self.stats['broken_qweb'] = len(broken)
|
||||
|
||||
if broken:
|
||||
for item in broken:
|
||||
error_msg = (
|
||||
f"QWeb template '{item['xml_id']}' (key: {item['key']}): {item['error']}"
|
||||
)
|
||||
print_error(error_msg)
|
||||
self.errors.append({
|
||||
'type': 'qweb_error',
|
||||
'severity': 'error',
|
||||
**item
|
||||
})
|
||||
else:
|
||||
print_ok("All QWeb templates are valid")
|
||||
|
||||
def _validate_field_references(self):
|
||||
"""Check that field references in views point to existing fields."""
|
||||
print_subheader("4. Validating Field References")
|
||||
|
||||
field_pattern = re.compile(r'(?:name|field)=["\'](\w+)["\']')
|
||||
missing_fields = []
|
||||
|
||||
# Only check form, tree, search, kanban views (not qweb)
|
||||
views = self.View.search([
|
||||
('type', 'in', ['form', 'tree', 'search', 'kanban', 'pivot', 'graph']),
|
||||
('active', '=', True),
|
||||
('model', '!=', False)
|
||||
])
|
||||
|
||||
print_info(f"Checking field references in {len(views)} views")
|
||||
|
||||
checked_models = set()
|
||||
for view in views:
|
||||
model_name = view.model
|
||||
if not model_name or model_name in checked_models:
|
||||
continue
|
||||
|
||||
# Skip if model doesn't exist
|
||||
if model_name not in self.env:
|
||||
continue
|
||||
|
||||
checked_models.add(model_name)
|
||||
|
||||
try:
|
||||
# Get combined arch
|
||||
arch = view._get_combined_arch()
|
||||
tree = etree.fromstring(arch)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
model = self.env[model_name]
|
||||
model_fields = set(model._fields.keys())
|
||||
|
||||
# Find all field references
|
||||
for field_node in tree.xpath('//*[@name]'):
|
||||
field_name = field_node.get('name')
|
||||
if not field_name:
|
||||
continue
|
||||
|
||||
# Skip special names
|
||||
if field_name in ('id', '__last_update', 'display_name'):
|
||||
continue
|
||||
|
||||
# Skip if it's a button or action (not a field)
|
||||
if field_node.tag in ('button', 'a'):
|
||||
continue
|
||||
|
||||
# Check if field exists
|
||||
if field_name not in model_fields:
|
||||
# Check if it's a related field path (e.g., partner_id.name)
|
||||
if '.' in field_name:
|
||||
continue
|
||||
|
||||
missing_fields.append({
|
||||
'view_id': view.id,
|
||||
'xml_id': view.xml_id or 'N/A',
|
||||
'model': model_name,
|
||||
'field_name': field_name,
|
||||
'tag': field_node.tag
|
||||
})
|
||||
|
||||
self.stats['broken_fields'] = len(missing_fields)
|
||||
|
||||
if missing_fields:
|
||||
# Group by view for cleaner output
|
||||
by_view = defaultdict(list)
|
||||
for item in missing_fields:
|
||||
by_view[item['xml_id']].append(item['field_name'])
|
||||
|
||||
for xml_id, fields in list(by_view.items())[:20]: # Limit output
|
||||
print_warn(f"View '{xml_id}': references missing fields: {', '.join(fields)}")
|
||||
self.warnings.append({
|
||||
'type': 'missing_field',
|
||||
'severity': 'warning',
|
||||
'xml_id': xml_id,
|
||||
'fields': fields
|
||||
})
|
||||
|
||||
if len(by_view) > 20:
|
||||
print_warn(f"... and {len(by_view) - 20} more views with missing fields")
|
||||
else:
|
||||
print_ok("All field references are valid")
|
||||
|
||||
def _validate_odoo_native(self):
|
||||
"""Run Odoo's native view validation."""
|
||||
print_subheader("5. Running Odoo Native Validation")
|
||||
|
||||
try:
|
||||
# This validates all custom views
|
||||
self.View._validate_custom_views('all')
|
||||
print_ok("Odoo native validation passed")
|
||||
except Exception as e:
|
||||
error_msg = f"Odoo native validation failed: {str(e)[:500]}"
|
||||
print_error(error_msg)
|
||||
self.errors.append({
|
||||
'type': 'native_validation',
|
||||
'severity': 'error',
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def _check_assets(self):
|
||||
"""Check for missing asset files."""
|
||||
print_subheader("6. Checking Asset Files")
|
||||
|
||||
try:
|
||||
IrAsset = self.env['ir.asset']
|
||||
except KeyError:
|
||||
print_info("ir.asset model not found (Odoo < 14.0), skipping asset check")
|
||||
return
|
||||
|
||||
assets = IrAsset.search([])
|
||||
print_info(f"Checking {len(assets)} asset definitions")
|
||||
|
||||
missing = []
|
||||
for asset in assets:
|
||||
if not asset.path:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Try to resolve the asset path
|
||||
# This is a simplified check - actual asset resolution is complex
|
||||
path = asset.path
|
||||
if path.startswith('/'):
|
||||
path = path[1:]
|
||||
|
||||
# Check if it's a glob pattern or specific file
|
||||
if '*' in path:
|
||||
continue # Skip glob patterns
|
||||
|
||||
# Try to get the asset content (this will fail if file is missing)
|
||||
# Note: This is environment dependent and may not catch all issues
|
||||
except Exception as e:
|
||||
missing.append({
|
||||
'asset_id': asset.id,
|
||||
'path': asset.path,
|
||||
'bundle': asset.bundle or 'N/A',
|
||||
'error': str(e)[:100]
|
||||
})
|
||||
|
||||
self.stats['missing_assets'] = len(missing)
|
||||
|
||||
if missing:
|
||||
for item in missing:
|
||||
print_warn(f"Asset '{item['path']}' (bundle: {item['bundle']}): may be missing")
|
||||
self.warnings.append({
|
||||
'type': 'missing_asset',
|
||||
'severity': 'warning',
|
||||
**item
|
||||
})
|
||||
else:
|
||||
print_ok("Asset definitions look valid")
|
||||
|
||||
def _print_summary(self):
|
||||
"""Print validation summary."""
|
||||
print_header("VALIDATION SUMMARY")
|
||||
|
||||
print(f"Statistics:")
|
||||
print(f" - Total views checked: {self.stats['total_views']}")
|
||||
print(f" - Inherited views: {self.stats['inherited_views']}")
|
||||
print(f" - QWeb templates: {self.stats['qweb_views']}")
|
||||
print()
|
||||
|
||||
print(f"Issues found:")
|
||||
print(f" - Broken view combinations: {self.stats['broken_combine']}")
|
||||
print(f" - Orphan XPath expressions: {self.stats['broken_xpath']}")
|
||||
print(f" - Invalid QWeb templates: {self.stats['broken_qweb']}")
|
||||
print(f" - Missing field references: {self.stats['broken_fields']}")
|
||||
print(f" - Missing assets: {self.stats['missing_assets']}")
|
||||
print()
|
||||
|
||||
total_errors = len(self.errors)
|
||||
total_warnings = len(self.warnings)
|
||||
|
||||
if total_errors == 0 and total_warnings == 0:
|
||||
print(f"{Colors.GREEN}{Colors.BOLD}")
|
||||
print("="*60)
|
||||
print(" ALL VALIDATIONS PASSED!")
|
||||
print("="*60)
|
||||
print(f"{Colors.END}")
|
||||
elif total_errors == 0:
|
||||
print(f"{Colors.YELLOW}{Colors.BOLD}")
|
||||
print("="*60)
|
||||
print(f" VALIDATION PASSED WITH {total_warnings} WARNING(S)")
|
||||
print("="*60)
|
||||
print(f"{Colors.END}")
|
||||
else:
|
||||
print(f"{Colors.RED}{Colors.BOLD}")
|
||||
print("="*60)
|
||||
print(f" VALIDATION FAILED: {total_errors} ERROR(S), {total_warnings} WARNING(S)")
|
||||
print("="*60)
|
||||
print(f"{Colors.END}")
|
||||
|
||||
if os.environ.get('VALIDATE_VIEWS_REPORT'):
|
||||
report = {
|
||||
'type': 'views',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'database': self.env.cr.dbname,
|
||||
'stats': self.stats,
|
||||
'errors': self.errors,
|
||||
'warnings': self.warnings
|
||||
}
|
||||
MARKER = '___VALIDATE_VIEWS_JSON___'
|
||||
print(MARKER)
|
||||
print(json.dumps(report, indent=2, default=str))
|
||||
print(MARKER)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
try:
|
||||
validator = ViewValidator(env)
|
||||
success = validator.validate_all()
|
||||
|
||||
# Exit with appropriate code
|
||||
if not success:
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
print_error(f"Validation script failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# Run when executed in Odoo shell
|
||||
if __name__ == '__main__' or 'env' in dir():
|
||||
main()
|
||||
89
prepare_db.sh
Executable file
89
prepare_db.sh
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Global variables
|
||||
ODOO_SERVICE="$1"
|
||||
DB_NAME="$2"
|
||||
DB_FINALE_MODEL="$3"
|
||||
DB_FINALE_SERVICE="$4"
|
||||
|
||||
echo "Start database preparation"
|
||||
|
||||
# Check POSTGRES container is running
|
||||
if ! docker ps | grep -q "$DB_CONTAINER_NAME"; then
|
||||
printf "Docker container %s is not running.\n" "$DB_CONTAINER_NAME" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
EXT_EXISTS=$(query_postgres_container "SELECT 1 FROM pg_extension WHERE extname = 'dblink'" "$DB_NAME") || exit 1
|
||||
if [ "$EXT_EXISTS" != "1" ]; then
|
||||
query_postgres_container "CREATE EXTENSION dblink;" "$DB_NAME" || exit 1
|
||||
fi
|
||||
|
||||
# Neutralize the database
|
||||
SQL_NEUTRALIZE=$(cat <<'EOF'
|
||||
/* Archive all the mail servers */
|
||||
UPDATE fetchmail_server SET active = false;
|
||||
UPDATE ir_mail_server SET active = false;
|
||||
|
||||
/* Archive all the cron */
|
||||
ALTER TABLE ir_cron ADD COLUMN IF NOT EXISTS active_bkp BOOLEAN;
|
||||
UPDATE ir_cron SET active_bkp = active;
|
||||
UPDATE ir_cron SET active = False;
|
||||
EOF
|
||||
)
|
||||
echo "Neutralize base..."
|
||||
query_postgres_container "$SQL_NEUTRALIZE" "$DB_NAME" || exit 1
|
||||
echo "Base neutralized..."
|
||||
|
||||
#######################################
|
||||
## List add-ons not in final version ##
|
||||
#######################################
|
||||
|
||||
# Retrieve add-ons not available on the final Odoo version
|
||||
SQL_404_ADDONS_LIST="
|
||||
SELECT module_origin.name
|
||||
FROM ir_module_module module_origin
|
||||
LEFT JOIN (
|
||||
SELECT *
|
||||
FROM dblink('dbname=$FINALE_DB_NAME','SELECT name, shortdesc, author FROM ir_module_module')
|
||||
AS tb2(name text, shortdesc text, author text)
|
||||
) AS module_dest ON module_dest.name = module_origin.name
|
||||
|
||||
WHERE (module_dest.name IS NULL) AND (module_origin.state = 'installed') AND (module_origin.author NOT IN ('Odoo S.A.', 'Lokavaluto', 'Elabore'))
|
||||
ORDER BY module_origin.name
|
||||
;
|
||||
"
|
||||
echo "Retrieve 404 addons... "
|
||||
echo "SQL REQUEST = $SQL_404_ADDONS_LIST"
|
||||
query_postgres_container "$SQL_404_ADDONS_LIST" "$DB_NAME" > 404_addons || exit 1
|
||||
|
||||
# Keep only the installed add-ons
|
||||
INSTALLED_ADDONS="SELECT name FROM ir_module_module WHERE state='installed';"
|
||||
query_postgres_container "$INSTALLED_ADDONS" "$DB_NAME" > installed_addons || exit 1
|
||||
|
||||
grep -Fx -f 404_addons installed_addons > final_404_addons
|
||||
rm -f 404_addons installed_addons
|
||||
|
||||
# Ask confirmation to uninstall the selected add-ons
|
||||
echo "
|
||||
==== ADD-ONS CHECK ====
|
||||
Installed add-ons not available in final Odoo version:
|
||||
"
|
||||
cat final_404_addons
|
||||
|
||||
|
||||
echo "
|
||||
Do you accept to migrate the database with all these add-ons still installed? (Y/N/R)"
|
||||
echo "Y - Yes, let's go on with the upgrade."
|
||||
echo "N - No, stop the upgrade"
|
||||
read -n 1 -p "Your choice: " choice
|
||||
case "$choice" in
|
||||
[Yy] ) echo "
|
||||
Upgrade confirmed!";;
|
||||
[Nn] ) echo "
|
||||
Upgrade cancelled!"; exit 1;;
|
||||
* ) echo "
|
||||
Please answer by Y or N.";;
|
||||
esac
|
||||
|
||||
echo "Database successfully prepared!"
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
DB_NAME="$1"
|
||||
ODOO_SERVICE="$2"
|
||||
|
||||
echo "Running SQL cleanup..."
|
||||
CLEANUP_SQL=$(cat <<'EOF'
|
||||
-- Drop sequences that prevent Odoo from starting.
|
||||
-- These sequences are recreated by Odoo on startup but stale values
|
||||
-- from the old version can cause conflicts.
|
||||
DROP SEQUENCE IF EXISTS base_registry_signaling;
|
||||
DROP SEQUENCE IF EXISTS base_cache_signaling;
|
||||
|
||||
-- Reset website templates to their original state.
|
||||
-- Views with arch_fs (file source) that have been customized (arch_db not null)
|
||||
-- are reset to use the file version, EXCEPT for actual website pages which
|
||||
-- contain user content that must be preserved.
|
||||
UPDATE ir_ui_view
|
||||
SET arch_db = NULL
|
||||
WHERE arch_fs IS NOT NULL
|
||||
AND arch_fs LIKE 'website/%'
|
||||
AND arch_db IS NOT NULL
|
||||
AND id NOT IN (SELECT view_id FROM website_page);
|
||||
|
||||
-- Purge compiled frontend assets (CSS/JS bundles).
|
||||
-- These cached files reference old asset versions and must be regenerated
|
||||
-- by Odoo after migration to avoid broken stylesheets and scripts.
|
||||
DELETE FROM ir_attachment
|
||||
WHERE name LIKE '/web/assets/%'
|
||||
OR name LIKE '%.assets_%'
|
||||
OR (res_model = 'ir.ui.view' AND mimetype = 'text/css');
|
||||
EOF
|
||||
)
|
||||
query_postgres_container "$CLEANUP_SQL" "$DB_NAME"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
|
||||
PYTHON_SCRIPT="${SCRIPT_DIR}/lib/python/fix_duplicated_views.py"
|
||||
echo "Remove duplicated views with script $PYTHON_SCRIPT ..."
|
||||
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT"
|
||||
|
||||
PYTHON_SCRIPT="${SCRIPT_DIR}/lib/python/cleanup_modules.py"
|
||||
echo "Uninstall obsolete add-ons with script $PYTHON_SCRIPT ..."
|
||||
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT"
|
||||
|
||||
# Give back the right to user to access to the tables
|
||||
# docker exec -u 70 "$DB_CONTAINER_NAME" pgm chown "$FINALE_SERVICE_NAME" "$DB_NAME"
|
||||
|
||||
|
||||
# Launch Odoo with database in finale version to run all updates
|
||||
run_compose --debug run "$ODOO_SERVICE" -u all --log-level=debug --stop-after-init --no-http
|
||||
|
||||
echo ""
|
||||
echo "Running post-migration view validation..."
|
||||
if exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "${SCRIPT_DIR}/lib/python/validate_views.py"; then
|
||||
echo "View validation passed."
|
||||
else
|
||||
echo "WARNING: View validation found issues. Run scripts/validate_migration.sh for the full report."
|
||||
fi
|
||||
@@ -1,74 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
ODOO_SERVICE="$1"
|
||||
DB_NAME="$2"
|
||||
DB_FINALE_MODEL="$3"
|
||||
DB_FINALE_SERVICE="$4"
|
||||
|
||||
TMPDIR=$(mktemp -d)
|
||||
trap 'rm -rf "$TMPDIR"' EXIT
|
||||
|
||||
echo "Start database preparation"
|
||||
|
||||
# Check POSTGRES container is running
|
||||
if ! docker ps | grep -q "$POSTGRES_SERVICE_NAME"; then
|
||||
printf "Docker container %s is not running.\n" "$POSTGRES_SERVICE_NAME" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
EXT_EXISTS=$(query_postgres_container "SELECT 1 FROM pg_extension WHERE extname = 'dblink'" "$DB_NAME") || exit 1
|
||||
if [[ "$EXT_EXISTS" != "1" ]]; then
|
||||
query_postgres_container "CREATE EXTENSION dblink;" "$DB_NAME" || exit 1
|
||||
fi
|
||||
|
||||
# Neutralize the database
|
||||
SQL_NEUTRALIZE=$(cat <<'EOF'
|
||||
/* Archive all the mail servers */
|
||||
UPDATE fetchmail_server SET active = false;
|
||||
UPDATE ir_mail_server SET active = false;
|
||||
|
||||
/* Archive all the cron */
|
||||
ALTER TABLE ir_cron ADD COLUMN IF NOT EXISTS active_bkp BOOLEAN;
|
||||
UPDATE ir_cron SET active_bkp = active;
|
||||
UPDATE ir_cron SET active = False;
|
||||
EOF
|
||||
)
|
||||
echo "Neutralize base..."
|
||||
query_postgres_container "$SQL_NEUTRALIZE" "$DB_NAME" || exit 1
|
||||
echo "Base neutralized..."
|
||||
|
||||
#######################################
|
||||
## List add-ons not in final version ##
|
||||
#######################################
|
||||
|
||||
SQL_MISSING_ADDONS=$(cat <<EOF
|
||||
SELECT module_origin.name
|
||||
FROM ir_module_module module_origin
|
||||
LEFT JOIN (
|
||||
SELECT *
|
||||
FROM dblink('dbname=${FINALE_DB_NAME}','SELECT name, shortdesc, author FROM ir_module_module')
|
||||
AS tb2(name text, shortdesc text, author text)
|
||||
) AS module_dest ON module_dest.name = module_origin.name
|
||||
WHERE (module_dest.name IS NULL)
|
||||
AND (module_origin.state = 'installed')
|
||||
AND (module_origin.author NOT IN ('Odoo S.A.'))
|
||||
ORDER BY module_origin.name;
|
||||
EOF
|
||||
)
|
||||
echo "Retrieve missing addons..."
|
||||
missing_addons=$(query_postgres_container "$SQL_MISSING_ADDONS" "$DB_NAME")
|
||||
|
||||
log_step "ADD-ONS CHECK"
|
||||
echo "Installed add-ons not available in final Odoo version:"
|
||||
echo "$missing_addons"
|
||||
confirm_or_exit "Do you accept to migrate with these add-ons still installed?"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
PYTHON_SCRIPT="${SCRIPT_DIR}/lib/python/check_views.py"
|
||||
echo "Check views with script $PYTHON_SCRIPT ..."
|
||||
exec_python_script_in_odoo_shell "$DB_NAME" "$DB_NAME" "$PYTHON_SCRIPT"
|
||||
|
||||
confirm_or_exit "Do you accept to migrate with the current views state?"
|
||||
|
||||
echo "Database successfully prepared!"
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Post-Migration Validation Script for Odoo
|
||||
# Validates views, XPath expressions, and QWeb templates.
|
||||
#
|
||||
# View validation runs automatically at the end of the upgrade process.
|
||||
# This script can also be run manually for the full report with JSON output.
|
||||
#
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
source "${PROJECT_ROOT}/lib/common.sh"
|
||||
|
||||
####################
|
||||
# CONFIGURATION
|
||||
####################
|
||||
|
||||
REPORT_DIR="/tmp"
|
||||
REPORT_TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
VIEWS_REPORT=""
|
||||
VIEWS_REPORT_MARKER="___VALIDATE_VIEWS_JSON___"
|
||||
|
||||
####################
|
||||
# USAGE
|
||||
####################
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $0 <db_name> <service_name>
|
||||
|
||||
Post-migration view validation for Odoo databases.
|
||||
|
||||
Validates:
|
||||
- Inherited view combination (parent + child)
|
||||
- XPath expressions find their targets
|
||||
- QWeb template syntax
|
||||
- Field references point to existing fields
|
||||
- Odoo native view validation
|
||||
|
||||
Arguments:
|
||||
db_name Name of the database to validate
|
||||
service_name Docker compose service name (e.g., odoo17, ou17)
|
||||
|
||||
Examples:
|
||||
$0 ou17 odoo17
|
||||
$0 elabore_migrated odoo18
|
||||
|
||||
Notes:
|
||||
- Runs via Odoo shell (no HTTP server needed)
|
||||
- Report is written to /tmp/validation_views_<db>_<timestamp>.json
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
####################
|
||||
# ARGUMENT PARSING
|
||||
####################
|
||||
|
||||
DB_NAME=""
|
||||
SERVICE_NAME=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$DB_NAME" ]]; then
|
||||
DB_NAME="$1"
|
||||
shift
|
||||
elif [[ -z "$SERVICE_NAME" ]]; then
|
||||
SERVICE_NAME="$1"
|
||||
shift
|
||||
else
|
||||
log_error "Unexpected argument: $1"
|
||||
usage
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$DB_NAME" ]]; then
|
||||
log_error "Missing database name"
|
||||
usage
|
||||
fi
|
||||
|
||||
if [[ -z "$SERVICE_NAME" ]]; then
|
||||
log_error "Missing service name"
|
||||
usage
|
||||
fi
|
||||
|
||||
####################
|
||||
# MAIN
|
||||
####################
|
||||
|
||||
log_step "POST-MIGRATION VIEW VALIDATION"
|
||||
log_info "Database: $DB_NAME"
|
||||
log_info "Service: $SERVICE_NAME"
|
||||
|
||||
PYTHON_SCRIPT="${PROJECT_ROOT}/lib/python/validate_views.py"
|
||||
|
||||
if [[ ! -f "$PYTHON_SCRIPT" ]]; then
|
||||
log_error "Validation script not found: $PYTHON_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VIEWS_REPORT="${REPORT_DIR}/validation_views_${DB_NAME}_${REPORT_TIMESTAMP}.json"
|
||||
|
||||
log_info "Running view validation in Odoo shell..."
|
||||
echo ""
|
||||
|
||||
RESULT=0
|
||||
RAW_OUTPUT=$(run_compose run --rm -e VALIDATE_VIEWS_REPORT=1 "$SERVICE_NAME" shell -d "$DB_NAME" --no-http --stop-after-init < "$PYTHON_SCRIPT") || RESULT=$?
|
||||
|
||||
echo "$RAW_OUTPUT" | sed "/${VIEWS_REPORT_MARKER}/,/${VIEWS_REPORT_MARKER}/d"
|
||||
|
||||
echo "$RAW_OUTPUT" | sed -n "/${VIEWS_REPORT_MARKER}/,/${VIEWS_REPORT_MARKER}/p" | grep -v "$VIEWS_REPORT_MARKER" > "$VIEWS_REPORT"
|
||||
|
||||
echo ""
|
||||
log_step "VALIDATION COMPLETE"
|
||||
|
||||
if [[ -s "$VIEWS_REPORT" ]]; then
|
||||
log_info "Report: $VIEWS_REPORT"
|
||||
else
|
||||
log_warn "Could not extract validation report from output"
|
||||
VIEWS_REPORT=""
|
||||
fi
|
||||
|
||||
if [[ $RESULT -eq 0 ]]; then
|
||||
log_info "All validations passed!"
|
||||
else
|
||||
log_error "Some validations failed. Check the output above for details."
|
||||
fi
|
||||
|
||||
exit $RESULT
|
||||
270
upgrade.sh
270
upgrade.sh
@@ -1,139 +1,207 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/lib/common.sh"
|
||||
|
||||
####################
|
||||
# USAGE & ARGUMENTS
|
||||
# GLOBAL VARIABLES #
|
||||
####################
|
||||
|
||||
usage() {
|
||||
cat <<EOF >&2
|
||||
Usage: $0 <origin_version> <final_version> <db_name> <service_name>
|
||||
ORIGIN_VERSION="$1" # "12" for version 12.0
|
||||
FINAL_VERSION="$2" # "16" for version 16.0
|
||||
# Path to the database to migrate. Must be a .zip file with the following syntax: {DATABASE_NAME}.zip
|
||||
ORIGIN_DB_NAME="$3"
|
||||
ORIGIN_SERVICE_NAME="$4"
|
||||
|
||||
Arguments:
|
||||
origin_version Origin Odoo version number (e.g., 12 for version 12.0)
|
||||
final_version Target Odoo version number (e.g., 16 for version 16.0)
|
||||
db_name Name of the database to migrate
|
||||
service_name Name of the origin Odoo service (docker compose service)
|
||||
|
||||
Example:
|
||||
$0 14 16 elabore_20241208 odoo14
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [[ $# -lt 4 ]]; then
|
||||
log_error "Missing arguments. Expected 4, got $#."
|
||||
usage
|
||||
fi
|
||||
|
||||
check_required_commands
|
||||
|
||||
readonly ORIGIN_VERSION="$1"
|
||||
readonly FINAL_VERSION="$2"
|
||||
readonly ORIGIN_DB_NAME="$3"
|
||||
readonly ORIGIN_SERVICE_NAME="$4"
|
||||
|
||||
readonly COPY_DB_NAME="ou${ORIGIN_VERSION}"
|
||||
# Get origin database name
|
||||
COPY_DB_NAME="ou${ORIGIN_VERSION}"
|
||||
# Define finale database name
|
||||
export FINALE_DB_NAME="ou${FINAL_VERSION}"
|
||||
readonly FINALE_DB_NAME
|
||||
readonly FINALE_SERVICE_NAME="${FINALE_DB_NAME}"
|
||||
# Define finale odoo service name
|
||||
FINALE_SERVICE_NAME="${FINALE_DB_NAME}"
|
||||
|
||||
readarray -t postgres_containers < <(docker ps --format '{{.Names}}' | grep postgres || true)
|
||||
# Service postgres name
|
||||
export POSTGRES_SERVICE_NAME="lokavaluto_postgres_1"
|
||||
|
||||
if [[ ${#postgres_containers[@]} -eq 0 ]]; then
|
||||
log_error "No running PostgreSQL container found. Please start a PostgreSQL container and try again."
|
||||
exit 1
|
||||
elif [[ ${#postgres_containers[@]} -gt 1 ]]; then
|
||||
log_error "Multiple PostgreSQL containers found:"
|
||||
printf ' %s\n' "${postgres_containers[@]}" >&2
|
||||
log_error "Please ensure only one PostgreSQL container is running."
|
||||
exit 1
|
||||
fi
|
||||
#############################################
|
||||
# DISPLAYS ALL INPUTS PARAMETERS
|
||||
#############################################
|
||||
|
||||
export POSTGRES_SERVICE_NAME="${postgres_containers[0]}"
|
||||
readonly POSTGRES_SERVICE_NAME
|
||||
echo "===== INPUT PARAMETERS ====="
|
||||
echo "Origin version .......... $ORIGIN_VERSION"
|
||||
echo "Final version ........... $FINAL_VERSION"
|
||||
echo "Origin DB name ........... $ORIGIN_DB_NAME"
|
||||
echo "Origin service name ..... $ORIGIN_SERVICE_NAME"
|
||||
|
||||
log_step "INPUT PARAMETERS"
|
||||
log_info "Origin version .......... $ORIGIN_VERSION"
|
||||
log_info "Final version ........... $FINAL_VERSION"
|
||||
log_info "Origin DB name ........... $ORIGIN_DB_NAME"
|
||||
log_info "Origin service name ..... $ORIGIN_SERVICE_NAME"
|
||||
|
||||
log_step "COMPUTED GLOBAL VARIABLES"
|
||||
log_info "Copy DB name ............. $COPY_DB_NAME"
|
||||
log_info "Finale DB name ........... $FINALE_DB_NAME"
|
||||
log_info "Finale service name ...... $FINALE_SERVICE_NAME"
|
||||
log_info "Postgres service name .... $POSTGRES_SERVICE_NAME"
|
||||
echo "
|
||||
===== COMPUTED GLOBALE VARIABLES ====="
|
||||
echo "Copy DB name ............. $COPY_DB_NAME"
|
||||
echo "Finale DB name ........... $FINALE_DB_NAME"
|
||||
echo "Finale service name ...... $FINALE_SERVICE_NAME"
|
||||
echo "Postgres service name .... $POSTGRES_SERVICE_NAME"
|
||||
|
||||
|
||||
|
||||
log_step "CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE"
|
||||
# Function to launch an SQL request to the postgres container
|
||||
query_postgres_container(){
|
||||
local QUERY="$1"
|
||||
local DB_NAME="$2"
|
||||
if [ -z "$QUERY" ]; then
|
||||
return 0
|
||||
fi
|
||||
local result
|
||||
if ! result=$(docker exec -u 70 "$POSTGRES_SERVICE_NAME" psql -d "$DB_NAME" -t -A -c "$QUERY"); then
|
||||
printf "Failed to execute SQL query: %s\n" "$query" >&2
|
||||
printf "Error: %s\n" "$result" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "$result"
|
||||
}
|
||||
export -f query_postgres_container
|
||||
|
||||
db_exists=$(docker exec -it -u 70 "$POSTGRES_SERVICE_NAME" psql -tc "SELECT 1 FROM pg_database WHERE datname = '$ORIGIN_DB_NAME'" | tr -d '[:space:]')
|
||||
if [[ "$db_exists" ]]; then
|
||||
log_info "Database '$ORIGIN_DB_NAME' found."
|
||||
# Function to copy the postgres databases
|
||||
copy_database(){
|
||||
local FROM_DB="$1"
|
||||
local TO_SERVICE="$2"
|
||||
local TO_DB="$3"
|
||||
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm cp -f "$FROM_DB" "$TO_DB"@"$TO_SERVICE"
|
||||
}
|
||||
export -f copy_database
|
||||
|
||||
# Function to copy the filetores
|
||||
copy_filestore(){
|
||||
local FROM_SERVICE="$1"
|
||||
local FROM_DB="$2"
|
||||
local TO_SERVICE="$3"
|
||||
local TO_DB="$4"
|
||||
mkdir -p /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
||||
rm -rf /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
||||
cp -a /srv/datastore/data/"$FROM_SERVICE"/var/lib/odoo/filestore/"$FROM_DB" /srv/datastore/data/"$TO_SERVICE"/var/lib/odoo/filestore/"$TO_DB" || exit 1
|
||||
echo "Filestore $FROM_SERVICE/$FROM_DB copied."
|
||||
}
|
||||
export -f copy_filestore
|
||||
|
||||
##############################################
|
||||
# CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE #
|
||||
##############################################
|
||||
|
||||
echo "
|
||||
==== CHECKS ALL NEEDED COMPONENTS ARE AVAILABLE ===="
|
||||
|
||||
# Check POSTGRES container is running
|
||||
if ! docker ps | grep -q "$POSTGRES_SERVICE_NAME"; then
|
||||
printf "Docker container %s is not running.\n" "$POSTGRES_SERVICE_NAME" >&2
|
||||
return 1
|
||||
else
|
||||
log_error "Database '$ORIGIN_DB_NAME' not found in the local postgres service. Please add it and restart the upgrade process."
|
||||
exit 1
|
||||
echo "UPGRADE: container $POSTGRES_SERVICE_NAME running."
|
||||
fi
|
||||
|
||||
filestore_path="${DATASTORE_PATH}/${ORIGIN_SERVICE_NAME}/${FILESTORE_SUBPATH}/${ORIGIN_DB_NAME}"
|
||||
if [[ -d "$filestore_path" ]]; then
|
||||
log_info "Filestore '$filestore_path' found."
|
||||
# Check origin database is in the local postgres
|
||||
DB_EXISTS=$(docker exec -it -u 70 $POSTGRES_SERVICE_NAME psql -tc "SELECT 1 FROM pg_database WHERE datname = '$ORIGIN_DB_NAME'" | tr -d '[:space:]')
|
||||
if [ "$DB_EXISTS" ]; then
|
||||
echo "UPGRADE: Database '$ORIGIN_DB_NAME' found."
|
||||
else
|
||||
log_error "Filestore '$filestore_path' not found, please add it and restart the upgrade process."
|
||||
echo "ERROR: Database '$ORIGIN_DB_NAME' not found in the local postgress service. Please add it and restart the upgrade process."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_step "LAUNCH VIRGIN ODOO IN FINAL VERSION"
|
||||
# Check that the origin filestore exist
|
||||
REPERTOIRE="/srv/datastore/data/${ORIGIN_SERVICE_NAME}/var/lib/odoo/filestore/${ORIGIN_DB_NAME}"
|
||||
if [ -d $REPERTOIRE ]; then
|
||||
echo "UPGRADE: '$REPERTOIRE' filestore found."
|
||||
else
|
||||
echo "ERROR: '$REPERTOIRE' filestore not found, please add it and restart the upgrade process."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm ls | grep "$FINALE_SERVICE_NAME"; then
|
||||
log_info "Removing existing finale database and filestore..."
|
||||
#######################################
|
||||
# LAUNCH VIRGIN ODOO IN FINAL VERSION #
|
||||
#######################################
|
||||
|
||||
# Remove finale database and datastore if already exists (we need a virgin Odoo)
|
||||
if docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm ls | grep -q "$FINALE_SERVICE_NAME"; then
|
||||
docker exec -u 70 "$POSTGRES_SERVICE_NAME" pgm rm -f "$FINALE_SERVICE_NAME"
|
||||
sudo rm -rf "${DATASTORE_PATH}/${FINALE_SERVICE_NAME}/${FILESTORE_SUBPATH}/${FINALE_SERVICE_NAME}"
|
||||
rm -rf /srv/datastore/data/"$FINALE_SERVICE_NAME"/var/lib/odoo/filestore/"$FINALE_SERVICE_NAME"
|
||||
fi
|
||||
|
||||
run_compose --debug run "$FINALE_SERVICE_NAME" -i base --stop-after-init --no-http
|
||||
compose --debug run "$FINALE_SERVICE_NAME" -i base --stop-after-init --no-http
|
||||
|
||||
log_info "Model database in final Odoo version created."
|
||||
echo "Model database in final Odoo version created."
|
||||
|
||||
log_step "COPY ORIGINAL COMPONENTS"
|
||||
############################
|
||||
# COPY ORIGINAL COMPONENTS #
|
||||
############################
|
||||
|
||||
copy_database "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME"
|
||||
log_info "Original database copied to ${COPY_DB_NAME}@${COPY_DB_NAME}."
|
||||
echo "
|
||||
==== COPY ORIGINAL COMPONENTS ===="
|
||||
echo "UPGRADE: Start copy"
|
||||
|
||||
copy_filestore "$ORIGIN_SERVICE_NAME" "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME"
|
||||
log_info "Original filestore copied."
|
||||
# Copy database
|
||||
copy_database "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME" || exit 1
|
||||
echo "UPGRADE: original database copied in ${COPY_DB_NAME}@${COPY_DB_NAME}."
|
||||
|
||||
# Copy filestore
|
||||
copy_filestore "$ORIGIN_SERVICE_NAME" "$ORIGIN_DB_NAME" "$COPY_DB_NAME" "$COPY_DB_NAME" || exit 1
|
||||
echo "UPGRADE: original filestore copied."
|
||||
|
||||
|
||||
log_step "PATH OF MIGRATION"
|
||||
#####################
|
||||
# PATH OF MIGRATION #
|
||||
####################
|
||||
|
||||
readarray -t versions < <(seq $((ORIGIN_VERSION + 1)) "$FINAL_VERSION")
|
||||
log_info "Migration path is ${versions[*]}"
|
||||
echo "
|
||||
==== PATH OF MIGRATION ===="
|
||||
# List all the versions to migrate through
|
||||
declare -a versions
|
||||
nb_migrations=$(($FINAL_VERSION - $ORIGIN_VERSION))
|
||||
|
||||
|
||||
log_step "DATABASE PREPARATION"
|
||||
|
||||
"${SCRIPT_DIR}/scripts/prepare_db.sh" "$COPY_DB_NAME" "$COPY_DB_NAME" "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME"
|
||||
|
||||
|
||||
log_step "UPGRADE PROCESS"
|
||||
|
||||
for version in "${versions[@]}"; do
|
||||
log_info "START UPGRADE TO ${version}.0"
|
||||
|
||||
"${SCRIPT_DIR}/versions/${version}.0/pre_upgrade.sh"
|
||||
"${SCRIPT_DIR}/versions/${version}.0/upgrade.sh"
|
||||
"${SCRIPT_DIR}/versions/${version}.0/post_upgrade.sh"
|
||||
|
||||
log_info "END UPGRADE TO ${version}.0"
|
||||
# Build the migration path
|
||||
for ((i = 0; i<$nb_migrations; i++))
|
||||
do
|
||||
versions[$i]=$(($ORIGIN_VERSION + 1 + i))
|
||||
done
|
||||
echo "UPGRADE: Migration path is ${versions[@]}"
|
||||
|
||||
log_step "POST-UPGRADE PROCESSES"
|
||||
|
||||
"${SCRIPT_DIR}/scripts/finalize_db.sh" "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME"
|
||||
########################
|
||||
# DATABASE PREPARATION #
|
||||
########################
|
||||
|
||||
log_step "UPGRADE PROCESS ENDED WITH SUCCESS"
|
||||
echo "
|
||||
==== DATABASE PREPARATION ===="
|
||||
|
||||
./prepare_db.sh "$COPY_DB_NAME" "$COPY_DB_NAME" "$FINALE_DB_MODEL_NAME" "$FINALE_SERVICE_NAME" || exit 1
|
||||
|
||||
|
||||
###################
|
||||
# UPGRADE PROCESS #
|
||||
###################
|
||||
|
||||
for version in "${versions[@]}"
|
||||
do
|
||||
echo "START UPGRADE TO ${version}.0"
|
||||
start_version=$((version-1))
|
||||
end_version="$version"
|
||||
|
||||
### Go to the repository holding the upgrate scripts
|
||||
cd "${end_version}.0"
|
||||
|
||||
### Execute pre_upgrade scripts
|
||||
./pre_upgrade.sh || exit 1
|
||||
|
||||
### Start upgrade
|
||||
./upgrade.sh || exit 1
|
||||
|
||||
### Execute post-upgrade scripts
|
||||
./post_upgrade.sh || exit 1
|
||||
|
||||
### Return to parent repository for the following steps
|
||||
cd ..
|
||||
echo "END UPGRADE TO ${version}.0"
|
||||
done
|
||||
## END UPGRADE LOOP
|
||||
|
||||
##########################
|
||||
# POST-UPGRADE PROCESSES #
|
||||
##########################
|
||||
./finalize_db.sh "$FINALE_DB_NAME" "$FINALE_SERVICE_NAME" || exit 1
|
||||
|
||||
|
||||
echo "UPGRADE PROCESS ENDED WITH SUCCESS"
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 13.0..."
|
||||
|
||||
#compose --debug run ou13 -u base --stop-after-init --no-http
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8013:8069 ou13 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou13
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 14.0..."
|
||||
|
||||
#compose --debug run ou14 -u base --stop-after-init --no-http
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8014:8069 ou14 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou14 --load=base,web,openupgrade_framework
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 15.0..."
|
||||
|
||||
#compose --debug run ou15 -u base --stop-after-init --no-http
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8015:8069 ou15 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou15 --load=base,web,openupgrade_framework
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 16.0..."
|
||||
|
||||
#compose --debug run ou16 -u base --stop-after-init --no-http
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8016:8069 ou16 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou16 --load=base,web,openupgrade_framework
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 17.0..."
|
||||
|
||||
# Execute SQL post-migration commands
|
||||
POST_MIGRATE_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
plan_id INTEGER;
|
||||
BEGIN
|
||||
-- Check if the 'Projects' analytic plan exists
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS' LIMIT 1;
|
||||
|
||||
-- If it does exist, delete it
|
||||
IF plan_id IS NOT NULL THEN
|
||||
DELETE FROM account_analytic_plan WHERE complete_name = 'migration_PROJECTS';
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE complete_name = 'Projects' LIMIT 1;
|
||||
-- Delete existing system parameter (if any)
|
||||
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||
-- Insert the system parameter with the correct plan ID
|
||||
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||
END IF;
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $POST_MIGRATE_SQL"
|
||||
query_postgres_container "$POST_MIGRATE_SQL" ou17 || exit 1
|
||||
|
||||
|
||||
#compose --debug run ou17 -u base --stop-after-init --no-http
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 17.0..."
|
||||
|
||||
# Copy database
|
||||
copy_database ou16 ou17 ou17 || exit 1
|
||||
|
||||
# Execute SQL pre-migration commands
|
||||
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
plan_id INTEGER;
|
||||
BEGIN
|
||||
-- Check if the 'Projects' analytic plan exists
|
||||
SELECT id INTO plan_id FROM account_analytic_plan WHERE name = 'Projects' LIMIT 1;
|
||||
|
||||
-- If it doesn't exist, create it
|
||||
IF plan_id IS NULL THEN
|
||||
INSERT INTO account_analytic_plan (name, complete_name, default_applicability, create_date, write_date)
|
||||
VALUES ('Projects', 'migration_PROJECTS', 'optional', now(), now())
|
||||
RETURNING id INTO plan_id;
|
||||
END IF;
|
||||
|
||||
-- Delete existing system parameter (if any)
|
||||
DELETE FROM ir_config_parameter WHERE key = 'analytic.project_plan';
|
||||
|
||||
-- Insert the system parameter with the correct plan ID
|
||||
INSERT INTO ir_config_parameter (key, value, create_date, write_date)
|
||||
VALUES ('analytic.project_plan', plan_id::text, now(), now());
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou17 || exit 1
|
||||
|
||||
PRE_MIGRATE_SQL_2=$(cat <<'EOF'
|
||||
DELETE FROM ir_model_fields WHERE name = 'kanban_state_label';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL_2"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL_2" ou17 || exit 1
|
||||
|
||||
PRE_MIGRATE_SQL_3=$(cat <<'EOF'
|
||||
DELETE FROM ir_model_fields WHERE name = 'phone' AND model='hr.employee';
|
||||
DELETE FROM ir_model_fields WHERE name = 'hr_responsible_id' AND model='hr.job';
|
||||
DELETE FROM ir_model_fields WHERE name = 'address_home_id' AND model='hr.employee';
|
||||
DELETE FROM ir_model_fields WHERE name = 'manager_id' AND model='project.task';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL_3"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL_3" ou17 || exit 1
|
||||
|
||||
# Copy filestores
|
||||
copy_filestore ou16 ou16 ou17 ou17 || exit 1
|
||||
|
||||
echo "Ready for migration to 17.0!"
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8017:8069 ou17 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou17 --load=base,web,openupgrade_framework
|
||||
@@ -1,160 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Post migration to 18.0..."
|
||||
|
||||
# ============================================================================
|
||||
# BANK-PAYMENT -> BANK-PAYMENT-ALTERNATIVE DATA MIGRATION
|
||||
# Source PR: https://github.com/OCA/bank-payment-alternative/pull/42
|
||||
# ============================================================================
|
||||
BANK_PAYMENT_POST_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
mode_rec RECORD;
|
||||
new_line_id INTEGER;
|
||||
journal_rec RECORD;
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'account_payment_mode') THEN
|
||||
RAISE NOTICE 'No account_payment_mode table found, skipping bank-payment migration';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Starting bank-payment to bank-payment-alternative migration...';
|
||||
|
||||
ALTER TABLE account_payment_method_line
|
||||
ADD COLUMN IF NOT EXISTS old_payment_mode_id INT,
|
||||
ADD COLUMN IF NOT EXISTS old_refund_payment_mode_id INT;
|
||||
|
||||
FOR mode_rec IN
|
||||
SELECT id, name, company_id, payment_method_id,
|
||||
fixed_journal_id AS journal_id, bank_account_link,
|
||||
create_date, create_uid, write_date, write_uid,
|
||||
show_bank_account, refund_payment_mode_id, active
|
||||
FROM account_payment_mode
|
||||
LOOP
|
||||
INSERT INTO account_payment_method_line (
|
||||
name, payment_method_id, bank_account_link, journal_id,
|
||||
selectable, company_id, create_uid, create_date,
|
||||
write_uid, write_date, show_bank_account,
|
||||
old_payment_mode_id, old_refund_payment_mode_id, active
|
||||
) VALUES (
|
||||
to_jsonb(mode_rec.name),
|
||||
mode_rec.payment_method_id,
|
||||
mode_rec.bank_account_link,
|
||||
mode_rec.journal_id,
|
||||
true,
|
||||
mode_rec.company_id,
|
||||
mode_rec.create_uid,
|
||||
mode_rec.create_date,
|
||||
mode_rec.write_uid,
|
||||
mode_rec.write_date,
|
||||
mode_rec.show_bank_account,
|
||||
mode_rec.id,
|
||||
mode_rec.refund_payment_mode_id,
|
||||
mode_rec.active
|
||||
) RETURNING id INTO new_line_id;
|
||||
|
||||
IF mode_rec.bank_account_link = 'variable' THEN
|
||||
FOR journal_rec IN
|
||||
SELECT rel.journal_id
|
||||
FROM account_payment_mode_variable_journal_rel rel
|
||||
WHERE rel.payment_mode_id = mode_rec.id
|
||||
LOOP
|
||||
INSERT INTO account_payment_method_line_journal_rel
|
||||
(account_payment_method_line_id, account_journal_id)
|
||||
VALUES (new_line_id, journal_rec.journal_id)
|
||||
ON CONFLICT DO NOTHING;
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Migrated payment mode % -> payment method line %', mode_rec.id, new_line_id;
|
||||
END LOOP;
|
||||
|
||||
UPDATE account_payment_method_line apml
|
||||
SET refund_payment_method_line_id = apml2.id
|
||||
FROM account_payment_method_line apml2
|
||||
WHERE apml.old_refund_payment_mode_id IS NOT NULL
|
||||
AND apml.old_refund_payment_mode_id = apml2.old_payment_mode_id;
|
||||
|
||||
UPDATE account_move am
|
||||
SET preferred_payment_method_line_id = apml.id
|
||||
FROM account_payment_mode apm, account_payment_method_line apml
|
||||
WHERE am.payment_mode_id = apm.id
|
||||
AND apm.id = apml.old_payment_mode_id
|
||||
AND am.preferred_payment_method_line_id IS NULL;
|
||||
|
||||
RAISE NOTICE 'account_payment_base_oca migration completed';
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "Executing bank-payment base migration..."
|
||||
query_postgres_container "$BANK_PAYMENT_POST_SQL" ou18 || exit 1
|
||||
|
||||
BANK_PAYMENT_BATCH_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'account_payment_mode') THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'account_payment_order') THEN
|
||||
RAISE NOTICE 'No account_payment_order table, skipping batch migration';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Starting account_payment_batch_oca migration...';
|
||||
|
||||
IF EXISTS (SELECT FROM information_schema.columns
|
||||
WHERE table_name = 'account_payment_method' AND column_name = 'payment_order_only') THEN
|
||||
UPDATE account_payment_method
|
||||
SET payment_order_ok = payment_order_only
|
||||
WHERE payment_order_only IS NOT NULL;
|
||||
END IF;
|
||||
|
||||
UPDATE account_payment_method_line apml
|
||||
SET payment_order_ok = apm.payment_order_ok,
|
||||
no_debit_before_maturity = apm.no_debit_before_maturity,
|
||||
default_payment_mode = apm.default_payment_mode,
|
||||
default_invoice = apm.default_invoice,
|
||||
default_target_move = apm.default_target_move,
|
||||
default_date_type = apm.default_date_type,
|
||||
default_date_prefered = apm.default_date_prefered,
|
||||
group_lines = apm.group_lines
|
||||
FROM account_payment_mode apm
|
||||
WHERE apml.old_payment_mode_id IS NOT NULL
|
||||
AND apm.id = apml.old_payment_mode_id;
|
||||
|
||||
IF EXISTS (SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'account_journal_account_payment_method_line_rel') THEN
|
||||
DELETE FROM account_journal_account_payment_method_line_rel
|
||||
WHERE account_payment_method_line_id IN (
|
||||
SELECT id FROM account_payment_method_line WHERE old_payment_mode_id IS NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO account_journal_account_payment_method_line_rel
|
||||
(account_payment_method_line_id, account_journal_id)
|
||||
SELECT apml.id, rel.account_journal_id
|
||||
FROM account_journal_account_payment_mode_rel rel
|
||||
JOIN account_payment_method_line apml ON rel.account_payment_mode_id = apml.old_payment_mode_id
|
||||
ON CONFLICT DO NOTHING;
|
||||
END IF;
|
||||
|
||||
UPDATE account_payment_order apo
|
||||
SET payment_method_line_id = apml.id,
|
||||
payment_method_code = apm_method.code
|
||||
FROM account_payment_method_line apml,
|
||||
account_payment_mode apm,
|
||||
account_payment_method apm_method
|
||||
WHERE apo.payment_mode_id = apm.id
|
||||
AND apml.old_payment_mode_id = apm.id
|
||||
AND apm_method.id = apml.payment_method_id;
|
||||
|
||||
RAISE NOTICE 'account_payment_batch_oca migration completed';
|
||||
RAISE NOTICE 'NOTE: Payment lots for open orders must be generated manually via Odoo UI or script';
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "Executing bank-payment batch migration..."
|
||||
query_postgres_container "$BANK_PAYMENT_BATCH_SQL" ou18 || exit 1
|
||||
|
||||
echo "Post migration to 18.0 completed!"
|
||||
@@ -1,97 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Prepare migration to 18.0..."
|
||||
|
||||
# Copy database
|
||||
copy_database ou17 ou18 ou18 || exit 1
|
||||
|
||||
# ============================================================================
|
||||
# BANK-PAYMENT -> BANK-PAYMENT-ALTERNATIVE MODULE RENAMING
|
||||
# Migration from OCA/bank-payment to OCA/bank-payment-alternative
|
||||
# Source PR: https://github.com/OCA/bank-payment-alternative/pull/42
|
||||
#
|
||||
# This renaming MUST be done BEFORE OpenUpgrade runs, so that the migration
|
||||
# scripts in the new modules (account_payment_base_oca, account_payment_batch_oca)
|
||||
# can properly migrate the data.
|
||||
# ============================================================================
|
||||
BANK_PAYMENT_RENAME_SQL=$(cat <<'EOF'
|
||||
DO $$
|
||||
DECLARE
|
||||
renamed_modules TEXT[][] := ARRAY[
|
||||
['account_payment_mode', 'account_payment_base_oca'],
|
||||
['account_banking_pain_base', 'account_payment_sepa_base'],
|
||||
['account_banking_sepa_credit_transfer', 'account_payment_sepa_credit_transfer'],
|
||||
['account_payment_order', 'account_payment_batch_oca']
|
||||
];
|
||||
merged_modules TEXT[][] := ARRAY[
|
||||
['account_payment_partner', 'account_payment_base_oca']
|
||||
];
|
||||
old_name TEXT;
|
||||
new_name TEXT;
|
||||
old_module_id INTEGER;
|
||||
deleted_count INTEGER;
|
||||
BEGIN
|
||||
FOR i IN 1..array_length(renamed_modules, 1) LOOP
|
||||
old_name := renamed_modules[i][1];
|
||||
new_name := renamed_modules[i][2];
|
||||
|
||||
SELECT id INTO old_module_id FROM ir_module_module WHERE name = old_name;
|
||||
IF old_module_id IS NOT NULL THEN
|
||||
RAISE NOTICE 'Renaming module: % -> %', old_name, new_name;
|
||||
UPDATE ir_module_module SET name = new_name WHERE name = old_name;
|
||||
UPDATE ir_model_data SET module = new_name WHERE module = old_name;
|
||||
UPDATE ir_module_module_dependency SET name = new_name WHERE name = old_name;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
FOR i IN 1..array_length(merged_modules, 1) LOOP
|
||||
old_name := merged_modules[i][1];
|
||||
new_name := merged_modules[i][2];
|
||||
|
||||
SELECT id INTO old_module_id FROM ir_module_module WHERE name = old_name;
|
||||
IF old_module_id IS NOT NULL THEN
|
||||
RAISE NOTICE 'Merging module: % -> %', old_name, new_name;
|
||||
|
||||
DELETE FROM ir_model_data
|
||||
WHERE module = old_name
|
||||
AND name IN (SELECT name FROM ir_model_data WHERE module = new_name);
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
IF deleted_count > 0 THEN
|
||||
RAISE NOTICE ' Deleted % duplicate ir_model_data records', deleted_count;
|
||||
END IF;
|
||||
|
||||
UPDATE ir_model_data SET module = new_name WHERE module = old_name;
|
||||
UPDATE ir_module_module_dependency SET name = new_name WHERE name = old_name;
|
||||
UPDATE ir_module_module SET state = 'uninstalled' WHERE name = old_name;
|
||||
DELETE FROM ir_module_module WHERE name = old_name;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END $$;
|
||||
EOF
|
||||
)
|
||||
echo "Executing bank-payment module renaming..."
|
||||
query_postgres_container "$BANK_PAYMENT_RENAME_SQL" ou18 || exit 1
|
||||
|
||||
BANK_PAYMENT_PRE_SQL=$(cat <<'EOF'
|
||||
UPDATE ir_model_data
|
||||
SET noupdate = false
|
||||
WHERE module = 'account_payment_base_oca'
|
||||
AND name = 'view_account_invoice_report_search';
|
||||
EOF
|
||||
)
|
||||
echo "Executing bank-payment pre-migration..."
|
||||
query_postgres_container "$BANK_PAYMENT_PRE_SQL" ou18 || exit 1
|
||||
|
||||
# Execute SQL pre-migration commands
|
||||
PRE_MIGRATE_SQL=$(cat <<'EOF'
|
||||
UPDATE account_analytic_plan SET default_applicability=NULL WHERE default_applicability='optional';
|
||||
EOF
|
||||
)
|
||||
echo "SQL command = $PRE_MIGRATE_SQL"
|
||||
query_postgres_container "$PRE_MIGRATE_SQL" ou18 || exit 1
|
||||
|
||||
# Copy filestores
|
||||
copy_filestore ou17 ou17 ou18 ou18 || exit 1
|
||||
|
||||
echo "Ready for migration to 18.0!"
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
run_compose run -p 8018:8069 ou18 --config=/opt/odoo/auto/odoo.conf --stop-after-init -u all --workers 0 --log-level=debug --max-cron-threads=0 --limit-time-real=10000 --database=ou18 --load=base,web,openupgrade_framework
|
||||
Reference in New Issue
Block a user