Sunday, October 26, 2025

#!/usr/bin/env bash # ============================================================================= # dp_migrate.sh (v4am) - Oracle 19c Data Pump migration & compare toolkit # ============================================================================= # - Robust debug logs + safe DIRECTORY validator (no ORA-01403) # - Runtime Debug Toggle (menu item) # - SQL runners use SET DEFINE OFF (avoid '&' issues) # - Data Pump LOGFILE = filename only; LOGTIME=ALL # - Import: sanitize dumpfile pattern (reject path parts) # - Export/Import menus: METADATA_ONLY vs FULL selectors # - DDL extraction (SQL terminators ON; no q'[STORAGE]' pitfalls) # - Compare engines: EXTERNAL / FILE / LOCAL (no direct DB link needed) # - HTML reports + inline email # - Exit option added to every (sub)menu # - **New**: explicit schema confirmation prompts and content echo # ============================================================================= set -euo pipefail CONFIG_FILE="${1:-dp_migrate.conf}" SCRIPT_NAME="$(basename "$0")" RUN_ID="$(date +%Y%m%d_%H%M%S)" # ------------------------------ Paths ----------------------------------------- WORK_DIR="${WORK_DIR:-/tmp/dp_migrate_${RUN_ID}}" LOG_DIR="${LOG_DIR:-${WORK_DIR}/logs}" PAR_DIR="${PAR_DIR:-${WORK_DIR}/parfiles}" DDL_DIR="${DDL_DIR:-${WORK_DIR}/ddls}" COMPARE_DIR="${COMPARE_DIR:-${WORK_DIR}/compare}" COMMON_DIR_NAME="${COMMON_DIR_NAME:-DP_DIR}" LOCAL_COMPARE_DIR="${LOCAL_COMPARE_DIR:-/tmp/dp_compare}" mkdir -p "$WORK_DIR" "$LOG_DIR" "$PAR_DIR" "$DDL_DIR" "$COMPARE_DIR" "$LOCAL_COMPARE_DIR" # ------------------------ Pretty print & debug helpers ------------------------- ce() { printf "%b\n" "$*"; } ok() { ce "\e[32m✔ $*\e[0m"; } warn() { ce "\e[33m! $*\e[0m"; } err() { ce "\e[31m✘ $*\e[0m"; } DEBUG="${DEBUG:-Y}" debug() { if [[ "${DEBUG^^}" == "Y" ]]; then ce "\e[36m[DEBUG]\e[0m $*"; fi; } toggle_debug() { if [[ "${DEBUG^^}" == "Y" ]]; then DEBUG="N"; ok "DEBUG turned OFF" else DEBUG="Y"; ok "DEBUG turned ON" fi ce "Current DEBUG=${DEBUG}" } # ------------------------------ Load Config ----------------------------------- [[ -f "$CONFIG_FILE" ]] || { err "Config file not found: $CONFIG_FILE"; exit 1; } # shellcheck disable=SC1090 source "$CONFIG_FILE" need_vars=( SRC_EZCONNECT TGT_EZCONNECT SYS_PASSWORD DUMPFILE_PREFIX ) for v in "${need_vars[@]}"; do [[ -n "${!v:-}" ]] || { err "Missing required config variable: $v"; exit 1; } done # ----------------------------- Defaults --------------------------------------- PARALLEL="${PARALLEL:-4}" COMPRESSION="${COMPRESSION:-METADATA_ONLY}" ENCRYPTION_PASSWORD="${ENCRYPTION_PASSWORD:-}" TABLE_EXISTS_ACTION="${TABLE_EXISTS_ACTION:-APPEND}" REMAP_SCHEMA="${REMAP_SCHEMA:-}" REMAP_TABLESPACE="${REMAP_TABLESPACE:-}" INCLUDE="${INCLUDE:-}" EXCLUDE="${EXCLUDE:-}" FLASHBACK_SCN="${FLASHBACK_SCN:-}" FLASHBACK_TIME="${FLASHBACK_TIME:-}" ESTIMATE_ONLY="${ESTIMATE_ONLY:-N}" EXPDP_TRACE="${EXPDP_TRACE:-}" IMPDP_TRACE="${IMPDP_TRACE:-}" SCHEMAS_LIST_EXP="${SCHEMAS_LIST_EXP:-}" SCHEMAS_LIST_IMP="${SCHEMAS_LIST_IMP:-}" SKIP_SCHEMAS="${SKIP_SCHEMAS:-}" SKIP_TABLESPACES="${SKIP_TABLESPACES:-SYSTEM,SYSAUX,TEMP,UNDOTBS1,UNDOTBS2}" DRY_RUN_ONLY="${DRY_RUN_ONLY:-N}" REPORT_EMAILS="${REPORT_EMAILS:-}" MAIL_ENABLED="${MAIL_ENABLED:-Y}" MAIL_FROM="${MAIL_FROM:-noreply@localhost}" MAIL_SUBJECT_PREFIX="${MAIL_SUBJECT_PREFIX:-[Oracle Compare]}" MAIL_METHOD="${MAIL_METHOD:-auto}" COMPARE_ENGINE="${COMPARE_ENGINE:-EXTERNAL}" # EXTERNAL | FILE | LOCAL EXACT_ROWCOUNT="${EXACT_ROWCOUNT:-N}" # Preferred export default path (shows to user, can be changed) EXPORT_DIR_PATH="${EXPORT_DIR_PATH:-${NAS_PATH:-}}" NAS_PATH="${NAS_PATH:-}" ok "Using config: $CONFIG_FILE" ok "Work: $WORK_DIR | Logs: $LOG_DIR | Parfiles: $PAR_DIR | DDLs: $DDL_DIR | Compare: $COMPARE_DIR" # ---------------------------- Pre-flight checks -------------------------------- for b in sqlplus expdp impdp; do command -v "$b" >/dev/null 2>&1 || { err "Missing required binary: $b"; exit 1; } done mask_pwd() { sed 's#[^/"]\{1,\}@#***@#g' | sed 's#sys/[^@]*@#sys/****@#g'; } # --- filename/path helpers (prevent paths in Data Pump LOGFILE) --- basename_safe() { local x="${1:-}"; x="${x##*/}"; printf "%s" "$x"; } reject_if_pathlike() { local x="${1:-}"; if [[ "$x" == *"/"* ]]; then warn "Path component detected and removed: [$x]"; x="${x##*/}"; fi; printf "%s" "$x"; } # ------------------------------- SQL helpers ---------------------------------- run_sql() { local ez="$1"; shift local tag="${1:-sql}"; shift || true local sql="$*" local conn="sys/${SYS_PASSWORD}@${ez} as sysdba" local logf="${LOG_DIR}/${tag}_${RUN_ID}.log" debug "run_sql(tag=${tag}) on ${ez} -> $logf" sqlplus -s "$conn" <"$logf" 2>&1 SET PAGES 0 FEEDBACK OFF LINES 32767 VERIFY OFF HEADING OFF ECHO OFF LONG 1000000 LONGCHUNKSIZE 1000000 SET DEFINE OFF ${sql} EXIT SQL if grep -qi "ORA-" "$logf"; then err "SQL error: ${tag} (see $logf)" tail -n 120 "$logf" | mask_pwd | sed 's/^/ /' exit 1 fi ok "SQL ok: ${tag}" } # NON-FATAL SQL (returns 0=ok, 1=error) run_sql_try() { local ez="$1"; shift local tag="${1:-sqltry}"; shift || true local sql="$*" local conn="sys/${SYS_PASSWORD}@${ez} as sysdba" local logf="${LOG_DIR}/${tag}_${RUN_ID}.log" debug "run_sql_try(tag=${tag}) on ${ez} -> $logf" sqlplus -s "$conn" <"$logf" 2>&1 SET PAGES 0 FEEDBACK OFF LINES 32767 VERIFY OFF HEADING OFF ECHO OFF LONG 1000000 LONGCHUNKSIZE 1000000 SET DEFINE OFF ${sql} EXIT SQL if grep -qi "ORA-" "$logf"; then warn "SQL (non-fatal) error on ${tag} — see $logf" tail -n 60 "$logf" | mask_pwd | sed 's/^/ /' return 1 fi ok "SQL ok (non-fatal): ${tag}" return 0 } run_sql_spool_local() { local ez="$1"; shift local tag="$1"; shift local out="$1"; shift local body="$*" local conn="sys/${SYS_PASSWORD}@${ez} as sysdba" local logf="${LOG_DIR}/${tag}_${RUN_ID}.log" debug "run_sql_spool_local(tag=${tag}) -> spool $out ; log=$logf" sqlplus -s "$conn" <"$logf" 2>&1 SET PAGESIZE 0 LINESIZE 4000 LONG 1000000 LONGCHUNKSIZE 1000000 TRIMSPOOL ON TRIMOUT ON FEEDBACK OFF VERIFY OFF HEADING OFF ECHO OFF SET DEFINE OFF WHENEVER SQLERROR EXIT SQL.SQLCODE SPOOL $out ${body} SPOOL OFF EXIT SQL if grep -qi "ORA-" "$logf"; then err "SQL error: ${tag} (see $logf)" tail -n 120 "$logf" | mask_pwd | sed 's/^/ /' exit 1 fi ok "Spool ok: $out" } # ------------------------------- Mail helpers --------------------------------- detect_mail_stack() { local forced="${MAIL_METHOD:-auto}" case "${forced}" in sendmail) command -v sendmail >/dev/null && { echo sendmail; return; } ;; mailutils) (mail --version 2>/dev/null | grep -qi "mailutils") && { echo mailutils; return; } ;; bsdmail) (mail -V 2>/dev/null | grep -qi "bsd") && { echo bsdmail; return; } ;; mailx) command -v mailx >/dev/null && { echo mailx; return; } ;; auto|*) : ;; esac if command -v sendmail >/dev/null 2>&1; then echo sendmail; return; fi if mail --version 2>/dev/null | grep -qi "mailutils"; then echo mailutils; return; fi if mail -V 2>/dev/null | grep -qi "bsd"; then echo bsdmail; return; fi if command -v mailx >/dev/null 2>&1; then echo mailx; return; fi echo none } email_inline_html() { local file="$1" subject="$2" [[ "${MAIL_ENABLED^^}" != "Y" ]] && { warn "MAIL_ENABLED!=Y; skip email."; return 0; } [[ -z "${REPORT_EMAILS}" ]] && { warn "REPORT_EMAILS empty; skip email."; return 0; } [[ ! -f "$file" ]] && { warn "email_inline_html: $file not found"; return 1; } local method; method="$(detect_mail_stack)" debug "Email stack detected: ${method}; subject=${subject}; to=${REPORT_EMAILS}; file=${file}" case "$method" in sendmail) { echo "From: ${MAIL_FROM}" echo "To: ${REPORT_EMAILS}" echo "Subject: ${subject}" echo "MIME-Version: 1.0" echo "Content-Type: text/html; charset=UTF-8" echo "Content-Transfer-Encoding: 8bit" echo cat "$file" } | sendmail -t || { warn "sendmail failed"; return 1; } ;; mailutils) mail -a "From: ${MAIL_FROM}" \ -a "MIME-Version: 1.0" \ -a "Content-Type: text/html; charset=UTF-8" \ -s "${subject}" ${REPORT_EMAILS} < "$file" \ || { warn "mail (mailutils) send failed"; return 1; } ;; bsdmail) mail -a "From: ${MAIL_FROM}" \ -a "MIME-Version: 1.0" \ -a "Content-Type: text/html; charset=UTF-8" \ -s "${subject}" ${REPORT_EMAILS} < "$file" \ || { warn "mail (BSD) send failed"; return 1; } ;; mailx) if mailx -V 2>&1 | grep -qiE "heirloom|s-nail|nail"; then if mailx -a "Content-Type: text/html" -s test "$MAIL_FROM" &1 | grep -qi "unknown option"; then mailx -r "$MAIL_FROM" -s "${subject}" ${REPORT_EMAILS} < "$file" \ || { warn "mailx send failed"; return 1; } else mailx -r "$MAIL_FROM" -a "Content-Type: text/html; charset=UTF-8" -s "${subject}" ${REPORT_EMAILS} < "$file" \ || { warn "mailx send failed"; return 1; } fi else mailx -s "${subject}" ${REPORT_EMAILS} < "$file" \ || { warn "mailx (generic) send failed"; return 1; } fi ;; none) warn "No supported mailer (sendmail/mail/mailx) found; skipping email." return 1 ;; esac ok "Inline email sent to ${REPORT_EMAILS} via ${method}" } # ----------------------- DIRECTORY helpers (NON-FATAL) ------------------------ create_or_replace_directory() { local ez="$1" dir_name="$2" dir_path="$3" host_tag="$4" [[ -z "$dir_path" ]] && { warn "create_or_replace_directory: dir_path is empty"; return 1; } dir_name="$(echo "$dir_name" | tr '[:lower:]' '[:upper:]')" debug "CREATE OR REPLACE DIRECTORY ${dir_name} AS '${dir_path}' on ${host_tag} (${ez})" run_sql_try "$ez" "create_dir_${host_tag}_${dir_name}" " BEGIN EXECUTE IMMEDIATE 'CREATE OR REPLACE DIRECTORY ${dir_name} AS ''${dir_path}'''; BEGIN EXECUTE IMMEDIATE 'GRANT READ,WRITE ON DIRECTORY ${dir_name} TO PUBLIC'; EXCEPTION WHEN OTHERS THEN NULL; END; END; / " return $? } validate_directory_on_db_try() { local ez="$1" tag="$2" dir_name="${3:-$COMMON_DIR_NAME}" dir_name="$(echo "$dir_name" | tr '[:lower:]' '[:upper:]')" local logtag="dircheck_${tag}" debug "VALIDATE DIRECTORY ${dir_name} on ${tag} (${ez})" run_sql_try "$ez" "$logtag" " SET SERVEROUTPUT ON DECLARE v_cnt PLS_INTEGER := 0; v_path VARCHAR2(4000); BEGIN SELECT COUNT(*) INTO v_cnt FROM all_directories WHERE directory_name = UPPER('${dir_name}'); IF v_cnt = 0 THEN DBMS_OUTPUT.PUT_LINE('DIRECTORY_MISSING '||UPPER('${dir_name}')); ELSE SELECT directory_path INTO v_path FROM all_directories WHERE directory_name = UPPER('${dir_name}'); DBMS_OUTPUT.PUT_LINE('DIRECTORY_OK '||UPPER('${dir_name}')||' '||v_path); END IF; EXCEPTION WHEN OTHERS THEN DBMS_OUTPUT.PUT_LINE('DIRECTORY_VALIDATE_ERROR '||SQLCODE||' '||SQLERRM); END; / " return $? } # Drop table safely (fatal OK) drop_table_if_exists() { local ez="$1" tname="${2^^}" debug "Drop table if exists: ${tname} on ${ez}" run_sql "$ez" "drop_${tname}_${RUN_ID}" " DECLARE v_cnt PLS_INTEGER; BEGIN SELECT COUNT(*) INTO v_cnt FROM all_tables WHERE owner = SYS_CONTEXT('USERENV','CURRENT_SCHEMA') AND table_name = UPPER('${tname}'); IF v_cnt > 0 THEN BEGIN EXECUTE IMMEDIATE 'DROP TABLE '||UPPER('${tname}')||' PURGE'; EXCEPTION WHEN OTHERS THEN EXECUTE IMMEDIATE 'DROP TABLE '||UPPER('${tname}'); END; END IF; END; / " } # -------------------------- Data Pump Core ------------------------------------ dp_run() { local tool="$1" ez="$2" pf="$3" tag="$4" local client_log="${LOG_DIR}/${tool}_${tag}_${RUN_ID}.client.log" local conn="sys/${SYS_PASSWORD}@${ez} as sysdba" debug "dp_run(${tool}) tag=${tag} parfile=${pf} ez=${ez}" { echo "---- ${tool} environment ----" echo "date: $(date)" echo "host: $(hostname)" echo "ORACLE_HOME: ${ORACLE_HOME:-}" echo "PATH: $PATH" echo "which ${tool}: $(command -v $tool || echo not found)" echo "version:"; $tool -V || true echo "---- parfile (${pf}) ----" [[ -f "$pf" ]] && sed -E 's/(encryption_password=).*/\1*****/I' "$pf" || echo "" } > "$client_log" 2>&1 set +e ( set -o pipefail; $tool "$conn" parfile="$pf" 2>&1 | tee -a "$client_log"; exit ${PIPESTATUS[0]} ) local rc=$? set -e [[ $rc -ne 0 ]] && { err "[${tool}] FAILED (rc=$rc) — see ${client_log}"; exit $rc; } ok "[${tool}] SUCCESS — see ${client_log}" } par_common() { local mode="$1" tag="$2" dir_name="$3" local pf="${PAR_DIR}/${tag}_${RUN_ID}.par" debug "par_common(mode=${mode}, tag=${tag}, dir=${dir_name}) -> ${pf}" local server_log="$(basename_safe "${DUMPFILE_PREFIX}_${tag}_${RUN_ID}.log")" { echo "directory=${dir_name}" echo "logfile=${server_log}" echo "logtime=all" echo "parallel=${PARALLEL}" } > "$pf" if [[ "$mode" == "expdp" ]]; then local dump_pat="$(basename_safe "${DUMPFILE_PREFIX}_${tag}_${RUN_ID}_%U.dmp")" { echo "dumpfile=${dump_pat}" echo "compression=${COMPRESSION}" [[ -n "$FLASHBACK_SCN" ]] && echo "flashback_scn=${FLASHBACK_SCN}" [[ -n "$FLASHBACK_TIME" ]] && echo "flashback_time=${FLASHBACK_TIME}" [[ -n "$INCLUDE" ]] && echo "include=${INCLUDE}" [[ -n "$EXCLUDE" ]] && echo "exclude=${EXCLUDE}" [[ "${ESTIMATE_ONLY^^}" == "Y" ]] && echo "estimate_only=Y" [[ -n "$ENCRYPTION_PASSWORD" ]] && { echo "encryption=encrypt_password"; echo "encryption_password=${ENCRYPTION_PASSWORD}"; } [[ -n "$EXPDP_TRACE" ]] && echo "trace=${EXPDP_TRACE}" } >> "$pf" else { echo "table_exists_action=${TABLE_EXISTS_ACTION}" [[ -n "$REMAP_SCHEMA" ]] && echo "remap_schema=${REMAP_SCHEMA}" [[ -n "$REMAP_TABLESPACE" ]] && echo "remap_tablespace=${REMAP_TABLESPACE}" [[ -n "$INCLUDE" ]] && echo "include=${INCLUDE}" [[ -n "$EXCLUDE" ]] && echo "exclude=${EXCLUDE}" [[ -n "$ENCRYPTION_PASSWORD" ]] && echo "encryption_password=${ENCRYPTION_PASSWORD}" [[ -n "$IMPDP_TRACE" ]] && echo "trace=${IMPDP_TRACE}" } >> "$pf" fi echo "$pf" } # Show parfile + confirm before impdp show_and_confirm_parfile() { local pf="$1" echo "----- PARFILE: ${pf} -----" sed -E 's/(encryption_password=).*/\1*****/I' "$pf" echo "---------------------------" local ans read -rp "Proceed with impdp using this parfile? [Y/N/X]: " ans case "${ans^^}" in Y) return 0 ;; N) return 1 ;; X) exit 0 ;; *) return 1 ;; esac } # -------------------- Improved value confirmers & content picker --------------- confirm_edit_value() { local label="$1" val="${2:-}" ans="" while true; do if [[ -z "${val// }" ]]; then echo "${label} is currently empty." read -rp "Please enter ${label}: " val continue fi echo "${label}: ${val}" read -rp "Use this value? (Y to accept, N to edit) [Y/N]: " ans case "${ans^^}" in Y) echo "$val"; return 0 ;; N) read -rp "Enter new ${label}: " val ;; *) echo "Please answer Y or N." ;; esac done } choose_content_option() { local choice="" while true; do cat <<'EOS' Choose Export Content: a) METADATA_ONLY (schema/metadata only, no table data) b) FULL (ALL) (metadata + data) x) Exit EOS read -rp "Select [a/b/x]: " choice case "${choice,,}" in a) echo "METADATA_ONLY"; echo "[INFO] You selected CONTENT=METADATA_ONLY"; return 0 ;; b) echo "ALL"; echo "[INFO] You selected CONTENT=ALL"; return 0 ;; x) echo "[INFO] Exit chosen."; exit 0 ;; *) echo "Invalid choice. Please select a, b or x." ;; esac done } # ---------------- Precheck helpers: verify only on the DB that runs the job --- precheck_export_directory() { local def_name="${COMMON_DIR_NAME:-DP_DIR}" local tries=0 while true; do read -rp "Export: DIRECTORY object name to use/create on SOURCE [${def_name}]: " dname local dir_name="$(echo "${dname:-$def_name}" | tr '[:lower:]' '[:upper:]')" local default_path="${EXPORT_DIR_PATH:-${NAS_PATH:-}}" if [[ -z "$default_path" ]]; then echo "Enter absolute OS path on the SOURCE DB server for export dumpfiles (.dmp):" read -rp "Export path: " dir_path else echo "Default export path from conf: ${default_path}" read -rp "Use this export path? [Y to accept, N to enter a different path]: " ans if [[ "${ans^^}" == "N" ]]; then read -rp "Enter export path: " dir_path else dir_path="$default_path" fi fi if [[ -z "$dir_path" ]]; then warn "Export path cannot be empty." else create_or_replace_directory "$SRC_EZCONNECT" "$dir_name" "$dir_path" "src" if validate_directory_on_db_try "$SRC_EZCONNECT" "src" "$dir_name"; then ok "SOURCE export DIRECTORY ${dir_name} -> ${dir_path} is ready" EXPORT_DIR_NAME="$dir_name" EXPORT_DIR_PATH="$dir_path" return 0 fi warn "[DEBUG]run_sql_try(tag_dircheck_src) failed to create/validate export DIRECTORY on source." fi tries=$((tries+1)) read -rp "Retry export precheck? [Y=retry / B=back / X=exit]: " r case "${r^^}" in Y) continue ;; B) return 1 ;; X) exit 0 ;; *) return 1 ;; esac done } precheck_import_directory() { local def_name="${COMMON_DIR_NAME:-DP_DIR}" local tries=0 while true; do read -rp "Import: DIRECTORY object name to use/create on TARGET [${def_name}]: " dname local dir_name="$(echo "${dname:-$def_name}" | tr '[:lower:]' '[:upper:]')" echo "Enter absolute OS path on the TARGET DB server for import dumpfiles (.dmp):" read -rp "Import path: " dir_path [[ -z "$dir_path" ]] && { warn "Import path cannot be empty."; } if [[ -n "$dir_path" ]]; then create_or_replace_directory "$TGT_EZCONNECT" "$dir_name" "$dir_path" "tgt" if validate_directory_on_db_try "$TGT_EZCONNECT" "tgt" "$dir_name"; then ok "TARGET import DIRECTORY ${dir_name} -> ${dir_path} is ready" IMPORT_DIR_NAME="$dir_name" IMPORT_DIR_PATH="$dir_path" return 0 fi warn "[DEBUG]run_sql_try(tag_dircheck_tgt) failed to create/validate import DIRECTORY on target." fi tries=$((tries+1)) read -rp "Retry import precheck? [Y=retry / B=back / X=exit]: " r case "${r^^}" in Y) continue ;; B) return 1 ;; X) exit 0 ;; *) return 1 ;; esac done } # ------------------- Export directory prompt (SOURCE only) -------------------- prompt_export_dump_location() { if [[ -n "${EXPORT_DIR_NAME:-}" && -n "${EXPORT_DIR_PATH:-}" ]]; then echo "Export using SOURCE DIRECTORY ${EXPORT_DIR_NAME} -> ${EXPORT_DIR_PATH}" read -rp "Use these? [Y to accept / N to change / X to exit]: " ans case "${ans^^}" in Y) return 0 ;; N) ;; X) exit 0 ;; *) ;; esac fi precheck_export_directory } # -------------------- Import directory prompt (TARGET only) ------------------- prompt_import_dump_location() { if [[ -n "${IMPORT_DIR_NAME:-}" && -n "${IMPORT_DIR_PATH:-}" ]]; then echo "Import using TARGET DIRECTORY ${IMPORT_DIR_NAME} -> ${IMPORT_DIR_PATH}" read -rp "Use these? [Y to accept / N to change / X to exit]: " ans case "${ans^^}" in Y) : ;; N) precheck_import_directory || { warn "Setup cancelled."; return 1; } ;; X) exit 0 ;; *) : ;; esac else precheck_import_directory || { warn "Setup cancelled."; return 1; } fi echo "Enter dumpfile pattern or list (impdp format; e.g., dumpfile%U.dmp or f1.dmp,f2.dmp)" echo "(Tip: just the filename pattern, not a path — DIRECTORY controls the path)" read -rp "Dumpfile(s): " IMPORT_DUMPFILE_PATTERN [[ -z "$IMPORT_DUMPFILE_PATTERN" ]] && { warn "Dumpfile pattern cannot be empty."; return 1; } return 0 } # -------------------------- CONTENT selector ---------------------------------- choose_content_option_wrapper() { # kept for backward compatibility; calls choose_content_option choose_content_option } # ------------------------------ EXPORT MENUS ---------------------------------- get_nonmaintained_schemas() { local pred="" if [[ -n "$SKIP_SCHEMAS" ]]; then IFS=',' read -r -a arr <<< "$SKIP_SCHEMAS" for s in "${arr[@]}"; do s="$(echo "$s" | awk '{$1=$1;print}')" [[ -z "$s" ]] && continue pred+=" AND UPPER(username) NOT LIKE '${s^^}'" done fi run_sql "$SRC_EZCONNECT" "list_nonmaint_users_${RUN_ID}" " SET PAGES 0 FEEDBACK OFF HEADING OFF WITH base AS ( SELECT username FROM dba_users WHERE oracle_maintained='N'${pred} ) SELECT LISTAGG(username, ',') WITHIN GROUP (ORDER BY username) FROM base; / " awk 'NF{line=$0} END{print line}' "${LOG_DIR}/list_nonmaint_users_${RUN_ID}.log" } get_nonmaintained_schemas_tgt() { local pred="" if [[ -n "$SKIP_SCHEMAS" ]]; then IFS=',' read -r -a arr <<< "$SKIP_SCHEMAS" for s in "${arr[@]}"; do s="$(echo "$s" | awk '{$1=$1;print}')" [[ -z "$s" ]] && continue pred+=" AND UPPER(username) NOT LIKE '${s^^}'" done fi run_sql "$TGT_EZCONNECT" "tgt_nonmaint_users_${RUN_ID}" " SET PAGES 0 FEEDBACK OFF HEADING OFF WITH base AS ( SELECT username FROM dba_users WHERE oracle_maintained='N'${pred} ) SELECT LISTAGG(username, ',') WITHIN GROUP (ORDER BY username) FROM base; / " awk 'NF{line=$0} END{print line}' "${LOG_DIR}/tgt_nonmaint_users_${RUN_ID}.log" } exp_full_menu() { while true; do cat <<'EOS' Export FULL (choose content): 1) metadata_only (CONTENT=METADATA_ONLY) 2) full (CONTENT=ALL) 3) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) prompt_export_dump_location || { warn "Setup cancelled."; continue; } pf=$(par_common expdp "exp_full_meta" "$EXPORT_DIR_NAME") { echo "full=Y"; echo "content=METADATA_ONLY"; } >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_full_meta" ;; 2) prompt_export_dump_location || { warn "Setup cancelled."; continue; } pf=$(par_common expdp "exp_full_all" "$EXPORT_DIR_NAME") { echo "full=Y"; echo "content=ALL"; } >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_full_all" ;; 3) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } exp_schemas_menu() { while true; do cat <<'EOS' Export SCHEMAS: 1) All accounts (exclude Oracle-maintained; honors SKIP_SCHEMAS) 2) User input or value from conf (SCHEMAS_LIST_EXP) with confirmation 3) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) prompt_export_dump_location || { warn "Setup cancelled."; continue; } schemas="$(get_nonmaintained_schemas)" schemas="$(confirm_edit_value "Schemas (comma-separated)" "$schemas")" content_choice="$(choose_content_option)" echo "[INFO] Final Schemas: ${schemas}" echo "[INFO] Final CONTENT: ${content_choice}" pf=$(par_common expdp "exp_schemas_auto" "$EXPORT_DIR_NAME") { echo "schemas=${schemas}"; echo "content=${content_choice}"; } >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_schemas_auto" ;; 2) prompt_export_dump_location || { warn "Setup cancelled."; continue; } init="${SCHEMAS_LIST_EXP:-}" [[ -z "$init" ]] && read -rp "Enter schemas (comma-separated): " init schemas="$(confirm_edit_value "Schemas (comma-separated)" "$init")" content_choice="$(choose_content_option)" echo "[INFO] Final Schemas: ${schemas}" echo "[INFO] Final CONTENT: ${content_choice}" pf=$(par_common expdp "exp_schemas_user" "$EXPORT_DIR_NAME") { echo "schemas=${schemas}"; echo "content=${content_choice}"; } >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_schemas_user" ;; 3) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } exp_tablespaces() { prompt_export_dump_location || { warn "Setup cancelled."; return; } read -rp "Tablespaces (comma-separated): " tbs pf=$(par_common expdp "exp_tbs" "$EXPORT_DIR_NAME") echo "transport_tablespaces=${tbs}" >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_tbs" } exp_tables() { prompt_export_dump_location || { warn "Setup cancelled."; return; } read -rp "Tables (SCHEMA.TAB,SCHEMA2.TAB2,...): " tabs pf=$(par_common expdp "exp_tables" "$EXPORT_DIR_NAME") echo "tables=${tabs}" >> "$pf" dp_run expdp "$SRC_EZCONNECT" "$pf" "exp_tables" } export_menu() { while true; do cat <<'EOS' Export Menu: 1) FULL database (metadata_only / full) 2) SCHEMAS (all non-maintained / user|conf) with content selector 3) TABLESPACES (transport) 4) TABLES 5) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) exp_full_menu ;; 2) exp_schemas_menu ;; 3) exp_tablespaces ;; 4) exp_tables ;; 5) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } # ------------------------------ IMPORT HELPERS -------------------------------- # (impdp parfile builder kept separate to ensure dumpfile pattern shown & confirmed) par_common_imp_with_dump() { local tag="$1" local pf="${PAR_DIR}/${tag}_${RUN_ID}.par" debug "Creating impdp parfile: ${pf}" local server_log="$(basename_safe "${DUMPFILE_PREFIX}_${tag}_${RUN_ID}.log")" local cleaned_pattern; cleaned_pattern="$(reject_if_pathlike "${IMPORT_DUMPFILE_PATTERN}")" { echo "directory=${IMPORT_DIR_NAME}" echo "dumpfile=${cleaned_pattern}" echo "logfile=${server_log}" echo "logtime=all" echo "parallel=${PARALLEL}" echo "table_exists_action=${TABLE_EXISTS_ACTION}" [[ -n "$REMAP_SCHEMA" ]] && echo "remap_schema=${REMAP_SCHEMA}" [[ -n "$REMAP_TABLESPACE" ]] && echo "remap_tablespace=${REMAP_TABLESPACE}" [[ -n "$INCLUDE" ]] && echo "include=${INCLUDE}" [[ -n "$EXCLUDE" ]] && echo "exclude=${EXCLUDE}" [[ -n "$ENCRYPTION_PASSWORD" ]] && echo "encryption_password=${ENCRYPTION_PASSWORD}" [[ -n "$IMPDP_TRACE" ]] && echo "trace=${IMPDP_TRACE}" } > "$pf" echo "$pf" } imp_full_menu() { while true; do cat <<'EOS' Import FULL (choose content): 1) metadata_only (CONTENT=METADATA_ONLY) 2) full (CONTENT=ALL) 3) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) prompt_import_dump_location || { warn "Setup cancelled."; continue; } pf=$(par_common_imp_with_dump "imp_full_meta"); { echo "full=Y"; echo "content=METADATA_ONLY"; } >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_full_meta"; else warn "Cancelled."; fi ;; 2) prompt_import_dump_location || { warn "Setup cancelled."; continue; } pf=$(par_common_imp_with_dump "imp_full_all"); { echo "full=Y"; echo "content=ALL"; } >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_full_all"; else warn "Cancelled."; fi ;; 3) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } imp_schemas_menu() { while true; do cat <<'EOS' Import SCHEMAS: 1) All accounts (exclude Oracle-maintained; honors SKIP_SCHEMAS) 2) User input or value from conf (SCHEMAS_LIST_IMP / SCHEMAS_LIST_EXP) with confirmation 3) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) prompt_import_dump_location || { warn "Setup cancelled."; continue; } schemas="$(get_nonmaintained_schemas_tgt)" schemas="$(confirm_edit_value "Schemas to import (comma-separated)" "$schemas")" echo "[INFO] Final Schemas to import: ${schemas}" pf=$(par_common_imp_with_dump "imp_schemas_auto"); { echo "schemas=${schemas}"; echo "content=ALL"; } >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_schemas_auto"; else warn "Cancelled."; fi ;; 2) prompt_import_dump_location || { warn "Setup cancelled."; continue; } init="${SCHEMAS_LIST_IMP:-${SCHEMAS_LIST_EXP:-}}" [[ -z "$init" ]] && read -rp "Enter schemas (comma-separated): " init schemas="$(confirm_edit_value "Schemas to import (comma-separated)" "$init")" echo "[INFO] Final Schemas to import: ${schemas}" pf=$(par_common_imp_with_dump "imp_schemas_user"); { echo "schemas=${schemas}"; echo "content=ALL"; } >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_schemas_user"; else warn "Cancelled."; fi ;; 3) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } imp_tablespaces() { prompt_import_dump_location || { warn "Setup cancelled."; return; } read -rp "Transported tablespaces (comma-separated): " tbs pf=$(par_common_imp_with_dump "imp_tbs") echo "transport_tablespaces=${tbs}" >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_tbs"; else warn "Cancelled."; fi } imp_tables() { prompt_import_dump_location || { warn "Setup cancelled."; return; } read -rp "Tables (SCHEMA.TAB,SCHEMA2.TAB2,...): " tabs pf=$(par_common_imp_with_dump "imp_tables") echo "tables=${tabs}" >> "$pf" if show_and_confirm_parfile "$pf"; then dp_run impdp "$TGT_EZCONNECT" "$pf" "imp_tables"; else warn "Cancelled."; fi } import_cleanup_menu() { while true; do cat < ${out}" sqlplus -s "$conn" <"$out" 2>"${out}.log" SET LONG 1000000 LONGCHUNKSIZE 1000000 LINES 32767 PAGES 0 TRIMSPOOL ON TRIMOUT ON FEEDBACK OFF VERIFY OFF HEADING OFF ECHO OFF SET DEFINE OFF BEGIN DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'STORAGE', FALSE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'SEGMENT_ATTRIBUTES', FALSE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'CONSTRAINTS', TRUE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'REF_CONSTRAINTS', TRUE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'OID', FALSE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'SQLTERMINATOR', TRUE); DBMS_METADATA.SET_TRANSFORM_PARAM(DBMS_METADATA.SESSION_TRANSFORM, 'PRETTY', TRUE); END; / ${body} EXIT SQL if grep -qi "ORA-" "${out}.log"; then err "DDL extract error in $(basename "$out")" tail -n 50 "${out}.log" | mask_pwd | sed 's/^/ /' return 1 fi ok "DDL file created: $out" } ddl_users() { local f="${DDL_DIR}/01_users_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('USER', username) FROM dba_users WHERE oracle_maintained = 'N' ORDER BY username; "; } ddl_profiles() { local f="${DDL_DIR}/02_profiles_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('PROFILE', profile) FROM (SELECT DISTINCT profile FROM dba_profiles ORDER BY 1); "; } ddl_roles() { local f="${DDL_DIR}/03_roles_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('ROLE', role) FROM dba_roles WHERE NVL(oracle_maintained,'N')='N' ORDER BY role; "; } ddl_privs_to_roles() { local f="${DDL_DIR}/04_sys_and_role_grants_${RUN_ID}.sql"; ddl_spool "$f" " SELECT 'GRANT '||privilege||' TO '||grantee||CASE WHEN admin_option='YES' THEN ' WITH ADMIN OPTION' ELSE '' END||';' FROM dba_sys_privs WHERE grantee NOT IN (SELECT username FROM dba_users WHERE oracle_maintained='Y') AND grantee NOT IN (SELECT role FROM dba_roles WHERE oracle_maintained='Y') UNION ALL SELECT 'GRANT '||granted_role||' TO '||grantee||CASE WHEN admin_option='YES' THEN ' WITH ADMIN OPTION' ELSE '' END||';' FROM dba_role_privs WHERE grantee NOT IN (SELECT username FROM dba_users WHERE oracle_maintained='Y') AND granted_role NOT IN (SELECT role FROM dba_roles WHERE oracle_maintained='Y') ORDER BY 1; "; } ddl_sysprivs_to_users() { local f="${DDL_DIR}/05_user_obj_privs_${RUN_ID}.sql"; ddl_spool "$f" " WITH src AS ( SELECT grantee, owner, table_name, privilege, grantable, grantor FROM dba_tab_privs WHERE grantee <> 'PUBLIC' AND grantee NOT IN (SELECT username FROM dba_users WHERE oracle_maintained='Y') AND grantee NOT IN (SELECT role FROM dba_roles WHERE oracle_maintained='Y') AND grantee NOT LIKE 'C##%' ) SELECT 'GRANT '||privilege||' ON '||owner||'.\"'||table_name||'\" TO '||grantee|| DECODE(grantable,'YES',' WITH GRANT OPTION','')||' /* grantor '||grantor||' */;' FROM src ORDER BY grantee, owner, table_name, privilege; "; } ddl_sequences_all_users() { local f="${DDL_DIR}/06_sequences_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('SEQUENCE', sequence_name, owner) FROM dba_sequences WHERE owner IN (SELECT username FROM dba_users WHERE oracle_maintained='N') ORDER BY owner, sequence_name; "; } ddl_public_synonyms() { local f="${DDL_DIR}/07_public_synonyms_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('SYNONYM', synonym_name, 'PUBLIC') FROM dba_synonyms WHERE owner='PUBLIC' ORDER BY synonym_name; "; } ddl_private_synonyms_all_users() { local f="${DDL_DIR}/08_private_synonyms_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('SYNONYM', synonym_name, owner) FROM dba_synonyms WHERE owner <> 'PUBLIC' AND owner IN (SELECT username FROM dba_users WHERE oracle_maintained='N') ORDER BY owner, synonym_name; "; } to_inlist_upper() { local csv="$1" out="" tok IFS=',' read -r -a arr <<< "$csv" for tok in "${arr[@]}"; do tok="$(echo "$tok" | awk '{$1=$1;print}')" [[ -z "$tok" ]] && continue tok="${tok^^}" out+="${out:+,}'${tok}'" done printf "%s" "$out" } ddl_all_ddls_all_users() { local f="${DDL_DIR}/09_all_ddls_${RUN_ID}.sql" local types_clause; types_clause="$(to_inlist_upper "TABLE,INDEX,VIEW,SEQUENCE,TRIGGER,FUNCTION,PROCEDURE,PACKAGE,PACKAGE_BODY,MATERIALIZED_VIEW,TYPE,SYNONYM")" ddl_spool "$f" " WITH owners AS ( SELECT username AS owner FROM dba_users WHERE oracle_maintained='N' ), objs AS ( SELECT owner, object_type, object_name FROM dba_objects WHERE owner IN (SELECT owner FROM owners) AND object_type IN (${types_clause}) AND object_name NOT LIKE 'BIN$%%' AND temporary='N' ) SELECT DBMS_METADATA.GET_DDL(object_type, object_name, owner) FROM objs ORDER BY owner, object_type, object_name; "; } ddl_tablespaces() { local f="${DDL_DIR}/10_tablespaces_${RUN_ID}.sql" local skip_clause; skip_clause="$(to_inlist_upper "$SKIP_TABLESPACES")" ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('TABLESPACE', tablespace_name) FROM dba_tablespaces WHERE UPPER(tablespace_name) NOT IN (${skip_clause}) ORDER BY tablespace_name; "; } ddl_role_grants_to_users() { local f="${DDL_DIR}/11_role_grants_to_users_${RUN_ID}.sql"; ddl_spool "$f" " WITH u AS (SELECT username FROM dba_users WHERE oracle_maintained='N'), r AS ( SELECT grantee AS username, LISTAGG(role, ',') WITHIN GROUP (ORDER BY role) AS roles FROM ( SELECT grantee, granted_role AS role FROM dba_role_privs WHERE default_role='YES' ) GROUP BY grantee ) SELECT 'ALTER USER '||username||' DEFAULT ROLE '||NVL(roles, 'ALL')||';' FROM u LEFT JOIN r USING (username) ORDER BY username; "; } ddl_directories() { local f="${DDL_DIR}/13_directories_${RUN_ID}.sql"; ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('DIRECTORY', directory_name) FROM (SELECT DISTINCT directory_name FROM dba_directories ORDER BY 1); "; } ddl_db_links_by_owner() { read -rp "Enter owner for DB links (schema name): " owner owner="${owner^^}" local f="${DDL_DIR}/14_db_links_${owner}_${RUN_ID}.sql" ddl_spool "$f" " SELECT DBMS_METADATA.GET_DDL('DB_LINK', db_link, owner) FROM dba_db_links WHERE owner = UPPER('${owner}') ORDER BY db_link; " warn "Note: DB link passwords may be masked/omitted depending on version/security." } ddl_menu_wrapper() { while true; do cat <<'EOS' DDL Extraction (Source DB): 1) USERS (exclude Oracle-maintained) 2) PROFILES 3) ROLES (exclude Oracle-maintained) 4) PRIVILEGES -> ROLES [system + role grants] 5) OBJECT PRIVS -> USERS [from DBA_TAB_PRIVS] 6) SEQUENCES for USERS (exclude Oracle-maintained) 7) PUBLIC SYNONYMS 8) PRIVATE SYNONYMS for USERS (exclude Oracle-maintained) 9) ALL OBJECT DDLs for USERS (exclude Oracle-maintained) [heavy] 10) TABLESPACE DDLs (skip system/temp/undo) 11) DEFAULT ROLES per USER (ALTER USER DEFAULT ROLE ...) 12) DIRECTORY OBJECTS 13) DB LINKS by OWNER (prompt) B) Back X) Exit EOS read -rp "Choose: " c case "${c^^}" in 1) ddl_users ;; 2) ddl_profiles ;; 3) ddl_roles ;; 4) ddl_privs_to_roles ;; 5) ddl_sysprivs_to_users ;; 6) ddl_sequences_all_users ;; 7) ddl_public_synonyms ;; 8) ddl_private_synonyms_all_users ;; 9) ddl_all_ddls_all_users ;; 10) ddl_tablespaces ;; 11) ddl_role_grants_to_users ;; 12) ddl_directories ;; 13) ddl_db_links_by_owner ;; B) break ;; X) exit 0 ;; * ) warn "Invalid choice" ;; esac done } # ----------------------- COMPARE helpers & menus ------------------------------- ensure_local_dir() { mkdir -p "$1" || { echo "Cannot create $1"; exit 1; }; } normalize_sorted() { local in="$1" out="$2" if [[ -f "$in" ]]; then tr -d '\r' < "$in" | awk 'NF' | sort -u > "$out"; else : > "$out"; fi } emit_set_delta_html() { local title="$1" left_csv="$2" right_csv="$3" html="$4" local L="$(mktemp)" R="$(mktemp)" normalize_sorted "$left_csv" "$L" normalize_sorted "$right_csv" "$R" echo "

${title}

" >> "$html" echo "
Only in SourceOnly in Target
" >> "$html"
  comm -23 "$L" "$R" | sed 's/&/\&/g;s/> "$html"
  echo "
" >> "$html"
  comm -13 "$L" "$R" | sed 's/&/\&/g;s/> "$html"
  echo "
" >> "$html" rm -f "$L" "$R" } emit_rowcount_delta_html() { local title="$1" left_csv="$2" right_csv="$3" html="$4" local L="$(mktemp)" R="$(mktemp)" normalize_sorted "$left_csv" "$L" normalize_sorted "$right_csv" "$R" echo "

${title}

" >> "$html" awk -F'|' ' NR==FNR { l[$1]=$2; next } { r[$1]=$2 } END{ for (k in l) keys[k]=1 for (k in r) keys[k]=1 PROCINFO["sorted_in"]="@ind_str_asc" for (k in keys){ ls=(k in l)?l[k]:""; rs=(k in r)?r[k]:"" delta="SAME" if (ls=="" && rs!="") delta="ONLY_IN_TARGET" else if (ls!="" && rs=="") delta="ONLY_IN_SOURCE" else if (ls!=rs) delta="DIFF" if (delta!="SAME"){ gsub(/&/,"\\&",k); gsub(/\n", k, ls, rs, delta) } } }' "$L" "$R" >> "$html" echo "
TableSourceTargetDelta
%s%s%s%s
" >> "$html" rm -f "$L" "$R" } # LOCAL/JUMPER (no NAS) snapshot_objects_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" run_sql_spool_local "$ez" "snap_${who}_objects_${schema}" "$out" " SET COLSEP '|' SELECT object_type||'|'||object_name||'|'||status FROM dba_objects WHERE owner=UPPER('${schema}') AND temporary='N' AND object_name NOT LIKE 'BIN$%' ORDER BY object_type, object_name; "; } snapshot_rowcounts_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" if [[ "${EXACT_ROWCOUNT^^}" == "Y" ]]; then run_sql_spool_local "$ez" "snap_${who}_rowcnt_exact_${schema}" "$out" " SET SERVEROUTPUT ON SIZE UNLIMITED DECLARE v_cnt NUMBER; BEGIN FOR t IN (SELECT table_name FROM dba_tables WHERE owner=UPPER('${schema}') ORDER BY table_name) LOOP BEGIN EXECUTE IMMEDIATE 'SELECT COUNT(1) FROM '||UPPER('${schema}')||'.\"'||t.table_name||'\"' INTO v_cnt; DBMS_OUTPUT.PUT_LINE(t.table_name||'|'||v_cnt); EXCEPTION WHEN OTHERS THEN DBMS_OUTPUT.PUT_LINE(t.table_name||'|#ERR#'); END; END LOOP; END; / " else run_sql_spool_local "$ez" "snap_${who}_rowcnt_stats_${schema}" "$out" " SET COLSEP '|' SELECT t.table_name||'|'||NVL(s.num_rows,-1) FROM dba_tables t LEFT JOIN dba_tab_statistics s ON s.owner=t.owner AND s.table_name=t.table_name AND s.object_type='TABLE' WHERE t.owner=UPPER('${schema}') ORDER BY t.table_name; " fi } snapshot_sys_privs_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" run_sql_spool_local "$ez" "snap_${who}_sysprivs_${schema}" "$out" " SET COLSEP '|' SELECT privilege||'|'||admin_option FROM dba_sys_privs WHERE grantee=UPPER('${schema}') ORDER BY privilege; "; } snapshot_role_privs_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" run_sql_spool_local "$ez" "snap_${who}_roleprivs_${schema}" "$out" " SET COLSEP '|' SELECT granted_role||'|'||admin_option||'|'||default_role FROM dba_role_privs WHERE grantee=UPPER('${schema}') ORDER BY granted_role; "; } snapshot_tabprivs_on_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" run_sql_spool_local "$ez" "snap_${who}_tabprivs_on_${schema}" "$out" " SET COLSEP '|' SELECT owner||'|'||table_name||'|'||grantee||'|'||privilege||'|'||grantable||'|'||grantor FROM dba_tab_privs WHERE owner=UPPER('${schema}') AND grantee <> 'PUBLIC' ORDER BY owner, table_name, grantee, privilege; "; } snapshot_tabprivs_to_local() { local ez="$1" who="$2" schema="${3^^}" out="${4}" run_sql_spool_local "$ez" "snap_${who}_tabprivs_to_${schema}" "$out" " SET COLSEP '|' SELECT owner||'|'||table_name||'|'||grantee||'|'||privilege||'|'||grantable||'|'||grantor FROM dba_tab_privs WHERE grantee=UPPER('${schema}') ORDER BY owner, table_name, privilege; "; } compare_one_schema_local() { local schema="${1^^}" ensure_local_dir "$LOCAL_COMPARE_DIR" local S="${LOCAL_COMPARE_DIR}" snapshot_objects_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_objects_${RUN_ID}.csv" snapshot_rowcounts_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_rowcounts_${RUN_ID}.csv" snapshot_sys_privs_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_sys_privs_${RUN_ID}.csv" snapshot_role_privs_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_role_privs_${RUN_ID}.csv" snapshot_tabprivs_on_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_obj_privs_on_${RUN_ID}.csv" snapshot_tabprivs_to_local "$SRC_EZCONNECT" "src" "$schema" "${S}/${schema}_src_obj_privs_to_${RUN_ID}.csv" snapshot_objects_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_objects_${RUN_ID}.csv" snapshot_rowcounts_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_rowcounts_${RUN_ID}.csv" snapshot_sys_privs_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_sys_privs_${RUN_ID}.csv" snapshot_role_privs_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_role_privs_${RUN_ID}.csv" snapshot_tabprivs_on_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_obj_privs_on_${RUN_ID}.csv" snapshot_tabprivs_to_local "$TGT_EZCONNECT" "tgt" "$schema" "${S}/${schema}_tgt_obj_privs_to_${RUN_ID}.csv" local html="${COMPARE_DIR}/compare_local_${schema}_${RUN_ID}.html" ensure_local_dir "$COMPARE_DIR" { echo "Schema Compare (LOCAL) ${schema}" echo "" echo "" echo "

Schema Compare (LOCAL/Jumper): ${schema}

" echo "

Run: ${RUN_ID}
Source: ${SRC_EZCONNECT}
Target: ${TGT_EZCONNECT}
Local: ${LOCAL_COMPARE_DIR}

" } > "$html" emit_set_delta_html "Objects (type|name|status)" \ "${S}/${schema}_src_objects_${RUN_ID}.csv" \ "${S}/${schema}_tgt_objects_${RUN_ID}.csv" \ "$html" emit_rowcount_delta_html "Rowcount differences (table|count)" \ "${S}/${schema}_src_rowcounts_${RUN_ID}.csv" \ "${S}/${schema}_tgt_rowcounts_${RUN_ID}.csv" \ "$html" emit_set_delta_html "System Privileges (priv|admin)" \ "${S}/${schema}_src_sys_privs_${RUN_ID}.csv" \ "${S}/${schema}_tgt_sys_privs_${RUN_ID}.csv" \ "$html" emit_set_delta_html "Role Grants (role|admin|default)" \ "${S}/${schema}_src_role_privs_${RUN_ID}.csv" \ "${S}/${schema}_tgt_role_privs_${RUN_ID}.csv" \ "$html" emit_set_delta_html "Object Privileges ON ${schema} objects (owner|table|grantee|priv|grantable|grantor)" \ "${S}/${schema}_src_obj_privs_on_${RUN_ID}.csv" \ "${S}/${schema}_tgt_obj_privs_on_${RUN_ID}.csv" \ "$html" emit_set_delta_html "Object Privileges TO ${schema} user (owner|table|grantee|priv|grantable|grantor)" \ "${S}/${schema}_src_obj_privs_to_${RUN_ID}.csv" \ "${S}/${schema}_tgt_obj_privs_to_${RUN_ID}.csv" \ "$html" echo "" >> "$html" ok "HTML (LOCAL/Jumper): ${html}" email_inline_html "$html" "${MAIL_SUBJECT_PREFIX} Schema Compare LOCAL - ${schema} - ${RUN_ID}" } compare_many_local() { local list_input="${1:-}" local schemas_list="" if [[ -n "$list_input" ]]; then schemas_list="$list_input" else schemas_list="$(get_nonmaintained_schemas)" [[ -z "$schemas_list" ]] && { warn "No non-maintained schemas found on source."; return 0; } ok "Auto-compare LOCAL mode (source list): ${schemas_list}" fi ensure_local_dir "$COMPARE_DIR" local index="${COMPARE_DIR}/compare_local_index_${RUN_ID}.html" { echo "Schema Compare LOCAL Index ${RUN_ID}" echo "" echo "" echo "

Schema Compare Index (LOCAL/Jumper)

" echo "

Run: ${RUN_ID}
Source: ${SRC_EZCONNECT}
Target: ${TGT_EZCONNECT}

" echo "" } > "$index" local i=0 IFS=',' read -r -a arr <<< "$schemas_list" for s in "${arr[@]}"; do s="$(echo "$s" | awk '{$1=$1;print}')" [[ -z "$s" ]] && continue i=$((i+1)) compare_one_schema_local "$s" local f="compare_local_${s^^}_${RUN_ID}.html" echo "" >> "$index" done echo "
#SchemaReport
${i}${s^^}${f}
" >> "$index" ok "Index HTML (LOCAL): ${index}" email_inline_html "$index" "${MAIL_SUBJECT_PREFIX} Compare LOCAL Index - ${RUN_ID}" } # --- FILE & EXTERNAL compare engines (require NAS for shared files) ----------- # (omitted: code identical to previous version; retained in full earlier section) # For brevity in this response, FILE/EXTERNAL helper functions remain as implemented # in v4al above (snapshot_*_csv, create_exts_for_schema, compare_*_file_mode, # compare_*_sql_external). If you need me to reprint them again verbatim here, # say the word and I’ll paste the entire section. # ------------------------------ Menus ----------------------------------------- export_import_menu() { while true; do cat <<'EOS' Data Pump: 1) Export -> sub menu 2) Import -> sub menu 3) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) export_menu ;; 2) import_menu ;; 3) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } import_menu() { while true; do cat <<'EOS' Import Menu: 1) FULL (metadata_only / full) 2) SCHEMAS (auto/user list) 3) TABLESPACES (transport) 4) TABLES 5) Cleanup helpers (drop users/objects) [DANGEROUS] 6) Back X) Exit EOS read -rp "Choose: " c case "$c" in 1) imp_full_menu ;; 2) imp_schemas_menu ;; 3) imp_tablespaces ;; 4) imp_tables ;; 5) import_cleanup_menu ;; 6) break ;; X|x) exit 0 ;; *) warn "Invalid choice" ;; esac done } compare_schema_menu() { while true; do cat <<'EOS' Compare Objects (Source vs Target) Engine: A) EXTERNAL tables on TARGET (needs DIRECTORY/NAS) B) FILE mode (CSV + shell diff; needs DIRECTORY/NAS) C) LOCAL/JUMPER (CSV spooled locally; NO NAS, NO ext tables) Actions: 1) One schema (HTML + email) 2) Multiple schemas (ENTER = all non-maintained on source) [HTML + email] 3) Back X) Exit EOS read -rp "Choose engine [A/B/C] or action [1/2/3/X]: " eng eng="${eng^^}" case "$eng" in A) read -rp "Pick action [1=one schema, 2=multiple, 3=back, X=exit]: " c case "${c^^}" in 1) read -rp "Schema name: " s; compare_one_schema_sql_external "$s" ;; 2) read -rp "Schema names (comma-separated) or ENTER for all: " list; compare_many_sql_external "${list:-}";; 3) break ;; X) exit 0 ;; *) warn "Invalid choice" ;; esac ;; B) read -rp "Pick action [1=one schema, 2=multiple, 3=back, X=exit]: " c case "${c^^}" in 1) read -rp "Schema name: " s; compare_one_schema_file_mode "$s" ;; 2) read -rp "Schema names (comma-separated) or ENTER for all: " list; compare_many_file_mode "${list:-}";; 3) break ;; X) exit 0 ;; *) warn "Invalid choice" ;; esac ;; C) read -rp "Pick action [1=one schema, 2=multiple, 3=back, X=exit]: " c case "${c^^}" in 1) read -rp "Schema name: " s; compare_one_schema_local "$s" ;; 2) read -rp "Schema names (comma-separated) or ENTER for all: " list; compare_many_local "${list:-}";; 3) break ;; X) exit 0 ;; *) warn "Invalid choice" ;; esac ;; 1|2|3) warn "Pick engine first (A/B/C), then action." ;; X) exit 0 ;; *) warn "Unknown engine. Choose A, B, C or X." ;; esac done } show_jobs() { ce "Logs under $LOG_DIR" read -rp "Show DBA_DATAPUMP_JOBS on which DB? (src/tgt/b=back/x=exit): " side case "${side,,}" in src) run_sql "$SRC_EZCONNECT" "jobs_src_${RUN_ID}" "SET LINES 220 PAGES 200 COL owner_name FOR A20 COL job_name FOR A30 COL state FOR A12 SELECT owner_name, job_name, state, operation, job_mode, degree, attached_sessions FROM dba_datapump_jobs ORDER BY 1,2; /" ;; tgt) run_sql "$TGT_EZCONNECT" "jobs_tgt_${RUN_ID}" "SET LINES 220 PAGES 200 COL owner_name FOR A20 COL job_name FOR A30 COL state FOR A12 SELECT owner_name, job_name, state, operation, job_mode, degree, attached_sessions FROM dba_datapump_jobs ORDER BY 1,2; /" ;; b) return ;; x) exit 0 ;; *) warn "Unknown choice";; esac for f in "$LOG_DIR"/*.log; do [[ -f "$f" ]] || continue; echo "---- $(basename "$f") (tail -n 20) ----"; tail -n 20 "$f"; done } cleanup_dirs() { read -rp "Drop DIRECTORY ${COMMON_DIR_NAME} on (src/tgt/both/b=back/x=exit)? " side case "${side,,}" in src) run_sql_try "$SRC_EZCONNECT" "drop_dir_src_${RUN_ID}" "BEGIN EXECUTE IMMEDIATE 'DROP DIRECTORY ${COMMON_DIR_NAME}'; EXCEPTION WHEN OTHERS THEN NULL; END; /" || true ;; tgt) run_sql_try "$TGT_EZCONNECT" "drop_dir_tgt_${RUN_ID}" "BEGIN EXECUTE IMMEDIATE 'DROP DIRECTORY ${COMMON_DIR_NAME}'; EXCEPTION WHEN OTHERS THEN NULL; END; /" || true ;; both) run_sql_try "$SRC_EZCONNECT" "drop_dir_src_${RUN_ID}" "BEGIN EXECUTE IMMEDIATE 'DROP DIRECTORY ${COMMON_DIR_NAME}'; EXCEPTION WHEN OTHERS THEN NULL; END; /" || true run_sql_try "$TGT_EZCONNECT" "drop_dir_tgt_${RUN_ID}" "BEGIN EXECUTE IMMEDIATE 'DROP DIRECTORY ${COMMON_DIR_NAME}'; EXCEPTION WHEN OTHERS THEN NULL; END; /" || true ;; b) return ;; x) exit 0 ;; *) warn "No action";; esac } main_menu() { while true; do cat <} PARALLEL=${PARALLEL} COMPRESSION=${COMPRESSION} TABLE_EXISTS_ACTION=${TABLE_EXISTS_ACTION} DDL out: ${DDL_DIR} Compare out: ${COMPARE_DIR} ============================================================= 1) Toggle DEBUG on/off (current: ${DEBUG}) 2) Precheck Export DIRECTORY (on SOURCE only) 3) Precheck Import DIRECTORY (on TARGET only) 4) Data Pump (EXP/IMP) -> sub menu 5) Monitor/Status -> DBA_DATAPUMP_JOBS + tail logs 6) Drop DIRECTORY objects -> cleanup (non-fatal) 7) DDL Extraction (Source DB) -> sub menu 8) Compare Objects -> sub menu (EXTERNAL / FILE / LOCAL) 9) Back X) Exit EOS read -rp "Choose: " choice case "${choice^^}" in 1) toggle_debug ;; 2) precheck_export_directory || true ;; 3) precheck_import_directory || true ;; 4) export_import_menu ;; 5) show_jobs ;; 6) cleanup_dirs ;; 7) ddl_menu_wrapper ;; 8) compare_schema_menu ;; 9) return ;; X) exit 0 ;; *) warn "Invalid choice.";; esac done } # Root loop main_menu