#!/bin/bash
#
# Cal.com Service

PATH=$HOME/.docker/cli-plugins:/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
get_appvars

config_calcom() {
  echo -ne "\n* Configuring /federated/apps/calcom container.."
  # spin &
  # SPINPID=$!

  if [ ! -d "/federated/apps/calcom" ]; then
    mkdir -p /federated/apps/calcom
  fi

  CALCOM_SECRET=$(create_password);

  # DOMAIN_ARRAY=(${DOMAIN//./ })
  # DOMAIN_FIRST=${DOMAIN_ARRAY[0]}
  # DOMAIN_LAST=${DOMAIN_ARRAY[1]}

  cat > /federated/apps/calcom/docker-compose.yml <<EOF
services:
  calcom:
    image: calcom/cal.com:\${IMAGE_VERSION}
    container_name: calcom
    # hostname: calcom.$DOMAIN
    restart: always
    ports:
       - "3000:3000"  # Map external port 3000 to internal port 3000 for broken code workaround
    networks:
      core:
        ipv4_address: 192.168.0.48
    volumes:
      - ./data/root/federated:/root/federated
    entrypoint: /root/federated/init.sh
    env_file:
      - ./.env
    labels:
      - "traefik.enable=true"
      - "traefik.http.routers.calcom.rule=Host(\`calcom.$DOMAIN\`)"
      - "traefik.http.routers.calcom.entrypoints=websecure"
      - "traefik.http.routers.calcom.tls.certresolver=letsencrypt"
      - "traefik.http.services.calcom.loadbalancer.server.port=3000"
    extra_hosts:
      - "calcom.$DOMAIN:$EXTERNALIP"
      - "nextcloud.$DOMAIN:$EXTERNALIP"

networks:
  core:
    external: true
EOF

  NEXTAUTH_SECRET=`openssl rand -base64 32`
  CALENDSO_ENCRYPTION_KEY=`dd if=/dev/urandom bs=1K count=1 2>/dev/null | md5sum | awk '{ print $1 }'`

  cat > /federated/apps/calcom/.env <<EOF
IMAGE_VERSION=v4.7.8

# Set this value to 'agree' to accept our license:
# LICENSE: https://github.com/calendso/calendso/blob/main/LICENSE
#
# Summary of terms:
# - The codebase has to stay open source, whether it was modified or not
# - You can not repackage or sell the codebase
# - Acquire a commercial license to remove these terms by emailing: license@cal.com
NEXT_PUBLIC_LICENSE_CONSENT=agree
LICENSE=
# BASE_URL and NEXT_PUBLIC_APP_URL are both deprecated. Both are replaced with one variable, NEXT_PUBLIC_WEBAPP_URL
# BASE_URL=http://localhost:3000
BASE_URL=https://calcom.$DOMAIN
# NEXT_PUBLIC_APP_URL=http://localhost:3000
NEXT_PUBLIC_APP_URL=https://calcom.$DOMAIN
NEXT_PUBLIC_WEBAPP_URL=https://calcom.$DOMAIN
# saint@fed 20241127 Comment out at this time.
#NEXT_PUBLIC_API_V2_URL=http://calcom.$DOMAIN:5555/api/v2
# saint@fed 20241204 Try this.
NEXT_PUBLIC_API_V2_URL=https://calcom.$DOMAIN/api/v2
# Configure NEXTAUTH_URL manually if needed, otherwise it will resolve to {NEXT_PUBLIC_WEBAPP_URL}/api/auth
# saint@fed 20241127 Instead of using the below we set internal /etc/hosts hostname to the external IP since /api is needed both internally and by client (and pass :3000 through instead)
#NEXTAUTH_URL=http://calcom.$DOMAIN:3000/api/auth
#NEXTAUTH_URL=http://localhost:3000/api/auth
# It is highly recommended that the NEXTAUTH_SECRET must be overridden and very unique
# Use "openssl rand -base64 32" to generate a key
#NEXTAUTH_SECRET=secret
NEXTAUTH_SECRET=$NEXTAUTH_SECRET
# Encryption key that will be used to encrypt CalDAV credentials, choose a random string, for example with "dd if=/dev/urandom bs=1K count=1 | md5sum"
#CALENDSO_ENCRYPTION_KEY=secret
CALENDSO_ENCRYPTION_KEY=$CALENDSO_ENCRYPTION_KEY
POSTGRES_USER=calcom
POSTGRES_PASSWORD=$CALCOM_SECRET
POSTGRES_DB=calcom
DATABASE_HOST=postgresql.$DOMAIN:5432
DATABASE_URL=postgresql://calcom:$CALCOM_SECRET@postgresql.$DOMAIN:5432/calcom
# Needed to run migrations while using a connection pooler like PgBouncer
# Use the same one as DATABASE_URL if you're not using a connection pooler
DATABASE_DIRECT_URL=postgresql://calcom:$CALCOM_SECRET@postgresql.$DOMAIN:5432/calcom
GOOGLE_API_CREDENTIALS={}
# Set this to '1' if you don't want Cal to collect anonymous usage
CALCOM_TELEMETRY_DISABLED=1
# Used for the Office 365 / Outlook.com Calendar integration
MS_GRAPH_CLIENT_ID=
MS_GRAPH_CLIENT_SECRET=
ZOOM_CLIENT_ID=
# E-mail settings
# Configures the global From: header whilst sending emails.
EMAIL_FROM=calcom@$DOMAIN
# Configure SMTP settings (@see https://nodemailer.com/smtp/).
EMAIL_SERVER_HOST=mail.$DOMAIN
EMAIL_SERVER_PORT=587
EMAIL_SERVER_USER=$SMTPUSER
EMAIL_SERVER_PASSWORD=$ADMINPASS
NODE_ENV=production
# saint@fed 20241127 Comment out due to bug https://github.com/calcom/cal.com/issues/12201 and https://github.com/calcom/cal.com/issues/13572
# saint@fed 20241127 Commenting out unfortunately triggers another bug though (which appears bearable however): https://github.com/calcom/cal.com/issues/11330
#ALLOWED_HOSTNAMES='"$DOMAIN"'
# saint@fed 20241127 Address potential TLS issues by adding this.
NODE_TLS_REJECT_UNAUTHORIZED=0
# saint@fed 20241127 Enable full debug with the following...
#DEBUG=*
#NEXTAUTH_DEBUG=true
#CALENDSO_LOG_LEVEL=debug
# saint@fed 20241127 Authelia OIDC SSO support
SAML_DATABASE_URL=postgresql://calcom:$CALCOM_SECRET@postgresql.$DOMAIN:5432/calcomsaml
SAML_ADMINS=admin@$DOMAIN
EOF

  chmod 600 /federated/apps/calcom/.env

  # Make data and data/root/federated directories
  mkdir -p /federated/apps/calcom/data/root/federated

  # Create .c source code for CRYPT SHA512 {crypt} Hash statically-linked mini-binary
  cat > /federated/apps/calcom/data/root/federated/static_crypt.c <<'EOF'
#define _XOPEN_SOURCE
#include <unistd.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <time.h>
#include <ctype.h>
#include <assert.h>
#include <crypt.h>

// Characters allowed in the salt
static const char salt_chars[] =
    "abcdefghijklmnopqrstuvwxyz"
    "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
    "0123456789"
    "./";

// Generate a random salt of specified length
void generate_salt(char *salt, size_t length) {
    // Initialize random number generator
    static int seeded = 0;
    if (!seeded) {
        srand(time(NULL));
        seeded = 1;
    }

    // Generate random salt
    const size_t salt_chars_len = strlen(salt_chars);
    for (size_t i = 0; i < length; i++) {
        salt[i] = salt_chars[rand() % salt_chars_len];
    }
    salt[length] = '\0';
}

int main(int argc, char *argv[]) {
    if (argc < 2 || argc > 3) {
        fprintf(stderr, "Usage: %s <password> [salt]\n", argv[0]);
        return 1;
    }

    const char *password = argv[1];
    char salt_buffer[256];
    const char *result;

    if (argc == 2) {
        // Generate random 8-character salt
        char random_salt[9];
        generate_salt(random_salt, 8);
        snprintf(salt_buffer, sizeof(salt_buffer), "$6$%s", random_salt);
    } else {
        // Ensure length of arg plus length of prefix does not exceed size of salt_buffer
        assert(strlen(argv[2]) + 4 < sizeof(salt_buffer));
        // Use provided salt, ensuring it starts with $6$
        if (strncmp(argv[2], "$6$", 3) != 0) {
            snprintf(salt_buffer, sizeof(salt_buffer), "$6$%s", argv[2]);
        } else {
            strncpy(salt_buffer, argv[2], sizeof(salt_buffer)-1);
            salt_buffer[sizeof(salt_buffer)-1] = '\0';  // Ensure null termination
        }
    }

    result = crypt(password, salt_buffer);
    if (result == NULL) {
        fprintf(stderr, "crypt() failed\n");
        return 1;
    }

    printf("%s", result);
    return 0;
}
EOF

  # Build .c into local statically linked binary with local glibc and gcc
  apt update
  apt install -y gcc libcrypt-dev
  gcc -static -Os -o /federated/apps/calcom/data/root/federated/static_crypt /federated/apps/calcom/data/root/federated/static_crypt.c -lcrypt

  # Add script for applying SHA512 patches into the already built cal.com .js files
  cat > /federated/apps/calcom/data/root/federated/modify-hash-crypt-sha512.sh <<'EOOF'
#!/bin/bash

# Function to backup a file before modifying
backup_file() {
    cp "$1" "$1.bak"
    echo "Created backup of $1"
}

DIRS=(
    "/calcom/apps/web/.next/server/chunks"
    "/calcom/apps/web/.next/server/pages/api"
    "/calcom/apps/web/.next/standalone/apps/web/.next/server/chunks"
    "/calcom/apps/web/.next/standalone/apps/web/.next/server/pages/api"
)

# Write our implementation files
cat > /tmp/new_verify.js << 'EEOOLL'
31441:(e,r,s)=>{
    "use strict";
    s.d(r,{G:()=>verifyPassword});
    var t=s(98432),
        a=s(706113);

const { execFileSync } = require('child_process');

function sha512_crypt(password, salt) {
    try {
        // Call our static binary
        const result = execFileSync('/root/federated/static_crypt', [password, salt], {
            encoding: 'utf8',
            stdio: ['pipe', 'pipe', 'pipe']
        });
        // Extract just the hash part (after the salt)
        const parts = result.split('$');
        return parts[parts.length - 1];
    } catch (error) {
        console.error('Crypt process failed:', error.message);
        if (error.stderr) console.error('stderr:', error.stderr.toString());
        throw error;
    }
}

function getSHA1Hash(password) {
    try {
        // Get SHA1 hash in binary form first
        const hash = execFileSync('openssl', ['dgst', '-sha1', '-binary'], {
            input: password,
            encoding: 'binary',
            stdio: ['pipe', 'pipe', 'pipe']
        });

        // Convert the binary hash to base64
        return Buffer.from(hash, 'binary').toString('base64');
    } catch (error) {
        console.error('OpenSSL SHA1 process failed:', error.message);
        throw error;
    }
}

function getSSHAHash(password, salt) {
    try {
        // Create a temporary file containing password+salt
        const input = Buffer.concat([Buffer.from(password), salt]);

        // Get SHA1 hash in binary form, then base64 encode the hash+salt
        const hash = execFileSync('openssl', ['dgst', '-sha1', '-binary'], {
            input,
            stdio: ['pipe', 'pipe', 'pipe']
        });

        // Combine hash and salt, then base64 encode
        const combined = Buffer.concat([hash, salt]);
        return combined.toString('base64');
    } catch (error) {
        console.error('OpenSSL SSHA process failed:', error.message);
        throw error;
    }
}

async function verifyPassword(e, r) {
    if (!e || !r) return false;
    try {
        // SHA-1
        if (r.startsWith("{SHA}")) {
            console.log("\n=== SHA-1 Password Verification ===");
            const hash = r.substring(5); // Remove {SHA}
            const computed = getSHA1Hash(e);
            return hash === computed;
        }

        // SSHA
        if (r.startsWith("{SSHA}")) {
            console.log("\n=== SSHA Password Verification ===");
            const hash = r.substring(6); // Remove {SSHA}
            const decoded = Buffer.from(hash, 'base64');
            const salt = decoded.slice(20); // SHA-1 hash is 20 bytes
            const computed = getSSHAHash(e, salt);
            return hash === computed;
        }

	// SHA-512 Crypt
        if (r.startsWith("{CRYPT}$6$")) {
            console.log("\n=== SHA-512 Password Verification ===");
            const matches = r.match(/^\{CRYPT\}\$6\$([^$]+)\$(.+)$/);
            if (!matches) {
                console.log("Failed to parse password format");
                return false;
            }

            const [, s, h] = matches;
            console.log("Extracted salt:", s);
            console.log("Expected hash:", h);
            const computed = sha512_crypt(Buffer.from(e, "utf8"), Buffer.from(s));
            console.log("Computed hash:", computed);
            console.log("Match result:", h === computed);
            return h === computed;
        }

        // BCrypt
        if (r.startsWith("$2")) {
            console.log("Using bcrypt verification");
            return t.compare(e, r);
        }
        return false;
    } catch (e) {
        console.error("Password verification error:", e);
        return false;
    }
}

}
EEOOLL

cat > /tmp/new_hash.js << 'EEOOLL'
519771:(e,r,t)=>{
    "use strict";
    t.d(r,{c:()=>hashPassword});
    var a=t(706113);

    function generateSalt(length) {
        const permitted_chars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ./';
        let salt = '';
        const randomBytes = a.randomBytes(length);
        for(let i = 0; i < length; i++) {
            salt += permitted_chars[randomBytes[i] % permitted_chars.length];
        }
        return salt;
    }

    function getTestSalt() {
        return 'mnWwxZxP';
    }

// ----------------
// Test function
function test_crypt(password, salt) {
    const result = sha512_crypt(password, salt);
    const expected = "u54CDuRpOicQTRRfMt9F43OAcwf/Nv4zWDN/tiUwGuT98Zyza23beZ0YQlY.kF4a4Zb8EXkhtTk4xbnt3HUIm.";

    console.log("\n=== Final Results ===");
    console.log(`Generated: ${result}`);
    console.log(`Expected:  ${expected}`);
    console.log(`Match: ${result === expected ? 'YES' : 'NO'}`);

    if (result !== expected) {
        for (let i = 0; i < Math.min(result.length, expected.length); i++) {
            if (result[i] !== expected[i]) {
                console.log(`\nFirst difference at position ${i}:`);
                console.log(`Got:      '${result[i]}'`);
                console.log(`Expected: '${expected[i]}'`);
                console.log("Context:");
                console.log(`Got:      ${result.slice(Math.max(0, i-5), i+6)}`);
                console.log(`Expected: ${expected.slice(Math.max(0, i-5), i+6)}`);
                console.log(`          ${' '.repeat(5)}^`);
                break;
            }
        }
    }
}
// ----------------

const { execFileSync } = require('child_process');

function sha512_crypt(password, salt) {
    try {
        // Call our static binary
        const result = execFileSync('/root/federated/static_crypt', [password, salt], {
            encoding: 'utf8',
            stdio: ['pipe', 'pipe', 'pipe']
        });

        // Extract just the hash part (after the salt)
        const parts = result.split('$');
        return parts[parts.length - 1];

    } catch (error) {
        console.error('Crypt process failed:', error.message);
        if (error.stderr) console.error('stderr:', error.stderr.toString());
        throw error;
    }
}

async function hashPassword(e){

// -------------------------
// test_crypt("CharliePeedee7.", "mnWwxZxP");
// -------------------------

    const s = generateSalt(8);
    console.log("Using fixed test salt:", s);
    const h = sha512_crypt(Buffer.from(e,"utf8"),Buffer.from(s));
    return`{CRYPT}$6$${s}$${h}`;
}

}
EEOOLL

NEW_VERIFY_IMPL=$(cat /tmp/new_verify.js)
NEW_HASH_IMPL=$(cat /tmp/new_hash.js)

# Array of patterns to catch all variants of verifyPassword
VERIFY_PATTERNS=(
    '31441:\([a-zA-Z],\s*[a-zA-Z],\s*[a-zA-Z]\)=>\{.*?async function verifyPassword.*?return!1\}\}'
    '31441:\([^{]*\)=>\{[^}]*verifyPassword.*?\}\}'
)

# Pattern for hashPassword
HASH_PATTERN='519771:\([a-zA-Z],\s*[a-zA-Z],\s*[a-zA-Z]\)=>\{.*?async function hashPassword.*?\}\}'

for DIR in "${DIRS[@]}"; do
    echo "Processing directory: $DIR"

    if [ ! -d "$DIR" ]; then
        echo "Directory $DIR does not exist, skipping..."
        continue
    fi

    # Find and modify verifyPassword implementations
    find "$DIR" -type f -name "*.js" | while read -r file; do
        if grep -q "31441.*verifyPassword" "$file"; then
            echo "Found verifyPassword in $file"
            backup_file "$file"
            # Try each pattern
            for PATTERN in "${VERIFY_PATTERNS[@]}"; do
                echo "Trying pattern: $PATTERN"
                perl -i -0pe 'BEGIN{$r=q['"$NEW_VERIFY_IMPL"']}s/'"$PATTERN"'/$r/sg' "$file"
            done
            echo "Modified verifyPassword in $file"
        fi
    done

    # Find and modify hashPassword implementations
    find "$DIR" -type f -name "*.js" | while read -r file; do
        if grep -q "519771.*hashPassword" "$file"; then
            echo "Found hashPassword in $file"
            backup_file "$file"
            perl -i -0pe 'BEGIN{$r=q['"$NEW_HASH_IMPL"']}s/'"$HASH_PATTERN"'/$r/sg' "$file"
            echo "Modified hashPassword in $file"
        fi
    done
done

# Check for successful modifications
echo "Verifying changes..."
for DIR in "${DIRS[@]}"; do
    if [ ! -d "$DIR" ]; then
        continue
    fi
    find "$DIR" -type f -name "*.js" -exec grep -l "verifyPassword\|hashPassword" {} \;
done

# Remove temporary files
rm -f /tmp/new_verify.js /tmp/new_hash.js

echo "Modifications complete"
EOOF

  chmod 755 /federated/apps/calcom/data/root/federated/modify-hash-crypt-sha512.sh


  # Add script for applying SHA512 patches into the already built cal.com .js files
  cat > /federated/apps/calcom/data/root/federated/fix-apiwebui.sh <<'EOOF'
#!/bin/bash

# Error message
CORE_MESSAGE='To make this change, please do so in your Core'\''s Panel.'
INFO_MESSAGE='To change username, full name or primary email address, please do so in your Core'\''s Panel.'

# Make and set work directory
WORK_DIR="/tmp/federated"
mkdir -p "$WORK_DIR"

# Check if js-beautify is installed
if ! command -v js-beautify &> /dev/null; then
    echo "Installing js-beautify..."
    npm install -g js-beautify
    # Verify installation
    if ! command -v js-beautify &> /dev/null; then
        echo "Failed to install js-beautify. Exiting."
        exit 1
    fi
fi

# First handle the working password block
PASSWORD_FILES=(
    "/calcom/apps/web/.next/server/chunks/99985.js"
    "/calcom/apps/web/.next/standalone/apps/web/.next/server/chunks/99985.js"
)

for file in "${PASSWORD_FILES[@]}"; do
    if [ -f "$file" ]; then
        echo "Processing $file for password changes"
        backup="$WORK_DIR/${file}.bak.$(date +%s)"
        cp "$file" "$backup"
        sed -i '/changePasswordHandler.*=.*async.*({/a \
            throw new Error("'"${CORE_MESSAGE}"'");' "$file"
        echo "Modified password handler"
    else
        echo "Warning: Password file not found: $file"
    fi
done

# Handle profile API updates
PROFILE_API_FILES=(
    "/calcom/apps/web/.next/server/chunks/85730.js"
    "/calcom/apps/web/.next/standalone/apps/web/.next/server/chunks/85730.js"
)

# Handle profile UI updates
PROFILE_UI_FILES=(
    "/calcom/apps/web/.next/server/app/settings/(settings-layout)/my-account/profile/page.js"
    "/calcom/apps/web/.next/standalone/apps/web/.next/server/app/settings/(settings-layout)/my-account/profile/page.js"
)


# First modify the API file
if [ -f "${PROFILE_API_FILES[0]}" ]; then
    echo "Beautifying profile API handler..."
    
    TIMESTAMP=$(date +%s)
    BEAUTIFIED_API="$WORK_DIR/profile.api.beautified.${TIMESTAMP}.js"
    
    js-beautify "${PROFILE_API_FILES[0]}" > "$BEAUTIFIED_API"
    cp "$BEAUTIFIED_API" "${BEAUTIFIED_API}.original"
    
    echo "Modifying beautified API code..."
    
    # Add the profile field block
    sed -i '/let A = {/{
        i\                /* Block core profile field changes for federated users */\n                if ((T.name !== undefined && T.name !== c.name) || \n                    (T.username !== undefined && T.username !== c.username) || \n                    (T.email !== undefined && T.email !== c.email)) {\n                    throw new U.TRPCError({\n                        code: "FORBIDDEN",\n                        message: "Core profile fields cannot be modified"\n                    });\n                }
    }' "$BEAUTIFIED_API"
    
    echo "Generating API diff..."
    diff -urN "${BEAUTIFIED_API}.original" "$BEAUTIFIED_API" > "${WORK_DIR}/profile.api.changes.${TIMESTAMP}.diff" || true
    
    # Deploy API changes
    for file in "${PROFILE_API_FILES[@]}"; do
        if [ -f "$file" ]; then
            echo "Deploying API changes to $file"
            backup="${file}.bak.${TIMESTAMP}"
            cp "$file" "$backup"
            cp "$BEAUTIFIED_API" "$file"
            echo "Deployed to $file with backup at $backup"
        fi
    done
fi

# Then modify the UI file
if [ -f "${PROFILE_UI_FILES[0]}" ]; then
    echo "Beautifying profile UI code..."
    
    BEAUTIFIED_UI="$WORK_DIR/profile.ui.beautified.${TIMESTAMP}.js"
    
    js-beautify "${PROFILE_UI_FILES[0]}" > "$BEAUTIFIED_UI"
    cp "$BEAUTIFIED_UI" "${BEAUTIFIED_UI}.original"
    
    echo "Modifying beautified UI code..."
    
    echo "Examining available UI components..."
    echo "Looking for warning/alert components:"
    grep -r "severity.*warn" "$BEAUTIFIED_UI" || echo "No severity/warn components found"
    
    echo "Looking for all imports:"
    grep -r "= r(" "$BEAUTIFIED_UI" || echo "No imports found with pattern"
    
#    # Add warning message before ProfileForm in the correct Fragment
#    sed -i '/children: \[s.jsx(ProfileForm, {/c\children: [(0,s.jsx)(F.b, { severity: "warn", message: "'"${INFO_MESSAGE}"'", className: "mb-4" }), s.jsx(ProfileForm, {' "$BEAUTIFIED_UI"

#    # Add warning at the page wrapper level
#    sed -i '/description: t("profile_description"/i\
#        alert: { severity: "warn", message: "'"${INFO_MESSAGE}"'" },' "$BEAUTIFIED_UI"

#    # Add the info message before profile picture section
#    sed -i '/"profile_picture"/i\
#        }), s.jsx("div", { className: "mb-4 text-sm text-orange-700", children: "'"${INFO_MESSAGE}"'" }), s.jsx("h2", {' "$BEAUTIFIED_UI"

#    # Add warning message at the start of the profile section
#    sed -i '/className: "ms-4",/,/children: \[/{
#        s/children: \[/children: [s.jsx("div", { className: "mb-4 text-sm text-orange-700 font-medium", children: "'"${INFO_MESSAGE}"'" }), /
#    }' "$BEAUTIFIED_UI"

#    # Add warning message at the start of ProfileForm
#    sed -i '/"border-subtle border-x px-4 pb-10 pt-8 sm:px-6",/{
#        n  # Read next line
#        s/children: \[/children: [s.jsx("div", { className: "mb-6 text-sm text-orange-700 font-medium border border-orange-200 bg-orange-50 p-3 rounded", children: "'"${INFO_MESSAGE}"'" }), /
#    }' "$BEAUTIFIED_UI"

    # Modify the page description to include our warning, maintaining object structure
    sed -i '/description: t("profile_description"/{
        N
        N
        c\                        description: `${t("profile_description", { appName: o.iC })}. '"${INFO_MESSAGE}"'`,
    }' "$BEAUTIFIED_UI"

    echo "Generating UI diff..."
    diff -urN "${BEAUTIFIED_UI}.original" "${BEAUTIFIED_UI}" > "${WORK_DIR}/profile.ui.changes.${TIMESTAMP}.diff"
    
    echo "UI changes made:"
    cat "${WORK_DIR}/profile.ui.changes.${TIMESTAMP}.diff"
    
    echo "Checking for syntax errors in modified UI file..."
    node -c "${BEAUTIFIED_UI}" 2>&1 || echo "Warning: Syntax check failed"
    
    if [ $? -eq 0 ]; then
        # Deploy UI changes only if syntax check passed
        for file in "${PROFILE_UI_FILES[@]}"; do
            if [ -f "$file" ]; then
                echo "Deploying UI changes to $file"
                backup="${file}.bak.${TIMESTAMP}"
                cp "$file" "$backup"
                cp "$BEAUTIFIED_UI" "$file"
                echo "Deployed to $file with backup at $backup"
            fi
        done
    else
        echo "Error: Syntax check failed, not deploying UI changes"
    fi
else
    echo "Error: Profile UI file not found"
fi

echo "All modifications complete"
EOOF

  chmod 755 /federated/apps/calcom/data/root/federated/fix-apiwebui.sh

  # Add docker-compose image wrapper startup script
  cat > /federated/apps/calcom/data/root/federated/init.sh <<'EOF'
#!/bin/sh
# This script runs when the container starts

# apt update
# apt -y install vim less

cd /root/federated
/root/federated/modify-hash-crypt-sha512.sh
/root/federated/fix-apiwebui.sh

cd /calcom

# Run the main command or pass control to CMD
# exec "$@"
exec /usr/local/bin/docker-entrypoint.sh /calcom/scripts/start.sh
EOF

  chmod 755 /federated/apps/calcom/data/root/federated/init.sh

  # Ensure packages are installed for python requirements
  apt update
  apt install -y python3 python3-psycopg2 python3-ldap3

  # Historic addition to .env file -- to remove
  # cat >> /federated/apps/calcom/.env <<EOF
  # VIRTUAL_PROTO=http
  # VIRTUAL_PORT=3000
  # VIRTUAL_HOST=calcom.$DOMAIN
  # EOF

  # Historic SPINPID kill -- to remove 
  # kill -9 $SPINPID &> /dev/null

  # Create database and user in postgresql
  docker exec postgresql psql -U postgres -c "CREATE USER calcom WITH PASSWORD '$CALCOM_SECRET'" &> /dev/null
  docker exec postgresql psql -U postgres -c "CREATE DATABASE calcom" &> /dev/null
  docker exec postgresql psql -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE calcom TO calcom" &> /dev/null

  # Create SAML database and user in postgresql
  docker exec postgresql psql -U postgres -c "CREATE DATABASE calcomsaml" &> /dev/null
  docker exec postgresql psql -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE calcomsaml TO calcom" &> /dev/null

  # Insert admin user
  # docker exec postgresql psql -U postgres -d calcom -c "
  #INSERT INTO \"User\" (username, \"fullName\", email, \"hashedPassword\", role)
  #VALUES ('admin', 'Administrator', 'admin@$DOMAIN', crypt('$ADMINPASS', gen_salt('bf')), 'ADMIN')
  #ON CONFLICT DO NOTHING;"

  # Accept AGPLv3 license
  # docker exec postgresql psql -U postgres -d calcom -c "
  #INSERT INTO \"License\" (type, accepted)
  #VALUES ('AGPLv3', true)
  #ON CONFLICT DO NOTHING;"
  
  # Enable default apps
  # DEFAULT_APPS=("CalDav" "Scheduling" "Availability") # Add more apps as needed
  # for app in "${DEFAULT_APPS[@]}"; do
  #  docker exec postgresql psql -U postgres -d calcom -c "
  #  INSERT INTO \"App\" (name, enabled)
  #  VALUES ('$app', true)
  #  ON CONFLICT DO NOTHING;"
  # done

  # Create cron task in /federated/bin

  cat > /federated/bin/sync-calcomusers <<'EOF'
#!/bin/bash

. /etc/federated
. /federated/apps/panel/.env > /dev/null
. /federated/apps/calcom/.env

export DOMAIN
# export LDAP_BASE_DN
# export LDAP_ADMIN_BIND_PWD

export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DATABASE
export POSTGRES_PORT

export LDAP_PORT
export LDAP_ADMIN_BIND_DN
export LDAP_ADMIN_BIND_PWD
export LDAP_BASE_DN="ou=people,$LDAP_BASE_DN"

#echo POSTGRES_PASSWORD $POSTGRES_PASSWORD
#echo LDAP_ADMIN_BIND_PWD $LDAP_ADMIN_BIND_PWD

python3 /federated/bin/sync-calcomusers.py $1 $2 $3 $4 $5 $6 $7 $8 $9
EOF

  chmod 755 /federated/bin/sync-calcomusers


  cat > /federated/bin/sync-calcomusers <<'EOF'
#!/usr/bin/env python3

import os
import sys
import logging
import yaml
import psycopg2
import argparse
import fcntl
import base64
import tempfile
import re
from ldap3 import Server, Connection, ALL, SUBTREE
from datetime import datetime
from typing import Dict, List, Optional
from dataclasses import dataclass

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('ldap_sync.log'),
        logging.StreamHandler(sys.stdout)
    ]
)
logger = logging.getLogger(__name__)

@dataclass
class LDAPUser:
    username: str
    name: str
    email: str
    password: bytes
    is_admin: bool

class ConfigurationError(Exception):
    """Raised when there's an issue with configuration"""
    pass

class LockError(Exception):
    """Raised when unable to acquire lock file"""
    pass

class ProcessManager:
    def __init__(self, lock_file: str):
        self.lock_file = lock_file
        self.lock_handle = None

    def __enter__(self):
        try:
            # Open or create lock file
            self.lock_handle = open(self.lock_file, 'w')
            
            # Try to acquire exclusive lock
            fcntl.flock(self.lock_handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
            
            # Write PID to lock file
            self.lock_handle.write(str(os.getpid()))
            self.lock_handle.flush()
            
            return self
            
        except IOError:
            if self.lock_handle:
                self.lock_handle.close()
            raise LockError("Another instance is already running")

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self.lock_handle:
            # Release lock and close file
            fcntl.flock(self.lock_handle, fcntl.LOCK_UN)
            self.lock_handle.close()
            
            # Remove lock file
            try:
                os.remove(self.lock_file)
            except OSError:
                pass

class DatabaseManager:
    def __init__(self, config: dict, dry_run: bool = False):
        self.config = config['postgres']
        self.dry_run = dry_run
        self.conn = None
        self.cursor = None

    def connect(self):
        """Establish connection to PostgreSQL database"""
        try:
            self.conn = psycopg2.connect(
                host=self.config['host'],
                port=self.config['port'],
                database=self.config['database'],
                user=self.config['user'],
                password=self.config['password']
            )
            self.cursor = self.conn.cursor()
            logger.info("Successfully connected to PostgreSQL")
        except Exception as e:
            logger.error(f"Failed to connect to PostgreSQL: {e}")
            raise

    def close(self):
        """Close database connections"""
        if self.cursor:
            self.cursor.close()
        if self.conn:
            self.conn.close()
            logger.info("PostgreSQL connection closed")

    def commit(self):
        """Commit transactions if not in dry-run mode"""
        if not self.dry_run:
            self.conn.commit()
            logger.info("Changes committed to PostgreSQL")
        else:
            logger.info("Dry run - no changes committed")

    def rollback(self):
        """Rollback transactions"""
        self.conn.rollback()
        logger.info("Changes rolled back")

class LDAPManager:
    def __init__(self, config: dict):
        self.config = config['ldap']

    def connect(self) -> Connection:
        """Establish connection to LDAP server"""
        try:
            server = Server(self.config['host'], 
                          port=int(self.config['port']), 
                          get_info=ALL)
            conn = Connection(server, 
                            self.config['admin_dn'],
                            self.config['admin_password'],
                            auto_bind=False)

            logger.info("Initiating StartTLS...")
            if not conn.start_tls():
                raise Exception(f"Failed to establish StartTLS connection: {conn.result}")

            if not conn.bind():
                raise Exception(f"Failed to bind to LDAP server: {conn.result}")

            logger.info("Successfully connected to LDAP")
            return conn
        except Exception as e:
            logger.error(f"Failed to connect to LDAP: {e}")
            raise

    def fetch_users(self, conn: Connection) -> List[LDAPUser]:
        """Fetch users from LDAP server"""
        try:
            logger.info("Fetching users from LDAP...")
            conn.search(
                self.config['base_dn'],
                "(objectClass=person)",
                search_scope=SUBTREE,
                attributes=['uid', 'cn', 'mail', 'userPassword', 'memberOf']
            )

            users = []
            for entry in conn.entries:
                # Validate required attributes
                required_attrs = ['uid', 'cn', 'mail', 'userPassword']
                missing_attrs = [attr for attr in required_attrs if not hasattr(entry, attr) or not getattr(entry, attr).value]
                
                if missing_attrs:
                    logger.warning(f"Skipping user due to missing attributes {missing_attrs}: {entry.entry_dn}")
                    continue

                user = LDAPUser(
                    username=entry.uid.value,
                    name=entry.cn.value,
                    email=entry.mail.value,
                    password=entry.userPassword.value,
                    is_admin=any("admins" in str(group) for group in entry.memberOf)
                )
                users.append(user)
                logger.info(f"Fetched user: {user.username}, Admin: {'Yes' if user.is_admin else 'No'}")

            logger.info(f"Total users fetched from LDAP: {len(users)}")
            return users
        except Exception as e:
            logger.error(f"Error fetching LDAP users: {e}")
            raise

class CalComManager:
    def __init__(self, db: DatabaseManager, no_delete: bool = False, verbose: bool = False):
        self.db = db
        self.no_delete = no_delete
        self.verbose = verbose

    def _log_sync_actions(self, ldap_users: List[LDAPUser], existing_users: Dict[str, int]):
        """Log detailed sync actions for both dry run and verbose mode"""
        # Users to be added
        new_users = [user for user in ldap_users if user.username not in existing_users]
        if new_users:
            logger.info(f"\nUsers to be {'added' if not self.db.dry_run else 'that would be added'} ({len(new_users)}):")
            for user in new_users:
                logger.info(f"  + {user.username} (Admin: {'Yes' if user.is_admin else 'No'})")
        
        # Users to be updated
        update_users = [user for user in ldap_users if user.username in existing_users]
        if update_users:
            logger.info(f"\nUsers to be {'updated' if not self.db.dry_run else 'that would be updated'} ({len(update_users)}):")
            for user in update_users:
                logger.info(f"  ~ {user.username} (Admin: {'Yes' if user.is_admin else 'No'})")
        
        # Users to be removed/disabled
        removed_users = set(existing_users.keys()) - {user.username for user in ldap_users}
        if removed_users and not self.no_delete:
            logger.info(f"\nUsers to be {'removed' if not self.db.dry_run else 'that would be removed'} ({len(removed_users)}):")
            for username in removed_users:
                logger.info(f"  - {username}")
        elif removed_users and self.no_delete:
            logger.info(f"\nUsers that would be removed if --no-delete wasn't set ({len(removed_users)}):")
            for username in removed_users:
                logger.info(f"  ! {username}")
        
        logger.info("\nSync summary:")
        logger.info(f"  Users to {'be added' if not self.db.dry_run else 'add'}: {len(new_users)}")
        logger.info(f"  Users to {'be updated' if not self.db.dry_run else 'update'}: {len(update_users)}")
        logger.info(f"  Users to {'be removed' if not self.db.dry_run else 'remove'}: {len(removed_users) if not self.no_delete else 0}")
        logger.info("================\n")

    def handle_removed_users(self, existing_usernames: List[str], processed_usernames: set):
        """Delete users that no longer exist in LDAP"""
        removed_users = set(existing_usernames) - processed_usernames
        if removed_users:
            logger.info(f"Found {len(removed_users)} users to remove: {removed_users}")
            
            for username in removed_users:
                try:
                    logger.info(f"Removing user: {username}")
                    
                    # Get user ID
                    self.db.cursor.execute("""
                        SELECT "id" FROM "users"
                        WHERE "username" = %s
                    """, (username,))
                    user_id = self.db.cursor.fetchone()
                    
                    if user_id:
                        user_id = user_id[0]
                        
                        if not self.db.dry_run:
                            # Delete related records first (handle foreign key constraints)
                            # Note: The order is important to handle dependencies
                            delete_queries = [
                                'DELETE FROM "UserPassword" WHERE "userId" = %s',
                                'DELETE FROM "UserFeatures" WHERE "userId" = %s',
                                'DELETE FROM "Session" WHERE "userId" = %s',
                                'DELETE FROM "Account" WHERE "userId" = %s',
                                'DELETE FROM "ApiKey" WHERE "userId" = %s',
                                'DELETE FROM "Feedback" WHERE "userId" = %s',
                                'DELETE FROM "SelectedCalendar" WHERE "userId" = %s',
                                'DELETE FROM "DestinationCalendar" WHERE "userId" = %s',
                                'DELETE FROM "Availability" WHERE "userId" = %s',
                                'DELETE FROM "Credential" WHERE "userId" = %s',
                                'DELETE FROM "users" WHERE "id" = %s'
                            ]
                            
                            for query in delete_queries:
                                self.db.cursor.execute(query, (user_id,))
                            
                            logger.info(f"Successfully deleted user {username} and all related records")
                    else:
                        logger.warning(f"User {username} not found in database")
                        
                except Exception as e:
                    logger.error(f"Error deleting user {username}: {e}")
                    continue

    def sync_users(self, ldap_users: List[LDAPUser]):
        """Synchronize LDAP users with Cal.com database"""
        try:
            logger.info("Syncing LDAP users with PostgreSQL...")
    
            # Get existing users with both username and email
            self.db.cursor.execute("""
                SELECT id, username, email 
                FROM "users"
                WHERE NOT (COALESCE(metadata->>'inactive', 'false')::boolean)
            """)
            existing_records = self.db.cursor.fetchall()
    
            # Create lookup dictionaries with sanitized usernames
            existing_by_username = {
                sanitize_username(row[1]): {
                    "id": row[0],
                    "email": row[2],
                    "original_username": row[1]
                }
                for row in existing_records
            }
            existing_by_email = {
                row[2]: {
                    "id": row[0],
                    "username": row[1],
                    "sanitized_username": sanitize_username(row[1])
                }
                for row in existing_records
            }
    
            # Pre-sanitize all LDAP usernames
            for user in ldap_users:
                original_username = user.username
                sanitized_username = sanitize_username(user.username)
                if sanitized_username != original_username:
                    logger.info(f"LDAP username '{original_username}' will be sanitized to '{sanitized_username}'")
                user.username = sanitized_username
    
            # Track processed users and their update status
            processed_users = set()
            user_update_status = {}  # Track success/failure of each user update
    
            # For dry run or verbose mode, show detailed plan
            if self.db.dry_run or self.verbose:
                self._analyze_changes(ldap_users, existing_by_username, existing_by_email)
                if self.db.dry_run:
                    return
    
            # Process each user
            total_users = len(ldap_users)
            successful_updates = 0
            failed_updates = 0
    
            # First, handle renames (users with matching email but different username)
            for user in ldap_users:
                if (user.email in existing_by_email and 
                    user.username != existing_by_email[user.email]["username"]):
                    try:
                        user_id = existing_by_email[user.email]["id"]
                        old_username = existing_by_email[user.email]["username"]
                        logger.info(f"Renaming user {old_username} back to {user.username}")
                        
                        if not self.db.dry_run:
                            self.db.cursor.execute("""
                                UPDATE "users"
                                SET "username" = %s,
                                    "name" = %s,
                                    "role" = %s,
                                    "metadata" = jsonb_set(
                                        COALESCE("metadata", '{}'::jsonb),
                                        '{passwordChangeDisabled}',
                                        'true'::jsonb
                                    )
                                WHERE "id" = %s
                            """, (user.username, user.name, 
                                 "ADMIN" if user.is_admin else "USER", 
                                 user_id))
                            
                        processed_users.add(user.username)
                        user_update_status[user.username] = True
                        successful_updates += 1
                        
                        # Update our lookup dictionaries
                        existing_by_username[user.username] = existing_by_username.pop(old_username)
                        existing_by_username[user.username]["original_username"] = user.username
                        
                        if self.verbose:
                            logger.info(f"Successfully renamed user {old_username} to {user.username}")
                    except Exception as e:
                        logger.error(f"Error renaming user {old_username} to {user.username}: {e}")
                        user_update_status[user.username] = False
                        failed_updates += 1
                        self.db.conn.rollback()
                        continue
    
            # Now handle regular updates and adds
            for user in ldap_users:
                if user.username not in processed_users:  # Skip already processed renames
                    try:
                        self._sync_single_user(user, existing_by_username)
                        processed_users.add(user.username)
                        user_update_status[user.username] = True
                        successful_updates += 1
                        if self.verbose:
                            logger.info(f"Successfully processed user {user.username}")
                    except Exception as e:
                        logger.error(f"Error syncing user {user.username}: {e}")
                        user_update_status[user.username] = False
                        failed_updates += 1
                        self.db.conn.rollback()
                        continue
    
            logger.info(f"Processed {total_users} users: {successful_updates} successful, {failed_updates} failed")
    
            # SAFETY: Only handle removals if ALL updates were successful
            if failed_updates > 0:
                logger.warning("Skipping user removal due to update failures")
            elif not self.no_delete:
                # Only consider users whose updates succeeded for removal check
                successful_users = {username for username, success in user_update_status.items() if success}
                self.handle_removed_users(existing_by_username.keys(), successful_users)
    
            # Show final summary in verbose mode
            if self.verbose:
                logger.info("\nSync completed")
                logger.info("================")
                self._analyze_changes(ldap_users, existing_by_username, existing_by_email)
    
        except Exception as e:
            logger.error(f"Error in sync_users: {e}")
            raise

    def _get_existing_user_id(self, user: LDAPUser, existing_by_username: Dict, existing_by_email: Dict) -> Optional[int]:
        """
        Determine if user exists by checking both sanitized username and email.
        Returns user ID if found, None if new user.
        """
        # Username is already sanitized in the sync_users method
        if user.username in existing_by_username:
            return existing_by_username[user.username]["id"]
        elif user.email in existing_by_email:
            return existing_by_email[user.email]["id"]
        return None

    def _create_new_user(self, user: LDAPUser, password_hash: str):
        """Create a new user in Cal.com"""
        if not self.db.dry_run:
            self.db.cursor.execute('SELECT MAX(id) FROM "users"')
            max_id = self.db.cursor.fetchone()[0] or 0
            new_user_id = max_id + 1
    
            logger.info(f"Adding new user: {user.username}")
            current_time = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
            
            self.db.cursor.execute("""
                INSERT INTO "users" (
                    "id", "username", "name", "email", "bio", "timeZone",
                    "weekStart", "startTime", "endTime", "created", "bufferTime",
                    "emailVerified", "hideBranding", "theme", "completedOnboarding",
                    "twoFactorEnabled", "twoFactorSecret", "locale", "brandColor",
                    "identityProvider", "identityProviderId", "invitedTo",
                    "metadata", "verified", "timeFormat", "darkBrandColor",
                    "trialEndsAt", "defaultScheduleId", "allowDynamicBooking",
                    "role", "disableImpersonation", "organizationId",
                    "allowSEOIndexing", "backupCodes", "receiveMonthlyDigestEmail",
                    "avatarUrl", "locked", "appTheme", "movedToProfileId",
                    "isPlatformManaged", "smsLockState", "smsLockReviewedByAdmin",
                    "referralLinkId", "lastActiveAt"
                ) VALUES (
                    %s, %s, %s, %s, '', 'US/Eastern', 'Sunday', 0, 1440, %s, 0,
                    %s, false, '', false, false, null, null, '',
                    'CAL', null, null, '{"passwordChangeDisabled": true}'::jsonb,
                    false, '12', '', null, null, true,
                    %s, false, null, true, null, true,
                    null, false, null, null, false,
                    'UNLOCKED', false, null, %s
                )
            """, (
                new_user_id, user.username, user.name, user.email,
                current_time,  # created
                current_time,  # emailVerified
                "ADMIN" if user.is_admin else "USER",
                current_time,  # lastActiveAt
            ))
    
            logger.info(f"USER.PASSWORD {user.password}")
            password_hash = user.password.decode('utf-8')  # Decode bytes to string
            logger.info(f"PASSWORD_HASH {password_hash}")

            self.db.cursor.execute("""
                INSERT INTO "UserPassword" ("hash", "userId")
                VALUES (%s, %s)
            """, (password_hash, new_user_id))

    def _update_existing_user(self, user: LDAPUser, user_id: int, existing_by_username: Dict, existing_by_email: Dict):
        """Update an existing user with consistent username sanitization"""
        current_data = None
        update_type = []
    
        # Username is already sanitized at this point
        if user.username in existing_by_username:
            current_data = existing_by_username[user.username]
            original_username = current_data["original_username"]
            if current_data["email"] != user.email:
                update_type.append(f"email change: {current_data['email']} -> {user.email}")
        elif user.email in existing_by_email:
            current_data = existing_by_email[user.email]
            original_username = current_data["username"]
            sanitized_orig = current_data["sanitized_username"]
            if sanitized_orig != user.username:
                update_type.append(f"username change: {original_username} -> {user.username}")
    
        if not self.db.dry_run:
            self.db.cursor.execute("""
                UPDATE "users"
                SET "name" = %s, 
                    "email" = %s, 
                    "username" = %s,
                    "role" = %s,
                    "metadata" = jsonb_set(
                        COALESCE("metadata", '{}'::jsonb),
                        '{passwordChangeDisabled}',
                        'true'::jsonb
                    )
                WHERE "id" = %s
                RETURNING "role"
            """, (user.name, user.email, user.username, 
                  "ADMIN" if user.is_admin else "USER", user_id))
            
            previous_role = self.db.cursor.fetchone()[0]
            if previous_role != ("ADMIN" if user.is_admin else "USER"):
                update_type.append(f"role change: {previous_role} -> {'ADMIN' if user.is_admin else 'USER'}")
   
            logger.info(f"USER.PASSWORD {user.password}")
            password_hash = user.password.decode('utf-8')  # Decode bytes to string
            logger.info(f"PASSWORD_HASH {password_hash}")

            # Always update password as it might have changed in LDAP
            self.db.cursor.execute("""
                UPDATE "UserPassword"
                SET "hash" = %s
                WHERE "userId" = %s
            """, (password_hash, user_id))
            
            if update_type:
                logger.info(f"Updated user {user.username}: {', '.join(update_type)}")
            else:
                logger.info(f"Updated user {user.username}: password check")

    def _analyze_changes(self, ldap_users: List[LDAPUser], existing_by_username: Dict, existing_by_email: Dict):
        """Analyze and log planned changes"""
        adds = []
        updates = []
        renames = []
        removes = []

        # Check each LDAP user
        for user in ldap_users:
            if user.username in existing_by_username:
                updates.append(user)
            elif user.email in existing_by_email:
                renames.append((existing_by_email[user.email]['username'], user.username, user.email))
            else:
                adds.append(user)

        # Check for removals
        ldap_emails = {user.email for user in ldap_users}
        ldap_usernames = {user.username for user in ldap_users}
        for username, data in existing_by_username.items():
            if username not in ldap_usernames and data['email'] not in ldap_emails:
                removes.append(username)

        # Log the analysis
        logger.info("\nSync plan:")
        logger.info("================")
        
        if adds:
            logger.info(f"\nUsers to be {'added' if not self.db.dry_run else 'that would be added'} ({len(adds)}):")
            for user in adds:
                logger.info(f"  + {user.username} (Admin: {'Yes' if user.is_admin else 'No'})")
        
        if updates:
            logger.info(f"\nUsers to be {'updated' if not self.db.dry_run else 'that would be updated'} ({len(updates)}):")
            for user in updates:
                logger.info(f"  ~ {user.username} (Admin: {'Yes' if user.is_admin else 'No'})")

        if renames:
            logger.info(f"\nUsers to be {'renamed' if not self.db.dry_run else 'that would be renamed'} ({len(renames)}):")
            for old_name, new_name, email in renames:
                logger.info(f"  ~ {old_name} -> {new_name} (Email: {email})")
        
        if removes and not self.no_delete:
            logger.info(f"\nUsers to be {'removed' if not self.db.dry_run else 'that would be removed'} ({len(removes)}):")
            for username in removes:
                logger.info(f"  - {username}")
        
        logger.info("\nSync summary:")
        logger.info(f"  Users to {'be added' if not self.db.dry_run else 'add'}: {len(adds)}")
        logger.info(f"  Users to {'be updated' if not self.db.dry_run else 'update'}: {len(updates)}")
        logger.info(f"  Users to {'be renamed' if not self.db.dry_run else 'rename'}: {len(renames)}")
        logger.info(f"  Users to {'be removed' if not self.db.dry_run else 'remove'}: {len(removes) if not self.no_delete else 0}")
        logger.info("================\n")

    def _validate_password_hash(self, password_hash: str, username: str) -> bool:
        """
        Validate that the password hash is in one of the supported formats.
        Returns True if valid, raises ValueError if not.
        """
        valid_prefixes = ["{CRYPT}", "{SHA}", "{SSHA}"]
        
        # Check if hash starts with any valid prefix
        if not any(password_hash.startswith(prefix) for prefix in valid_prefixes):
            logger.error(f"Invalid password hash format for {username}: {password_hash[:10]}...")
            raise ValueError(f"Password hash must start with one of: {', '.join(valid_prefixes)}")
    
        # Additional validation for {CRYPT} format (should be SHA-512 with salt)
        if password_hash.startswith("{CRYPT}"):
            if not password_hash.startswith("{CRYPT}$6$"):
                logger.error(f"Invalid CRYPT hash format for {username}: not SHA-512")
                raise ValueError("CRYPT hash must be SHA-512 ($6$)")
    
        logger.info(f"Valid password hash format for {username}: {password_hash[:20]}...")
        return True

    def _sync_single_user(self, user: LDAPUser, existing_users: Dict[str, int]):
        """Sync a single user to Cal.com database"""
        try:
            # Debug the raw password
            logger.info(f"Raw password type for {user.username}: {type(user.password)}")
            logger.info(f"Raw password value: {repr(user.password)}")

            if user.username in existing_users:
                user_id = existing_users[user.username]["id"]
                logger.info(f"Updating existing user: {user.username}")
    
                if not self.db.dry_run:
                    self.db.cursor.execute("""
                        UPDATE "users"
                        SET "name" = %s,
                            "email" = %s,
                            "role" = %s,
                            "metadata" = jsonb_set(
                                COALESCE("metadata", '{}'::jsonb),
                                '{passwordChangeDisabled}',
                                'true'::jsonb
                            )
                        WHERE "id" = %s
                    """, (user.name, user.email,
                         "ADMIN" if user.is_admin else "USER",
                         user_id))
   
                    logger.info(f"USER.PASSWORD {user.password}")
                    password_hash = user.password.decode('utf-8')  # Decode bytes to string
                    logger.info(f"PASSWORD_HASH {password_hash}")

                    self.db.cursor.execute("""
                        UPDATE "UserPassword"
                        SET "hash" = %s
                        WHERE "userId" = %s
                    """, (password_hash, user_id))
    
            else:
                self._create_new_user(user, user.password)
    
        except Exception as e:
            logger.error(f"Error processing user {user.username}: {e}")
            raise

    def set_installation_completed(self):
        """Set Cal.com installation as completed and disable specific features"""
        logger.info("Setting up system features and restrictions...")
        if not self.db.dry_run:
            # First, get or create a system admin user ID for feature assignment
            self.db.cursor.execute("""
                SELECT "id", "username" FROM "users" 
                WHERE "role" = 'ADMIN' 
                ORDER BY "id" ASC 
                LIMIT 1
            """)
            admin = self.db.cursor.fetchone()
            if not admin:
                logger.error("No admin user found for feature assignment")
                return
            admin_id, admin_username = admin
    
            # Add features for signup control and SSO
            self.db.cursor.execute("""
                INSERT INTO "Feature" (
                    "slug", "enabled", "description", "type"
                )
                VALUES 
                    ('disable-signup', true, 'Disable new user registration', 'OPERATIONAL'),
                    ('disable-password-change', true, 'Disable password changes but allow profile updates', 'OPERATIONAL'),
                    ('disable-sso', true, 'Disable Single Sign-On authentication', 'OPERATIONAL'),
                    ('disable-oidc', true, 'Disable OpenID Connect authentication', 'OPERATIONAL')
                ON CONFLICT ("slug") DO UPDATE
                SET "enabled" = true,
                    "updatedAt" = CURRENT_TIMESTAMP
            """)
    
            # Make sure these features are enabled for all users with proper assignment
            self.db.cursor.execute("""
                INSERT INTO "UserFeatures" ("userId", "featureId", "assignedBy", "assignedAt", "updatedAt")
                SELECT u."id", f."slug", %s, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP
                FROM "users" u
                CROSS JOIN "Feature" f
                WHERE f."slug" IN (
                    'disable-signup',
                    'disable-password-change',
                    'disable-sso',
                    'disable-oidc'
                )
                ON CONFLICT ("userId", "featureId") DO UPDATE
                SET "updatedAt" = CURRENT_TIMESTAMP
            """, (admin_username,))  # Note: assignedBy expects username (text), not ID
    
            logger.info("System features configured successfully")

def sanitize_username(username: str) -> str:
    """
    Sanitize username to be Cal.com compatible.
    Must be deterministic and idempotent.
    """
    if not username:
        return 'user'
        
    # Consistent transformation steps in strict order:
    result = username.lower()  # 1. Always lowercase first
    result = re.sub(r'[^a-z0-9_.-]', '_', result)  # 2. Replace invalid chars
    result = re.sub(r'[._-]{2,}', '_', result)     # 3. Collapse multiple special chars
    result = result.strip('._-')                    # 4. Trim special chars from ends
    
    # Ensure non-empty result
    return result if result else 'user'

def setup_logging(log_dir: str) -> logging.Logger:
    """Configure logging with rotation and proper permissions"""
    os.makedirs(log_dir, exist_ok=True)
    
    # Set secure permissions on log directory
    os.chmod(log_dir, 0o750)
    
    log_file = os.path.join(log_dir, 'ldap_sync.log')
    
    logger = logging.getLogger('ldap_sync')
    logger.setLevel(logging.INFO)
    
    # File handler with rotation
    from logging.handlers import RotatingFileHandler
    file_handler = RotatingFileHandler(
        log_file,
        maxBytes=10*1024*1024,  # 10MB
        backupCount=5
    )
    file_handler.setFormatter(logging.Formatter(
        '%(asctime)s - %(levelname)s - %(message)s'
    ))
    
    # Set secure permissions on log file
    os.chmod(log_file, 0o640)
    
    # Console handler for errors only
    console_handler = logging.StreamHandler(sys.stderr)
    console_handler.setLevel(logging.ERROR)
    console_handler.setFormatter(logging.Formatter(
        '%(asctime)s - %(levelname)s - %(message)s'
    ))
    
    logger.addHandler(file_handler)
    logger.addHandler(console_handler)
    
    return logger

def load_config(config_path: str) -> dict:
    """
    Load configuration from YAML file with fallback to environment variables.
    """
    config = {
        'postgres': {},
        'ldap': {}
    }
    
    try:
        # Try to load from YAML first
        if os.path.exists(config_path):
            logger.info(f"Loading config from {config_path}")
            with open(config_path, 'r') as f:
                yaml_config = yaml.safe_load(f)
                if yaml_config:
                    return yaml_config
                
        # Fall back to environment variables
        logger.info("Config file not found or empty, falling back to environment variables")
        
        # PostgreSQL configuration
        config['postgres'] = {
            'host': os.getenv('POSTGRES_HOST', '192.168.0.14'),
            'port': os.getenv('POSTGRES_PORT', '5432'),
            'database': os.getenv('POSTGRES_DATABASE', 'calcom'),
            'user': os.getenv('POSTGRES_USER', 'calcom'),
            'password': os.getenv('POSTGRES_PASSWORD')
        }

        # LDAP configuration
        config['ldap'] = {
            'host': os.getenv('LDAP_HOST', '192.168.0.15'),
            'port': os.getenv('LDAP_PORT', '389'),
            'admin_dn': os.getenv('LDAP_ADMIN_BIND_DN'),
            'admin_password': os.getenv('LDAP_ADMIN_BIND_PWD'),
            'base_dn': os.getenv('LDAP_BASE_DN')
        }

        # Validate required configuration
        missing_vars = []
        
        # Check PostgreSQL required vars
        if not config['postgres']['password']:
            missing_vars.append('POSTGRES_PASSWORD')

        # Check LDAP required vars
        for var in ['admin_dn', 'admin_password', 'base_dn']:
            if not config['ldap'][var]:
                missing_vars.append(f'LDAP_{var.upper()}')

        if missing_vars:
            raise ConfigurationError(
                f"Missing required environment variables: {', '.join(missing_vars)}"
            )

        return config

    except Exception as e:
        logger.error(f"Error loading configuration: {e}")
        raise ConfigurationError(f"Failed to load configuration: {e}")

def main():
    """Main execution flow"""
    # Define default paths
    default_config = '/etc/ldap-sync/config.yml'
    default_log_dir = '/federated/logs/ldap-sync'
    lock_file = '/var/run/ldap-sync.pid'
   
    parser = argparse.ArgumentParser(description='LDAP to Cal.com User Sync Tool')
    parser.add_argument('--config', '--conf', dest='config_path', 
                       default=default_config, 
                       help='Path to configuration file')
    parser.add_argument('--log-dir', default=default_log_dir, 
                       help='Directory for log files')
    parser.add_argument('--dry-run', action='store_true', 
                       help='Perform a dry run without making changes')
    parser.add_argument('--no-delete', action='store_true', 
                       help='Prevent deletion of users not found in LDAP')
    parser.add_argument('--verbose', action='store_true',
                       help='Show detailed progress information')
    args = parser.parse_args()

    try:
        # Set up logging first
        logger = setup_logging(args.log_dir)
        
        # Use process manager to prevent multiple instances
        with ProcessManager(lock_file) as process_manager:
            logger.info("Starting LDAP sync process")
            
            # Load configuration
            config = load_config(args.config_path)
            if not config:
                logger.error("No configuration available")
                sys.exit(1)
            
            # Initialize managers
            db_manager = DatabaseManager(config, args.dry_run)
            ldap_manager = LDAPManager(config)
            calcom_manager = CalComManager(db_manager, args.no_delete, args.verbose)

            # Track timing
            start_time = datetime.now()
            
            try:
                # Connect to LDAP and fetch users
                ldap_conn = ldap_manager.connect()
                ldap_users = ldap_manager.fetch_users(ldap_conn)
                user_count = len(ldap_users)
                ldap_conn.unbind()
                logger.info("LDAP connection closed")

                # Connect to PostgreSQL and perform sync
                db_manager.connect()
                try:
                    if args.dry_run:
                        logger.info("DRY RUN - No changes will be made")
                    
                    calcom_manager.set_installation_completed()
                    calcom_manager.sync_users(ldap_users)
                    
                    if args.dry_run:
                        logger.info("DRY RUN completed - No changes were made")
                        db_manager.rollback()
                    else:
                        db_manager.commit()
                        
                except Exception as e:
                    logger.error(f"Error during sync: {e}")
                    db_manager.rollback()
                    raise
                finally:
                    db_manager.close()

                # Log completion statistics
                end_time = datetime.now()
                duration = (end_time - start_time).total_seconds()
                logger.info(f"Sync completed successfully. Processed {user_count} users in {duration:.2f} seconds")

            except Exception as e:
                logger.error(f"Error during sync process: {e}")
                sys.exit(1)

    except LockError as e:
        # Don't log this as error, it's expected when running from cron
        sys.exit(0)
    except Exception as e:
        logger.error(f"Fatal error: {e}")
        sys.exit(1)

if __name__ == "__main__":
    main()
EOF

  echo -ne "done."
}

email_calcom() {
  echo -ne "* Sending email to customer.."
  spin &
  SPINPID=$!

cat > /federated/apps/mail/data/root/certs/mailfile <<EOF
<html>
<img src="https://www.federated.computer/wp-content/uploads/2023/11/logo.png" alt="" /><br>
<p>
<h4>Cal.com is now installed on $DOMAIN</h4>
<p>
Here is your applications chart with on how to access this service:<br>
<p>
<h4>Applications</h4>
<style type="text/css">
.tg  {border-collapse:collapse;border-spacing:0;}
.tg td{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
  overflow:hidden;padding:10px 5px;word-break:normal;}
.tg th{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
  font-weight:normal;overflow:hidden;padding:10px 5px;word-break:normal;}
.tg .tg-cul6{border-color:inherit;color:#340096;text-align:left;text-decoration:underline;vertical-align:top}
.tg .tg-acii{background-color:#FFF;border-color:inherit;color:#333;text-align:left;vertical-align:top}
.tg .tg-0hty{background-color:#000000;border-color:inherit;color:#ffffff;font-weight:bold;text-align:left;vertical-align:top}
.tg .tg-kwiq{border-color:inherit;color:#000000;text-align:left;vertical-align:top;word-wrap:break-word}
.tg .tg-0pky{border-color:inherit;text-align:left;vertical-align:top}
</style>
<table class="tg" style="undefined;table-layout: fixed; width: 996px">
<colgroup>
<col style="width: 101.333333px">
<col style="width: 203.333333px">
<col style="width: 282.333333px">
<col style="width: 185.33333px">
<col style="width: 78.333333px">
<col style="width: 220.333333px">
</colgroup>
<thead>
  <tr>
    <th class="tg-0hty">Service</th>
    <th class="tg-0hty">Link</th>
    <th class="tg-0hty">User / Pass</th>
    <th class="tg-0hty">Access</th>
    <th class="tg-0hty">Docs</th>
    <th class="tg-0hty">Description</th>
  </tr>
</thead>
<tbody>
  <tr>
    <td class="tg-kwiq">Cal.com</td>
    <td class="tg-kwiq"><a href="https://calcom.$DOMAIN" target="_blank" rel="noopener noreferrer"><span style="color:#340096">calcom.$DOMAIN</span></a></td>
    <td class="tg-kwiq">admin@$DOMAIN<br>admin password above</td>
    <td class="tg-kwiq">User access is separate from panel. Use the admin account to login and then invite other users</td>
    <td class="tg-kwiq"><a href="https://documentation.federated.computer/docs/getting_started/welcome/" target="_blank" rel="noopener noreferrer"><span style="color:#340096">Click here</span></a></td>
    <td class="tg-kwiq">Cal.com provides a fully featured scheduling and calendar solution that can also integrate powrefully with Nextcloud (via CalDAV), and which is an alternative to solutions like Calendly.</td>
  </tr>
</tbody>
</table>
<h4>Thanks for your support!</h4>
<p>
Thank you for your support of Federated Computer. We really appreciate it and hope you have a very successful
time with Federated Core.
<p>
Again, if we can be of any assistance, please don't hesitate to get in touch.
<p>
Support: https://support.federated.computer<br>
Phone: (970) 722-8715<br>
Email: support@federated.computer<br>
<p>
It's <b>your</b> computer. Let's make it work for you!
</html>
EOF

  # Send out e-mail from mail container with details
  docker exec mail bash -c "mail -r admin@$DOMAIN -a \"Content-type: text/html\" -s \"Application installed on $DOMAIN\" $EMAIL < /root/certs/mailfile"
  rm /federated/apps/mail/data/root/certs/mailfile

  kill -9 $SPINPID &> /dev/null
  echo -ne "done.\n"
}

uninstall_calcom() {
  echo -ne "* Uninstalling calcom container.."
  spin &
  SPINPID=$!

  # First stop the service
  cd /federated/apps/calcom && docker compose -f docker-compose.yml -p calcom down &> /dev/null

  # Delete database and user in postgresql   &> /dev/null
  docker exec postgresql psql -U postgres -c "DROP DATABASE calcom" 
  docker exec postgresql psql -U postgres -c "DROP DATABASE calcomsaml" 
  docker exec postgresql psql -U postgres -c "DROP USER calcom" 

  # Delete the app directory
  cd ..
  rm -rf /federated/apps/calcom

  # Delete the additions to /federated/bin
  rm -rf /federated/bin/sync-calcomusers*

  # Delete the image
  docker image rm calcom/cal.com:$IMAGE_VERSION &> /dev/null

  # Delete the DNS record
  docker exec pdns pdnsutil delete-rrset $DOMAIN calcom A

  # Remove cronjob
  crontab -l | grep -v '/federated/bin/sync-calcomusers'  | crontab -

  kill -9 $SPINPID &> /dev/null
  echo -ne "done.\n"
}

start_calcom() {
  # Start service with command to make sure it's up before proceeding
  start_service "calcom" "nc -z 192.168.0.48 3000 &> /dev/null" "30"

  # Ensure DNS entry is added
  docker exec pdns pdnsutil add-record $DOMAIN calcom A 86400 $EXTERNALIP &> /dev/null
  [ $? -ne 0 ] && fail "Couldn't add dns record for calcom"
 
  # Install cronjob
  (crontab -l 2>/dev/null; echo "*/15 * * * * /federated/bin/sync-calcomusers > /dev/null 2>&1") | sort -u | crontab -

  # kill -9 $SPINPID &> /dev/null
  echo -ne "done."
}