Show Posts

This section allows you to view all posts made by this member. Note that you can only see posts made in areas you currently have access to.


Messages - ftajiri

Pages: [1]
1
CentOS-WebPanel Bugs / Re: cwp_stats is taking too much space
« on: May 21, 2025, 06:21:57 PM »
Code: [Select]
#!/bin/bash

# This script compresses the CWP statistics folder for EACH user
# and then cleans the content of the original folder, saving
# the compressed file in a specific backup directory for each user.
# by Fernando Tajiri
# --- Global Configurations ---
# Base path where user home directories are located
USERS_HOME_BASE_DIR="/home"

# Subdirectory of the statistics folder within each user's home
# E.g.: /home/USER/cwp_stats/goaccess
STATS_SUB_DIR="cwp_stats/goaccess"

# Subdirectory where the compressed file will be saved within each user's home
# E.g.: /home/USER/backupcwp
BACKUP_SUB_DIR="backupcwp"

# Compression format (options: "gz" for .tar.gz or "bz2" for .tar.bz2)
# "gz" is recommended for a good balance between speed and size.
COMPRESSION_FORMAT="gz"

# --- Function to process a single user ---
process_user() {
    local CURRENT_USERNAME="$1"
    local USER_HOME_DIR="${USERS_HOME_BASE_DIR}/${CURRENT_USERNAME}"

    echo -e "\n--- Processing user: $CURRENT_USERNAME ---"

    # Define full paths for the current user
    CURRENT_SOURCE_DIR="${USER_HOME_DIR}/${STATS_SUB_DIR}"
    CURRENT_BACKUP_DIR="${USER_HOME_DIR}/${BACKUP_SUB_DIR}"

    # 1. Check if the user's source directory exists
    if [ ! -d "$CURRENT_SOURCE_DIR" ]; then
        echo "Warning: Source directory '$CURRENT_SOURCE_DIR' does not exist for user $CURRENT_USERNAME. Skipping."
        return 1 # Return 1 to indicate failure for this user
    fi

    # 2. Create the user's backup directory if it doesn't exist
    echo "Checking/creating backup directory: $CURRENT_BACKUP_DIR"
    mkdir -p "$CURRENT_BACKUP_DIR"

    # Check if backup directory creation was successful
    if [ ! -d "$CURRENT_BACKUP_DIR" ]; then
        echo "Error: Could not create or access backup directory '$CURRENT_BACKUP_DIR' for user $CURRENT_USERNAME. Skipping."
        return 1 # Return 1 to indicate failure for this user
    fi

    # 3. Define the compressed file name with date/time and the full path
    ARCHIVE_TIMESTAMP=$(date +%Y-%m-%d_%H-%M-%S)
    ARCHIVE_BASE_NAME="cwp_stats__goaccess__${ARCHIVE_TIMESTAMP}"

    if [ "$COMPRESSION_FORMAT" == "gz" ]; then
        ARCHIVE_PATH="${CURRENT_BACKUP_DIR}/${ARCHIVE_BASE_NAME}.tar.gz"
        TAR_OPTIONS="-czvf"
    elif [ "$COMPRESSION_FORMAT" == "bz2" ]; then
        ARCHIVE_PATH="${CURRENT_BACKUP_DIR}/${ARCHIVE_BASE_NAME}.tar.bz2"
        TAR_OPTIONS="-cjvf"
    else
        echo "Error: Invalid compression format. Use 'gz' or 'bz2'. Skipping to the next user."
        return 1 # Return 1 to indicate failure for this user
    fi

    # 4. Compress the user's statistics folder
    echo "Compressing '$CURRENT_SOURCE_DIR' to '$ARCHIVE_PATH'..."
    # The tar command is executed from the parent directory of CURRENT_SOURCE_DIR
    # to ensure that the folder structure is preserved in the backup.
    # E.g.: if CURRENT_SOURCE_DIR is /home/user/cwp_stats/goaccess,
    # dirname is /home/user/cwp_stats and basename is goaccess
    (cd "$(dirname "$CURRENT_SOURCE_DIR")" && tar $TAR_OPTIONS "$ARCHIVE_PATH" "$(basename "$CURRENT_SOURCE_DIR")")

    # Check if compression was successful
    if [ $? -eq 0 ]; then
        echo "Compression completed successfully for $CURRENT_USERNAME: $ARCHIVE_PATH"
        echo "Checking compressed file size:"
        du -sh "$ARCHIVE_PATH"
    else
        echo "Error: Compression failed for user $CURRENT_USERNAME. Source folder will not be cleaned."
        return 1 # Return 1 to indicate failure for this user
    fi

    # 5. Clean only .html files inside the original folder, keeping the folder structure
    echo "Cleaning only .html files from the original folder for user $CURRENT_USERNAME: $CURRENT_SOURCE_DIR"
    # CAUTION: The command below will remove all .html files within the folder and its subfolders.
    find "$CURRENT_SOURCE_DIR" -type f -name "*.html" -delete

    # Check if cleaning was successful
    if [ $? -eq 0 ]; then
        echo "HTML files in folder '$CURRENT_SOURCE_DIR' and its subfolders cleaned successfully for user $CURRENT_USERNAME."
    else
        echo "Error: Cleaning of HTML files in folder '$CURRENT_SOURCE_DIR' failed for user $CURRENT_USERNAME."
        # Not a critical error to abort the entire script, just logs and continues
    fi
    return 0 # Return 0 to indicate success for this user
}

# --- Main Script Logic ---

# Check if a username was passed as an argument
if [ -n "$1" ]; then
    # If an argument was provided, process only that user
    SPECIFIC_USER="$1"
    echo "Starting CWP statistics backup and clean process for specific user: $SPECIFIC_USER"
    process_user "$SPECIFIC_USER"
else
    # If no argument was provided, process all users
    echo "Starting CWP statistics backup and clean process for all users..."

    # Loop through each user's home directory in USERS_HOME_BASE_DIR
    for USER_HOME_DIR in "${USERS_HOME_BASE_DIR}"/*; do
        # Check if it's a directory and not a symbolic link (to avoid loops or issues)
        if [ -d "$USER_HOME_DIR" ] && [ ! -L "$USER_HOME_DIR" ]; then
            CURRENT_USERNAME=$(basename "$USER_HOME_DIR")
            process_user "$CURRENT_USERNAME"
        else
            echo "Warning: '$USER_HOME_DIR' is not a valid user directory. Skipping."
        fi
    done
    echo -e "\nBackup and clean process completed for all users."
fi


2
works for me too

3
CentOS 7 Problems / Re: YUM Manager No Longer Working
« on: April 19, 2023, 07:55:48 PM »
@oleteacher I have the same problem.

Various updates,
1-I update,
2- I wait a while,
3-it says it has been updated,
but when restarting it keeps asking for the same updates.

What did you do to your csf for it to complete the update?

Pages: [1]