So I was recently commissioned to do some work on writing up a perforce installation script and also a script to create backups to install from s3 tarballs. This is the end result of the bash script I created. There are some issues with regards to chmod permissions on the system. However, the script is fully functional.
Here is a list of commands:
./p4d_install.sh -c 1 – Uninstalls everything that the script had installed previously
./p4d_install.sh -r -b s3://bucket-url -p superuserpassword – Installs perforce from an s3 bucket tarball, it will grab the latest tarball and use this for installation.
./p4d_install.sh -p superuserpassword – will install perforce without a backup, clean installation
#!/bin/bash
: ' @Author Daniel Gleason
@Email [email protected]
@Script_Name p4d_install.sh
@Date 12/11/2021
-- Developer Notes --
Most if not all of the permissions of the perforce files are owned by the perforce user
Use sudo -u perforce CMD_LINE for most commands
p4dctl commands:
sudo -u perforce p4dctl stop master
sudo -u perforce p4dctl start master
sudo -u perforce p4dctl status master
'
set -e
# Prints error to stderr in red
function print_error {
local RED='\033[0;31m'
local NC='\033[0m'
printf "${RED}$1${NC}\n" 1>&2;
}
# Prints out the usage of the script
function usage() {
local usage="Usage: $0 [-r <1|0> restore_from_s3 number] [-k aws_access_key str] [-s aws_secret_key str] [-d aws_region str] [-b s3_bucket_path str] [-p perforce_password str] [-c <1|0> clean_install]";
print_error $usage
exit 1;
}
# Removes any potential installation residue from previous runs of the script
function clean() {
sudo slay perforce 2> /dev/null
sudo rm -rf /perforce
sudo apt-get --purge remove helix-p4d -y
sudo apt-get remove awscli -y
sudo apt-get autoremove -y
sudo rm /etc/perforce/p4dctl.conf.d/master.conf 2> /dev/null
sudo userdel perforce -f 2> /dev/null
sudo delgroup perforce 2> /dev/null
}
# Get inputs
while getopts "r:k:s:d:b:p:c:" flag; do
case "${flag}" in
r)
export restore=${OPTARG}
;;
k)
export AWS_ACCESS_KEY_ID=${OPTARG}
;;
s)
export AWS_SECRET_ACCESS_KEY=${OPTARG}
;;
d)
export AWS_DEFAULT_REGION=${OPTARG}
;;
b)
export S3_BUCKET_PATH=${OPTARG}
;;
p)
export PERFORCE_PASSWORD=${OPTARG}
;;
c)
export CLEAN=1
clean
;;
*)
usage
;;
esac
done
function install_p4d() {
local ubuntu_distro=`lsb_release --codename --short` # focal, xenial, etc
wget -qO - https://package.perforce.com/perforce.pubkey | sudo apt-key add -
sudo rm /etc/apt/sources.list.d/perforce.list 2> /dev/null # Delete perforce apt file if it already eixsts, ignore the error output.
echo "deb http://package.perforce.com/apt/ubuntu $ubuntu_distro release" | sudo tee -a /etc/apt/sources.list.d/perforce.list
sudo apt-get update
sudo apt-get install helix-p4d -y
}
function configure_p4d() {
sudo mkdir /perforce 2> /dev/null
sudo /opt/perforce/sbin/configure-helix-p4d.sh -n master -p ssl:1666 -r /perforce -u perforce -P $PERFORCE_PASSWORD
sudo chown -R perforce.perforce /perforce
sudo usermod -aG perforce ubuntu # Add ubuntu user to perforce group
sudo -u perforce mkdir /perforce/depots
# All depot data will be located in /perforce/depots
p4 configure set master#server.depot.root=/perforce/depots
sudo chmod -R 777 /perforce/
sudo chmod -R 770 /perforce/depots
}
# Just some sueful tools I like to use
function install_prerequisites() {
sudo apt-get install awscli -y
sudo apt-get install slay -y
sudo apt-get install mlocate -y
}
# Produces credentials file in ~/.aws/credentials and config file in ~/.aws/config used by aws cli
function setup_aws_cli() {
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY
aws configure set default.region $AWS_DEFAULT_REGION
}
function restore_from_s3_backup() {
echo 'Restoring from s3 backup'
sudo mkdir /perforce/backups 2> /dev/null
sudo chown perforce.perforce /perforce/backups
sudo -u perforce p4dctl stop master
# Download from s3 into /perforce/backups/
pushd /perforce/backups/
local most_recent_checkpoint_s3=$(aws s3 ls $S3_BUCKET_PATH | sort | tail -n 1 | awk '{print $4}')
local backup_dir=`pwd`
sudo -u perforce aws s3 cp $S3_BUCKET_PATH/$most_recent_checkpoint_s3 .
sudo -u perforce tar -xf $most_recent_checkpoint_s3
sudo -u perforce rsync -a perforce/depots/ /perforce/depots/ # Move the depot files over
# Get checkpoint file we just extracted from the tarball
pushd perforce/backups/
local most_recent_checkpoint_file=`ls -t master.ckp.*.gz | head -1`
popd
# Restore the checkpoint
pushd /perforce/root/
sudo rm -rf /perforce/root/db.*
sudo -u perforce p4d -r . -jr $backup_dir/perforce/backups/$most_recent_checkpoint_file
popd
# Remove extracted files to keep things clean
sudo rm -rf perforce/
popd
# Add P4PORT to /etc/perforce/p4dctl.conf.d/master.conf
sudo sed -i '/P4SSLDIR\s.*=\s.*ssl/a \\tP4PORT=ssl:1666' /etc/perforce/p4dctl.conf.d/master.conf
# Change permissions of db files
sudo chmod -R 600 /perforce/root/db.*
# Generate new ssl certs
pushd /perforce/root/
sudo chmod -R 0700 ssl/
sudo rm ssl/privatekey.txt ssl/certificate.txt
P4SSLDIR=ssl sudo -Eu perforce p4d -Gc
popd
# Start p4d
sudo -u perforce p4dctl start master
# Run p4 trust on the new server setup.
p4 trust -f -y
}
function main() {
if [ ! $CLEAN ]
then
install_prerequisites
fi
if [ $PERFORCE_PASSWORD ]
then
install_p4d
configure_p4d
fi
if [ $AWS_ACCESS_KEY_ID ] && [ $AWS_SECRET_ACCESS_KEY ] && [ $AWS_DEFAULT_REGION ]
then
setup_aws_cli
fi
if [ $restore ] && [ $restore -eq 1 ]
then
if [ $S3_BUCKET_PATH ]
then
echo Bucket Path: $S3_BUCKET_PATH
restore_from_s3_backup
else
print_error "Error: restore flag must be 1 and s3_bucket_path must be set to restore from a backup."
usage
fi
fi
}
main
This next script is used in conjunction with the script above to create backups. You can run this script on a cron task and it will create a tarball and create a new perforce checkpoint and upload it to s3. It does not have functionality to setup an aws cli configuration. You will need to do that yourself, or give the aws instance the permissions it needs. I also hard coded the s3 backup location, you will need to change this to a variable and use getopts or just change the hard coded path for the bucket url.
#!/bin/bash
: ' @Author Daniel Gleason
@Email [email protected]
@Script_Name create_backup.sh
@Date 12/13/2021
'
function get_most_recent_checkpoint() {
pushd /perforce/backups/ > /dev/null
echo `ls -t master.ckp.*.gz | head -1`
popd > /dev/null
}
function make_backup() {
pushd /perforce > /dev/null
mkdir backups 2> /dev/null
sudo chown -R perforce.perforce backups/
pushd backups/
# Creates a checkpoint in /perforce/backups using the p4 database in /perforce/root and the journal in /perforce/root
sudo -u perforce p4d -r /perforce/root -J /perforce/root/journal -z -jc `pwd`/master
# Create tarball of depots and the checkpoint
local backup_name="$(date +"%m.%d.%Y.%I.%M.%p")_backup.tar.gz"
local most_recent_checkpoint=$(get_most_recent_checkpoint)
sudo -u perforce tar -Ppzcf $backup_name /perforce/depots/ /perforce/backups/$most_recent_checkpoint
aws s3 cp `pwd`/$backup_name " S3_LOCATION "
popd > /dev/null
popd > /dev/null
}
make_backup