Remove unused scripts.
This commit is contained in:
parent
854ca686c0
commit
d514305f26
@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# script/alpine_setup: Adds all the system packages, directors, users, etc.
|
||||
# required to run the application on Alpine
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
# Set app specific items
|
||||
APP_USER="atst"
|
||||
APP_UID="8010"
|
||||
|
||||
# Add additional packages required by app dependencies
|
||||
ADDITIONAL_PACKAGES="postgresql-libs python3 rsync uwsgi uwsgi-python3 uwsgi-logfile"
|
||||
|
||||
# add sync-crl cronjob for atst user
|
||||
echo "1 */6 * * * /opt/atat/atst/script/sync-crls tests/crl-tmp" >> /etc/crontabs/atst
|
||||
|
||||
# Run the shared alpine setup script
|
||||
source ./script/include/run_alpine_setup
|
@ -1,24 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/fix_permissions: Updates the app directory with the correct user
|
||||
# permissions (skipping node_modules since it is not
|
||||
# required and very large)
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
APP_USER="${1}"
|
||||
APP_GROUP="${2}"
|
||||
|
||||
if [ "${APP_USER}x" = "x" ] || [ "${APP_GROUP}x" = "x" ]; then
|
||||
echo "ERROR: Missing username or groupname argument!"
|
||||
echo "Received: *${APP_USER}:${APP_GROUP}*"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
|
||||
chown "${APP_USER}:${APP_GROUP}" .
|
||||
chown "${APP_USER}:${APP_GROUP}" ./*
|
||||
for subdir in $(find . -type d -maxdepth 1 | grep -Ee '.[^/]' | grep -Fve 'node_modules')
|
||||
do
|
||||
chown "${APP_USER}:${APP_GROUP}" -R "${subdir}"
|
||||
done
|
@ -1,143 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# script/generate_build_info: Generates buildinfo.html and buildinfo.json and
|
||||
# places them in a publicly accessable static asset
|
||||
# folder
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
# Config
|
||||
APP_NAME="ATST"
|
||||
STATIC_DIR="./static"
|
||||
|
||||
if [ "${CIRCLECI}" = "true" ]
|
||||
then
|
||||
# This is a CircleCI build
|
||||
BUILD_NUMBER="${CIRCLE_BUILD_NUM}"
|
||||
BUILD_STATUS_URL="${CIRCLE_BUILD_URL}"
|
||||
BUILT_BY="CircleCI"
|
||||
CIRCLECI_WORKFLOW_BASEURL="https://circleci.com/workflow-run"
|
||||
GIT_BRANCH="${CIRCLE_BRANCH}"
|
||||
WORKFLOW_ID="${CIRCLE_WORKFLOW_ID}"
|
||||
WORKFLOW_STATUS_URL="${CIRCLECI_WORKFLOW_BASEURL}/${CIRCLE_WORKFLOW_ID}"
|
||||
else
|
||||
# Assume we're running on TravisCI instead
|
||||
BUILD_NUMBER="${TRAVIS_BUILD_ID}"
|
||||
BUILD_STATUS_URL="https://travis-ci.org/$TRAVIS_REPO_SLUG/builds/$TRAVIS_BUILD_ID"
|
||||
BUILT_BY="TravisCI"
|
||||
GIT_BRANCH="${TRAVIS_BRANCH}"
|
||||
WORKFLOW_ID="N/A"
|
||||
WORKFLOW_STATUS_URL="#"
|
||||
fi
|
||||
|
||||
echo "### Generate Build Info ###"
|
||||
|
||||
echo "Gathering info from git..."
|
||||
COMMIT_AUTHOR=$(git log -1 --pretty=%aN)
|
||||
COMMIT_AUTHOR_EMAIL=$(git log -1 --pretty=%aE)
|
||||
GIT_SHA=$(git rev-parse HEAD)
|
||||
# Escape all double quotes in commit message and switch newlines for \n
|
||||
# (for JSON compatability)
|
||||
COMMIT_MESSAGE_JSON=$(git log -1 --pretty=format:%B | sed -e 's#\([^\\]\)"#\1\\"#g' | awk 1 ORS='\\n')
|
||||
# Escape all < and > characters in commit message and trade newlines for <BR/> tags
|
||||
COMMIT_MESSAGE_HTML=$(git log -1 --pretty=format:%B | sed -e 's#>#>#g' | sed -e 's#<#<#g' | awk 1 ORS='<BR/>')
|
||||
|
||||
# Assemble https based git repo url
|
||||
GIT_REMOTE_URL=$(git config --get remote.origin.url)
|
||||
if [[ ${GIT_REMOTE_URL} =~ "@" ]]
|
||||
then
|
||||
GIT_URL="https://github.com/$(echo "${GIT_REMOTE_URL}" | cut -d ':' -f 2)"
|
||||
else
|
||||
GIT_URL="${GIT_REMOTE_URL}"
|
||||
fi
|
||||
# Drop the trailing .git for generating github links
|
||||
GITHUB_BASE_URL="${GIT_URL%.git}"
|
||||
GITHUB_COMMIT_URL="${GITHUB_BASE_URL}/commit/${GIT_SHA}"
|
||||
|
||||
APP_CONTAINER_CREATE_DATE=$(date '+%Y-%m-%d')
|
||||
APP_CONTAINER_CREATE_TIME=$(date '+%H:%M:%S')
|
||||
|
||||
echo "Generating ${STATIC_DIR}/buildinfo.json ..."
|
||||
cat > ${STATIC_DIR}/buildinfo.json <<ENDJSON
|
||||
{
|
||||
"build_info" : {
|
||||
"project_name" : "${APP_NAME}",
|
||||
"build_id" : "${BUILD_NUMBER}",
|
||||
"build_url" : "${BUILD_STATUS_URL}",
|
||||
"built_by" : "${BUILT_BY}",
|
||||
"workflow_id" : "${WORKFLOW_ID}",
|
||||
"workflow_url" : "${WORKFLOW_STATUS_URL}"
|
||||
},
|
||||
"image_info" : {
|
||||
"create_date" : "${APP_CONTAINER_CREATE_DATE}",
|
||||
"create_time" : "${APP_CONTAINER_CREATE_TIME}"
|
||||
},
|
||||
"git_info" : {
|
||||
"repository_url" : "${GIT_URL}",
|
||||
"branch" : "${GIT_BRANCH}",
|
||||
"commit" : {
|
||||
"sha" : "${GIT_SHA}",
|
||||
"github_commit_url" : "${GITHUB_COMMIT_URL}",
|
||||
"author_name" : "${COMMIT_AUTHOR}",
|
||||
"author_email" : "${COMMIT_AUTHOR_EMAIL}",
|
||||
"message" : "${COMMIT_MESSAGE_JSON}"
|
||||
}
|
||||
}
|
||||
}
|
||||
ENDJSON
|
||||
|
||||
echo "Generating ${STATIC_DIR}/buildinfo.html ..."
|
||||
cat > ${STATIC_DIR}/buildinfo.html <<ENDHTML
|
||||
<HTML>
|
||||
<HEAD>
|
||||
<TITLE>${APP_NAME} build ${BUILD_NUMBER} info</TITLE>
|
||||
<STYLE>
|
||||
table {
|
||||
display: table;
|
||||
border-width: 1px;
|
||||
border-color: green;
|
||||
border-spacing: 0px;
|
||||
}
|
||||
td {
|
||||
padding: 5px;
|
||||
vertical-align: top;
|
||||
}
|
||||
td.label {
|
||||
text-align: right;
|
||||
font-weight: bold;
|
||||
}
|
||||
</STYLE>
|
||||
</HEAD>
|
||||
<BODY>
|
||||
<TABLE border="1">
|
||||
<TR>
|
||||
<TH colspan="2">BuildInfo (${BUILT_BY})</TH>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Container Image Creation Time:</TD>
|
||||
<TD>${APP_CONTAINER_CREATE_DATE} ${APP_CONTAINER_CREATE_TIME}</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Build Number:</TD>
|
||||
<TD><A target="_blank" href="${BUILD_STATUS_URL}">${BUILD_NUMBER}</A></TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Workflow Number:</TD>
|
||||
<TD><A target="_blank" href="${WORKFLOW_STATUS_URL}">${WORKFLOW_ID}</A></TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Commit SHA:</TD>
|
||||
<TD><A target="_blank" href="${GITHUB_COMMIT_URL}">${GIT_SHA}</A></TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Commit Author:</TD>
|
||||
<TD>${COMMIT_AUTHOR} <${COMMIT_AUTHOR_EMAIL}></TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD class="label">Commit Message:</TD>
|
||||
<TD>${COMMIT_MESSAGE_HTML}</TD>
|
||||
</TR>
|
||||
</TABLE>
|
||||
</BODY>
|
||||
</HTML>
|
||||
ENDHTML
|
@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/get_crl_expiry: Will print the names and expiration dates
|
||||
# for CRLs that exist in a given ATAT namespace.
|
||||
# usage: `script/get_crl_expiry [NAMESPACE]`
|
||||
# defaults to `atat` for the namespace
|
||||
# You must have a valid k8s config for the ATAT clusters to run
|
||||
# this. Keep in mind it parses every CRL so it is slow.
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
NAMESPACE=atat
|
||||
else
|
||||
NAMESPACE=$1
|
||||
fi
|
||||
|
||||
# we only need to run these commands against one existing pod
|
||||
ATST_POD=$(kubectl -n ${NAMESPACE} get pods -l app=atst -o custom-columns=NAME:.metadata.name --no-headers | sed -n 1p)
|
||||
|
||||
echo "expiration information for $NAMESPACE namespace, pod $ATST_POD"
|
||||
|
||||
for i in $(kubectl -n $NAMESPACE exec $ATST_POD -c atst -- ls crls); do
|
||||
expiry=$(kubectl -n $NAMESPACE exec $ATST_POD -c atst -- cat crls/$i | \
|
||||
openssl crl -inform def -noout -text | \
|
||||
grep "Next Update" | \
|
||||
sed -E "s/ +Next Update: //g")
|
||||
echo "$i: $expiry";
|
||||
done
|
@ -1,42 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/make-test-cac: Set up a test CAC card.
|
||||
# Usage:
|
||||
# ./script/make-test-cac [DOD identifier string] [user email] [output name]
|
||||
# i.e.:
|
||||
# ./script/make-test-cac JONES.ANDY.1234567890 andy@example.com andy
|
||||
# The script will output 3 files:
|
||||
# 1. The certificate (crt) file (for reference)
|
||||
# 2. The certificate key (key) file (also for reference)
|
||||
# 3. The PFX file, which is the package file that needs to be loaded on the PIVKey brand card
|
||||
set -e
|
||||
|
||||
SAN="subjectAltName=email:$2"
|
||||
|
||||
openssl genrsa -out $3.key 2048
|
||||
|
||||
CSR=$(openssl req \
|
||||
-new \
|
||||
-nodes \
|
||||
-subj "/CN=$1" \
|
||||
-reqexts SAN \
|
||||
-config <(cat /etc/ssl/openssl.cnf; echo '[SAN]'; echo $SAN) \
|
||||
-key $3.key )
|
||||
|
||||
openssl x509 \
|
||||
-req \
|
||||
-in <(echo "$CSR") \
|
||||
-days 365 \
|
||||
-CA "ssl/client-certs/client-ca.crt" \
|
||||
-CAkey "ssl/client-certs/client-ca.key" \
|
||||
-CAcreateserial \
|
||||
-extensions SAN \
|
||||
-extfile <(cat /etc/ssl/openssl.cnf; echo '[SAN]'; echo $SAN) \
|
||||
-out $3.crt
|
||||
|
||||
openssl pkcs12 -passout pass: -export -out $3.pfx -inkey $3.key -in $3.crt
|
||||
|
||||
echo "Generated files:"
|
||||
echo " CERT: $3.crt"
|
||||
echo " KEY: $3.key"
|
||||
echo " PFX: $3.pfx"
|
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/rq_worker: Launch the Flask-RQ worker
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
# Before starting the server, apply any pending migrations to the DB
|
||||
migrate_db
|
||||
|
||||
# Launch the worker
|
||||
run_command "flask rq worker"
|
@ -1,53 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/selenium_test: Run selenium tests via BrowserStack
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
export FLASK_ENV=selenium
|
||||
|
||||
# create upload directory for app
|
||||
mkdir uploads | true
|
||||
|
||||
# Fetch postgres settings and set them as ENV vars
|
||||
source ./script/get_db_settings
|
||||
|
||||
if [ -n "${PGDATABASE}" ]; then
|
||||
echo "Resetting database ${PGDATABASE}..."
|
||||
# Reset the db
|
||||
reset_db "${PGDATABASE}"
|
||||
else
|
||||
echo "ERROR: RESET_DB is set, but PGDATABASE is not!"
|
||||
echo "Skipping database reset..."
|
||||
fi
|
||||
|
||||
BSL_FILE=BrowserStackLocal
|
||||
if [[ `uname` == "Darwin" ]]; then
|
||||
BSL_DOWNLOAD="https://www.browserstack.com/browserstack-local/BrowserStackLocal-darwin-x64.zip"
|
||||
else
|
||||
BSL_DOWNLOAD="https://www.browserstack.com/browserstack-local/BrowserStackLocal-linux-x64.zip"
|
||||
fi
|
||||
|
||||
# Fetch BrowserStackLocal script
|
||||
if [ -e "${BSL_FILE}" ]; then
|
||||
echo "BrowserStack file already exists"
|
||||
else
|
||||
echo "downloading BrowserStack file"
|
||||
curl $BSL_DOWNLOAD --output $BSL_FILE.zip
|
||||
unzip $BSL_FILE.zip -d .
|
||||
rm $BSL_FILE.zip
|
||||
chmod u+x $BSL_FILE
|
||||
fi
|
||||
|
||||
# run BrowserStackLocal in the background
|
||||
echo "starting BrowserStack local client..."
|
||||
./$BSL_FILE --key $BROWSERSTACK_TOKEN &
|
||||
BSL_ID=$!
|
||||
trap "kill $BSL_ID" SIGTERM SIGINT EXIT
|
||||
|
||||
# run example selenium script that fetches the home page
|
||||
echo "running selenium tests"
|
||||
pipenv run pytest tests/acceptance -s --no-cov
|
||||
|
||||
# kill BrowserStackLocal
|
||||
kill $BSL_ID
|
@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# script/uwsgi_server: Launch the UWSGI server
|
||||
|
||||
source "$(dirname "${0}")"/../script/include/global_header.inc.sh
|
||||
|
||||
# Before starting the server, apply any pending migrations to the DB
|
||||
migrate_db
|
||||
|
||||
seed_db
|
||||
|
||||
# Launch UWSGI
|
||||
run_command "uwsgi --ini ${UWSGI_CONFIG_FULLPATH}"
|
Loading…
x
Reference in New Issue
Block a user