code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/bash
echo "`lsof | grep txt | wc -l` - `date`" >> /tmp/test.txt
|
alphagov/ier-frontend
|
scripts/utils/file_handles_check.sh
|
Shell
|
mit
| 72 |
#!/bin/bash
#SBATCH --nodes 1
#SBATCH --ntasks-per-node 1
#SBATCH --cpus-per-task 1
#SBATCH --mem 4000
#SBATCH --job-name="nodesign"
#SBATCH --time 35:00:00
#SBATCH --array=1-1000
#SBATCH --output=/scratch/bonet/logs/nodesign.%A_%a.out
#SBATCH --error=/scratch/bonet/logs/nodesign.%A_%a.err
ROSETTAPATH="/scratch/lpdi/bin/Rosetta/devel/nubinitio/main/source/bin/"
ROSETTADIST="linuxiccrelease"
EXPCOMPLEMENT="nobinder"
JOBID=1
NSTRUCT=40
ROUND=${SLURM_ARRAY_TASK_ID}
TARGET="nodesign"
DDG="evaluate_ddg"
PDB="../../../pdbs/3lhp.pdb.gz"
if [ ${SLURM_ARRAY_TASK_ID} -gt 500 ]; then
EXPCOMPLEMENT="binder"
fi
SUFFIX=${EXPCOMPLEMENT}_${JOBID}_${ROUND}
OUTDIR=$(echo ${EXPCOMPLEMENT})
mkdir -p ${OUTDIR}
srun ${ROSETTAPATH}rosetta_scripts.${ROSETTADIST} -parser:protocol ${TARGET}.${EXPCOMPLEMENT}.xml @common_flags -s ${PDB} -out:nstruct ${NSTRUCT} -out:prefix ${TARGET}_ -out:suffix _${SUFFIX} -out:file:silent ${OUTDIR}/${TARGET}_${SUFFIX}
echo "CASTOR: RUN FINISHED"
|
jaumebonet/FoldFromLoopsTutorial
|
benchmark/scientific/bcl2/experiments/from4oyd/fullcst/submiter.castor.sh
|
Shell
|
mit
| 974 |
#!/bin/sh
##############################################################
##################### usage ############################
##############################################################
function shell_usage()
{
echo "################## USAGE: $0 [-fph]"
sleep 1
echo "################## USAGE: -f [yes/no] force config"
sleep 1
echo "################## USAGE: -p [path name] client bin path name"
sleep 1
echo "################## USAGE: -h help usage"
sleep 1
exit
}
##############################################################
################### parameter ##########################
##############################################################
CUR_PATH=`pwd`
CUR_DATE=$(date +%Y-%m-%d-%H-%M-%S)
PARAM_FORCE="no"
PARAM_PATH="."
PARAM_NAME="demo"
while getopts "fp:h" arg
do
case $arg in
f) PARAM_FORCE="yes";;
p) PARAM_NAME=$OPTARG;;
h) shell_usage $0;;
?) shell_usage $0;;
esac
done
echo "param_force:" $PARAM_FORCE
echo "param path:" $PARAM_PATH
echo "param_name:" $PARAM_NAME
testBaseName="test";
testNum=3;
testI=0;
###########################################
###############function####################
###########################################
function testDemo()
{
for ((testI=0;testI < testNum; ++testI)); do
echo "hello:" $testI
testName=$testBaseName$testI
echo $testName
{
if [ "$PARAM_NAME"x != ""x ]
then
echo "load bin..." $PARAM_NAME
##rest=`$PARAM_NAME $testName`
##echo $rest
`$PARAM_PATH/$PARAM_NAME $testName &`
fi
}&
done
echo "wait for ending..."
wait
}
function testClient()
{
for ((testI=0;testI < testNum; ++testI)); do
{
testName=$testBaseName$testI
echo "load file name:" $testName
rest=`$PARSER_PATH/$PARSER_NAME $PARSER_PATH/../$testName`
echo "load done..."
}&
done
echo "testClient wait for ending..."
wait
}
function checkSendAndRecv()
{
resultName=$CUR_PATH/result.res
for ((testI=0;testI < testNum; ++testI)); do
{
sendName=$testBaseName$testI-send
recvName=$testBaseName$testI
if [[ -f "$sendName" ]] && [[ -f "$recvName" ]]; then
sendLine=`sed -n '$=' $sendName`
recvLine=`sed -n '$=' $recvName`
errlog=$testBaseName$testI-log
if [[ $sendLine -gt 0 ]] && [[ $recvLine -gt 0 ]]; then
if [ $sendLine -eq $recvLine ]; then
echo "sendLine and recvLine are equal, send:" $sendLine ", recv:" $recvLine
else
echo "error:sendName:" $sendName " and recvName: " $recvName >> $errlog
echo "*****:sendLine:" $sendLine " and recvLine: " $recvLine >> $errlog
echo "*****:are not same" >> $errlog
cat $errlog >> $resultName
testClient
fi
else
echo "error:sendName:" $sendName " and recvName: " $recvName >> $errlog
echo "#####:sendLine:" $sendLine " and recvLine: " $recvLine >> $errlog
echo "#####:are not all greater than 0" >> $errlog
cat $errlog >> $resultName
fi
else
echo "error:sendName:" $sendName " and recvName: " $recvName " not exists!"
fi
}&
done
echo "checkSendAndRecv wait for ending..."
wait
}
function checkSequenceLine()
{
for ((testI=0;testI < testNum; ++testI)); do
{
sendName=$testBaseName$testI-send
if [ -f "$sendName" ]; then
sendLine=`sed -n '$=' $sendName`
errlog=$testBaseName$testI-log
if [ $sendLine -gt 0 ]; then
echo "sendLine send:" $sendLine
fi
else
echo "error:sendName:" $sendName " not exists!"
fi
}&
done
echo "checkSequenceLine wait for ending..."
wait
}
###########################################
#################test######################
###########################################
#################################################
##testNum=3;
##PARAM_NAME="demo"
##PARAM_PATH="."
##trap "pgrep $PARAM_NAME | xargs kill -s 9;exit 0" INT
##testDemo
#################################################
testNum=100;
PARAM_NAME=""
PARAM_PATH=$CUR_PATH
PARSER_NAME="parser"
PARSER_PATH=$CUR_PATH/parser
echo "kill PARSER_PATH/PARSER_NAME:" $PARSER_PATH/$PARSER_NAME
#trap "pgrep $PARSER_NAME | xargs kill -s 9;exit 0" INT
#####trap "ps -ef | grep $PARAM_PATH/$PARAM_NAME | grep -v grep | cut -c 9-15 | xargs kill -s 9;exit 0" INT
checkSendAndRecv
|
DyncKevin/Teameeting-MsgServer
|
MsgServer/client_test/concurrency_parser.sh
|
Shell
|
gpl-2.0
| 4,941 |
#!/bin/bash
## Give the Job a descriptive name
#PBS -N makejob
## Output and error files
#PBS -o makejob.out
#PBS -e makejob.err
## How many machines should we get?
#PBS -l nodes=1
## Start
## Run make in the src folder (modify properly)
cd /home/users/goumas/benchmarks/MPI_code/fw
make
|
VHarisop/Parallel
|
ex1/code/scripts/make_on_queue.sh
|
Shell
|
gpl-2.0
| 294 |
/opt/lampp/bin/php make_plugin.php aceditor
/opt/lampp/bin/php make_plugin.php attach
/opt/lampp/bin/php make_plugin.php navigation
/opt/lampp/bin/php make_plugin.php tableau
/opt/lampp/bin/php make_plugin.php hashcash
|
YesWiki/yeswiki-extensions-deprecated
|
script/make_plug.sh
|
Shell
|
gpl-2.0
| 219 |
#!/system/bin/sh
# custom busybox installation shortcut
bb=/sbin/bb/busybox;
# Set TCP westwood
echo "westwood" > /proc/sys/net/ipv4/tcp_congestion_control
# Apply fq pie packet sched
tc qdisc add dev wlan0 root fq_pie
tc qdisc add dev rmnet_data0 root fq_pie
|
HelpMeRuth/ruthless
|
Anykernel2/ramdisk/init.optimus.sh
|
Shell
|
gpl-2.0
| 263 |
#!/bin/bash
# setup colors
red=`tput setaf 1`
green=`tput setaf 2`
cyan=`tput setaf 6`
bold=`tput bold`
reset=`tput sgr0`
heading()
{
echo
echo "${cyan}==>${reset}${bold} $1${reset}"
}
success()
{
echo
echo "${green}==>${bold} $1${reset}"
}
error()
{
echo
echo "${red}==>${bold} Error: $1${reset}"
}
heading "Installing..."
mkdir -p bin
mkdir -p logs
# update packages
sudo apt-get update -y
sudo apt-get upgrade -y
sudo apt-get install -y software-properties-common
# install dependencies
sudo apt-get install -y software-properties-common build-essential git unzip wget nodejs npm ntp cloud-utils
# add node symlink if it doesn't exist
[[ ! -f /usr/bin/node ]] && sudo ln -s /usr/bin/nodejs /usr/bin/node
sudo npm install
sudo npm install pm2 -g
# set up time update cronjob
sudo bash -c "cat > /etc/cron.hourly/ntpdate << EOF
#!/bin/sh
pm2 flush
sudo service ntp stop
sudo ntpdate -s ntp.ubuntu.com
sudo service ntp start
EOF"
sudo chmod 755 /etc/cron.hourly/ntpdate
|
ExEric3/shift-network-reporter
|
build.sh
|
Shell
|
gpl-3.0
| 988 |
# For Caltech Clusters
#MATLAB_PATH='/ldcg/matlab_r2016b'
#MATLAB_PATH='/Documents/MATLAB2016b'
MATLAB_PATH='/Downloads/MCR_R2016b'
export LD_LIBRARY_PATH="$MATLAB_PATH/sys/os/glnxa64:$MATLAB_PATH/bin/glnxa64:$MATLAB_PATH/extern/lib/glnxa64:$MATLAB_PATH/runtime/glnxa64:$MATLAB_PATH/sys/java/jre/glnxa64/jre/lib/amd64/native_threads:$MATLAB_PATH/sys/java/jre/glnxa64/jre/lib/amd64/server"
echo $LD_LIBRARY_PATH
|
ligovirgo/seismon
|
RfPrediction/RfAmp_Compiled_Python_Package/robustLocklossPredictionPkg4/for_redistribution_files_only/set_shared_library_paths.sh
|
Shell
|
gpl-3.0
| 414 |
#!/bin/bash
set -e
pegasus_lite_version_major="4"
pegasus_lite_version_minor="7"
pegasus_lite_version_patch="0"
pegasus_lite_enforce_strict_wp_check="true"
pegasus_lite_version_allow_wp_auto_download="true"
. pegasus-lite-common.sh
pegasus_lite_init
# cleanup in case of failures
trap pegasus_lite_signal_int INT
trap pegasus_lite_signal_term TERM
trap pegasus_lite_exit EXIT
echo -e "\n################################ Setting up workdir ################################" 1>&2
# work dir
export pegasus_lite_work_dir=$PWD
pegasus_lite_setup_work_dir
echo -e "\n###################### figuring out the worker package to use ######################" 1>&2
# figure out the worker package to use
pegasus_lite_worker_package
echo -e "\n##################### setting the xbit for executables staged #####################" 1>&2
# set the xbit for any executables staged
/bin/chmod +x example_workflow-calculateratio_8-1.0
echo -e "\n############################# executing the user tasks #############################" 1>&2
# execute the tasks
set +e
pegasus-kickstart -n example_workflow::calculateratio_8:1.0 -N ID0000023 -R condorpool -L example_workflow -T 2016-11-07T15:25:01+00:00 ./example_workflow-calculateratio_8-1.0
job_ec=$?
set -e
|
elainenaomi/sciwonc-dataflow-examples
|
dissertation2017/Experiment 1A/logs/w-11_0/20161107T152502+0000/00/00/calculateratio_8_ID0000023.sh
|
Shell
|
gpl-3.0
| 1,252 |
#!/bin/bash
set -e
pegasus_lite_version_major="4"
pegasus_lite_version_minor="7"
pegasus_lite_version_patch="0"
pegasus_lite_enforce_strict_wp_check="true"
pegasus_lite_version_allow_wp_auto_download="true"
. pegasus-lite-common.sh
pegasus_lite_init
# cleanup in case of failures
trap pegasus_lite_signal_int INT
trap pegasus_lite_signal_term TERM
trap pegasus_lite_exit EXIT
echo -e "\n################################ Setting up workdir ################################" 1>&2
# work dir
export pegasus_lite_work_dir=$PWD
pegasus_lite_setup_work_dir
echo -e "\n###################### figuring out the worker package to use ######################" 1>&2
# figure out the worker package to use
pegasus_lite_worker_package
echo -e "\n##################### setting the xbit for executables staged #####################" 1>&2
# set the xbit for any executables staged
/bin/chmod +x wikiflow-longestsession_0-1.0
echo -e "\n############################# executing the user tasks #############################" 1>&2
# execute the tasks
set +e
pegasus-kickstart -n wikiflow::longestsession_0:1.0 -N ID0000006 -R condorpool -L example_workflow -T 2017-01-17T20:40:45+00:00 ./wikiflow-longestsession_0-1.0
job_ec=$?
set -e
|
elainenaomi/sciwonc-dataflow-examples
|
dissertation2017/Experiment 2/logs/w-08_0/20170117T204045+0000/00/00/longestsession_0_ID0000006.sh
|
Shell
|
gpl-3.0
| 1,228 |
#!/bin/bash
#
# This is job project1_test1_0
#
#
## Start of header for backend 'local'.
#
set -e
set -u
ENVIRONMENT_DIR='.'
#
# Variables declared in MOLGENIS Compute headers/footers always start with an MC_ prefix.
#
declare MC_jobScript="test1_0.sh"
declare MC_jobScriptSTDERR="test1_0.err"
declare MC_jobScriptSTDOUT="test1_0.out"
declare MC_failedFile="molgenis.pipeline.failed"
declare MC_singleSeperatorLine=$(head -c 120 /dev/zero | tr '\0' '-')
declare MC_doubleSeperatorLine=$(head -c 120 /dev/zero | tr '\0' '=')
declare MC_tmpFolder='tmpFolder'
declare MC_tmpFile='tmpFile'
declare MC_tmpFolderCreated=0
#
##
### Header functions.
##
#
function errorExitAndCleanUp() {
local _signal="${1}"
local _problematicLine="${2}"
local _exitStatus="${3:-$?}"
local _executionHost="$(hostname)"
local _format='INFO: Last 50 lines or less of %s:\n'
local _errorMessage="FATAL: Trapped ${_signal} signal in ${MC_jobScript} running on ${_executionHost}. Exit status code was ${_exitStatus}."
if [ "${_signal}" == 'ERR' ]; then
_errorMessage="FATAL: Trapped ${_signal} signal on line ${_problematicLine} in ${MC_jobScript} running on ${_executionHost}. Exit status code was ${_exitStatus}."
fi
_errorMessage=${4:-"${_errorMessage}"} # Optionally use custom error message as 4th argument.
echo "${_errorMessage}"
echo "${MC_doubleSeperatorLine}" > "${MC_failedFile}"
echo "${_errorMessage}" >> "${MC_failedFile}"
if [ -f "${MC_jobScriptSTDERR}" ]; then
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
printf "${_format}" "${MC_jobScriptSTDERR}" >> "${MC_failedFile}"
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
tail -50 "${MC_jobScriptSTDERR}" >> "${MC_failedFile}"
fi
if [ -f "${MC_jobScriptSTDOUT}" ]; then
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
printf "${_format}" "${MC_jobScriptSTDOUT}" >> "${MC_failedFile}"
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
tail -50 "${MC_jobScriptSTDOUT}" >> "${MC_failedFile}"
fi
echo "${MC_doubleSeperatorLine}" >> "${MC_failedFile}"
}
#
# Create tmp dir per script/job.
# To be called with with either a file or folder as first and only argument.
# Defines two globally set variables:
# 1. MC_tmpFolder: a tmp dir for this job/script. When function is called multiple times MC_tmpFolder will always be the same.
# 2. MC_tmpFile: when the first argument was a folder, MC_tmpFile == MC_tmpFolder
# when the first argument was a file, MC_tmpFile will be a path to a tmp file inside MC_tmpFolder.
#
function makeTmpDir {
#
# Compile paths.
#
local _originalPath="${1}"
local _myMD5="$(md5sum ${MC_jobScript} | cut -d ' ' -f 1)"
local _tmpSubFolder="tmp_${MC_jobScript}_${_myMD5}"
local _dir
local _base
if [[ -d "${_originalPath}" ]]; then
_dir="${_originalPath}"
_base=''
else
_base=$(basename "${_originalPath}")
_dir=$(dirname "${_originalPath}")
fi
MC_tmpFolder="${_dir}/${_tmpSubFolder}/"
MC_tmpFile="${MC_tmpFolder}/${_base}"
echo "DEBUG ${MC_jobScript}::makeTmpDir: dir='${_dir}';base='${_base}';MC_tmpFile='${MC_tmpFile}'"
#
# Cleanup the previously created tmpFolder first if this script was resubmitted.
#
if [[ ${MC_tmpFolderCreated} -eq 0 && -d "${MC_tmpFolder}" ]]; then
rm -rf "${MC_tmpFolder}"
fi
#
# (Re-)create tmpFolder.
#
mkdir -p "${MC_tmpFolder}"
MC_tmpFolderCreated=1
}
trap 'errorExitAndCleanUp HUP NA $?' HUP
trap 'errorExitAndCleanUp INT NA $?' INT
trap 'errorExitAndCleanUp QUIT NA $?' QUIT
trap 'errorExitAndCleanUp TERM NA $?' TERM
trap 'errorExitAndCleanUp EXIT NA $?' EXIT
trap 'errorExitAndCleanUp ERR $LINENO $?' ERR
touch "${MC_jobScript}.started"
#
## End of header for backend 'local'
#
#
## Generated header
#
# Assign values to the parameters in this script
# Set taskId, which is the job name of this task
taskId="test1_0"
# Make compute.properties available
rundir="TEST_PROPERTY(project.basedir)/target/test/benchmark/run/testParameters2"
runid="testParameters2"
workflow="src/main/resources/workflows/listOutOfTwo/workflow2.csv"
parameters="src/main/resources/workflows/listOutOfTwo/parameters.csv"
user="TEST_PROPERTY(user.name)"
database="none"
backend="localhost"
port="80"
interval="2000"
path="."
# Connect parameters to environment
project="project1"
prefix[0]="prefix1"
prefix[1]="prefix2"
prefix[2]="prefix3"
postfix[0]="postfix1"
postfix[1]="postfix2"
postfix[2]="postfix3"
# Validate that each 'value' parameter has only identical values in its list
# We do that to protect you against parameter values that might not be correctly set at runtime.
if [[ ! $(IFS=$'\n' sort -u <<< "${project[*]}" | wc -l | sed -e 's/^[[:space:]]*//') = 1 ]]; then echo "Error in Step 'test1': input parameter 'project' is an array with different values. Maybe 'project' is a runtime parameter with 'more variable' values than what was folded on generation-time?" >&2; exit 1; fi
#
## Start of your protocol template
#
#!/bin/sh
#string project
#list prefix,postfix
for s in "prefix1" "prefix2" "prefix3" "postfix1" "postfix2" "postfix3"
do
echo $s
done
#
## End of your protocol template
#
# Save output in environment file: '$ENVIRONMENT_DIR/test1_0.env' with the output vars of this step
echo "" >> $ENVIRONMENT_DIR/test1_0.env
chmod 755 $ENVIRONMENT_DIR/test1_0.env
#
## Start of footer for backend 'local'.
#
if [ -d "${MC_tmpFolder:-}" ]; then
echo -n "INFO: Removing MC_tmpFolder ${MC_tmpFolder} ..."
rm -rf "${MC_tmpFolder}"
echo 'done.'
fi
tS=${SECONDS:-0}
tM=$((SECONDS / 60 ))
tH=$((SECONDS / 3600))
echo "On $(date +"%Y-%m-%d %T") ${MC_jobScript} finished successfully after ${tM} minutes." >> molgenis.bookkeeping.log
printf '%s:\t%d seconds\t%d minutes\t%d hours\n' "${MC_jobScript}" "${tS}" "${tM}" "${tH}" >> molgenis.bookkeeping.walltime
mv "${MC_jobScript}".{started,finished}
trap - EXIT
exit 0
|
molgenis/molgenis-compute
|
molgenis-compute-core/src/test/resources/expected/testParameters2/test1_0.sh
|
Shell
|
lgpl-3.0
| 5,977 |
#!/bin/bash -x
TRIK_QT_INSTALL_DIR=${TRIK_QT_INSTALL_DIR:-$HOME/TRIK_Qt}
SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]})
set -ueo pipefail
D=/tmp/qt-installer.dmg
test -r "$D" || curl -L -o "$D" http://download.qt-project.org/official_releases/online_installers/qt-unified-mac-x64-online.dmg
P=$(hdiutil attach "$D" -noverify -noautofsck | grep -o '/Volumes/.*$')
I=$(find "$P" -type f -name '*qt-unified-*' -path '*Contents/MacOS/*' -print)
env TRIK_QT_INSTALL_DIR="$TRIK_QT_INSTALL_DIR" "$I" --verbose --no-force-installations --show-virtual-components --script "$SCRIPT_DIR/../docker/qt_scriptinstall.qs"
#remove garbage. No way to deselect this in installer script.
rm -rf "$TRIK_QT_INSTALL_DIR"/{Docs,Examples,"Qt Creator.app",MaintenanceTool.app,MaintenanceTool.dat}
du -csh "$TRIK_QT_INSTALL_DIR"/* | sort -h
|
trikset/trikRuntime
|
scripts/install_qt_mac.sh
|
Shell
|
apache-2.0
| 810 |
#!/bin/bash
CLI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $CLI_DIR
if [ -z "$GOPATH" -o -z "$(which go)" ]; then
echo "Missing GOPATH environment variable or 'go' executable. Please configure a Go build environment."
exit 1
fi
# The name of the binary produced by Go:
if [ -z "$EXE_NAME" ]; then
EXE_NAME="dcos-cassandra"
fi
print_file_and_shasum() {
# Only show 'file <filename>' if that utility is available: often missing in CI builds.
if [ -n "$(which file)" ]; then
file "$1"
fi
ls -l "$1"
echo ""
}
# ---
# go (static binaries containing the CLI itself)
cd $EXE_NAME/
# this may be omitted in 1.6+, left here for compatibility with 1.5:
export GO15VENDOREXPERIMENT=1
go get
# available GOOS/GOARCH permutations are listed at:
# https://golang.org/doc/install/source#environment
# windows:
GOOS=windows GOARCH=386 go build
if [ $? -ne 0 ]; then exit 1; fi
print_file_and_shasum "${EXE_NAME}.exe"
# osx (static build):
SUFFIX="-darwin"
CGO_ENABLED=0 GOOS=darwin GOARCH=386 go build \
&& mv -vf "${EXE_NAME}" "${EXE_NAME}${SUFFIX}"
if [ $? -ne 0 ]; then exit 1; fi
# don't ever strip the darwin binary: results in a broken/segfaulty binary
print_file_and_shasum "${EXE_NAME}${SUFFIX}"
# linux (static build):
SUFFIX="-linux"
CGO_ENABLED=0 GOOS=linux GOARCH=386 go build \
&& mv -vf "${EXE_NAME}" "${EXE_NAME}${SUFFIX}"
if [ $? -ne 0 ]; then exit 1; fi
case "$OSTYPE" in
linux*) strip "${EXE_NAME}${SUFFIX}"
esac
print_file_and_shasum "${EXE_NAME}${SUFFIX}"
cd ..
# ---
# python (wraps above binaries for compatibility with 1.7 and universe-2.x):
cd python/
python setup.py bdist_wheel
if [ $? -ne 0 ]; then exit 1; fi
print_file_and_shasum dist/*.whl
cd ..
|
verma7/dcos-cassandra-service
|
cli/build-cli.sh
|
Shell
|
apache-2.0
| 1,742 |
#!/bin/bash
#
chmod a+x ./adb/mac/adb
# 1. Command Line interface of this tool
DEVICE_ID=$1
UNAME=$2
APK_FILE=$3
PKG_NAME=$4
PKG_VERSION=$5
SIG_DURA=$6
SER_DURA=$6
if [ -d "./logs/" ]; then
echo "./logs/ exists! remove expired folder"
# rm -r ./logs/
fi
# 2. running command
#java -jar monkey-adapter-runner.jar --device-id $DEVICE_ID --user-name $UNAME --pkg-path $APK_FILE --pkg-name $PKG_NAME --pkg-version $PKG_VERSION --single-duration $SIG_DURA --series-duration $SER_DURA
# 3. analyzing command
java -jar monkey-adapter-analyzer.jar --workspaces ./logs/ --monkey-log-file-name monkey_log.txt --logcat-log-file-name logcat_log.txt --traces-log-file-name traces_log.txt --bugreport-log-file-name bugreport_log.txt --properties-file-name properties.txt --duration $SER_DURA --package-name $PKG_NAME
# 4. open html report
open ./logs/index.html
|
apack1001/Android-Monkey-Adapter
|
dist/gen_report.sh
|
Shell
|
apache-2.0
| 856 |
#!/bin/sh -e
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
if test -n "$1"; then
version=$1
else
echo "error: no version"
exit 1
fi
log () {
printf "\033[1;31m$1\033[0m\n"
}
basename=`basename $0`
temp_dir=`mktemp -d /tmp/${basename}.XXXXXX` || exit 1
exit="echo Files in: $temp_dir"
trap "echo && $exit && kill 0" SIGINT
build () {
time_start=`date "+%s"`
log_file=$temp_dir/$time_start".txt"
echo "Build started `date -r $time_start`" > $log_file
make distcheck | tee -a $log_file
time_finish=`date "+%s"`
echo "Build finished `date -r $time_finish`" >> $log_file
total_time=`expr $time_finish - $time_start`
echo "Build took `TZ=UTC date -r $total_time +%H:%M:%S`" >> $log_file
}
while true; do
sleep 5
log "Checking build..."
if test -s apache-couchdb-$version.tar.gz; then
break
else
build
fi
done
log "Build success..."
$exit
|
apache/couchdb-admin
|
release/auto_distcheck.sh
|
Shell
|
apache-2.0
| 1,399 |
#!/bin/bash
# Always run this script from the root of the Buck project directory.
#
# Remove any residual files that could derail build and publication.
#
cd "$(git rev-parse --show-toplevel)" || exit
ant clean
cd "$(git rev-parse --show-toplevel)/docs" || exit
exec java -jar plovr-81ed862.jar soyweb --port 9814 --dir . --globals globals.json
|
LegNeato/buck
|
docs/soyweb-prod.sh
|
Shell
|
apache-2.0
| 349 |
#!/usr/bin/env bash
##
# Copyright (c) 2008-2013 Marius Zwicker
# All rights reserved.
#
# @LICENSE_HEADER_START:Apache@
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# http://www.mlba-team.de
#
# @LICENSE_HEADER_END:Apache@
##
MZ_CMAKETOOLS_compiler=clang
MZ_CMAKETOOLS_generator=xcode
MZ_CMAKETOOLS_mode=debug
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $DIR/generator.sh $@
|
leon-m/cool
|
externals/xdispatch/build/genXCode_ProjectFiles.command
|
Shell
|
apache-2.0
| 906 |
#!/bin/bash
source "$(dirname "${BASH_SOURCE}")/../../hack/lib/init.sh"
trap os::test::junit::reconcile_output EXIT
os::test::junit::declare_suite_start "cmd/create"
# No clusters notice
os::cmd::try_until_text "_output/oshinko get" "No clusters found."
# name required
os::cmd::expect_failure "_output/oshinko create"
# General note -- at present, the master and worker counts in the included config object on get are "MasterCount" and "WorkerCount"
# the master and worker counts in the outer cluster status are "masterCount" and "workerCount"
# Likewise, SparkImage is from config and 'image' is in the outer cluster status
# default one worker / one master
os::cmd::expect_success "_output/oshinko create abc"
os::cmd::expect_success "_output/oshinko get abc -o yaml | grep 'WorkersCount: 1'"
os::cmd::expect_success "_output/oshinko get abc -o yaml | grep 'MastersCount: 1'"
# could still be creating so use 'until'
os::cmd::try_until_text "_output/oshinko get abc -o yaml" "WorkersCount: 1"
os::cmd::try_until_text "_output/oshinko get abc -o yaml" "MastersCount: 1"
os::cmd::expect_success "_output/oshinko delete abc"
# workers flag
os::cmd::expect_success "_output/oshinko create def --workers=-1"
os::cmd::expect_success "_output/oshinko get def -o yaml | grep 'WorkersCount: 1'"
os::cmd::try_until_text "_output/oshinko get def -o yaml" "WorkersCount: 1"
os::cmd::expect_success "_output/oshinko delete def"
os::cmd::expect_success "_output/oshinko create ghi --workers=2"
os::cmd::expect_success "_output/oshinko get ghi -o yaml | grep 'WorkersCount: 2'"
os::cmd::try_until_text "_output/oshinko get ghi -o yaml" "WorkersCount: 2"
os::cmd::expect_success "_output/oshinko delete ghi"
os::cmd::expect_success "_output/oshinko create sam --workers=0"
os::cmd::expect_success "_output/oshinko get sam -o yaml | grep 'WorkersCount: 0'"
os::cmd::try_until_text "_output/oshinko get sam -o yaml" "WorkersCount: 0"
os::cmd::expect_success "_output/oshinko delete sam"
# masters flag
os::cmd::expect_success "_output/oshinko create jkl --masters=-1"
os::cmd::expect_success "_output/oshinko get jkl -o yaml | grep 'MastersCount: 1'"
os::cmd::try_until_text "_output/oshinko get jkl -o yaml" "MastersCount: 1"
os::cmd::expect_success "_output/oshinko delete jkl"
os::cmd::expect_success "_output/oshinko create jill --masters=0"
os::cmd::expect_success "_output/oshinko get jill -o yaml | grep 'MastersCount: 0'"
os::cmd::try_until_text "_output/oshinko get jill -o yaml" "MastersCount: 0"
os::cmd::expect_success "_output/oshinko delete jill"
os::cmd::expect_failure_and_text "_output/oshinko create mno --masters=2" "cluster configuration must have a master count of 0 or 1"
# workerconfig
os::cmd::expect_success "oc create configmap testmap"
os::cmd::expect_failure_and_text "_output/oshinko create mno --workerconfig=jack" "unable to find spark configuration 'jack'"
os::cmd::expect_success "_output/oshinko create mno --workerconfig=testmap"
os::cmd::expect_success "_output/oshinko delete mno"
# masterconfig
os::cmd::expect_failure_and_text "_output/oshinko create mno --masterconfig=jack" "unable to find spark configuration 'jack'"
os::cmd::expect_success "_output/oshinko create pqr --masterconfig=testmap"
os::cmd::expect_success "_output/oshinko delete pqr"
# create against existing cluster
os::cmd::expect_success "_output/oshinko create sally"
os::cmd::expect_failure_and_text "_output/oshinko create sally" "cluster 'sally' already exists"
# create against incomplete clusters
os::cmd::expect_success "oc delete service sally-ui"
os::cmd::expect_failure_and_text "_output/oshinko create sally" "cluster 'sally' already exists \(incomplete\)"
os::cmd::expect_success "_output/oshinko delete sally"
# metrics
os::cmd::expect_success "_output/oshinko create klondike --metrics=true"
os::cmd::try_until_success "oc get service klondike-metrics"
os::cmd::try_until_text "oc log dc/klondike-m" "with jolokia metrics"
os::cmd::expect_success "_output/oshinko delete klondike"
os::cmd::expect_success "_output/oshinko create klondike0 --metrics=jolokia"
os::cmd::try_until_success "oc get service klondike0-metrics"
os::cmd::try_until_text "oc log dc/klondike0-m" "with jolokia metrics"
os::cmd::expect_success "_output/oshinko delete klondike0"
os::cmd::expect_success "_output/oshinko create klondike1 --metrics=prometheus"
os::cmd::try_until_success "oc get service klondike1-metrics"
os::cmd::try_until_text "oc log dc/klondike1-m" "with prometheus metrics"
os::cmd::expect_success "_output/oshinko delete klondike1"
os::cmd::expect_success "_output/oshinko create klondike2"
os::cmd::try_until_success "oc get service klondike2-ui"
os::cmd::expect_failure "oc get service klondike2-metrics"
os::cmd::expect_success "_output/oshinko delete klondike2"
os::cmd::expect_success "_output/oshinko create klondike3 --metrics=false"
os::cmd::try_until_success "oc get service klondike3-ui"
os::cmd::expect_failure "oc get service klondike3-metrics"
os::cmd::expect_success "_output/oshinko delete klondike3"
os::cmd::expect_failure_and_text "_output/oshinko create klondike4 --metrics=notgonnadoit" "must be 'true', 'false', 'jolokia', or 'prometheus'"
#exposeui
os::cmd::expect_success "_output/oshinko create charlie --exposeui=false"
os::cmd::expect_success_and_text "_output/oshinko get -d charlie" "charlie.*<no route>"
os::cmd::expect_success "_output/oshinko delete charlie"
os::cmd::expect_success "_output/oshinko create charlie2 --exposeui=true"
os::cmd::expect_success_and_text "_output/oshinko get -d charlie2" "charlie2-ui-route"
os::cmd::expect_success "_output/oshinko delete charlie2"
os::cmd::expect_success "_output/oshinko create charlie3"
os::cmd::expect_success_and_text "_output/oshinko get -d charlie3" "charlie3-ui-route"
os::cmd::expect_success "_output/oshinko delete charlie3"
os::cmd::expect_failure_and_text "_output/oshinko create charlie4 --exposeui=notgonnadoit" "must be a boolean"
# storedconfig
oc create configmap masterconfig
oc create configmap workerconfig
oc create configmap clusterconfig \
--from-literal=workercount=3 \
--from-literal=mastercount=0 \
--from-literal=sparkmasterconfig=masterconfig \
--from-literal=sparkworkerconfig=workerconfig \
--from-literal=exposeui=false \
--from-literal=metrics=true \
--from-literal=sparkimage=myimage
os::cmd::expect_failure_and_text "_output/oshinko create chicken --storedconfig=jack" "named config 'jack' does not exist"
os::cmd::expect_success "_output/oshinko create chicken --storedconfig=clusterconfig"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "WorkersCount: 3"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "MastersCount: 0"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "ExposeWebUI: \"false\""
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "Metrics: \"true\""
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "ConfigName: clusterconfig"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "SparkImage: myimage"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "Image: myimage"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "SparkMasterConfig: masterconfig"
os::cmd::expect_success_and_text "_output/oshinko get chicken -o yaml" "SparkWorkerConfig: workerconfig"
os::cmd::try_until_text "_output/oshinko get chicken -o yaml" "WorkersCount: 3"
os::cmd::try_until_text "_output/oshinko get chicken -o yaml" "MastersCount: 0"
os::cmd::expect_success "_output/oshinko delete chicken"
oc create configmap clusterconfig2 --from-literal=metrics=jolokia
os::cmd::expect_success "_output/oshinko create chicken2 --storedconfig=clusterconfig2"
os::cmd::expect_success_and_text "_output/oshinko get chicken2 -o yaml" "Metrics: jolokia"
os::cmd::expect_success "_output/oshinko delete chicken2"
oc create configmap clusterconfig3 --from-literal=metrics=prometheus
os::cmd::expect_success "_output/oshinko create chicken3 --storedconfig=clusterconfig3"
os::cmd::expect_success_and_text "_output/oshinko get chicken3 -o yaml" "Metrics: prometheus"
os::cmd::expect_success "_output/oshinko delete chicken3"
oc create configmap clusterconfig4 --from-literal=bogusfield=bogus
os::cmd::expect_failure_and_text "_output/oshinko create chicken4 --storedconfig=clusterconfig4" "'clusterconfig4.bogusfield', unrecognized configuration field"
os::cmd::expect_success "_output/oshinko create egg"
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "WorkersCount: 1"
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "MastersCount: 1"
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "ExposeWebUI: \"true\""
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "Metrics: \"false\""
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "SparkImage: radanalyticsio/openshift-spark"
os::cmd::expect_success_and_text "_output/oshinko get egg -o yaml" "Image: radanalyticsio/openshift-spark"
os::cmd::try_until_text "_output/oshinko get egg -o yaml" "WorkersCount: 1"
os::cmd::try_until_text "_output/oshinko get egg -o yaml" "MastersCount: 1"
os::cmd::expect_success "_output/oshinko delete egg"
oc create configmap default-oshinko-cluster-config --from-literal=workercount=2
os::cmd::expect_success "_output/oshinko create readdefault"
os::cmd::expect_success_and_text "_output/oshinko get readdefault -o yaml" "ConfigName: default-oshinko-cluster-config"
os::cmd::expect_success_and_text "_output/oshinko get readdefault -o yaml" "WorkersCount: 2"
os::cmd::expect_success "_output/oshinko delete readdefault"
os::cmd::expect_success "_output/oshinko create readdefault2 --storedconfig=default-oshinko-cluster-config"
os::cmd::expect_success_and_text "_output/oshinko get readdefault2 -o yaml" "ConfigName: default-oshinko-cluster-config"
os::cmd::expect_success_and_text "_output/oshinko get readdefault2 -o yaml" "WorkersCount: 2"
os::cmd::expect_success "_output/oshinko delete readdefault2"
oc delete configmap default-oshinko-cluster-config
os::cmd::expect_success "_output/oshinko create readdefault3 --storedconfig=default-oshinko-cluster-config"
os::cmd::expect_success_and_text "_output/oshinko get readdefault3 -o yaml" "WorkersCount: 1"
os::cmd::expect_success "_output/oshinko delete readdefault3"
os::cmd::expect_success "_output/oshinko create hawk --workers=1 --masters=1 --storedconfig=clusterconfig"
os::cmd::expect_success_and_text "_output/oshinko get hawk -o yaml" "WorkersCount: 1"
os::cmd::expect_success_and_text "_output/oshinko get hawk -o yaml" "MastersCount: 1"
os::cmd::try_until_text "_output/oshinko get hawk -o yaml" "WorkersCount: 1"
os::cmd::try_until_text "_output/oshinko get hawk -o yaml" "MastersCount: 1"
os::cmd::expect_success "_output/oshinko delete hawk"
# image
os::cmd::expect_success "_output/oshinko create cordial --image=someotherimage"
os::cmd::expect_success_and_text "_output/oshinko get cordial -o yaml" "SparkImage: someotherimage"
os::cmd::expect_success_and_text "_output/oshinko get cordial -o yaml" "Image: someotherimage"
os::cmd::expect_success "_output/oshinko delete cordial"
# flags for ephemeral not valid
os::cmd::expect_failure_and_text "_output/oshinko create mouse --app=bill" "unknown flag"
os::cmd::expect_failure_and_text "_output/oshinko create mouse -e" "unknown shorthand flag"
os::cmd::expect_failure_and_text "_output/oshinko create mouse --ephemeral=true" "unknown flag"
os::test::junit::declare_suite_end
|
crobby/oshinko-cli
|
test/cmd/create.sh
|
Shell
|
apache-2.0
| 11,563 |
#!/bin/bash
git describe --exact-match
if [[ ! $? -eq 0 ]];then
echo "Nothing to publish, exiting.."
exit 0;
fi
if [[ -z "$NPM_TOKEN" ]];then
echo "No NPM_TOKEN, exiting.."
exit 0;
fi
if [[ $(git describe --exact-match 2> /dev/null || :) =~ -canary ]];
then
echo "Publishing canary"
yarn run lerna publish from-git --npm-tag canary --no-git-reset --no-verify-access --yes
# Make sure to exit script with code 1 if publish failed
if [[ ! $? -eq 0 ]];then
exit 1;
fi
else
echo "Did not publish canary"
fi
if [[ ! $(git describe --exact-match 2> /dev/null || :) =~ -canary ]];then
echo "Publishing stable"
yarn run lerna publish from-git --no-git-reset --no-verify-access --yes
# Make sure to exit script with code 1 if publish failed
if [[ ! $? -eq 0 ]];then
exit 1;
fi
else
echo "Did not publish stable"
fi
|
azukaru/next.js
|
scripts/publish-release.sh
|
Shell
|
mit
| 853 |
#!/bin/sh
# Copyright (C) 2002-2014 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Make sure it's OK to install a library under different conditions
# in different directories. PR/285.
required='libtoolize'
. test-init.sh
cat >>configure.ac <<'END'
AM_CONDITIONAL([COND1], [true])
AM_CONDITIONAL([COND2], [false])
AC_PROG_CC
AM_PROG_AR
AC_PROG_LIBTOOL
AC_OUTPUT
END
cat >Makefile.am <<'END'
if COND1
lib_LTLIBRARIES = liba.la
endif
if COND2
pkglib_LTLIBRARIES = liba.la
endif
END
libtoolize
$ACLOCAL
$AUTOMAKE --add-missing
# am_liba_la_rpath is defined twice, and used once
test 3 -eq $(grep -c 'am_liba_la_rpath' Makefile.in)
:
|
kuym/openocd
|
tools/automake-1.15/t/libtool6.sh
|
Shell
|
gpl-2.0
| 1,248 |
#!/bin/bash
rm -rf *.so
OPT="-g -O3" python setup.py build_ext --inplace
python setup.py clean
|
brianjimenez/lightdock
|
lightdock/scoring/vdw/energy/c/compile.sh
|
Shell
|
gpl-3.0
| 96 |
#!/bin/bash
# Author: Fred Brooker <[email protected]>
# URL: http://fredbrooker.cz/
# "touch .ignorebackup" in folders to be skipped
# CHANGE THIS VARIABLE TO MATCH YOUR BACKUP MEDIA LOCATION!
P='/media/backup'
if [ -d "$P" ]
then
cd ~
sudo tar cvpzf "$P/home-backup-`date +%d.%m.%Y`.tar.gz" --one-file-system --exclude-tag-under=.ignorebackup --exclude=/.cache --exclude-caches-all .
else
echo -e "Invalid folder: $P\n"
exit 1
fi
echo -e "\nDone.\n"
exit 0
|
gauravdatir/linux-bash-scripts
|
backup-home.sh
|
Shell
|
unlicense
| 475 |
DESTDIR=droneapi/lib
protoc --python_out=$DESTDIR --proto_path=../droneapi-protobuf/src ../droneapi-protobuf/src/webapi.proto
|
Blackflappybird/droneapi-python
|
compile-protobufs.sh
|
Shell
|
apache-2.0
| 126 |
#!/bin/sh
#
# This script runs as root through sudo without the need for a password,
# so it needs to make sure it can't be abused.
#
# make sure we have a secure PATH
PATH=/bin:/usr/bin
export PATH
if [ $# -ne 2 ]; then
echo "usage: $0 <image-file> <block device>"
exit 1
fi
IMAGE="$1"
OUTPUT="$2"
# workaround for issue on 12.04 LTS, use LANG=C
echo ${IMAGE} | LANG=C egrep '^/[A-za-z0-9_/-]+/image$' > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo "ERROR: illegal image file: ${IMAGE}"
exit 1
fi
echo ${OUTPUT} | egrep '^/dev/[svx]+d[a-z0-9]+$' > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo "ERROR: illegal device: ${OUTPUT}"
exit 1
fi
if [ ! -b ${OUTPUT} ]; then
echo "ERROR: not a device: ${OUTPUT}"
exit 1
fi
# copy image to block device with 1 MB block size
tar -xzf ${IMAGE} -O root.img | dd bs=1M of=${OUTPUT}
|
tracyde/bosh-aws-cpi-release
|
src/bosh_aws_cpi/scripts/stemcell-copy.sh
|
Shell
|
apache-2.0
| 834 |
# Copyright (C) 2014 Benny Bobaganoosh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#Move the res directory back to it's inital position
mv ./build/res/ ./
#Add everything to a new commit and push it to github
git add -A
git commit
git push origin master
#Move the res directory back to the working "build" directory
mv ./res/ ./build/
|
AGoodGameMaker/https-github.com-BennyQBD-3DEngineCpp
|
Unix-GitPush.sh
|
Shell
|
apache-2.0
| 836 |
#!/bin/bash
# ------------------------------------------------------------------------
#
# Copyright 2005-2015 WSO2, Inc. (http://wso2.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
#
# ------------------------------------------------------------------------
iaas=$1
host_ip="localhost"
host_port=9443
product_type="esb"
product_version="490"
product="wso2${product_type}-${product_version}"
set -e
echo "Undeploying application..."
curl -X POST -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/applications/${product}-application/undeploy
sleep 5
echo "Deleting application..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/applications/${product}-application
sleep 1
echo "Removing application policies..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/applicationPolicies/application-policy-1
echo "Removing deployment policies..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/deploymentPolicies/deployment-policy-1
echo "Removing network partitions..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/networkPartitions/network-partition-${iaas}
echo "Removing autoscale policies..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/autoscalingPolicies/autoscaling-policy-1
echo "Removing cartridge Groups ..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:9443/api/cartridgeGroups/${product}-group
echo "Removing cartridges..."
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/cartridges/${product}-manager
curl -X DELETE -H "Content-Type: application/json" -k -v -u admin:admin https://${host_ip}:${host_port}/api/cartridges/${product}-worker
|
gayangunarathne/private-paas-cartridges
|
wso2esb/4.9.0/samples/applications/wso2esb-490-application/scripts/common/undeploy.sh
|
Shell
|
apache-2.0
| 2,511 |
#!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Updates maven POM artifacts with version.
# Usage
# ./maven/maven-pom-version.sh VERSION
# Example
# ./maven/maven-pom-version.sh 0.14.1
if [ "$1" = "" ]; then
echo "ERROR: heron version missing. Usage './maven/maven-pom-version.sh VERSION' "
exit 1
fi
cat ./maven/heron-no-kryo.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-api/g" | \
sed "s/NAME/heron-api/g" | \
sed "s/DESCRIPTION/Heron API/g" \
>> ./heron-api-$1.pom
cat ./maven/heron-no-kryo.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-spi/g" | \
sed "s/NAME/heron-spi/g" | \
sed "s/DESCRIPTION/Heron SPI/g" \
>> ./heron-spi-$1.pom
cat ./maven/heron-with-kryo.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-storm/g" | \
sed "s/NAME/heron-storm/g" | \
sed "s/DESCRIPTION/Heron Storm/g" \
>> ./heron-storm-$1.pom
cat ./maven/heron-with-kryo.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-simulator/g" | \
sed "s/NAME/heron-simulator/g" | \
sed "s/DESCRIPTION/Heron Simulator/g" \
>> ./heron-simulator-$1.pom
cat ./maven/heron-kafka.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-kafka-spout/g" | \
sed "s/NAME/heron-kafka-spout/g" | \
sed "s/DESCRIPTION/Heron Kafka Spout/g" \
>> ./heron-kafka-spout-$1.pom
cat ./maven/heron-kafka.template.pom | \
sed "s/VERSION/$1/g" | \
sed "s/ARTIFACT_ID/heron-kafka-bolt/g" | \
sed "s/NAME/heron-kafka-bolt/g" | \
sed "s/DESCRIPTION/Heron Kafka Bolt/g" \
>> ./heron-kafka-bolt-$1.pom
|
twitter/heron
|
release/maven/maven-pom-version.sh
|
Shell
|
apache-2.0
| 2,174 |
#!/bin/bash
exitError()
{
echo "ERROR $1: $3" 1>&2
echo "ERROR LOCATION=$0" 1>&2
echo "ERROR LINE=$2" 1>&2
exit $1
}
cmakeConfigure()
{
local idir=$1
# construct cmake arguments
local CMAKEARGS=(..
"-DCMAKE_INSTALL_PREFIX=${idir}"
"-DBoost_NO_SYSTEM_PATHS=TRUE" # http://stackoverflow.com/a/13204207/592024
"-DBoost_NO_BOOST_CMAKE=TRUE"
"-DBOOST_ROOT=${BOOST_PATH}"
)
local logfile=`pwd`/cmake.log
echo "CMAKE ${CMAKEARGS[@]}"
cmake "${CMAKEARGS[@]}" 2>&1 1>> ${logfile}
}
echo "TEST $@"
TEMP=`getopt -o h,f:,i:,z --long fcompiler:,idir:,local \
-n 'build' -- "$@"`
if [ $? -ne 0 ]
then
exitError 4440 "Wrong options $@"
fi
eval set -- "$TEMP"
while true; do
case "$1" in
--idir|-i) install_dir="$2"; shift 2;;
--local) install_local="yes"; shift;;
-z) clean_build="yes"; shift;;
-- ) shift; break ;;
* ) break ;;
esac
done
base_path=$PWD
build_dir=$base_path/build
mkdir -p $build_dir
if [[ "${clean_build}" == "yes" ]]; then
rm -rf ${build_dir}
fi
mkdir -p $build_dir
cd $build_dir
if [[ ${install_local} == "yes" ]]; then
install_dir=${base_path}/install
fi
cmakeConfigure "${install_dir}"
if [ $? -ne 0 ]; then
exitError 4430 ${LINENO} "Unable to configure cmake"
fi
make install
if [ $? -ne 0 ]; then
exitError 4420 ${LINENO} "Unable to configure cmake"
fi
|
clementval/serialbox
|
test/build.sh
|
Shell
|
bsd-2-clause
| 1,456 |
#!/bin/bash
# Initialization script for the Bamboo web application
getopts :fh FLAG
case $FLAG in
f) # force composer install
# Copies the parameters dist file to the actual parameters.yml.
# If you run composer manually, you will benefit from Incenteev
# ParameterHandler, which will interactively ask you for
# the parameters to be copied.
if [ ! -e app/config/parameters.yml ]; then
cp app/config/parameters.dist.yml app/config/parameters.yml
fi
# Firing up composer. Better to invoke the INSTALL than an UPDATE
sh -c 'composer install --no-interaction --prefer-source'
;;
h) # help
echo -e \\n"Usage: $0 [-f]: initializes a Bamboo store"
echo -e \\n"Use the -f flag to force composer install"
;;
esac
# Creating database schema and tables
/usr/bin/env php app/console --no-interaction doc:dat:cre
/usr/bin/env php app/console --no-interaction doc:sch:cre
/usr/bin/env php app/console --no-interaction doctrine:fixtures:load --fixtures="src/Elcodi/Fixtures"
# Add geographic information by ISO code. Adding "Spain" as a reference
/usr/bin/env php app/console elcodi:locations:populate ES
# Loads elcodi plugins. See Elcodi\Component\Plugin\Services\PluginManager
/usr/bin/env php app/console elcodi:plugins:load
# Enables the store and makes it visible. Also enables the default template
/usr/bin/env php app/console elcodi:configuration:set store.enabled 1
/usr/bin/env php app/console elcodi:configuration:set store.under_construction 0
/usr/bin/env php app/console elcodi:configuration:set store.template "\"StoreTemplateBundle\""
# Assets & Assetic
/usr/bin/env php app/console --no-interaction assets:install web --symlink
/usr/bin/env php app/console --no-interaction assetic:dump
|
atresmediahf/bamboo
|
app/install.sh
|
Shell
|
mit
| 1,805 |
#!/bin/bash
FN="pd.vitis.vinifera_3.12.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/annotation/src/contrib/pd.vitis.vinifera_3.12.0.tar.gz"
"https://bioarchive.galaxyproject.org/pd.vitis.vinifera_3.12.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.vitis.vinifera/bioconductor-pd.vitis.vinifera_3.12.0_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.vitis.vinifera/bioconductor-pd.vitis.vinifera_3.12.0_src_all.tar.gz"
)
MD5="104a8ac33a9baa25a61f953679795696"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
Luobiny/bioconda-recipes
|
recipes/bioconductor-pd.vitis.vinifera/post-link.sh
|
Shell
|
mit
| 1,466 |
#!/bin/bash
FN="pd.mogene.1.0.st.v1_3.14.1.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/annotation/src/contrib/pd.mogene.1.0.st.v1_3.14.1.tar.gz"
"https://bioarchive.galaxyproject.org/pd.mogene.1.0.st.v1_3.14.1.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.mogene.1.0.st.v1/bioconductor-pd.mogene.1.0.st.v1_3.14.1_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.mogene.1.0.st.v1/bioconductor-pd.mogene.1.0.st.v1_3.14.1_src_all.tar.gz"
)
MD5="c3767385af3b9a120c777adb6d6e3364"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
Luobiny/bioconda-recipes
|
recipes/bioconductor-pd.mogene.1.0.st.v1/post-link.sh
|
Shell
|
mit
| 1,480 |
#!/bin/bash
FN="pd.moex.1.0.st.v1_3.14.1.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/annotation/src/contrib/pd.moex.1.0.st.v1_3.14.1.tar.gz"
"https://bioarchive.galaxyproject.org/pd.moex.1.0.st.v1_3.14.1.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.moex.1.0.st.v1/bioconductor-pd.moex.1.0.st.v1_3.14.1_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.moex.1.0.st.v1/bioconductor-pd.moex.1.0.st.v1_3.14.1_src_all.tar.gz"
)
MD5="57427e63b2d44258c12d796eada1897b"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
Luobiny/bioconda-recipes
|
recipes/bioconductor-pd.moex.1.0.st.v1/post-link.sh
|
Shell
|
mit
| 1,466 |
#!/bin/sh
# Copyright (C) 2010 Eric Day
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# * The names of its contributors may not be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Kill any leftover processes from before
pids=`ps -ef|grep drizzled|grep 12345|awk '{print $2}'`
if [ "$pids" != "" ]
then
kill -9 $pids
sleep 1
fi
datadir=$1
./drizzled/drizzled \
--datadir=$datadir \
--plugin-add=auth_ldap \
--auth-ldap-uri=ldap://localhost:12321/ \
--auth-ldap-bind-dn="cn=root,dc=drizzle,dc=org" \
--auth-ldap-bind-password=testldap \
--auth-ldap-base-dn="dc=drizzle,dc=org" \
--auth-ldap-cache-timeout=1 \
--mysql-protocol-port=12345 \
--drizzle-protocol.port=12346 \
--pid-file=pid &
sleep 3
failed=0
for x in 1 2
do
echo
echo "Test good login:"
echo "SELECT 'SUCCESS';" | ./client/drizzle -u user -Pdrizzlepass -p 12345
if [ $? -ne 0 ]
then
failed=1
fi
echo
echo "Test bad password:"
echo "SELECT 'FAIL';" | ./client/drizzle -u user -Pbadpass -p 12345
if [ $? -ne 1 ]
then
failed=1
fi
echo
echo "Test no password:"
echo "SELECT 'FAIL';" | ./client/drizzle -u user -p 12345
if [ $? -ne 1 ]
then
failed=1
fi
echo
echo "Test bad user:"
echo "SELECT 'FAIL';" | ./client/drizzle -u baduser -Pdrizzlepass -p 12345
if [ $? -ne 1 ]
then
failed=1
fi
echo
echo "Test bad user with no password:"
echo "SELECT 'FAIL';" | ./client/drizzle -u baduser -p 12345
if [ $? -ne 1 ]
then
failed=1
fi
# sleep here so ldap cache has time to clear
sleep 2
done
kill `cat $datadir/pid`
sleep 2
echo
if [ $failed -ne 0 ]
then
echo "At least one test failed, see error messages above"
exit 1
fi
echo "All tests passed successfully!"
exit 0
|
jeppeter/drizzlebr
|
plugin/auth_ldap/test_ldap.sh
|
Shell
|
gpl-2.0
| 3,075 |
#!/bin/sh
#
# Copyright (c) 2006 Junio C Hamano
#
test_description='git grep various.
'
. ./test-lib.sh
cat >hello.c <<EOF
#include <stdio.h>
int main(int argc, const char **argv)
{
printf("Hello world.\n");
return 0;
/* char ?? */
}
EOF
test_expect_success setup '
{
echo foo mmap bar
echo foo_mmap bar
echo foo_mmap bar mmap
echo foo mmap bar_mmap
echo foo_mmap bar mmap baz
} >file &&
{
echo Hello world
echo HeLLo world
echo Hello_world
echo HeLLo_world
} >hello_world &&
{
echo "a+b*c"
echo "a+bc"
echo "abc"
} >ab &&
echo vvv >v &&
echo ww w >w &&
echo x x xx x >x &&
echo y yy >y &&
echo zzz > z &&
mkdir t &&
echo test >t/t &&
echo vvv >t/v &&
mkdir t/a &&
echo vvv >t/a/v &&
{
echo "line without leading space1"
echo " line with leading space1"
echo " line with leading space2"
echo " line with leading space3"
echo "line without leading space2"
} >space &&
git add . &&
test_tick &&
git commit -m initial
'
test_expect_success 'grep should not segfault with a bad input' '
test_must_fail git grep "("
'
for H in HEAD ''
do
case "$H" in
HEAD) HC='HEAD:' L='HEAD' ;;
'') HC= L='in working tree' ;;
esac
test_expect_success "grep -w $L" '
{
echo ${HC}file:1:foo mmap bar
echo ${HC}file:3:foo_mmap bar mmap
echo ${HC}file:4:foo mmap bar_mmap
echo ${HC}file:5:foo_mmap bar mmap baz
} >expected &&
git -c grep.linenumber=false grep -n -w -e mmap $H >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L" '
{
echo ${HC}file:1:foo mmap bar
echo ${HC}file:3:foo_mmap bar mmap
echo ${HC}file:4:foo mmap bar_mmap
echo ${HC}file:5:foo_mmap bar mmap baz
} >expected &&
git -c grep.linenumber=true grep -w -e mmap $H >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L" '
{
echo ${HC}file:foo mmap bar
echo ${HC}file:foo_mmap bar mmap
echo ${HC}file:foo mmap bar_mmap
echo ${HC}file:foo_mmap bar mmap baz
} >expected &&
git -c grep.linenumber=true grep --no-line-number -w -e mmap $H >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L (w)" '
: >expected &&
test_must_fail git grep -n -w -e "^w" >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L (x)" '
{
echo ${HC}x:1:x x xx x
} >expected &&
git grep -n -w -e "x xx* x" $H >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L (y-1)" '
{
echo ${HC}y:1:y yy
} >expected &&
git grep -n -w -e "^y" $H >actual &&
test_cmp expected actual
'
test_expect_success "grep -w $L (y-2)" '
: >expected &&
if git grep -n -w -e "^y y" $H >actual
then
echo should not have matched
cat actual
false
else
test_cmp expected actual
fi
'
test_expect_success "grep -w $L (z)" '
: >expected &&
if git grep -n -w -e "^z" $H >actual
then
echo should not have matched
cat actual
false
else
test_cmp expected actual
fi
'
test_expect_success "grep $L (t-1)" '
echo "${HC}t/t:1:test" >expected &&
git grep -n -e test $H >actual &&
test_cmp expected actual
'
test_expect_success "grep $L (t-2)" '
echo "${HC}t:1:test" >expected &&
(
cd t &&
git grep -n -e test $H
) >actual &&
test_cmp expected actual
'
test_expect_success "grep $L (t-3)" '
echo "${HC}t/t:1:test" >expected &&
(
cd t &&
git grep --full-name -n -e test $H
) >actual &&
test_cmp expected actual
'
test_expect_success "grep -c $L (no /dev/null)" '
! git grep -c test $H | grep /dev/null
'
test_expect_success "grep --max-depth -1 $L" '
{
echo ${HC}t/a/v:1:vvv
echo ${HC}t/v:1:vvv
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth -1 -n -e vvv $H >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 0 $L" '
{
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth 0 -n -e vvv $H >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 0 -- '*' $L" '
{
echo ${HC}t/a/v:1:vvv
echo ${HC}t/v:1:vvv
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth 0 -n -e vvv $H -- "*" >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 1 $L" '
{
echo ${HC}t/v:1:vvv
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth 1 -n -e vvv $H >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 0 -- t $L" '
{
echo ${HC}t/v:1:vvv
} >expected &&
git grep --max-depth 0 -n -e vvv $H -- t >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 0 -- . t $L" '
{
echo ${HC}t/v:1:vvv
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth 0 -n -e vvv $H -- . t >actual &&
test_cmp expected actual
'
test_expect_success "grep --max-depth 0 -- t . $L" '
{
echo ${HC}t/v:1:vvv
echo ${HC}v:1:vvv
} >expected &&
git grep --max-depth 0 -n -e vvv $H -- t . >actual &&
test_cmp expected actual
'
test_expect_success "grep $L with grep.extendedRegexp=false" '
echo "ab:a+bc" >expected &&
git -c grep.extendedRegexp=false grep "a+b*c" ab >actual &&
test_cmp expected actual
'
test_expect_success "grep $L with grep.extendedRegexp=true" '
echo "ab:abc" >expected &&
git -c grep.extendedRegexp=true grep "a+b*c" ab >actual &&
test_cmp expected actual
'
done
cat >expected <<EOF
file
EOF
test_expect_success 'grep -l -C' '
git grep -l -C1 foo >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:5
EOF
test_expect_success 'grep -l -C' '
git grep -c -C1 foo >actual &&
test_cmp expected actual
'
test_expect_success 'grep -L -C' '
git ls-files >expected &&
git grep -L -C1 nonexistent_string >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:foo mmap bar_mmap
EOF
test_expect_success 'grep -e A --and -e B' '
git grep -e "foo mmap" --and -e bar_mmap >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:foo_mmap bar mmap
file:foo_mmap bar mmap baz
EOF
test_expect_success 'grep ( -e A --or -e B ) --and -e B' '
git grep \( -e foo_ --or -e baz \) \
--and -e " mmap" >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:foo mmap bar
EOF
test_expect_success 'grep -e A --and --not -e B' '
git grep -e "foo mmap" --and --not -e bar_mmap >actual &&
test_cmp expected actual
'
test_expect_success 'grep should ignore GREP_OPTIONS' '
GREP_OPTIONS=-v git grep " mmap bar\$" >actual &&
test_cmp expected actual
'
test_expect_success 'grep -f, non-existent file' '
test_must_fail git grep -f patterns
'
cat >expected <<EOF
file:foo mmap bar
file:foo_mmap bar
file:foo_mmap bar mmap
file:foo mmap bar_mmap
file:foo_mmap bar mmap baz
EOF
cat >pattern <<EOF
mmap
EOF
test_expect_success 'grep -f, one pattern' '
git grep -f pattern >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:foo mmap bar
file:foo_mmap bar
file:foo_mmap bar mmap
file:foo mmap bar_mmap
file:foo_mmap bar mmap baz
t/a/v:vvv
t/v:vvv
v:vvv
EOF
cat >patterns <<EOF
mmap
vvv
EOF
test_expect_success 'grep -f, multiple patterns' '
git grep -f patterns >actual &&
test_cmp expected actual
'
test_expect_success 'grep, multiple patterns' '
git grep "$(cat patterns)" >actual &&
test_cmp expected actual
'
cat >expected <<EOF
file:foo mmap bar
file:foo_mmap bar
file:foo_mmap bar mmap
file:foo mmap bar_mmap
file:foo_mmap bar mmap baz
t/a/v:vvv
t/v:vvv
v:vvv
EOF
cat >patterns <<EOF
mmap
vvv
EOF
test_expect_success 'grep -f, ignore empty lines' '
git grep -f patterns >actual &&
test_cmp expected actual
'
test_expect_success 'grep -f, ignore empty lines, read patterns from stdin' '
git grep -f - <patterns >actual &&
test_cmp expected actual
'
cat >expected <<EOF
y:y yy
--
z:zzz
EOF
test_expect_success 'grep -q, silently report matches' '
>empty &&
git grep -q mmap >actual &&
test_cmp empty actual &&
test_must_fail git grep -q qfwfq >actual &&
test_cmp empty actual
'
# Create 1024 file names that sort between "y" and "z" to make sure
# the two files are handled by different calls to an external grep.
# This depends on MAXARGS in builtin-grep.c being 1024 or less.
c32="0 1 2 3 4 5 6 7 8 9 a b c d e f g h i j k l m n o p q r s t u v"
test_expect_success 'grep -C1, hunk mark between files' '
for a in $c32; do for b in $c32; do : >y-$a$b; done; done &&
git add y-?? &&
git grep -C1 "^[yz]" >actual &&
test_cmp expected actual
'
test_expect_success 'grep -C1 hunk mark between files' '
git grep -C1 "^[yz]" >actual &&
test_cmp expected actual
'
test_expect_success 'log grep setup' '
echo a >>file &&
test_tick &&
GIT_AUTHOR_NAME="With * Asterisk" \
GIT_AUTHOR_EMAIL="[email protected]" \
git commit -a -m "second" &&
echo a >>file &&
test_tick &&
git commit -a -m "third" &&
echo a >>file &&
test_tick &&
GIT_AUTHOR_NAME="Night Fall" \
GIT_AUTHOR_EMAIL="[email protected]" \
git commit -a -m "fourth"
'
test_expect_success 'log grep (1)' '
git log --author=author --pretty=tformat:%s >actual &&
( echo third ; echo initial ) >expect &&
test_cmp expect actual
'
test_expect_success 'log grep (2)' '
git log --author=" * " -F --pretty=tformat:%s >actual &&
( echo second ) >expect &&
test_cmp expect actual
'
test_expect_success 'log grep (3)' '
git log --author="^A U" --pretty=tformat:%s >actual &&
( echo third ; echo initial ) >expect &&
test_cmp expect actual
'
test_expect_success 'log grep (4)' '
git log --author="frotz\.com>$" --pretty=tformat:%s >actual &&
( echo second ) >expect &&
test_cmp expect actual
'
test_expect_success 'log grep (5)' '
git log --author=Thor -F --pretty=tformat:%s >actual &&
( echo third ; echo initial ) >expect &&
test_cmp expect actual
'
test_expect_success 'log grep (6)' '
git log --author=-0700 --pretty=tformat:%s >actual &&
>expect &&
test_cmp expect actual
'
test_expect_success 'log --grep --author implicitly uses all-match' '
# grep matches initial and second but not third
# author matches only initial and third
git log --author="A U Thor" --grep=s --grep=l --format=%s >actual &&
echo initial >expect &&
test_cmp expect actual
'
test_expect_success 'log with multiple --author uses union' '
git log --author="Thor" --author="Aster" --format=%s >actual &&
{
echo third && echo second && echo initial
} >expect &&
test_cmp expect actual
'
test_expect_success 'log with --grep and multiple --author uses all-match' '
git log --author="Thor" --author="Night" --grep=i --format=%s >actual &&
{
echo third && echo initial
} >expect &&
test_cmp expect actual
'
test_expect_success 'log with --grep and multiple --author uses all-match' '
git log --author="Thor" --author="Night" --grep=q --format=%s >actual &&
>expect &&
test_cmp expect actual
'
test_expect_success 'grep with CE_VALID file' '
git update-index --assume-unchanged t/t &&
rm t/t &&
test "$(git grep test)" = "t/t:test" &&
git update-index --no-assume-unchanged t/t &&
git checkout t/t
'
cat >expected <<EOF
hello.c=#include <stdio.h>
hello.c: return 0;
EOF
test_expect_success 'grep -p with userdiff' '
git config diff.custom.funcname "^#" &&
echo "hello.c diff=custom" >.gitattributes &&
git grep -p return >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c=int main(int argc, const char **argv)
hello.c: return 0;
EOF
test_expect_success 'grep -p' '
rm -f .gitattributes &&
git grep -p return >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c-#include <stdio.h>
hello.c=int main(int argc, const char **argv)
hello.c-{
hello.c- printf("Hello world.\n");
hello.c: return 0;
EOF
test_expect_success 'grep -p -B5' '
git grep -p -B5 return >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c=int main(int argc, const char **argv)
hello.c-{
hello.c- printf("Hello world.\n");
hello.c: return 0;
hello.c- /* char ?? */
hello.c-}
EOF
test_expect_success 'grep -W' '
git grep -W return >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c= printf("Hello world.\n");
hello.c: return 0;
hello.c- /* char ?? */
EOF
test_expect_success 'grep -W with userdiff' '
test_when_finished "rm -f .gitattributes" &&
git config diff.custom.xfuncname "(printf.*|})$" &&
echo "hello.c diff=custom" >.gitattributes &&
git grep -W return >actual &&
test_cmp expected actual
'
test_expect_success 'grep from a subdirectory to search wider area (1)' '
mkdir -p s &&
(
cd s && git grep "x x x" ..
)
'
test_expect_success 'grep from a subdirectory to search wider area (2)' '
mkdir -p s &&
(
cd s || exit 1
( git grep xxyyzz .. >out ; echo $? >status )
! test -s out &&
test 1 = $(cat status)
)
'
cat >expected <<EOF
hello.c:int main(int argc, const char **argv)
EOF
test_expect_success 'grep -Fi' '
git grep -Fi "CHAR *" >actual &&
test_cmp expected actual
'
test_expect_success 'outside of git repository' '
rm -fr non &&
mkdir -p non/git/sub &&
echo hello >non/git/file1 &&
echo world >non/git/sub/file2 &&
{
echo file1:hello &&
echo sub/file2:world
} >non/expect.full &&
echo file2:world >non/expect.sub &&
(
GIT_CEILING_DIRECTORIES="$(pwd)/non/git" &&
export GIT_CEILING_DIRECTORIES &&
cd non/git &&
test_must_fail git grep o &&
git grep --no-index o >../actual.full &&
test_cmp ../expect.full ../actual.full
cd sub &&
test_must_fail git grep o &&
git grep --no-index o >../../actual.sub &&
test_cmp ../../expect.sub ../../actual.sub
) &&
echo ".*o*" >non/git/.gitignore &&
(
GIT_CEILING_DIRECTORIES="$(pwd)/non/git" &&
export GIT_CEILING_DIRECTORIES &&
cd non/git &&
test_must_fail git grep o &&
git grep --no-index --exclude-standard o >../actual.full &&
test_cmp ../expect.full ../actual.full &&
{
echo ".gitignore:.*o*"
cat ../expect.full
} >../expect.with.ignored &&
git grep --no-index --no-exclude o >../actual.full &&
test_cmp ../expect.with.ignored ../actual.full
)
'
test_expect_success 'inside git repository but with --no-index' '
rm -fr is &&
mkdir -p is/git/sub &&
echo hello >is/git/file1 &&
echo world >is/git/sub/file2 &&
echo ".*o*" >is/git/.gitignore &&
{
echo file1:hello &&
echo sub/file2:world
} >is/expect.unignored &&
{
echo ".gitignore:.*o*" &&
cat is/expect.unignored
} >is/expect.full &&
: >is/expect.empty &&
echo file2:world >is/expect.sub &&
(
cd is/git &&
git init &&
test_must_fail git grep o >../actual.full &&
test_cmp ../expect.empty ../actual.full &&
git grep --untracked o >../actual.unignored &&
test_cmp ../expect.unignored ../actual.unignored &&
git grep --no-index o >../actual.full &&
test_cmp ../expect.full ../actual.full &&
git grep --no-index --exclude-standard o >../actual.unignored &&
test_cmp ../expect.unignored ../actual.unignored &&
cd sub &&
test_must_fail git grep o >../../actual.sub &&
test_cmp ../../expect.empty ../../actual.sub &&
git grep --no-index o >../../actual.sub &&
test_cmp ../../expect.sub ../../actual.sub &&
git grep --untracked o >../../actual.sub &&
test_cmp ../../expect.sub ../../actual.sub
)
'
test_expect_success 'setup double-dash tests' '
cat >double-dash <<EOF &&
--
->
other
EOF
git add double-dash
'
cat >expected <<EOF
double-dash:->
EOF
test_expect_success 'grep -- pattern' '
git grep -- "->" >actual &&
test_cmp expected actual
'
test_expect_success 'grep -- pattern -- pathspec' '
git grep -- "->" -- double-dash >actual &&
test_cmp expected actual
'
test_expect_success 'grep -e pattern -- path' '
git grep -e "->" -- double-dash >actual &&
test_cmp expected actual
'
cat >expected <<EOF
double-dash:--
EOF
test_expect_success 'grep -e -- -- path' '
git grep -e -- -- double-dash >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c:int main(int argc, const char **argv)
hello.c: printf("Hello world.\n");
EOF
test_expect_success LIBPCRE 'grep --perl-regexp pattern' '
git grep --perl-regexp "\p{Ps}.*?\p{Pe}" hello.c >actual &&
test_cmp expected actual
'
test_expect_success LIBPCRE 'grep -P pattern' '
git grep -P "\p{Ps}.*?\p{Pe}" hello.c >actual &&
test_cmp expected actual
'
test_expect_success 'grep pattern with grep.extendedRegexp=true' '
>empty &&
test_must_fail git -c grep.extendedregexp=true \
grep "\p{Ps}.*?\p{Pe}" hello.c >actual &&
test_cmp empty actual
'
test_expect_success LIBPCRE 'grep -P pattern with grep.extendedRegexp=true' '
git -c grep.extendedregexp=true \
grep -P "\p{Ps}.*?\p{Pe}" hello.c >actual &&
test_cmp expected actual
'
test_expect_success LIBPCRE 'grep -P -v pattern' '
{
echo "ab:a+b*c"
echo "ab:a+bc"
} >expected &&
git grep -P -v "abc" ab >actual &&
test_cmp expected actual
'
test_expect_success LIBPCRE 'grep -P -i pattern' '
cat >expected <<-EOF &&
hello.c: printf("Hello world.\n");
EOF
git grep -P -i "PRINTF\([^\d]+\)" hello.c >actual &&
test_cmp expected actual
'
test_expect_success LIBPCRE 'grep -P -w pattern' '
{
echo "hello_world:Hello world"
echo "hello_world:HeLLo world"
} >expected &&
git grep -P -w "He((?i)ll)o" hello_world >actual &&
test_cmp expected actual
'
test_expect_success 'grep -G invalidpattern properly dies ' '
test_must_fail git grep -G "a["
'
test_expect_success 'grep -E invalidpattern properly dies ' '
test_must_fail git grep -E "a["
'
test_expect_success LIBPCRE 'grep -P invalidpattern properly dies ' '
test_must_fail git grep -P "a["
'
test_expect_success 'grep -G -E -F pattern' '
echo "ab:a+b*c" >expected &&
git grep -G -E -F "a+b*c" ab >actual &&
test_cmp expected actual
'
test_expect_success 'grep -E -F -G pattern' '
echo "ab:a+bc" >expected &&
git grep -E -F -G "a+b*c" ab >actual &&
test_cmp expected actual
'
test_expect_success 'grep -F -G -E pattern' '
echo "ab:abc" >expected &&
git grep -F -G -E "a+b*c" ab >actual &&
test_cmp expected actual
'
test_expect_success 'grep -G -F -P -E pattern' '
>empty &&
test_must_fail git grep -G -F -P -E "a\x{2b}b\x{2a}c" ab >actual &&
test_cmp empty actual
'
test_expect_success LIBPCRE 'grep -G -F -E -P pattern' '
echo "ab:a+b*c" >expected &&
git grep -G -F -E -P "a\x{2b}b\x{2a}c" ab >actual &&
test_cmp expected actual
'
test_config() {
git config "$1" "$2" &&
test_when_finished "git config --unset $1"
}
cat >expected <<EOF
hello.c<RED>:<RESET>int main(int argc, const char **argv)
hello.c<RED>-<RESET>{
<RED>--<RESET>
hello.c<RED>:<RESET> /* char ?? */
hello.c<RED>-<RESET>}
<RED>--<RESET>
hello_world<RED>:<RESET>Hello_world
hello_world<RED>-<RESET>HeLLo_world
EOF
test_expect_success 'grep --color, separator' '
test_config color.grep.context normal &&
test_config color.grep.filename normal &&
test_config color.grep.function normal &&
test_config color.grep.linenumber normal &&
test_config color.grep.match normal &&
test_config color.grep.selected normal &&
test_config color.grep.separator red &&
git grep --color=always -A1 -e char -e lo_w hello.c hello_world |
test_decode_color >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c:int main(int argc, const char **argv)
hello.c: /* char ?? */
hello_world:Hello_world
EOF
test_expect_success 'grep --break' '
git grep --break -e char -e lo_w hello.c hello_world >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c:int main(int argc, const char **argv)
hello.c-{
--
hello.c: /* char ?? */
hello.c-}
hello_world:Hello_world
hello_world-HeLLo_world
EOF
test_expect_success 'grep --break with context' '
git grep --break -A1 -e char -e lo_w hello.c hello_world >actual &&
test_cmp expected actual
'
cat >expected <<EOF
hello.c
int main(int argc, const char **argv)
/* char ?? */
hello_world
Hello_world
EOF
test_expect_success 'grep --heading' '
git grep --heading -e char -e lo_w hello.c hello_world >actual &&
test_cmp expected actual
'
cat >expected <<EOF
<BOLD;GREEN>hello.c<RESET>
2:int main(int argc, const <BLACK;BYELLOW>char<RESET> **argv)
6: /* <BLACK;BYELLOW>char<RESET> ?? */
<BOLD;GREEN>hello_world<RESET>
3:Hel<BLACK;BYELLOW>lo_w<RESET>orld
EOF
test_expect_success 'mimic ack-grep --group' '
test_config color.grep.context normal &&
test_config color.grep.filename "bold green" &&
test_config color.grep.function normal &&
test_config color.grep.linenumber normal &&
test_config color.grep.match "black yellow" &&
test_config color.grep.selected normal &&
test_config color.grep.separator normal &&
git grep --break --heading -n --color \
-e char -e lo_w hello.c hello_world |
test_decode_color >actual &&
test_cmp expected actual
'
cat >expected <<EOF
space: line with leading space1
space: line with leading space2
space: line with leading space3
EOF
test_expect_success LIBPCRE 'grep -E "^ "' '
git grep -E "^ " space >actual &&
test_cmp expected actual
'
test_expect_success LIBPCRE 'grep -P "^ "' '
git grep -P "^ " space >actual &&
test_cmp expected actual
'
test_done
|
TextusData/Mover
|
thirdparty/git-1.7.11.3/t/t7810-grep.sh
|
Shell
|
gpl-3.0
| 20,792 |
#!/bin/sh
# $Id: z3950-daemon-shell.sh,v 1.5 2003/11/05 23:33:45 slef Exp $
# Script to start Koha background Z39.50 search daemon
# Part of the Koha Library Mgmt System - www.koha.org
# Licensed under the GPL
#----------------------------
# Do NOT run this script directly from system startup-- this should not run as root
# Call z3950-daemon-launch.sh instead
#----------------------------
. $(dirname $0)/z3950-daemon-options
#----------------------------
LOGFILE=$LogDir/z3950-daemon-`date +%Y%m%d-%H%M`.log
touch $LOGFILE
if [ ! -w $LOGFILE ]
then
echo ERROR: Cannot write to log file $LOGFILE
exit 1
fi
KohaZ3950Script=$KohaZ3950Dir/processz3950queue
if [ ! -x $KohaZ3950Script ]
then
echo ERROR: Cannot find Koha Z39.50 daemon script $KohaZ3950Script
exit 1
fi
PERL5LIB=$KohaModuleDir
export PERL5LIB
KOHA_CONF=$KohaConf
export KOHA_CONF
exec $KohaZ3950Script $LogDir >>$LOGFILE 2>&1
#-------------------
# $Log: z3950-daemon-shell.sh,v $
# Revision 1.5 2003/11/05 23:33:45 slef
# Now figures out what directory the scripts are in
#
# Revision 1.4 2003/10/20 19:16:50 slef
# Work on install bugs (see bug 632)
#
# Revision 1.3 2003/10/06 09:10:39 slef
# Removing config info from z3950*sh and using C4::Context in processz3950queue (Fixed bug 39)
#
# Revision 1.2 2003/04/29 16:48:25 tipaul
# really proud of this commit :-)
# z3950 search and import seems to works fine.
# Let me explain how :
# * a "search z3950" button is added in the addbiblio template.
# * when clicked, a popup appears and z3950/search.pl is called
# * z3950/search.pl calls addz3950search in the DB
# * the z3950 daemon retrieve the records and stores them in z3950results AND in marc_breeding table.
# * as long as there as searches pending, the popup auto refresh every 2 seconds, and says how many searches are pending.
# * when the user clicks on a z3950 result => the parent popup is called with the requested biblio, and auto-filled
#
# Note :
# * character encoding support : (It's a nightmare...) In the z3950servers table, a "encoding" column has been added. You can put "UNIMARC" or "USMARC" in this column. Depending on this, the char_decode in C4::Biblio.pm replaces marc-char-encode by an iso 8859-1 encoding. Note that in the breeding import this value has been added too, for a better support.
# * the marc_breeding and z3950* tables have been modified : they have an encoding column and the random z3950 number is stored too for convenience => it's the key I use to list only requested biblios in the popup.
#
# Revision 1.1 2002/11/22 10:15:22 tipaul
# moving z3950 related scripts to specific dir
#
# Revision 1.3 2002/07/02 22:08:50 tonnesen
# merging changes from rel-1-2
#
# Revision 1.1.2.3 2002/06/28 17:45:39 tonnesen
# z3950queue now listens for a -HUP signal before processing the queue. Z3950.pm
# sends the -HUP signal when queries are added to the queue.
#
# Revision 1.1.2.2 2002/06/26 16:25:51 amillar
# Make directory variable name more explanatory
#
|
Desarrollo-CeSPI/meran
|
intranet/scripts/z3950daemon/z3950-daemon-shell.sh
|
Shell
|
gpl-3.0
| 3,004 |
#!/bin/sh
cd "$(dirname "$0")"
# This script uses PhantomJS to update the descriptions with the DOM generated by WebKit.
# It thus replace seriously broken HTML by what is actually rendered in the browser.
# Use like this:
# ./fix_descriptions.sh > update_descriptions.sql
# Read what gets written on stderr, and eventually perform manual adjustments then:
# psql < update_descriptions.sql
# PhantomJS binary to use. It can be overidden by setting the variable in your environment.
[ -z "$PHANTOMJS" ] && PHANTOMJS="phantomjs"
# http://localhost:9001/ with Chromium to get the console
# When running in debugging mode phantomjs won't exit and you'll have to kill it.
#PHANTOM_FLAGS="--remote-debugger-port=9001 --remote-debugger-autorun=yes"
python generate_processing_page.py | "$PHANTOMJS" $PHANTOM_FLAGS fix_descriptions_phantom.js
|
nop33/indico
|
bin/utils/descriptions/fix_descriptions.sh
|
Shell
|
gpl-3.0
| 844 |
#!/bin/bash
rm -rf html/
mkdir html
asciidoctor *.adoc
mv *.html html/
|
joshuagn/ANPR
|
docs/render_docs.sh
|
Shell
|
apache-2.0
| 72 |
#Aqueduct - Compliance Remediation Content
#Copyright (C) 2011,2012 Vincent C. Passaro ([email protected])
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor,
#Boston, MA 02110-1301, USA.
#!/bin/bash
######################################################################
#By Tummy a.k.a Vincent C. Passaro #
#Vincent[.]Passaro[@]gmail[.]com #
#www.vincentpassaro.com #
######################################################################
#_____________________________________________________________________
#| Version | Change Information | Author | Date |
#|__________|_______________________|____________________|____________|
#| 1.0 | Initial Script | Vincent C. Passaro | 20-oct-2011|
#| | Creation | | |
#|__________|_______________________|____________________|____________|
#
#
# - Updated by Shannon Mitchell([email protected])
# on 04-Feb-2012 to check group ownership before running chgrp.
#######################DISA INFORMATION###############################
#Group ID (Vulid): V-22435
#Group Title: GEN003930
#Rule ID: SV-26675r1_rule
#Severity: CAT II
#Rule Version (STIG-ID): GEN003930
#Rule Title: The printers.conf file must be group-owned by lp.
#
#Vulnerability Discussion: Failure to give group-ownership of the
#printers.conf file to lp provides the members of the owning group
#and possible unauthorized users, with the potential to modify the
#printers.conf file. Unauthorized modifications could disrupt access
#to local printers from authorized remote hosts or permit
#unauthorized remote access to local printers.
#
#Responsibility: System Administrator
#IAControls: ECLP-1
#
#Check Content:
#Check the group ownership of the /etc/cups/printers.conf file.
#
#Procedure:
# ls -lL /etc/cups/printers.conf
#
#If the file is not group-owned by lp, this is a finding.
#
#Fix Text: Change the group-owner of the printers.conf file.
#
#Procedure:
# chgrp lp /etc/cups/printers.conf
#######################DISA INFORMATION###############################
#Global Variables#
PDI=GEN003930
#Start-Lockdown
if [ -a "/etc/cups/printers.conf" ]
then
CURGROUP=`stat -c %G /etc/cups/printers.conf`;
if [ "$CURGROUP" != "lp" ]
then
chgrp lp /etc/cups/printers.conf
fi
fi
|
quark-pat/CLIP
|
packages/aqueduct/aqueduct/compliance/Bash/STIG/rhel-5-beta/prod/GEN003930.sh
|
Shell
|
apache-2.0
| 2,980 |
#!/usr/bin/env bash
# Copyright 2017 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
#
# This script will build licenseok and run it on all
# source files to check licence
set -e
go build ./hack/licenseok
find . -path ./vendor -prune -o -regex ".+\.pb\.go$" -prune -o -type f -regex ".*\.\(go\|proto\)$"\
-printf '%P\n' | xargs ./licenseok
|
ibrasho-forks/dep
|
hack/validate-licence.bash
|
Shell
|
bsd-3-clause
| 433 |
#!/bin/bash -e
#===============================================================================
# FILE: package_a52dec_source.sh
# USAGE: ./package_a52dec_source.sh
# DESCRIPTION: Retrieve a52dec source
# OPTIONS: ---
# LICENSE: AGPLv3+
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: Tan-Tan, <[email protected]>
# ORGANIZATION: Kaltura, inc.
# CREATED: 12/21/14
# REVISION: ---
#===============================================================================
set -o nounset # Treat unset variables as an error
if [ ! -x "`which wget 2>/dev/null`" ];then
echo "Need to install wget."
exit 2
fi
SOURCES_RC=`dirname $0`/sources.rc
if [ ! -r $SOURCES_RC ];then
echo "Could not find $SOURCES_RC"
exit 1
fi
. $SOURCES_RC
wget $A52DEC_URI -O $RPM_SOURCES_DIR/a52dec-$A52DEC_VERSION.tar.gz
if [ $? -eq 0 ];then
echo "Packaged to a52dec-$A52DEC_VERSION.tar.gz"
else
echo "Unable to download $A52DEC_URI" >&2
exit 1
fi
if [ -x "`which rpmbuild 2>/dev/null`" ];then
rpmbuild -ba $RPM_SPECS_DIR/kaltura-a52dec.spec
fi
|
littlefisher112291/platform-install-packages
|
build/package_a52dec_source.sh
|
Shell
|
agpl-3.0
| 1,135 |
#!/bin/bash
# Requires bash version >= 4.
# This script updates pom.xml, README.md and other relevant files to the next version number.
# This script is meant to be run manually (not by Travis)
#
# This script should be ALWAYS executed from the repo root directory: ./utilities/update_versions.sh
#
# Arguments (all are optional):
# --modules= specifies coma separated list of modules to update (spaces are not allowed),
# for example --modules=google-cloud-logging,google-cloud-vision
# --alpha= specifies new version for alpha modules in format
# "major.minor.incremental.qualifier[-SNAPSHOT]",
# for example --alpha=0.12.2-alpha-SNAPSHOT
# --beta= specifies new version for beta modules in format
# "major.minor.incremental.qualifier[-SNAPSHOT]",
# for example --beta=0.12.2-beta
# --rc= specifies new version for release candidate modules in format
# "major.minor.incremental.qualifier[-SNAPSHOT]",
# for example --rc=0.12.2-rc1
# --ga= specifies new version for GA modules in format
# "major.minor.incremental.qualifier[-SNAPSHOT]",
# for example --ga=0.12.2
#
# If at least one argument is provided it means that all the other modules, which do not belong
# to the argument's qualifier(s) ("alpha", "beta", "rc" and/or "ga"), will NOT be updated.
#
# Providing no argument defaults to incrementing revision number in ALL modules to
# major.minor.incremental+1-qualifier-SNAPSHOT if the current version is
# major.minor.incremental-qualifier OR to major.minor.incremental-qualifier if the
# current version is major.minor.incremental-qualifier-SNAPSHOT.
#
# This scripts assumes that all project modules have explicit
# <version>major.minor.incremental.qualifier[-SNAPSHOT]</version> element specified in each
# (parent and children) module's pom.xml
#
# Note, README.md files are updated only in case if the new version is NOT a SNAPSHOT one.
#
# Examples:
# ~$ ./utilities/update_versions.sh --modules=google-cloud-logging,google-cloud-speech
# will update only modules google-cloud-logging and google-cloud-speech, ignoring others, using
# default version bumping described above;
#
# ~$ ./utilities/update_versions.sh --alpha=0.12.3-alpha
# will update all alpha modules to the specified version, ignoring others (beta, rc and ga);
#
# ~$ ./utilities/update_versions.sh --alpha=0.12.3-alpha-SNAPSHOT --rc=0.12.3-rc2-SNAPSHOT
# will update only all alpha and rc modules to the specified versions respectively, as result all
# alpha and rc modules within the repo will be of version 0.12.3-alpha-SNAPSHOT and
# 0.12.3-rc2-SNAPSHOT respectively;
#
# ~$ ./utilities/update_versions.sh --modules=google-cloud-logging,google-cloud-speech --beta=0.12.3-beta --ga=1.1.0
# assuming on the moment of execution google-cloud-logging is in GA and google-cloud-speech
# is in beta, the above command will update google-cloud-logging to 1.1.0 and google-cloud-speech
# to 0.12.3-beta, ignoring all other modules.
#
# ~$ ./utilities/update_versions.sh --modules=google-cloud-speech --beta=1.0.0
# assuming google-cloud-speech is in beta, the above command can be executed to move it to
# GA stage.
set -e
RED='\033[1;31m'
GREEN='\033[1;32m'
BOLD='\033[1m'
NC='\033[0m'
MODULES=""
ALPHA_VERSION=""
BETA_VERSION=""
RC_VERSION=""
GA_VERSION=""
# uncomment module for GAE testing apps to enable detection (re-commented below)
sed -i -e 's:<!--<module>google-cloud-testing</module>-->:<module>google-cloud-testing</module>:' pom.xml
for i in "$@"
do
case $i in
--modules=*)
MODULES="${i#*=}"
shift
;;
--alpha=*)
ALPHA_VERSION="${i#*=}"
shift
;;
--beta=*)
BETA_VERSION="${i#*=}"
shift
;;
--rc=*)
RC_VERSION="${i#*=}"
shift
;;
--ga=*)
GA_VERSION="${i#*=}"
shift
;;
*)
;;
esac
done
echo -e "\n${BOLD}Parameters passed:${NC} --modules=${MODULES} --alpha=${ALPHA_VERSION}, --beta=${BETA_VERSION}, --rc=${RC_VERSION}, --ga=${GA_VERSION}"
# Necessary step for the next "mvn dependency:tree" command to complete successuflly
echo -e "\n${BOLD}Executing${NC} mvn -q clean install -DskipTests -Dmaven.javadoc.skip=true"
mvn -q clean install -DskipTests -Dmaven.javadoc.skip=true
echo -e "\n${BOLD}Checking modules${NC}"
modules=$(mvn -B dependency:tree | grep "\[INFO\] com\.google\." | sed -r "s/.*:(.*):(.*):(.*)/\1:\3/g")
declare -A module_version_map
root_module=""
for item in ${modules[*]}
do
echo "Module: ${item}"
module_version_map[${item%:*}]=${item#*:}
if [ "${root_module}" = "" ];then
root_module=${item%:*}
fi
done
echo -e "\n${BOLD}Validating explicitly specified modules${NC}"
declare -A specified_modules_map
if [ "${MODULES}" != "" ]; then
specified_modules_arr=(${MODULES//,/ })
for item in ${specified_modules_arr[*]}; do
if [ "${module_version_map[${item}]}" != "" ]; then
specified_modules_map[${item}]=${module_version_map[${item}]}
else
echo -e "${RED}WARNING:${NC} Module \"${item}\" module, specified in --modules parameter could not be found, ignoring \"${item}\""
fi
done
fi
echo -e "\n${BOLD}Checking module folders${NC}"
module_folders=($(find . -maxdepth 2 -type d | sed -E -n "/^\.\/(google-cloud-contrib\/|google-cloud-testing\/)?google-cloud(-[a-z0-9]+)+$/p") . ./google-cloud)
declare -A module_folder_map
module_folder_map[${root_module}]="."
for item in ${module_folders[*]}
do
echo "Module Folder: ${item}"
module_folder_map[${item##*/}]=${item}
done
echo -e "\n${BOLD}Validating module folder mapping${NC}"
for item in ${!module_folder_map[@]}
do
if [ "${item}" != "." ] && [ "${module_version_map[${item}]}" == "" ]; then
echo -e "${RED}WARNING:${NC} Ignoring \"${module_folder_map[${item}]}\" folder, as there is no corresponding module found in the root pom"
fi
done
echo -e "\n${BOLD}Updating versions${NC}"
for item in ${modules[*]}; do
module="${item%:*}"
folder="${module_folder_map[${module}]}"
old_version="${module_version_map[${module}]}"
# Check if we need to update only the specified modules, and if we do, ignore the other modules
if [ "${MODULES}" != "" ]; then
if [ "${specified_modules_map[${module}]}" == "" ]; then
echo -e "${RED}WARNING:${NC} Ignoring \"${item}\" module, as it is not specified in --modules= non-empty parameter"
continue
fi
fi
# Determine current base_version (e.g. "0.12.1"), qualifier (e.g. "-alpha" or "-rc1") and snapshot (e.g. "-SNAPSHOT" or "")
base_version=${old_version}
qualifier=""
snapshot=""
if [ "${base_version##*-}" == "SNAPSHOT" ]; then
snapshot="-SNAPSHOT"
base_version="${base_version%-*}"
fi
if [[ "${base_version##*-}" =~ [a-z]+[0-9]* ]]; then
qualifier="-${base_version##*-}"
base_version="${base_version%-*}"
fi
# Determine new_version value
new_version=""
if [ "${ALPHA_VERSION}" != "" ] && [[ "${qualifier}" =~ -alpha[0-9]* ]]; then
new_version=${ALPHA_VERSION}
fi
if [ "${BETA_VERSION}" != "" ] && [[ "${qualifier}" =~ -beta[0-9]* ]]; then
new_version=${BETA_VERSION}
fi
if [ "${RC_VERSION}" != "" ] && [[ "${qualifier}" =~ -rc[0-9]* ]]; then
new_version=${RC_VERSION}
fi
if [ "${GA_VERSION}" != "" ] && [ "${qualifier}" == "" ]; then
new_version=${GA_VERSION}
fi
# echo -e "base_version=${base_version}, qualifier=${qualifier}, snapshot=${snapshot}"
if [ "${ALPHA_VERSION}" == "" ] && [ "${BETA_VERSION}" == "" ] && [ "${RC_VERSION}" == "" ] && [ "${GA_VERSION}" == "" ]; then
if [ "${snapshot}" == "" ]; then
new_snapshot="-SNAPSHOT"
new_base_version="${base_version%.*}.$((${base_version##*.}+1))"
else
new_snapshot=""
new_base_version="${base_version}"
fi
new_version="${new_base_version}${qualifier}${new_snapshot}"
fi
if [ "${new_version}" == "" ]; then
echo -e "${RED}WARNING:${NC} Ignoring module ${BOLD}${module}${NC}, as its qualifier ${BOLD}${qualifier}${NC} does not match command line arguments"
continue
fi
# Determine new base_version (e.g. "0.13.1"), new qualifier (e.g. "-alpha" or "-rc2") and new snapshot (e.g. "-SNAPSHOT" or "")
new_base_version=${new_version}
new_qualifier=""
new_snapshot=""
if [ "${new_base_version##*-}" == "SNAPSHOT" ]; then
new_snapshot="-SNAPSHOT"
new_base_version="${new_base_version%-*}"
fi
if [[ "${new_base_version##*-}" =~ [a-z]+[0-9]* ]]; then
new_qualifier="-${new_base_version##*-}"
new_base_version="${new_base_version%-*}"
fi
# echo -e "new_base_version=${new_base_version}, new_qualifier=${new_qualifier}, new_snapshot=${new_snapshot}"
echo -e "Updating module ${BOLD}${module}${NC} in folder ${folder} from version ${RED}${old_version}${NC} to ${GREEN}${new_version}${NC}"
if [ "${module}" == "google-cloud" ]; then
module_suffix="cloud"
else
module_suffix="${module##google-cloud-}"
fi
# Where the actual version changing happens (the only mutative operations in the script).
# 1) Update version properties (used to define dependencies between google-cloud modules).
echo -e " Updating ${module_suffix}.version property in root pom.xml"
sed -ri "s/(<${module_suffix}.version>\s*)((\w|-|\.)+)(\s*<\/${module_suffix}.version>)/\1${new_version}\4/" pom.xml
# 2) Update version of the module. If the module is a parent of other modules
# (like the root module or the google-cloud-contrib), then the parent secion of its child modules
# will be updated too.
echo -e " Updating version in ${folder}/pom.xml and the parent version in the corresponding children modules if exist"
mvn -q versions:set -DartifactId=${module} -DnewVersion=${new_version} -DprocessPlugins=false -DgenerateBackupPoms=false
# 3) Update Google App Engine application dockerfile, if it exists.
if [ -w ${folder}/src/main/docker/Dockerfile ]; then
old_version="${module_version_map[${module}]}"
echo -e " Updating ${folder}/src/main/docker/Dockerfile"
sed -ri "s/${old_version}/${new_version}/" ${folder}/src/main/docker/Dockerfile
fi
# 4) Update README.md
if [ -f ${folder}/README.md ] && [ "${new_snapshot}" == "" ]; then
readme_version="${new_version}"
if [ "${module}" != "google-cloud-nio-examples" ]; then
echo -e " Updating ${folder}/README.md to the version ${readme_version}"
sed -ri "s/<version>[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+[0-9]*)?<\/version>/<version>${readme_version}<\/version>/g" ${folder}/README.md
sed -ri "s/:[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+[0-9]*)?'/:${readme_version}'/g" ${folder}/README.md
sed -ri "s/\"[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+[0-9]*)?\"/\"${readme_version}\"/g" ${folder}/README.md
else
readme_version="${new_base_version%.*}.$((${new_base_version##*.}+1))${new_qualifier}-SNAPSHOT"
echo -e " Updating ${folder}/README.md to the version ${readme_version}"
sed -ri "s/google-cloud-nio-[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+[0-9]*)?-SNAPSHOT/google-cloud-nio-${readme_version}/g" ${folder}/README.md
sed -ri "s/google-cloud-nio-examples-[0-9]+\.[0-9]+\.[0-9]+(-[a-z]+[0-9]*)?-SNAPSHOT/google-cloud-nio-examples-${readme_version}/g" ${folder}/README.md
fi
fi
done
# re-comment module for GAE testing apps
sed -i -e 's:<module>google-cloud-testing</module>:<!--<module>google-cloud-testing</module>-->:' pom.xml
|
rborer/google-cloud-java
|
utilities/update_versions.sh
|
Shell
|
apache-2.0
| 11,259 |
#!/bin/sh
../../bin/pyjsbuild $@ Scope
|
certik/pyjamas
|
examples/scoping/build.sh
|
Shell
|
apache-2.0
| 39 |
mvn archetype:create -DarchetypeGroupId=org.ops4j.pax.exam -DarchetypeArtifactId=maven-archetype-paxexam-junit -DarchetypeVersion=1.2.0-SNAPSHOT -DgroupId=com.company -DartifactId=company-osgitests
|
ops4j/org.ops4j.pax.exam1
|
maven-archetype-paxexam-junit/src/test/resources/testcreate.sh
|
Shell
|
apache-2.0
| 198 |
ipsec auto --up westnet--eastnet-ikev2
ipsec look
ping -c1 -I 192.0.1.254 192.0.2.254
echo done
|
y-trudeau/openswan-patch-meraki
|
testing/pluto/interop-ikev2-racoon-02-psk-responder/westrun.sh
|
Shell
|
gpl-2.0
| 97 |
#!/usr/bin/env bash
ci_dir="$(dirname "$0")"
source ${ci_dir}/ci-common.sh
Flocq_CI_DIR=${CI_BUILD_DIR}/flocq
git_checkout ${Flocq_CI_BRANCH} ${Flocq_CI_GITURL} ${Flocq_CI_DIR}
( cd ${Flocq_CI_DIR} && ./autogen.sh && ./configure && ./remake -j${NJOBS} )
|
amintimany/coq
|
dev/ci/ci-flocq.sh
|
Shell
|
lgpl-2.1
| 258 |
inkscape --export-png ../icons/16x16/vlc-qt.png -w 16 vlc-qt.svg
inkscape --export-png ../icons/16x16/[email protected] -w 32 vlc-qt.svg
inkscape --export-png ../icons/24x24/vlc-qt.png -w 24 vlc-qt.svg
inkscape --export-png ../icons/24x24/[email protected] -w 48 vlc-qt.svg
inkscape --export-png ../icons/32x32/vlc-qt.png -w 32 vlc-qt.svg
inkscape --export-png ../icons/32x32/[email protected] -w 64 vlc-qt.svg
inkscape --export-png ../icons/48x48/vlc-qt.png -w 48 vlc-qt.svg
inkscape --export-png ../icons/48x48/[email protected] -w 96 vlc-qt.svg
inkscape --export-png ../icons/64x64/vlc-qt.png -w 64 vlc-qt.svg
inkscape --export-png ../icons/64x64/[email protected] -w 128 vlc-qt.svg
inkscape --export-png ../icons/128x128/vlc-qt.png -w 128 vlc-qt.svg
inkscape --export-png ../icons/128x128/[email protected] -w 256 vlc-qt.svg
inkscape --export-png ../icons/256x256/vlc-qt.png -w 256 vlc-qt.svg
inkscape --export-png ../icons/256x256/[email protected] -w 512 vlc-qt.svg
|
Danielmachon/vlc-qt
|
resources/src/inkscape-gen_vlc-qt.sh
|
Shell
|
lgpl-3.0
| 950 |
#!/bin/bash
if [ -z "$BROOKLYN_APP_CLASS" ] ; then
BROOKLYN_APP_CLASS=brooklyn.demo.WebClusterDatabaseExample
fi
if [ -z "$JAVA" ] ; then
JAVA=`which java`
fi
if [ ! -z "$JAVA_HOME" ] ; then
JAVA=$JAVA_HOME/bin/java
else
JAVA=`which java`
fi
if [ ! -x "$JAVA" ] ; then
echo Cannot find java. Set JAVA_HOME or add java to path.
exit 1
fi
if [[ ! `ls *.jar 2> /dev/null` || ! `ls lib/*.jar 2> /dev/null` ]] ; then
echo Command must be run from the directory where it is installed.
exit 1
fi
$JAVA -Xms256m -Xmx1024m -XX:MaxPermSize=1024m -classpath "*:lib/*" $BROOKLYN_APP_CLASS "$@"
|
rhodgin/brooklyn
|
examples/simple-web-cluster/src/main/assembly/scripts/start.sh
|
Shell
|
apache-2.0
| 613 |
#!/bin/bash
# ensure that the specified version of protoc is installed in
# /tmp/proto$PROTO_VERSION/bin/protoc, which maybe cached
# bash tools/travis-install-protoc.sh 3.0.0-beta-3.1
#
# make bash more robust.
set -beEux -o pipefail
if [ $# != 1 ] ; then
echo "wrong # of args: $0 protoversion" >&2
exit 1
fi
PROTO_VERSION="$1"
PROTO_DIR="/tmp/proto$PROTO_VERSION"
# Can't check for presence of directory as cache auto-creates it.
if [ ! -f "$PROTO_DIR/bin/protoc" ]; then
wget -O - "https://github.com/google/protobuf/archive/v${PROTO_VERSION}.tar.gz" | tar xz -C /tmp
cd "/tmp/protobuf-${PROTO_VERSION}"
./autogen.sh
./configure --prefix="$PROTO_DIR" --disable-shared
make -j 4
make install
fi
|
ga4gh/schemas
|
tools/travis-install-protoc.sh
|
Shell
|
apache-2.0
| 728 |
#!/bin/bash
# Generate input if none exists
mkdir -p input
EXISTING_INPUTS=$(ls -la input/ | wc -l)
if [[ $EXISTING_INPUTS != 14 ]]; then
rm -rf input/*
./generate.sh
fi
# Place input in HDFS
./convert.sh
|
agrippa/spark-swat
|
functional-tests/tuple-output/init.sh
|
Shell
|
bsd-3-clause
| 215 |
source $stdenv/setup
set -x
sources_=($sources)
targets_=($targets)
echo $objects
objects=($objects)
symlinks=($symlinks)
# Remove the initial slash from a path, since genisofs likes it that way.
stripSlash() {
res="$1"
if test "${res:0:1}" = /; then res=${res:1}; fi
}
touch pathlist
# Add the individual files.
for ((i = 0; i < ${#targets_[@]}; i++)); do
stripSlash "${targets_[$i]}"
mkdir -p "$(dirname "$res")"
cp -a "${sources_[$i]}" "$res"
done
# Add the closures of the top-level store objects.
mkdir -p nix/store
storePaths=$(perl $pathsFromGraph closure-*)
for i in $storePaths; do
cp -a "$i" "${i:1}"
done
# TODO tar ruxo
# Also include a manifest of the closures in a format suitable for
# nix-store --load-db.
printRegistration=1 perl $pathsFromGraph closure-* > nix-path-registration
# Add symlinks to the top-level store objects.
for ((n = 0; n < ${#objects[*]}; n++)); do
object=${objects[$n]}
symlink=${symlinks[$n]}
if test "$symlink" != "none"; then
mkdir -p $(dirname ./$symlink)
ln -s $object ./$symlink
fi
done
ensureDir $out/tarball
tar cvJf $out/tarball/$fileName.tar.xz *
ensureDir $out/nix-support
echo $system > $out/nix-support/system
echo "file system-tarball $out/tarball/$fileName.tar.xz" > $out/nix-support/hydra-build-products
|
nbp/nixos
|
lib/make-system-tarball.sh
|
Shell
|
mit
| 1,331 |
# This file is part of Relax-and-Recover,
# licensed under the GNU General Public License.
# Refer to the included COPYING for full text of license.
# Save the current disk usage (in POSIX output format) in the rescue image
# excluding possibly mounted ReaR target USB data and USB ESP partitions:
local original_disk_space_usage_file="$VAR_DIR/layout/config/df.txt"
local rear_USB_data_partition="$( readlink -f "/dev/disk/by-label/$USB_DEVICE_FILESYSTEM_LABEL" )"
local rear_USB_ESP_partition="$( readlink -f /dev/disk/by-label/REAR-EFI )"
# Careful with "egrep -v" patterns because with an empty pattern egrep -v '' discards all lines:
local egrep_pattern=""
test "$rear_USB_data_partition" && egrep_pattern="^$rear_USB_data_partition"
if test "$rear_USB_ESP_partition" ; then
test "$egrep_pattern" && egrep_pattern="$egrep_pattern|^$rear_USB_ESP_partition" || egrep_pattern="^$rear_USB_ESP_partition"
fi
# The disk usage must be in MiB units '-BM' (and not in arbitrary human readable units via '-h')
# because the values are used in 420_autoresize_last_partitions.sh to calculate whether or not
# the current disk usage still fits on a smaller disk when the last partition must be shrinked:
if test "$egrep_pattern" ; then
df -Pl -BM -x encfs -x tmpfs -x devtmpfs | egrep -v "$egrep_pattern" >$original_disk_space_usage_file
else
df -Pl -BM -x encfs -x tmpfs -x devtmpfs >$original_disk_space_usage_file
fi
|
phracek/rear
|
usr/share/rear/layout/save/GNU/Linux/510_current_disk_usage.sh
|
Shell
|
gpl-3.0
| 1,426 |
#! /bin/sh
./lib/mk/std-autogen.sh ./lib
|
jaiminpan/pgbouncer
|
autogen.sh
|
Shell
|
isc
| 44 |
#!/bin/bash
DEVDIR=python-codec-dev
VERSION=`PYTHONPATH=$DEVDIR/src/ python get_version.py`
DEVZIP=dist/$DEVDIR-$VERSION.tar.gz
python ../googlecode_upload.py -s "Developer version of the RL-Glue Python Codec $VERSION" -p rl-glue-ext --labels=Type-Installer,OpSys-All,Language-Python,Audience-Dev $DEVZIP
|
GarethNelson/rl-glue-ext
|
projects/distribution_tools/Python-Codec/upload-fromsource-dev.bash
|
Shell
|
apache-2.0
| 307 |
#!/bin/bash
echo "Assumed that you ran 'sbt +clean +update dependencyUpdates evicted +test:compile +test' first!"
echo ""
if grep -q SNAP build.sbt
then
echo "There are SNAPSHOTs in the build! Aborting."
exit 1
fi
sbt "; + rootJVM/publishSigned ; + rootJS/publishSigned ; sonatypeBundleRelease"
|
Sciss/ScalaCollider
|
scripts/publishSigned.sh
|
Shell
|
lgpl-2.1
| 302 |
#!/bin/bash
source "$(dirname "${BASH_SOURCE}")/lib/init.sh"
SCRIPT_ROOT=$(dirname ${BASH_SOURCE})/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd ${SCRIPT_ROOT}; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../../../k8s.io/code-generator)}
go install ./${CODEGEN_PKG}/cmd/conversion-gen
function codegen::join() { local IFS="$1"; shift; echo "$*"; }
# enumerate group versions
ALL_FQ_APIS=(
github.com/openshift/origin/pkg/build/controller/build/apis/defaults/v1
github.com/openshift/origin/pkg/apps/apis/apps/v1
github.com/openshift/origin/pkg/authorization/apis/authorization/v1
github.com/openshift/origin/pkg/build/apis/build/v1
github.com/openshift/origin/pkg/image/apis/image/v1
github.com/openshift/origin/pkg/network/apis/network/v1
github.com/openshift/origin/pkg/oauth/apis/oauth/v1
github.com/openshift/origin/pkg/project/apis/project/v1
github.com/openshift/origin/pkg/quota/apis/quota/v1
github.com/openshift/origin/pkg/route/apis/route/v1
github.com/openshift/origin/pkg/security/apis/security/v1
github.com/openshift/origin/pkg/template/apis/template/v1
github.com/openshift/origin/pkg/user/apis/user/v1
)
ALL_PEERS=(
k8s.io/apimachinery/pkg/api/resource
k8s.io/apimachinery/pkg/apis/meta/v1
k8s.io/apimachinery/pkg/apis/meta/internalversion
k8s.io/apimachinery/pkg/runtime
k8s.io/apimachinery/pkg/conversion
k8s.io/apimachinery/pkg/types
k8s.io/api/core/v1
k8s.io/kubernetes/pkg/apis/core
k8s.io/kubernetes/pkg/apis/core/v1
)
echo "Generating conversions"
${GOPATH}/bin/conversion-gen --input-dirs $(codegen::join , "${ALL_FQ_APIS[@]}") --extra-peer-dirs $(codegen::join , "${ALL_PEERS[@]}") --build-tag=ignore_autogenerated_openshift -O zz_generated.conversion --go-header-file ${SCRIPT_ROOT}/hack/boilerplate.txt --v=8 "$@"
|
php-coder/origin
|
hack/update-generated-conversions.sh
|
Shell
|
apache-2.0
| 1,839 |
#!/bin/sh
# production binary with bootloader
#/usr/local/stlink/st-flash --reset write /tmp/ArduCopter.build/f4light_Revolution.bin 0x08010000
#bare metal binary
/usr/local/stlink/st-flash --reset read eeprom.bin 0x08004000 0xc000 && \
/usr/local/stlink/st-flash --reset write ../../../../../ArduPlane/f4light_Revolution_bl.bin 0x08000000 && \
/usr/local/stlink/st-flash --reset write eeprom.bin 0x08004000 && \
/usr/local/stlink/st-util -m
|
lekston/ardupilot
|
libraries/AP_HAL_F4Light/boards/f4light_Revolution/support/bl/UPLOAD-plane-STLINK.sh
|
Shell
|
gpl-3.0
| 453 |
#!/bin/sh
cd `dirname $0`
java -classpath "${CLASSPATH}:.:bin/classes:BuiltIn:bin/lib/Hack.jar:bin/lib/HackGUI.jar:bin/lib/Simulators.jar:bin/lib/SimulatorsGUI.jar:bin/lib/Compilers.jar" HardwareSimulatorMain $*
|
Sl0vi/nand2tetris
|
tools/HardwareSimulator.sh
|
Shell
|
gpl-3.0
| 212 |
#!/bin/sh -x
echo "Writing to /tmp/$bar"
echo $foo > /tmp/$bar
echo -n "The file /tmp/$bar contains `cat /tmp/$bar` for server $deploy_server_id during $deploy_action" > $heat_outputs_path.result
echo "Written to /tmp/$bar"
echo "Output to stderr" 1>&2
|
hardys/demo_templates
|
resource_group_nested_config/config-scripts/example-script.sh
|
Shell
|
apache-2.0
| 252 |
#!/bin/bash
#
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests the examples provided in Bazel
#
# Load test environment
source $(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/test-setup.sh \
|| { echo "test-setup.sh not found!" >&2; exit 1; }
function set_up() {
copy_examples
}
#
# Native rules
#
function test_cpp() {
assert_build "//examples/cpp:hello-world"
assert_bazel_run "//examples/cpp:hello-world foo" "Hello foo"
assert_test_ok "//examples/cpp:hello-success_test"
assert_test_fails "//examples/cpp:hello-fail_test"
}
# An assertion that execute a binary from a sub directory (to test runfiles)
function assert_binary_run_from_subdir() {
( # Needed to make execution from a different path work.
export PATH=${bazel_javabase}/bin:"$PATH" &&
mkdir -p x &&
cd x &&
unset JAVA_RUNFILES &&
unset TEST_SRCDIR &&
assert_binary_run "../$1" "$2" )
}
function test_java() {
local java_pkg=examples/java-native/src/main/java/com/example/myproject
assert_build_output ./bazel-bin/${java_pkg}/libhello-lib.jar ${java_pkg}:hello-lib
assert_build_output ./bazel-bin/${java_pkg}/libcustom-greeting.jar ${java_pkg}:custom-greeting
assert_build_output ./bazel-bin/${java_pkg}/hello-world ${java_pkg}:hello-world
assert_build_output ./bazel-bin/${java_pkg}/hello-resources ${java_pkg}:hello-resources
assert_binary_run_from_subdir "bazel-bin/${java_pkg}/hello-world foo" "Hello foo"
}
function test_java_test() {
setup_javatest_support
local java_native_tests=//examples/java-native/src/test/java/com/example/myproject
local java_native_main=//examples/java-native/src/main/java/com/example/myproject
assert_build "-- //examples/java-native/... -${java_native_main}:hello-error-prone"
assert_build_fails "${java_native_main}:hello-error-prone" \
"Did you mean 'result = b == -1;'?"
assert_test_ok "${java_native_tests}:hello"
assert_test_ok "${java_native_tests}:custom"
assert_test_fails "${java_native_tests}:fail"
assert_test_fails "${java_native_tests}:resource-fail"
}
function test_java_test_with_workspace_name() {
local java_pkg=examples/java-native/src/main/java/com/example/myproject
# Use named workspace and test if we can still execute hello-world
bazel clean
rm -f WORKSPACE
cat >WORKSPACE <<'EOF'
workspace(name = "toto")
EOF
assert_build_output ./bazel-bin/${java_pkg}/hello-world ${java_pkg}:hello-world
assert_binary_run_from_subdir "bazel-bin/${java_pkg}/hello-world foo" "Hello foo"
}
function test_genrule_and_genquery() {
# The --javabase flag is to force the tools/jdk:jdk label to be used
# so it appears in the dependency list.
assert_build_output ./bazel-bin/examples/gen/genquery examples/gen:genquery --javabase=//tools/jdk
local want=./bazel-genfiles/examples/gen/genrule.txt
assert_build_output $want examples/gen:genrule --javabase=//tools/jdk
diff $want ./bazel-bin/examples/gen/genquery \
|| fail "genrule and genquery output differs"
grep -qE "^//tools/jdk:jdk$" $want || {
cat $want
fail "//tools/jdk:jdk not found in genquery output"
}
}
if [ "${PLATFORM}" = "darwin" ]; then
function test_objc() {
setup_objc_test_support
# https://github.com/bazelbuild/bazel/issues/162
# prevents us from running iOS tests.
# TODO(bazel-team): Execute iOStests here when this issue is resolved.
assert_build_output ./bazel-bin/examples/objc/PrenotCalculator.ipa \
//examples/objc:PrenotCalculator
}
fi
function test_native_python() {
assert_build //examples/py_native:bin --python2_path=python
assert_test_ok //examples/py_native:test --python2_path=python
assert_test_fails //examples/py_native:fail --python2_path=python
}
#
# Skylark rules
#
function test_python() {
assert_build "//examples/py:bin"
./bazel-bin/examples/py/bin >& $TEST_log \
|| fail "//examples/py:bin execution failed"
expect_log "Fib(5)=8"
# Mutate //examples/py:bin so that it needs to build again.
echo "print('Hello')" > ./examples/py/bin.py
# Ensure that we can rebuild //examples/py::bin without error.
assert_build "//examples/py:bin"
./bazel-bin/examples/py/bin >& $TEST_log \
|| fail "//examples/py:bin 2nd build execution failed"
expect_log "Hello"
}
function test_java_skylark() {
local java_pkg=examples/java-skylark/src/main/java/com/example/myproject
assert_build_output ./bazel-bin/${java_pkg}/libhello-lib.jar ${java_pkg}:hello-lib
assert_build_output ./bazel-bin/${java_pkg}/hello-data ${java_pkg}:hello-data
assert_build_output ./bazel-bin/${java_pkg}/hello-world ${java_pkg}:hello-world
# we built hello-world but hello-data is still there.
want=./bazel-bin/${java_pkg}/hello-data
test -x $want || fail "executable $want not found"
assert_binary_run_from_subdir "bazel-bin/${java_pkg}/hello-data foo" "Heyo foo"
}
function test_java_test_skylark() {
setup_skylark_javatest_support
javatests=examples/java-skylark/src/test/java/com/example/myproject
assert_build //${javatests}:pass
assert_test_ok //${javatests}:pass
assert_test_fails //${javatests}:fail
}
function test_protobuf() {
setup_protoc_support
local jar=bazel-bin/examples/proto/libtest_proto.jar
assert_build_output $jar //examples/proto:test_proto
unzip -v $jar | grep -q 'KeyVal\.class' \
|| fail "Did not find KeyVal class in proto jar."
}
run_suite "examples"
|
d/bazel
|
src/test/shell/bazel/bazel_example_test.sh
|
Shell
|
apache-2.0
| 5,925 |
#!/usr/bin/env bash
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Vars assumed:
# NUM_NODES
function get-master-size {
local suggested_master_size=1
if [[ "${NUM_NODES}" -gt "5" ]]; then
suggested_master_size=2
fi
if [[ "${NUM_NODES}" -gt "10" ]]; then
suggested_master_size=4
fi
if [[ "${NUM_NODES}" -gt "100" ]]; then
suggested_master_size=8
fi
if [[ "${NUM_NODES}" -gt "250" ]]; then
suggested_master_size=16
fi
if [[ "${NUM_NODES}" -gt "500" ]]; then
suggested_master_size=32
fi
if [[ "${NUM_NODES}" -gt "3000" ]]; then
suggested_master_size=64
fi
echo "${suggested_master_size}"
}
# Vars assumed:
# NUM_NODES
function get-master-root-disk-size() {
local suggested_master_root_disk_size="20GB"
if [[ "${NUM_NODES}" -gt "1000" ]]; then
suggested_master_root_disk_size="50GB"
fi
if [[ "${NUM_NODES}" -gt "2000" ]]; then
suggested_master_root_disk_size="100GB"
fi
echo "${suggested_master_root_disk_size}"
}
# Vars assumed:
# NUM_NODES
function get-master-disk-size() {
local suggested_master_disk_size="20GB"
if [[ "${NUM_NODES}" -gt "1000" ]]; then
suggested_master_disk_size="100GB"
fi
if [[ "${NUM_NODES}" -gt "2000" ]]; then
suggested_master_disk_size="200GB"
fi
echo "${suggested_master_disk_size}"
}
function get-node-ip-range {
if [[ -n "${NODE_IP_RANGE:-}" ]]; then
>&2 echo "Using user provided NODE_IP_RANGE: ${NODE_IP_RANGE}"
echo "${NODE_IP_RANGE}"
return
fi
local suggested_range="10.40.0.0/22"
if [[ "${NUM_NODES}" -gt 1000 ]]; then
suggested_range="10.40.0.0/21"
fi
if [[ "${NUM_NODES}" -gt 2000 ]]; then
suggested_range="10.40.0.0/20"
fi
if [[ "${NUM_NODES}" -gt 4000 ]]; then
suggested_range="10.40.0.0/19"
fi
echo "${suggested_range}"
}
function get-cluster-ip-range {
local suggested_range="10.64.0.0/14"
if [[ "${NUM_NODES}" -gt 1000 ]]; then
suggested_range="10.64.0.0/13"
fi
if [[ "${NUM_NODES}" -gt 2000 ]]; then
suggested_range="10.64.0.0/12"
fi
if [[ "${NUM_NODES}" -gt 4000 ]]; then
suggested_range="10.64.0.0/11"
fi
echo "${suggested_range}"
}
# Calculate ip alias range based on max number of pods.
# Let pow be the smallest integer which is bigger than log2($1 * 2).
# (32 - pow) will be returned.
#
# $1: The number of max pods limitation.
function get-alias-range-size() {
for pow in {0..31}; do
if (( 1 << $pow > $1 * 2 )); then
echo $((32 - pow))
return 0
fi
done
}
# NOTE: Avoid giving nodes empty scopes, because kubelet needs a service account
# in order to initialize properly.
NODE_SCOPES="${NODE_SCOPES:-monitoring,logging-write,storage-ro}"
|
wjiangjay/origin
|
vendor/k8s.io/kubernetes/cluster/gce/config-common.sh
|
Shell
|
apache-2.0
| 3,227 |
#!/bin/sh
[ -n "$INCLUDE_ONLY" ] || {
. /lib/functions.sh
. /lib/functions/network.sh
. ../netifd-proto.sh
init_proto "$@"
}
vti_generic_setup() {
local cfg="$1"
local mode="$2"
local local="$3"
local remote="$4"
local link="$5"
local mtu zone ikey
json_get_vars mtu zone ikey okey
proto_init_update "$link" 1
proto_add_tunnel
json_add_string mode "$mode"
json_add_int mtu "${mtu:-1280}"
json_add_string local "$local"
json_add_string remote "$remote"
[ -n "$tunlink" ] && json_add_string link "$tunlink"
json_add_object 'data'
[ -n "$ikey" ] && json_add_int ikey "$ikey"
[ -n "$okey" ] && json_add_int okey "$okey"
json_close_object
proto_close_tunnel
proto_add_data
[ -n "$zone" ] && json_add_string zone "$zone"
proto_close_data
proto_send_update "$cfg"
}
vti_setup() {
local cfg="$1"
local mode="$2"
local ipaddr peeraddr
json_get_vars df ipaddr peeraddr tunlink
[ -z "$peeraddr" ] && {
proto_notify_error "$cfg" "MISSING_ADDRESS"
proto_block_restart "$cfg"
exit
}
( proto_add_host_dependency "$cfg" "$peeraddr" "$tunlink" )
[ -z "$ipaddr" ] && {
local wanif="$tunlink"
if [ -z $wanif ] && ! network_find_wan wanif; then
proto_notify_error "$cfg" "NO_WAN_LINK"
exit
fi
if ! network_get_ipaddr ipaddr "$wanif"; then
proto_notify_error "$cfg" "NO_WAN_LINK"
exit
fi
}
vti_generic_setup $cfg $mode $ipaddr $peeraddr "vti-$cfg"
}
proto_vti_setup() {
local cfg="$1"
vti_setup $cfg "vtiip"
}
vti6_setup() {
local cfg="$1"
local mode="$2"
local ip6addr peer6addr weakif
json_get_vars ip6addr peer6addr tunlink weakif
[ -z "$peer6addr" ] && {
proto_notify_error "$cfg" "MISSING_ADDRESS"
proto_block_restart "$cfg"
exit
}
( proto_add_host_dependency "$cfg" "$peer6addr" "$tunlink" )
[ -z "$ip6addr" ] && {
local wanif="$tunlink"
if [ -z $wanif ] && ! network_find_wan6 wanif; then
proto_notify_error "$cfg" "NO_WAN_LINK"
exit
fi
if ! network_get_ipaddr6 ip6addr "$wanif"; then
[ -z "$weakif" ] && weakif="lan"
if ! network_get_ipaddr6 ip6addr "$weakif"; then
proto_notify_error "$cfg" "NO_WAN_LINK"
exit
fi
fi
}
vti_generic_setup $cfg $mode $ip6addr $peer6addr "vti6-$cfg"
}
proto_vti6_setup() {
local cfg="$1"
vti6_setup $cfg "vtiip6"
}
proto_vti_teardown() {
local cfg="$1"
}
proto_vti6_teardown() {
local cfg="$1"
}
vti_generic_init_config() {
no_device=1
available=1
proto_config_add_int "mtu"
proto_config_add_string "tunlink"
proto_config_add_string "zone"
proto_config_add_int "ikey"
proto_config_add_int "okey"
}
proto_vti_init_config() {
vti_generic_init_config
proto_config_add_string "ipaddr"
proto_config_add_string "peeraddr"
}
proto_vti6_init_config() {
vti_generic_init_config
proto_config_add_string "ip6addr"
proto_config_add_string "peer6addr"
proto_config_add_string "weakif"
}
[ -n "$INCLUDE_ONLY" ] || {
[ -d /sys/module/ip_vti ] && add_protocol vti
[ -d /sys/module/ip6_vti ] && add_protocol vti6
}
|
plntyk/openwrt
|
package/network/config/vti/files/vti.sh
|
Shell
|
gpl-2.0
| 2,984 |
#!/bin/sh
# Unit tests for vc-list-files
# Copyright (C) 2008-2013 Free Software Foundation, Inc.
# This file is part of the GNUlib Library.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. */
: ${srcdir=.}
. "$srcdir/init.sh"; path_prepend_ "$abs_aux_dir" .
tmpdir=vc-git-$$
GIT_DIR= GIT_WORK_TREE=; unset GIT_DIR GIT_WORK_TREE
fail=1
mkdir $tmpdir && cd $tmpdir &&
# without git, skip the test
# The double use of 'exit' is needed for the reference to $? inside the trap.
{ ( git init -q ) > /dev/null 2>&1 \
|| skip_ "git not found in PATH"; } &&
mkdir d &&
touch d/a b c &&
git config user.email "[email protected]" &&
git config user.name "Your Name" &&
git add . > /dev/null &&
git commit -q -a -m log &&
printf '%s\n' b c d/a > expected &&
vc-list-files > actual &&
compare expected actual &&
fail=0
Exit $fail
|
DDTChen/CookieVLC
|
vlc/contrib/android/gnutls/gl/tests/test-vc-list-files-git.sh
|
Shell
|
gpl-2.0
| 1,438 |
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
set -o xtrace
export REPO_DIR=${REPO_DIR:-$(pwd)}
# Produce a JUnit-style XML test report for Jenkins.
export KUBE_JUNIT_REPORT_DIR=${WORKSPACE}/_artifacts
# Run the kubekins container, mapping in docker (so we can launch containers),
# the repo directory, and the artifacts output directory.
#
# Note: We pass in the absolute path to the repo on the host as an env var incase
# any tests that get run need to launch containers that also map volumes.
# This is required because if you do
#
# $ docker run -v $PATH:/container/path ...
#
# From _inside_ a container that has the host's docker mapped in, the $PATH
# provided must be resolvable on the *HOST*, not the container.
docker run --rm=true \
-v /var/run/docker.sock:/var/run/docker.sock \
-v "$(which docker)":/bin/docker \
-v "${REPO_DIR}":/go/src/k8s.io/kubernetes \
-v "${KUBE_JUNIT_REPORT_DIR}":/workspace/artifacts \
--env REPO_DIR="${REPO_DIR}" \
-i gcr.io/google_containers/kubekins-test:0.3 \
bash -c "cd kubernetes && ./hack/jenkins/test-dockerized.sh"
|
lleszczu/kubernetes
|
hack/jenkins/gotest-dockerized.sh
|
Shell
|
apache-2.0
| 1,704 |
#!/bin/sh
test_description='git log with invalid commit headers'
. ./test-lib.sh
test_expect_success 'setup' '
test_commit foo &&
git cat-file commit HEAD |
sed "/^author /s/>/>-<>/" >broken_email.commit &&
git hash-object -w -t commit broken_email.commit >broken_email.hash &&
git update-ref refs/heads/broken_email $(cat broken_email.hash)
'
test_expect_success 'fsck notices broken commit' '
test_must_fail git fsck 2>actual &&
test_i18ngrep invalid.author actual
'
test_expect_success 'git log with broken author email' '
{
echo commit $(cat broken_email.hash)
echo "Author: A U Thor <[email protected]>"
echo "Date: Thu Apr 7 15:13:13 2005 -0700"
echo
echo " foo"
} >expect.out &&
git log broken_email >actual.out 2>actual.err &&
test_cmp expect.out actual.out &&
test_must_be_empty actual.err
'
test_expect_success 'git log --format with broken author email' '
echo "A U [email protected]+Thu Apr 7 15:13:13 2005 -0700" >expect.out &&
git log --format="%an+%ae+%ad" broken_email >actual.out 2>actual.err &&
test_cmp expect.out actual.out &&
test_must_be_empty actual.err
'
munge_author_date () {
git cat-file commit "$1" >commit.orig &&
sed "s/^\(author .*>\) [0-9]*/\1 $2/" <commit.orig >commit.munge &&
git hash-object -w -t commit commit.munge
}
test_expect_success 'unparsable dates produce sentinel value' '
commit=$(munge_author_date HEAD totally_bogus) &&
echo "Date: Thu Jan 1 00:00:00 1970 +0000" >expect &&
git log -1 $commit >actual.full &&
grep Date <actual.full >actual &&
test_cmp expect actual
'
test_expect_success 'unparsable dates produce sentinel value (%ad)' '
commit=$(munge_author_date HEAD totally_bogus) &&
echo >expect &&
git log -1 --format=%ad $commit >actual &&
test_cmp expect actual
'
# date is 2^64 + 1
test_expect_success 'date parser recognizes integer overflow' '
commit=$(munge_author_date HEAD 18446744073709551617) &&
echo "Thu Jan 1 00:00:00 1970 +0000" >expect &&
git log -1 --format=%ad $commit >actual &&
test_cmp expect actual
'
# date is 2^64 - 2
test_expect_success 'date parser recognizes time_t overflow' '
commit=$(munge_author_date HEAD 18446744073709551614) &&
echo "Thu Jan 1 00:00:00 1970 +0000" >expect &&
git log -1 --format=%ad $commit >actual &&
test_cmp expect actual
'
# date is within 2^63-1, but enough to choke glibc's gmtime
test_expect_success 'absurdly far-in-future date' '
commit=$(munge_author_date HEAD 999999999999999999) &&
git log -1 --format=%ad $commit
'
test_done
|
devzero2000/git-core
|
t/t4212-log-corrupt.sh
|
Shell
|
gpl-2.0
| 2,523 |
R CMD REMOVE --library=$PREFIX/lib/R/library/ maizeprobe
|
joachimwolff/bioconda-recipes
|
recipes/bioconductor-maizeprobe/pre-unlink.sh
|
Shell
|
mit
| 57 |
#!/bin/sh
#
# Portions copyright (c) 2007, 2009 Sam Vilain
# Portions copyright (c) 2011 Bryan Jacobs
#
test_description='git-svn svn mergeinfo propagation'
. ./lib-git-svn.sh
test_expect_success 'load svn dump' "
svnadmin load -q '$rawsvnrepo' \
< '$TEST_DIRECTORY/t9161/branches.dump' &&
git svn init --minimize-url -R svnmerge \
-T trunk -b branches '$svnrepo' &&
git svn fetch --all
"
test_expect_success 'propagate merge information' '
git config svn.pushmergeinfo yes &&
git checkout svnb1 &&
git merge --no-ff svnb2 &&
git svn dcommit
'
test_expect_success 'check svn:mergeinfo' '
mergeinfo=$(svn_cmd propget svn:mergeinfo "$svnrepo"/branches/svnb1)
test "$mergeinfo" = "/branches/svnb2:3,8"
'
test_expect_success 'merge another branch' '
git merge --no-ff svnb3 &&
git svn dcommit
'
test_expect_success 'check primary parent mergeinfo respected' '
mergeinfo=$(svn_cmd propget svn:mergeinfo "$svnrepo"/branches/svnb1)
test "$mergeinfo" = "/branches/svnb2:3,8
/branches/svnb3:4,9"
'
test_expect_success 'merge existing merge' '
git merge --no-ff svnb4 &&
git svn dcommit
'
test_expect_success "check both parents' mergeinfo respected" '
mergeinfo=$(svn_cmd propget svn:mergeinfo "$svnrepo"/branches/svnb1)
test "$mergeinfo" = "/branches/svnb2:3,8
/branches/svnb3:4,9
/branches/svnb4:5-6,10-12
/branches/svnb5:6,11"
'
test_expect_success 'make further commits to branch' '
git checkout svnb2 &&
touch newb2file &&
git add newb2file &&
git commit -m "later b2 commit" &&
touch newb2file-2 &&
git add newb2file-2 &&
git commit -m "later b2 commit 2" &&
git svn dcommit
'
test_expect_success 'second forward merge' '
git checkout svnb1 &&
git merge --no-ff svnb2 &&
git svn dcommit
'
test_expect_success 'check new mergeinfo added' '
mergeinfo=$(svn_cmd propget svn:mergeinfo "$svnrepo"/branches/svnb1)
test "$mergeinfo" = "/branches/svnb2:3,8,16-17
/branches/svnb3:4,9
/branches/svnb4:5-6,10-12
/branches/svnb5:6,11"
'
test_expect_success 'reintegration merge' '
git checkout svnb4 &&
git merge --no-ff svnb1 &&
git svn dcommit
'
test_expect_success 'check reintegration mergeinfo' '
mergeinfo=$(svn_cmd propget svn:mergeinfo "$svnrepo"/branches/svnb4)
test "$mergeinfo" = "/branches/svnb1:2-4,7-9,13-18
/branches/svnb2:3,8,16-17
/branches/svnb3:4,9
/branches/svnb5:6,11"
'
test_expect_success 'dcommit a merge at the top of a stack' '
git checkout svnb1 &&
touch anotherfile &&
git add anotherfile &&
git commit -m "a commit" &&
git merge svnb4 &&
git svn dcommit
'
test_done
|
twitter/git
|
t/t9161-git-svn-mergeinfo-push.sh
|
Shell
|
gpl-2.0
| 2,556 |
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
cloud="${args[0]}"
python="${args[1]}"
group="${args[2]}"
target="shippable/${cloud}/group${group}/"
stage="${S:-prod}"
changed_all_target="shippable/${cloud}/smoketest/"
if [ "${group}" == "1" ]; then
# only run smoketest tests for group1
changed_all_mode="include"
if ! ansible-test integration "${changed_all_target}" --list-targets > /dev/null 2>&1; then
# no smoketest tests are available for this cloud
changed_all_target="none"
fi
else
# smoketest tests already covered by group1
changed_all_mode="exclude"
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote-terminate always --remote-stage "${stage}" \
--docker --python "${python}" --changed-all-target "${changed_all_target}" --changed-all-mode "${changed_all_mode}"
|
simonwydooghe/ansible
|
test/utils/shippable/cloud.sh
|
Shell
|
gpl-3.0
| 1,001 |
#!/bin/bash
set -e
PARENT_DIR="$(dirname "$0")"
jasmine-node "$PARENT_DIR"/spec/
|
romiem/angular.js
|
i18n/run-tests.sh
|
Shell
|
mit
| 82 |
#!/bin/bash
mkdir -p test/temp
echo '<script src="bundle.js"></script>' > test/temp/test.html
browserify -t brfs test/*.js -o test/temp/bundle.js
|
toulon/trestle
|
templates/app/public/vendor/forms/node_modules/browserify/node_modules/insert-module-globals/node_modules/lexical-scope/test/testling.sh
|
Shell
|
mit
| 146 |
#!/bin/bash
if [ "$EUID" -ne 0 ]; then
echo "Error: This script must be run as root"
exit 1
fi
# Setup apt-get for grabbing mono snapshot builds
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
echo "deb http://jenkins.mono-project.com/repo/debian sid main" | tee /etc/apt/sources.list.d/mono-jenkins.list
apt-get update > /dev/null
apt-get -y install mono-snapshot-latest
. mono-snapshot mono
if [ ! -d "$MONO_PREFIX" ]; then
echo "Error: Mono snapshot did not load correctly"
exit 1
fi
# Now install the PCL assemblies on the snapshot
source setup-pcl $MONO_PREFIX
|
KamalRathnayake/roslyn
|
build/linux/setup-snapshot.sh
|
Shell
|
apache-2.0
| 638 |
#!/bin/bash
#
# Copyright © 2012-2013 Sergio Arroutbi Braojos <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided that
# the above copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
# OR PERFORMANCE OF THIS SOFTWARE.
#
# This script gets all the bicycles from
# Biciescapa store !
#
URL="www.kingbarcelona.com"
ONLY_DOMAIN="kingbarcelona.com"
MAX_TRIES=10
MAX_TIMEOUT=10
. ./common_get
MTB_FIX_29_BIKES_BASE="${URL}/es/rigida-aluminio-c-644_646.html?page="
MTB_FIX_29_BIKES_PAGES="$(seq 1 1)"
MTB_FIX_29_CARBON_BIKES_BASE="${URL}/es/rigida-carbono-c-644_645.html?page="
MTB_FIX_29_CARBON_BIKES_PAGES="$(seq 1 2)"
MTB_FIX_275_BIKES_BASE="${URL}/es/rigida-aluminio-c-644_648.html?page="
MTB_FIX_275_BIKES_PAGES="$(seq 1 1)"
MTB_FIX_275_CARBON_BIKES_BASE="${URL}/es/rigida-carbono-c-644_647.html?page="
MTB_FIX_275_CARBON_BIKES_PAGES="$(seq 1 1)"
URBAN_ELECTRIC_BIKES_BASE="${URL}/es/ebikes-c-644_656.html?page="
URBAN_ELECTRIC_BIKES_PAGES="$(seq 1 1)"
MTB_DOUBLE_29_BIKES_BASE="${URL}/es/doble-suspension-29-aluminio-c-644_650.html?page="
MTB_DOUBLE_29_BIKES_PAGES="$(seq 1 1)"
MTB_DOUBLE_29_CARBON_BIKES_BASE="${URL}/es/doble-suspension-29-carbono-c-644_649.html?page="
MTB_DOUBLE_29_CARBON_BIKES_PAGES="$(seq 1 1)"
MTB_DOUBLE_275_BIKES_BASE="${URL}/es/doble-suspension-275-aluminio-c-644_652.html?page="
MTB_DOUBLE_275_BIKES_PAGES="$(seq 1 1)"
MTB_DOUBLE_275_CARBON_BIKES_BASE="${URL}/es/doble-suspension-275-carbono-c-644_651.html?page="
MTB_DOUBLE_275_CARBON_BIKES_PAGES="$(seq 1 1)"
ROAD_BIKES_BASE="${URL}/es/carretera-c-644_653.html?page="
ROAD_BIKES_PAGES="$(seq 1 2)"
ROAD_DISC_BIKES_BASE="${URL}/es/carretera-disco-c-644_654.html?page="
ROAD_DISC_BIKES_PAGES="$(seq 1 1)"
ROAD_CICLOCROSS_BIKES_BASE="${URL}/es/ciclo-cross-c-644_658.html?page="
ROAD_CICLOCROSS_BIKES_PAGES="$(seq 1 1)"
URBAN_BIKES_BASE="${URL}/es/trekking-touring-c-644_673.html?page="
URBAN_BIKES_PAGES="$(seq 1 1)"
URBAN_FOLDING_BIKES_BASE="${URL}/es/plegable-c-644_678.html?page="
URBAN_FOLDING_BIKES_PAGES="$(seq 1 1)"
URBAN_FAT_BIKES_BASE="${URL}/es/fatbike-c-644_659.html?page="
URBAN_FAT_BIKES_PAGES="$(seq 1 1)"
KIDS_BIKES_BASE="${URL}/es/bicicletas-infantiles-c-644_655.html?page="
KIDS_BIKES_PAGES="$(seq 1 1)"
KIDS_BTT_BIKES_BASE="${URL}/es/bicicletas-26-c-644_675.html?page="
KIDS_BTT_BIKES_PAGES="$(seq 1 1)"
bubic_get_page_outfile "${MTB_FIX_29_BIKES_BASE}" "${MTB_FIX_29_BIKES_PAGES}" mtb-fix-29
bubic_get_page_outfile "${MTB_FIX_29_CARBON_BIKES_BASE}" "${MTB_FIX_29_CARBON_BIKES_PAGES}" mtb-fix-29-carbon
bubic_get_page_outfile "${MTB_FIX_275_BIKES_BASE}" "${MTB_FIX_275_BIKES_PAGES}" mtb-fix-275
bubic_get_page_outfile "${MTB_FIX_275_CARBON_BIKES_BASE}" "${MTB_FIX_275_CARBON_BIKES_PAGES}" mtb-fix-275-carbon
bubic_get_page_outfile "${MTB_DOUBLE_29_BIKES_BASE}" "${MTB_DOUBLE_29_BIKES_PAGES}" mtb-double-29
bubic_get_page_outfile "${MTB_DOUBLE_29_CARBON_BIKES_BASE}" "${MTB_DOUBLE_29_CARBON_BIKES_PAGES}" mtb-double-29-carbon
bubic_get_page_outfile "${MTB_DOUBLE_275_BIKES_BASE}" "${MTB_DOUBLE_275_BIKES_PAGES}" mtb-double-275
bubic_get_page_outfile "${MTB_DOUBLE_275_CARBON_BIKES_BASE}" "${MTB_DOUBLE_275_CARBON_BIKES_PAGES}" mtb-double-275-carbon
bubic_get_page_outfile "${ROAD_BIKES_BASE}" "${ROAD_BIKES_PAGES}" road
bubic_get_page_outfile "${ROAD_DISC_BIKES_BASE}" "${ROAD_DISC_BIKES_PAGES}" road-disc
bubic_get_page_outfile "${ROAD_CICLOCROSS_BIKES_BASE}" "${ROAD_CICLOCROSS_BIKES_PAGES}" road-ciclocross
bubic_get_page_outfile "${URBAN_ELECTRIC_BIKES_BASE}" "${URBAN_ELECTRIC_BIKES_PAGES}" urban-electric
bubic_get_page_outfile "${URBAN_BIKES_BASE}" "${URBAN_BIKES_PAGES}" urban
bubic_get_page_outfile "${URBAN_FOLDING_BIKES_BASE}" "${URBAN_FOLDING_BIKES_PAGES}" urban-folding
bubic_get_page_outfile "${URBAN_FAT_BIKES_BASE}" "${URBAN_FAT_BIKES_PAGES}" urban-fat
bubic_get_page_outfile "${KIDS_BIKES_BASE}" "${KIDS_BIKES_PAGES}" kids
bubic_get_page_outfile "${KIDS_BTT_BIKES_BASE}" "${KIDS_BTT_BIKES_PAGES}" kids-btt
|
sarroutbi/buscobici
|
getscripts/kingbarcelona_get.sh
|
Shell
|
isc
| 4,535 |
#!/bin/sh
# if no arguments are given, we just want to build
if [ "$1" == "" ]; then
echo "Building Vektor..."
bam $BAMFLAGS -a Mode=Debug
elif [ "$1" == "install" ]; then
cp build/usr/local/lib/* /usr/local/lib/ &&
cp build/usr/local/include/* /usr/local/include/ &&
cp build/usr/local/bin/* /usr/local/bin/ &&
cp -af build/usr/local/share/vektor /usr/local/share/ || exit 1
echo "build/usr/local/lib/* -> /usr/local/lib/"
echo "build/usr/local/include/* -> /usr/local/lib/"
echo "build/usr/local/bin/* -> /usr/local/bin/"
echo "build/usr/local/share/vektor -> /usr/local/share/"
elif [ "$1" == "clean" ]; then
echo "removing build/"
rm -fR build
else
echo "Usage:"
echo -e "\t$0\t\tBuild"
echo -e "\t$0 install\tInstall to /usr/local/lib, /usr/local/include, and /usr/local/bin"
echo -e "\t$0 clean\t\tClean build outputs"
fi
|
Potent-Code/vektor
|
make.sh
|
Shell
|
isc
| 845 |
#!/usr/bin/env bash
# Bundle components/modules
gulp aws-1
pm2 delete all
# Boot it the app!
npm run pm2-aws-1
|
russll/analytics-suite
|
tools/start-aws-1.sh
|
Shell
|
mit
| 113 |
#!/bin/sh
sudo apt install cockpit cockpit-doc cockpit-docker cockpit-machines cockpit-packagekit
|
locoesso/install
|
ubuntu/cockpit-docker-machines.sh
|
Shell
|
mit
| 99 |
#!/bin/sh
NOA_HOME=${HOME}/.noa
if [ ! -d ${NOA_HOME}/pgdc/pgdata ];
then
echo "Looks like Noa is not setup."
echo "Run ./noa_docker_setup.sh to create a new Noa environment."
exit 1
fi
echo "> Bringing down Noa Containers"
sudo docker-compose down
echo "> Twiddling a bit before wiping off ${NOA_HOME}"
sleep 6
echo "> rm -rf ${NOA_HOME}"
sudo rm -rf ${NOA_HOME}
echo "> Done! You can start all over by running ./noa_docker_setup.sh"
|
handnot2/noa
|
noa_docker_cleanup.sh
|
Shell
|
mit
| 447 |
#!/bin/sh
path=`dirname $0`
# start an instance the php7 internal httpd server
# needed to execute the w3c tests in a browser
php -S localhost:8000 -t ${path}/w3c/ &
|
dperini/nwsapi
|
test/w3c-test.sh
|
Shell
|
mit
| 170 |
git pull origin master
touch coins_uwsgi.ini
|
desecho/coins
|
deploy.sh
|
Shell
|
mit
| 44 |
#!/usr/bin/env bash
# 柔韧性练习|2014-10-01
python topswim.py 1649 19 2
# 练好游泳的内功――超直|2014-10-02
python topswim.py 11424 33 2
# 仰泳常见问题及纠正方法|2014-10-03
python topswim.py 2039 1 2
# 超直的陆上练习|2014-10-04
python topswim.py 73347 11 2
# 腰部发力打腿的解剖学解释!|2014-10-05
python topswim.py 84268 7 2
# 重心与漂浮|2014-10-06
python topswim.py 39910 63 2
# 通过送肩形成侧超转体|2014-10-07
python topswim.py 40260 7 2
# 仰泳腿部技术入门练习(图文)|2014-10-08
python topswim.py 50582 7 2
# 游泳对皮肤何影响?|2014-10-09
python topswim.py 53979 1 2
# 有关自由泳呼吸技术的最完整论述!|2014-10-10
python topswim.py 55985 7 2
# 克服游泳障碍的窍门|2014-10-11
python topswim.py 30513 2 2
# (菜头心得)水感的获得---大腿、髋腹|2014-10-12
python topswim.py 58796 2 2
# 【视频】自由泳技巧----初学者不能不知道的技巧|2014-10-13
python topswim.py 75657 1 2
# 游泳抽筋原因入解救法图释|2014-10-14
python topswim.py 73682 12 2
# 运动中的基本姿势|2014-10-15
python topswim.py 73607 2 2
# 运动后如何缓解肌肉疼痛|2014-10-16
python topswim.py 714 3 2
# 运动对骨质结构代谢影响|2014-10-17
python topswim.py 4652 1 2
# 肩关节知识问答|2014-10-18
python topswim.py 71777 3 2
# 运动营养补剂|2014-10-20
python topswim.py 764 1 2
# 游泳腰的作用|2014-10-22
python topswim.py 75568 1 2
# 自由泳腿部的练习要领|2014-10-23
python topswim.py 39917 29 2
# 五、水球水球圆溜溜——浅谈游泳的水感|2014-10-26
python topswim.py 40007 1 2
# 如何改进水中姿态 译文: Seahiker How to Stop Swimming Like Gollum in 5 Easy Steps|2014-11-12
python topswim.py 78540 12 2
# 蛙泳划臂技术 (图解)|2014-11-16
python topswim.py 1541 8 2
# 游泳的生物力学-----值得细细品味,不时翻看的好文!|2014-11-20
python topswim.py 28669 1 2
# 蛙泳划手练习|2014-11-22
python topswim.py 73401 3 2
# 摩西的蛙泳技术15张图片解|2014-12-16
python topswim.py 33965 15 2
# 赞! 四样泳姿图解教材 (GIF.小动画)|2014-12-17
python topswim.py 82328 1 2
# 打腿打腿打腿|2014-12-19
python topswim.py 77449 2 2
# 在泳池游泳如何保护眼、耳、发、肤|2014-12-20
python topswim.py 41420 4 2
# 游泳前后的拉伸|2014-12-21
python topswim.py 111722 1 2
# 小鱼的自由泳体会|2015-01-17
python topswim.py 23370 1 2
# 关于自由泳“抓水”和“划水”新颖与完整论述|2015-04-12
python topswim.py 56365 1 2
# 呼吸三步曲|2015-08-14
python topswim.py 786 5 2
|
reverland/topswim
|
rebuild.sh
|
Shell
|
mit
| 2,637 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2642-2
#
# Security announcement date: 2015-06-21 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:38 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-3.13.0-55-generic-lpae:3.13.0-55.94~precise1
# - linux-image-3.13.0-55-generic:3.13.0-55.94~precise1
#
# Last versions recommanded by security team:
# - linux-image-3.13.0-55-generic-lpae:3.13.0-55.94~precise1
# - linux-image-3.13.0-55-generic:3.13.0-55.94~precise1
#
# CVE List:
# - CVE-2015-1328
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-3.13.0-55-generic-lpae=3.13.0-55.94~precise1 -y
sudo apt-get install --only-upgrade linux-image-3.13.0-55-generic=3.13.0-55.94~precise1 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_12.04_LTS/x86_64/2015/USN-2642-2.sh
|
Shell
|
mit
| 922 |
#/usr/bin/env bash
if which nvim > /dev/null ; then
echo "########## Neovim already installed. Skipping ..."
else
echo "########## Installing neovim ..."
sudo apt-get install software-properties-common
sudo add-apt-repository --remove -y ppa:neovim-ppa/unstable
sudo add-apt-repository -y ppa:neovim-ppa/unstable
sudo apt-get update
sudo apt-get install -y --force-yes neovim
sudo update-alternatives --install /usr/bin/vi vi /usr/bin/nvim 60
sudo update-alternatives --install /usr/bin/editor editor /usr/bin/nvim 60
sudo update-alternatives --install /usr/bin/vim vim /usr/bin/nvim 60
fi
|
eendroroy/loki-bootstrap
|
recipes/neovim.sh
|
Shell
|
mit
| 610 |
#!/bin/bash
cd utils
nosetests -v --with-coverage --cover-erase
if [ $? -eq 1 ]; then
echo "utils tests failed"
exit 1
fi
cd ../kafka-monitor
nosetests -v --with-coverage --cover-erase --cover-package=../kafka-monitor/
if [ $? -eq 1 ]; then
echo "kafka-monitor tests failed"
exit 1
fi
cd ../redis-monitor
nosetests -v --with-coverage --cover-erase --cover-package=../redis-monitor/
if [ $? -eq 1 ]; then
echo "redis-monitor tests failed"
exit 1
fi
cd ../crawler
nosetests -v --with-coverage --cover-erase --cover-package=crawling/
if [ $? -eq 1 ]; then
echo "crawler tests failed"
exit 1
fi
cd ../rest
nosetests -v --with-coverage --cover-erase --cover-package=../rest/
if [ $? -eq 1 ]; then
echo "rest tests failed"
exit 1
fi
cd ../
coverage combine crawler/.coverage kafka-monitor/.coverage redis-monitor/.coverage utils/.coverage rest/.coverage
|
istresearch/scrapy-cluster
|
run_offline_tests.sh
|
Shell
|
mit
| 889 |
#!/bin/bash
if [ $# -lt 1 ]; then
echo You need to specify \"on\" or \"off\" as an argument.
exit 1
fi
NUM_PROC=`cat /sys/devices/system/cpu/present | sed 's/-/ /g' | awk '{print $2}'`
if [ "$1" == "off" ]; then
echo 'CPU 0 cannot be disabled. (always enabled)'
fi
for ((i=1;i<=$NUM_PROC;i++)); do
ENABLED=`cat /sys/devices/system/cpu/cpu$i/online`
if [ "$1" == "on" -a "$ENABLED" == "0" ]; then
echo Enabling CPU $i ..
echo 1 > /sys/devices/system/cpu/cpu$i/online
fi
if [ "$1" == "off" -a "$ENABLED" == "1" ]; then
echo Disabling CPU $i ..
echo 0 > /sys/devices/system/cpu/cpu$i/online
fi
done
|
ANLAB-KAIST/NBA
|
scripts/turn_allothercpu.sh
|
Shell
|
mit
| 632 |
sudo docker run -p 49160:8080 -d yoshuawuyts/frontend-server
|
yoshuawuyts/frontend-server
|
sh/run.sh
|
Shell
|
mit
| 61 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2955-1
#
# Security announcement date: 2016-04-27 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:22 UTC
#
# Operating System: Ubuntu 15.10
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - liboxideqtcore0:1.14.7-0ubuntu0.15.10.1
#
# Last versions recommanded by security team:
# - liboxideqtcore0:1.15.8-0ubuntu0.15.10.1
#
# CVE List:
# - CVE-2016-1578
# - CVE-2016-1646
# - CVE-2016-1647
# - CVE-2016-1649
# - CVE-2016-1653
# - CVE-2016-1654
# - CVE-2016-1655
# - CVE-2016-1659
# - CVE-2016-3679
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade liboxideqtcore0=1.15.8-0ubuntu0.15.10.1 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_15.10/x86_64/2016/USN-2955-1.sh
|
Shell
|
mit
| 820 |
#!/bin/bash
STANFORD_SEGMENTER_JAR='/home/wlzhuang/stanford-segmenter-2015-04-20/stanford-segmenter-3.5.2.jar'
pip3 install Jpype1
javac -cp "$STANFORD_SEGMENTER_JAR" \
-d chineseseg chineseseg/DemoSeg.java
|
iamalbert/chinese-segmenter
|
build.sh
|
Shell
|
mit
| 213 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2458-1
#
# Security announcement date: 2015-01-14 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:12 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: i686
#
# Vulnerable packages fix on version:
# - firefox:35.0+build3-0ubuntu0.14.04.2
#
# Last versions recommanded by security team:
# - firefox:50.0+build2-0ubuntu0.14.04.2
#
# CVE List:
# - CVE-2014-8634
# - CVE-2014-8635
# - CVE-2014-8636
# - CVE-2014-8637
# - CVE-2014-8638
# - CVE-2014-8639
# - CVE-2014-8640
# - CVE-2014-8641
# - CVE-2014-8642
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade firefox=50.0+build2-0ubuntu0.14.04.2 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_14.04_LTS/i686/2015/USN-2458-1.sh
|
Shell
|
mit
| 813 |
#!/bin/bash
docker run \
--name=proxy \
--detach=true \
--publish=80:80 \
--volume=/var/run/docker.sock:/tmp/docker.sock \
jwilder/nginx-proxy:latest
|
badsyntax/docker-project
|
start-proxy.sh
|
Shell
|
mit
| 156 |
#!/bin/bash
. env.sh
vcf_del -c ${chr} -i 50 -f union.2010_06.deletions.sites.vcf format.site.vcf
DonorSim -D -v format.site.vcf -n ${number} -c ${chr} -r $ref/ncbi36/${chr}.fa -p 0.5
|
zz-zigzag/bioinformatics
|
scripts/simulate/old/0.0.pre.sh
|
Shell
|
mit
| 187 |
sh -c "wget -qO /tmp/bg.jpg `wget -qO - bing.com/HPImageArchive.aspx?format=xml\\&n=1\\&idx=$1 |sed -e 's/.*<urlBase>\\(.*\\)<\\/url.*/bing.com\\1_1920x1080.jpg/'` && feh /tmp/bg.jpg --bg-scale"
|
phaezah7/misc
|
bing_wallpaper.sh
|
Shell
|
mit
| 195 |
pkg_name=caddy-proxy
pkg_origin=jarvus
pkg_version=1.0.4
pkg_maintainer="Chris Alfano <[email protected]>"
pkg_license=("MIT")
pkg_description="Service and config wrapper around Caddy for providing a simple proxy to a web application"
pkg_deps=(
"core/caddy/${pkg_version}"
)
pkg_svc_run="caddy -agree -conf ${pkg_svc_config_path}/Caddyfile"
pkg_svc_user="root"
pkg_binds=(
[backend]="port"
)
do_build() {
return 0
}
do_install() {
return 0
}
do_strip() {
return 0
}
|
JarvusInnovations/habitat-plans
|
caddy-proxy/plan.sh
|
Shell
|
mit
| 478 |
pkg_prereqs=('apt-get')
pkg_extract_path=~/
pkg_description='docker - lightweight linux env isolation containers'
function install_package() {
b.system.pretend_super
# install the backported kernel
sudo apt-get update > /dev/null
sudo apt-get install -y linux-image-extra-`uname -r`
sudo sh -c 'echo deb http://get.docker.io/ubuntu docker main > /etc/apt/sources.list.d/docker.list'
sudo apt-get update > /dev/null
sudo apt-get install -y lxc-docker
# Install Docker
sudo curl -s https://get.docker.io/ubuntu/ | sudo sh
}
|
smileart/omg
|
packages/debian/docker/docker.sh
|
Shell
|
mit
| 544 |
alias reload!='. ~/.zshrc'
alias caskrepo="$(brew --repository)"/Library/Taps/caskroom/homebrew-cask
alias dev="~/Development"
alias weather="wego"
alias hosts="sudo vim /etc/hosts"
alias todos="~/Development/Cliist/cliist.py"
alias killrails="kill -9 $(lsof -i tcp:3000 -t)"
alias killpostgre="launchctl unload -w ~/Library/LaunchAgents/homebrew.mxcl.postgresql.plist && pg_ctl restart -D /usr/local/var/postgres && launchctl load -w ~/Library/LaunchAgents/homebrew.mxcl.postgresql.plist"
alias gu='function _guetzli(){ guetzli --quality 84 $1 $1 }; _guetzli'
alias wt="cd wp-content/themes"
alias p="cd ~/Projects"
alias vim="nvim"
alias tmux='tmux -2' # for 256color
alias tmux='tmux -u' # to get rid of unicode rendering problem
alias fixperms='find . -type f ! -path "*node_modules*" ! -path "*Makefile*" -exec chmod 644 {} \; && find . -type d ! -path "*node_modules*" -exec chmod 755 {} \;'
alias bfg="java -jar ~/.bin/bfg.jar"
alias newsite="sudo vim /etc/hosts /etc/apache2/httpd.conf"
|
twooton61/dotfiles
|
zsh/aliases.zsh
|
Shell
|
mit
| 999 |
#!/bin/bash
docker run --link elasticsearch:elasticsearch --name kibana -p 5601:5601 -d kibana
|
Usualme/profile
|
docker/kibana.sh
|
Shell
|
mit
| 95 |
#!/bin/bash
HOSTNAME=$(hostname)
echo "This script is running on ${HOSTNAME}"
|
riccardotommasini/shell-scripting-course
|
section-two/exe3.sh
|
Shell
|
mit
| 80 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-NQLibrary_Example/NQLibrary.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-NQLibrary_Example/NQLibrary.framework"
fi
|
quannguyen90/NQLibrary
|
Example/Pods/Target Support Files/Pods-NQLibrary_Example/Pods-NQLibrary_Example-frameworks.sh
|
Shell
|
mit
| 3,556 |
# ---------------------------------------------------------------------
#
# Copyright (c) 2012 University of Oxford
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# ---------------------------------------------------------------------
#!/bin/bash
#
# Run from new VM console or SSH session
echo =============================
echo "Install packages"
echo =============================
apt-get install acpid
apt-get install unattended-upgrades
apt-get install denyhosts
apt-get install ufw
apt-get install samba-doc
apt-get install krb5-user
apt-get install ntp
apt-get install libapache2-webauth
apt-get install libapache2-mod-auth-kerb
apt-get install cadaver
apt-get install nano
apt-get install slapd
apt-get install manpages
apt-get install man-db
apt-get install locate
apt-get install kernel-package
apt-get install lvm2
apt-get install acl
apt-get install git-core
apt-get install python-pip
apt-get install wget
cd /mnt/data/tool
wget http://redis.googlecode.com/files/redis-2.4.6.tar.gz
tar xzf redis-2.4.6.tar.gz
cd redis-2.4.6
make
rm /mnt/data/tool/redis*.gz
apt-get install libxml2-dev
apt-get install libxslt1-dev
apt-get install libacl1-dev
apt-get install python-dev
apt-get install python-psycopg2
apt-get install postgresql
apt-get install libapache2-mod-wsgi
/etc/init.d/apache2 restart
sudo a2enmod wsgi
apt-get update
echo =============================
echo "Next step: postboot_4.sh"
echo =============================
|
dataflow/DataStage
|
datastage-base/postboot-3.sh
|
Shell
|
mit
| 2,485 |
# nix-env -i fzf
# nix-env -i fd
# Setting fd as the default source for fzf
export FZF_DEFAULT_COMMAND='fd --type f'
# To apply the command to CTRL-T as well
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
fgst() {
eval "command git status -s" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS" fzf -m "$@" | while read -r item; do
printf "$item" | cut -c3-
done
}
fvg() {
vim -p $(fgst)
}
frg() {
files=$(git status -s | rg spec | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS" fzf -m "$@" | cut -c3- | tr -s " ")
echo $files
rspec $(printf $files | paste -s -d " ")
}
fbr() {
local branches branch
branches=$(git --no-pager branch -vv) &&
branch=$(echo "$branches" | fzf-tmux +m) &&
git checkout $(echo "$branch" | awk '{print $1}' | sed "s/.* //")
}
fshow() {
git log --graph --color=always \
--format="%C(auto)%h%d %s %C(black)%C(bold)%cr" "$@" |
fzf --ansi --no-sort --reverse --tiebreak=index --bind=ctrl-s:toggle-sort \
--bind "ctrl-m:execute:
(grep -o '[a-f0-9]\{7\}' | head -1 |
xargs -I % sh -c 'git show --color=always % | less -R') << 'FZF-EOF'
{}
FZF-EOF"
}
fuzzyrg() {
eval "command rg -l '$*'" | FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} --reverse $FZF_DEFAULT_OPTS $FZF_CTRL_T_OPTS" fzf -m | while read -r item; do
printf "$item\n"
done
}
fv() {
vim -p $(fuzzyrg $*)
}
|
bernardeli/dotfiles
|
shell/fzf.sh
|
Shell
|
mit
| 1,502 |
#!/bin/bash
dbname=zabbix
daily_retention='31 days'
monthly_retention='12 months'
cleanup_log_retention=31 # days
#####################################################################
# daily_partition_cleanup.sh
# Drop old PostgreSQL history and trend partitions from the Zabbix DB
#
# This script will drop old history and trend partitions from
# the zabbix database based on retentions set above.
#
# Adapted from:
# https://www.zabbix.org/wiki/Docs/howto/zabbix2_postgresql_autopartitioning
#
# - Must be run as DB super-user
# - Should be run daily from cron, but *NOT* during a DB backup
#
# A "logs" directory will be automatically created below the directory
# where this cleanup script is executed. The logs will be purged
# automatically based on the above log retention.
#
#####################################################################
main() {
# clean up old logs
find ${logdir} -name "${sscript}.*.log" -mtime +${cleanup_log_retention} -delete
# begin run
echo '================================================================'
date +"%Y-%m-%d %H:%M:%S %Z"
echo "Script..: ${abspath}/${script}"
echo "Hostname: `hostname`"
echo "Logfile.: ${logfile}"
echo "Settings:"
echo " dbname=${dbname}"
echo " daily_retention='${daily_retention}'"
echo " monthly_retention='${monthly_retention}'"
echo " cleanup_log_retention=${cleanup_log_retention}"
echo
drop_old_partitions || exit 1
}
drop_old_partitions () {
echo 'drop_old_partitions --------------------------------------------'
date +"%Y-%m-%d %H:%M:%S %Z"
echo
psql -Xqte -v ON_ERROR_STOP=on ${dbname} <<EOF
SELECT zbx_part_cleanup_func('${daily_retention}', 'day');
SELECT zbx_part_cleanup_func('${monthly_retention}', 'month');
EOF
rc=$?
# error encountered?
if [[ ${rc} -ne 0 ]]; then
# force cron to email someone (be sure to set "MAILTO" in crontab)
# add two blanks to beginning of every line to prevent MS Outlook automatic word-wrapping
echo "
************* ERROR encountered!
ERROR in Zabbix partition maintenance script!
Script..: ${abspath}/${script}
Host....: `hostname`
Database: ${dbname}
Log file: ${logfile}
Date....: `date +'%Y-%m-%d %H:%M:%S %Z'`
Please investigate!!!
Tail of log:
============
`tail ${logfile}|sed -e 's/^/ /'`
" >&3
# write to log
echo '************* ERROR encountered! Exiting...'
fi
echo "Ended: `date +'%Y-%m-%d %H:%M:%S %Z'`"
echo
return $rc
}
#########
# SETUP #
#########
abspath=`cd ${0%/*};pwd` # get absolute path of script directory
logdir=${abspath}/logs # log directory under script directory
script=${0##*/} # script name
sscript=${script%.*} # script name without ".sh"
logfile=${logdir}/${sscript}.$(date "+%Y-%m-%d").log
# create log subdirectory if does not exist
if [[ ! -d ${logdir} ]]; then
mkdir -p ${logdir}
if [[ $? -ne 0 ]]; then
echo "`hostname` ${0} ERROR: unable to create log directory" >&2
exit 2
fi
fi
# non-interactive?
if [[ $(tty) == "not a tty" ]]; then # run non-interactively (i.e. cron)
exec 3>&2 # save stderr descriptor to send error emails from cron
main >> ${logfile} 2>&1 # everything else to log file
else # run interactively (i.e. human)
exec 3>/dev/null # no need to send email errors
main 2>&1 | tee -a ${logfile} # send to both stdout and logfile
fi
exec 3>&- # close descriptor
|
robbrucks/zabbix-postgresql-auto-partitioning
|
daily_partition_cleanup.sh
|
Shell
|
mit
| 3,507 |
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
PHP_VER=$(php -v | head -n 1 | cut -d ' ' -f 2);
SDK_VER=$(grep -oiP '(?<="version": ")([a-zA-Z0-9\-.]+)(?=")' composer.json)
bash ${DIR}/allocate_test_cloud.sh "PHP ${PHP_VER} SDK ${SDK_VER}"
|
cloudinary/cloudinary_php
|
tools/get_test_cloud.sh
|
Shell
|
mit
| 289 |
#!/usr/bin/env bash
while echo $1 | grep -q ^--; do
eval $( echo $1 | sed 's/^--//' )=$2
shift
shift
done
python ./get_tasks.py $username $password $request
|
renatobenks/ToDoListSoftExpert
|
cli/tasks/import-tasks-to-github.sh
|
Shell
|
mit
| 171 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.