code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#this script is called on release_one_time.sh, modules are stopped
# TAKE CARE: use relative paths. Will only work from ${NIVIS_FIRMWARE}
db_path=${NIVIS_ACTIVITY_FILES}
db_file=${db_path}/Monitor_Host.db3
# The scripts inside must be named exactly updateDB_to_${sql_script_version}.sql
sql_script_path=DbUpdate/
echo "path to db is -> $db_path"
db_schema_version=$(echo "select Value from Properties where Key = 'SchemaVersion';" | sqlite3 $db_file)
if [ $? != 0 ]; then
echo "Cannot retreive installed database schema. "
exit 1
fi
echo "current db version is '${db_schema_version}'"
#echo "delete from DeviceChannelsHistory;"
#echo "delete from DeviceChannelsHistory;" | sqlite3 $db_file
# db_schema_version is equal with the version of MH that needs that change in db
#Version of initial DB (released with the initial image)
PREV_DB_VERSION="15.0.00"
#List with all successive DB versions, in the order to be applied
DB_VERSION_LIST="2.0.08 2.0.12 2.0.13 2.0.15 2.0.17 2.0.18 2.0.24 2.0.33 2.0.50"
for crt_ver in $DB_VERSION_LIST; do
if [ "$db_schema_version" == "$PREV_DB_VERSION" ]; then
sql_script_version=$crt_ver
sql_script_file=${sql_script_path}/updateDB_to_${sql_script_version}.sql
echo "Upgrading DB ver $PREV_DB_VERSION -> $sql_script_version ..."
sqlite3 $db_file < $sql_script_file
if [ $? != 0 ]; then
echo "ERROR upgrading DB to version $sql_script_version"
log2flash "ERROR upgrading DB to version $sql_script_version"
else
echo "DB upgrade OK to version $sql_script_version"
log2flash "DB upgrade OK to version $sql_script_version"
fi
#next schema version for cascaded upgrade
db_schema_version=$sql_script_version
fi
PREV_DB_VERSION=$crt_ver
done
|
irares/ISA100.11a-Gateway
|
AccessNode/config/FW_mesh_HW_any/release_isa/updateDB.sh
|
Shell
|
gpl-3.0
| 1,730 |
#!/tools/bin/sh
mkdir -pv /{bin,boot,etc/{opt,sysconfig},home,lib/firmware,mnt,opt}
mkdir -pv /{media/{floppy,cdrom},sbin,srv,var}
install -dv -m 0750 /root
install -dv -m 1777 /tmp /var/tmp
mkdir -pv /usr/{,local/}{bin,include,lib,sbin,src}
mkdir -pv /usr/{,local/}share/{color,dict,doc,info,locale,man}
mkdir -v /usr/{,local/}share/{misc,terminfo,zoneinfo}
mkdir -v /usr/libexec
mkdir -pv /usr/{,local/}share/man/man{1..8}
case $(uname -m) in
x86_64) ln -sv lib /lib64
ln -sv lib /usr/lib64
ln -sv lib /usr/local/lib64 ;;
esac
mkdir -v /var/{log,mail,spool}
ln -sv /run /var/run
ln -sv /run/lock /var/lock
mkdir -pv /var/{opt,cache,lib/{color,misc,locate},local}
ln -sv /tools/bin/{bash,cat,echo,pwd,stty} /bin
ln -sv /tools/bin/perl /usr/bin
ln -sv /tools/lib/libgcc_s.so{,.1} /usr/lib
ln -sv /tools/lib/libstdc++.so{,.6} /usr/lib
sed 's/tools/usr/' /tools/lib/libstdc++.la > /usr/lib/libstdc++.la
ln -sv bash /bin/sh
ln -sv /proc/self/mounts /etc/mtab
cat > /etc/passwd << "EOF"
root:x:0:0:root:/root:/bin/bash
bin:x:1:1:bin:/dev/null:/bin/false
daemon:x:6:6:Daemon User:/dev/null:/bin/false
messagebus:x:18:18:D-Bus Message Daemon User:/var/run/dbus:/bin/false
nobody:x:99:99:Unprivileged User:/dev/null:/bin/false
EOF
cat > /etc/group << "EOF"
root:x:0:
bin:x:1:daemon
sys:x:2:
kmem:x:3:
tape:x:4:
tty:x:5:
daemon:x:6:
floppy:x:7:
disk:x:8:
lp:x:9:
dialout:x:10:
audio:x:11:
video:x:12:
utmp:x:13:
usb:x:14:
cdrom:x:15:
adm:x:16:
messagebus:x:18:
systemd-journal:x:23:
input:x:24:
mail:x:34:
nogroup:x:99:
users:x:999:
EOF
touch /var/log/{btmp,lastlog,faillog,wtmp}
chgrp -v utmp /var/log/lastlog
chmod -v 664 /var/log/lastlog
chmod -v 600 /var/log/btmp
exec /tools/bin/bash --login +h
export PS1='\w\$'
|
uraymeiviar/artos
|
build/crossbuild/chroot-scripts/init-chroot.sh
|
Shell
|
gpl-3.0
| 1,741 |
#!/bin/bash
sudo apt install pandoc ttf-ubuntu-font-family
mkdir fake
cd fake
# you can replace $HOME with any dir
sed -i 's@\$TEXLIVEHOME@'"$HOME"'@' ../texlive.profile
wget http://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz
tar zxf install-tl-unx.tar.gz
./install-tl*/install-tl -profile ../texlive.profile
# texlive.tar.gz is a portable and full TeXLive package
tar zcf texlive.tar.gz -C $HOME texlive
# symlink TeXLive executables to /usr/local/bin/
sudo $HOME/texlive/bin/x86_64-linux/tlmgr path add
# inform apt that all TeXLive dependencies are satisfied
sudo apt-get install equivs
wget https://github.com/scottkosty/install-tl-ubuntu/raw/master/debian-control-texlive-in.txt
equivs-build debian-control-texlive-in.txt
sudo dpkg -i texlive-local*.deb
cd ..
rm -r fake
|
bugra9/bugra9.github.io
|
_travis-ci/pdf/install.sh
|
Shell
|
gpl-3.0
| 799 |
rm *.acn
rm *.acr
rm *.alg
find . -name \*.aux -delete
rm *.bbl
find . -name \*.DS_store -delete
rm *.glg
rm *.glo
rm *.gls
rm *.glsdefs
rm *.idx
rm *.ilg
rm *.ind
rm *.ist
rm *.log
rm *.out
rm *.ptc
rm *.run.xml
find . -name \*.swp -delete
rm *.toc
rm log.md
rm oq-manual-blx.bib
|
gem/oq-engine
|
doc/manual/clean.sh
|
Shell
|
agpl-3.0
| 281 |
#! /bin/sh
./checker.sh "CAnDL test suite" "$TEST_FILES"
|
Ced/candl
|
tests/old/check_suite.sh
|
Shell
|
lgpl-3.0
| 59 |
#!/bin/sh
# do we have enough arguments?
if [ $# < 3 ]; then
echo "Usage:"
echo
echo "./release.sh <release version> <development version>"
exit 1
fi
# pick arguments
release=$1
devel=$2
# get current branch
branch=$(git status -bs | awk '{ print $2 }' | awk -F'.' '{ print $1 }' | head -n 1)
# manually edit and commit changelog changes
#./scripts/changelog.sh $1 | tee CHANGES.md
#git commit -a -m "Modifying changelog."
commit=$(git log --pretty=format:"%H" | head -n 1)
echo "releasing from ${commit} on branch ${branch}"
git push origin ${branch}
# do scala 2.10 release
git checkout -b maint_2.10-${release} ${branch}
mvn --batch-mode \
-P distribution \
-Dresume=false \
-Dtag=adam-parent_2.10-${release} \
-DreleaseVersion=${release} \
-DdevelopmentVersion=${devel} \
-DbranchName=adam_2.10-${release} \
release:clean \
release:prepare \
release:perform
if [ $? != 0 ]; then
echo "Releasing Spark 1, Scala 2.10 version failed."
exit 1
fi
# create spark packages zip file
cp adam-cli/target/adam-cli_2.10-${release}.jar adam-${release}.jar
cp adam-cli/pom.xml adam-${release}.pom
zip -r adam-${release}.zip adam-${release}.jar adam-${release}.pom
# do scala 2.11 release
git checkout -b maint_2.11-${release} ${branch}
./scripts/move_to_scala_2.11.sh
git commit -a -m "Modifying pom.xml files for Spark 1, Scala 2.11 release."
mvn --batch-mode \
-P distribution \
-Dresume=false \
-Dtag=adam-parent_2.11-${release} \
-DreleaseVersion=${release} \
-DdevelopmentVersion=${devel} \
-DbranchName=adam_2.11-${release} \
release:clean \
release:prepare \
release:perform
if [ $? != 0 ]; then
echo "Releasing Spark 1, Scala 2.11 version failed."
exit 1
fi
# do spark 2, scala 2.10 release
git checkout -b maint_spark2_2.10-${release} ${branch}
./scripts/move_to_spark_2.sh
git commit -a -m "Modifying pom.xml files for Spark 2, Scala 2.10 release."
mvn --batch-mode \
-P distribution \
-Dresume=false \
-Dtag=adam-parent-spark2_2.10-${release} \
-DreleaseVersion=${release} \
-DdevelopmentVersion=${devel} \
-DbranchName=adam-spark2_2.10-${release} \
release:clean \
release:prepare \
release:perform
if [ $? != 0 ]; then
echo "Releasing Spark 2, Scala 2.10 version failed."
exit 1
fi
# do spark 2, scala 2.11 release
git checkout -b maint_spark2_2.11-${release} ${branch}
./scripts/move_to_spark_2.sh
./scripts/move_to_scala_2.11.sh
git commit -a -m "Modifying pom.xml files for Spark 2, Scala 2.11 release."
mvn --batch-mode \
-P distribution \
-Dresume=false \
-Dtag=adam-parent-spark2_2.11-${release} \
-DreleaseVersion=${release} \
-DdevelopmentVersion=${devel} \
-DbranchName=adam-spark2_2.11-${release} \
release:clean \
release:prepare \
release:perform
if [ $? != 0 ]; then
echo "Releasing Spark 2, Scala 2.11 version failed."
exit 1
fi
# publish docs
./scripts/publish-scaladoc.sh ${release}
if [ $branch = "master" ]; then
# if original branch was master, update versions on original branch
git checkout ${branch}
mvn versions:set -DnewVersion=${devel} \
-DgenerateBackupPoms=false
git commit -a -m "Modifying pom.xml files for new development after ${release} release."
git push origin ${branch}
fi
|
massie/adam
|
scripts/release/release.sh
|
Shell
|
apache-2.0
| 3,246 |
#!/bin/sh
set -xe
./dci/worker/worker.py &
|
enovance/dci-control-server
|
scripts/start_worker.sh
|
Shell
|
apache-2.0
| 44 |
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
function test_module() {
module="$FLINK_PYTHON_DIR/pyflink/$1"
echo "test module $module"
pytest --durations=20 ${module}
if [[ $? -ne 0 ]]; then
echo "test module $module failed"
exit 1
fi
}
# CURRENT_DIR is "flink/flink-python/dev/"
CURRENT_DIR="$(cd "$( dirname "$0" )" && pwd)"
# FLINK_PYTHON_DIR is "flink/flink-python"
FLINK_PYTHON_DIR=$(dirname "$CURRENT_DIR")
# test common module
test_module "common"
# test dataset module
test_module "dataset"
# test datastream module
test_module "datastream"
# test fn_execution module
test_module "fn_execution"
# test metrics module
test_module "metrics"
# test table module
test_module "table"
|
rmetzger/flink
|
flink-python/dev/integration_test.sh
|
Shell
|
apache-2.0
| 1,665 |
#!/bin/bash
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# TODO(mtomasz): Remove this package, once a new upstream release of libarchive
# is available.
AutogenStep() {
ChangeDir ${SRC_DIR}
export MAKE_LIBARCHIVE_RELEASE="1"
./build/autogen.sh
PatchConfigure
cd -
}
if [ "${NACL_SHARED}" = "0" ]; then
EXTRA_CONFIGURE_ARGS+=" --enable-shared=no"
fi
EnableGlibcCompat
ConfigureStep() {
AutogenStep
EXTRA_CONFIGURE_ARGS="--disable-bsdtar --disable-bsdcpio"
EXTRA_CONFIGURE_ARGS+=" --without-iconv"
# Temporary xml2 support cannot be added because the patch used in
# ports/libarchve doesn't apply correctly here due. The reason is that
# configure file is not present on gihub repository and is created
# after AutogenStep.
# # TODO(cmihail): Remove this once nacl.patch is applied correctly.
EXTRA_CONFIGURE_ARGS+=" --without-xml2"
NACLPORTS_CPPFLAGS+=" -Dtimezone=_timezone"
DefaultConfigureStep
}
BuildHost() {
HOST_BUILD_DIR=${WORK_DIR}/build_host
HOST_INSTALL_DIR=${WORK_DIR}/install_host
if [ ! -d ${HOST_INSTALL_DIR} ]; then
Banner "Build host version"
MakeDir ${HOST_BUILD_DIR}
ChangeDir ${HOST_BUILD_DIR}
LogExecute ${SRC_DIR}/configure --without-lzma
LogExecute make -j${OS_JOBS}
LogExecute make install DESTDIR=${HOST_INSTALL_DIR}
cd -
fi
}
BuildStep() {
BuildHost
DefaultBuildStep
}
|
GoogleChromeLabs/chromeos_smart_card_connector
|
third_party/webports/src/src/ports/libarchive-dev/build.sh
|
Shell
|
apache-2.0
| 1,504 |
#!/bin/bash
#Get swarm master IP address
source .chimera_env
mstr_ip=$(docker-machine ip $SWARM_MASTER)
sed -i s/SWARM_MASTER_IP_ADDRES/$mstr_ip/g apps/app1/docker-compose.yml
sudo echo -e "\n $mstr_ip wordpress.wordpress.mysite.com" >> /etc/hosts
eval $(docker-machine env --swarm $SWARM_MASTER)
cd apps/app1 && docker-compose up -d
eval $(docker-machine env -u)
docker run -d -p 8080:9101 --name PROM_HAPROXY_EXP --add-host wordpress.wordpress.mysite.com:$mstr_ip prom/haproxy-exporter -haproxy.scrape-uri='http://stats:[email protected]:8010/haproxy?stats;csv' -haproxy.server-metric-fields="1,2,3,4,7,12"
tmp_ip=$(/sbin/ifconfig eth0 | grep 'inet addr:' | cut -d: -f2 | awk '{ print $1}')
cur=$(grep targets.*: prometheus.yml)
if [[ "$nt" != "m" ]]; then
new=$(echo $cur | sed s/]/", \'$tmp_ip:9104\']"/g)
else
new=$(echo $cur | sed s/]/", \'$tmp_ip:9104\', \'$tmp_ip:9101\' ]"/g)
fi
sed -i s/"-.*targets:.*]"/"$new"/g prometheus.yml
sed -i s/"targets:.*\[,"/"targets: ["/g prometheus.yml
docker restart `docker ps -q`
|
CenturyLinkLabs/chimera
|
example/create_haproxy_test.sh
|
Shell
|
apache-2.0
| 1,060 |
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
install_ubuntu_16_pip_deps pip2.7
# Update bazel
update_bazel_linux
# Export required variables for running pip.sh
export OS_TYPE="UBUNTU"
export CONTAINER_TYPE="GPU"
export TF_PYTHON_VERSION='python2.7'
# Run configure.
export TF_NEED_GCP=1
export TF_NEED_HDFS=1
export TF_NEED_S3=1
export TF_NEED_CUDA=1
export TF_CUDA_VERSION=10.1
export TF_CUDNN_VERSION=7
export TF_NEED_TENSORRT=1
export TENSORRT_INSTALL_PATH=/usr/local/tensorrt
export CC_OPT_FLAGS='-mavx'
export PYTHON_BIN_PATH=$(which ${TF_PYTHON_VERSION})
export PROJECT_NAME="tensorflow_gpu"
export LD_LIBRARY_PATH="/usr/local/cuda:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:$TENSORRT_INSTALL_PATH/lib"
export TF_CUDA_COMPUTE_CAPABILITIES=3.5,3.7,5.2,6.0,6.1,7.0
yes "" | "$PYTHON_BIN_PATH" configure.py
# Get the default test targets for bazel.
source tensorflow/tools/ci_build/build_scripts/PRESUBMIT_BUILD_TARGETS.sh
# Export optional variables for running pip.sh
export TF_TEST_FILTER_TAGS='gpu,requires-gpu,-no_gpu,-no_oss,-oss_serial'
export TF_BUILD_FLAGS="--config=opt --config=v2 --config=cuda --distinct_host_configuration=false \
--action_env=TF_CUDA_VERSION --action_env=TF_CUDNN_VERSION --crosstool_top=//third_party/toolchains/preconfig/ubuntu16.04/gcc7_manylinux2010-nvcc-cuda10.1:toolchain "
export TF_TEST_FLAGS="--test_tag_filters=${TF_TEST_FILTER_TAGS} --build_tag_filters=${TF_TEST_FILTER_TAGS} --distinct_host_configuration=false \
--action_env=TF_CUDA_VERSION --action_env=TF_CUDNN_VERSION --test_env=TF2_BEHAVIOR=1 \
--config=cuda --test_output=errors --local_test_jobs=4 --test_lang_filters=py \
--verbose_failures=true --keep_going --define=no_tensorflow_py_deps=true \
--run_under=//tensorflow/tools/ci_build/gpu_build:parallel_gpu_execute "
export TF_TEST_TARGETS="${DEFAULT_BAZEL_TARGETS} -//tensorflow/lite/..."
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export IS_NIGHTLY=0 # Not nightly
export TF_PROJECT_NAME=${PROJECT_NAME}
export TF_PIP_TEST_ROOT="pip_test"
# To build both tensorflow and tensorflow-gpu pip packages
export TF_BUILD_BOTH_GPU_PACKAGES=1
./tensorflow/tools/ci_build/builds/pip_new.sh
|
adit-chandra/tensorflow
|
tensorflow/tools/ci_build/release/ubuntu_16/gpu_py2_full/pip.sh
|
Shell
|
apache-2.0
| 2,918 |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -e
set -x
# install libraries for mxnet's python package on ubuntu
apt-get update && apt-get install -y python-dev python3-dev
# the version of the pip shipped with ubuntu may be too lower, install a recent version here
cd /tmp && wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py && python2 get-pip.py
pip2 install nose pylint numpy nose-timer requests h5py scipy
pip3 install nose pylint numpy nose-timer requests h5py scipy
|
tornadomeet/mxnet
|
tests/ci_build/install/ubuntu_install_python.sh
|
Shell
|
apache-2.0
| 1,254 |
#!/usr/bin/env bash
function async_run() {
{
eval "$@" &> /dev/null
}&
}
function git_prompt_dir() {
# assume the gitstatus.sh is in the same directory as this script
# code thanks to http://stackoverflow.com/questions/59895
if [ -z "$__GIT_PROMPT_DIR" ]; then
local SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
local DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
__GIT_PROMPT_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
fi
}
function echoc() {
echo -e "${1}$2${ResetColor}" | sed 's/\\\]//g' | sed 's/\\\[//g'
}
function get_theme() {
local CUSTOM_THEME_FILE="${HOME}/.git-prompt-colors.sh"
local DEFAULT_THEME_FILE="${__GIT_PROMPT_DIR}/themes/Default.bgptheme"
if [[ -z ${GIT_PROMPT_THEME} ]]; then
if [[ -r $CUSTOM_THEME_FILE ]]; then
GIT_PROMPT_THEME="Custom"
__GIT_PROMPT_THEME_FILE=$CUSTOM_THEME_FILE
else
GIT_PROMPT_THEME="Default"
__GIT_PROMPT_THEME_FILE=$DEFAULT_THEME_FILE
fi
else
if [[ "${GIT_PROMPT_THEME}" = "Custom" ]]; then
GIT_PROMPT_THEME="Custom"
__GIT_PROMPT_THEME_FILE=$CUSTOM_THEME_FILE
if [[ ! (-r $__GIT_PROMPT_THEME_FILE) ]]; then
GIT_PROMPT_THEME="Default"
__GIT_PROMPT_THEME_FILE=$DEFAULT_THEME_FILE
fi
else
local theme=""
# use default theme, if theme was not found
for themefile in "${__GIT_PROMPT_DIR}/themes/"*.bgptheme; do
local basename=${themefile##*/}
if [[ "${basename%.bgptheme}" = "${GIT_PROMPT_THEME}" ]]; then
theme=$GIT_PROMPT_THEME
break
fi
done
if [[ "${theme}" = "" ]]; then
GIT_PROMPT_THEME="Default"
fi
__GIT_PROMPT_THEME_FILE="${__GIT_PROMPT_DIR}/themes/${GIT_PROMPT_THEME}.bgptheme"
fi
fi
}
function git_prompt_load_theme() {
get_theme
local DEFAULT_THEME_FILE="${__GIT_PROMPT_DIR}/themes/Default.bgptheme"
source "${DEFAULT_THEME_FILE}"
source "${__GIT_PROMPT_THEME_FILE}"
}
function git_prompt_list_themes() {
local oldTheme
local oldThemeFile
git_prompt_dir
get_theme
for themefile in "${__GIT_PROMPT_DIR}/themes/"*.bgptheme; do
local basename=${themefile##*/}
local theme="${basename%.bgptheme}"
if [[ "${GIT_PROMPT_THEME}" = "${theme}" ]]; then
echoc ${Red} "*${theme}"
else
echo $theme
fi
done
if [[ "${GIT_PROMPT_THEME}" = "Custom" ]]; then
echoc ${Magenta} "*Custom"
else
echoc ${Blue} "Custom"
fi
}
function git_prompt_make_custom_theme() {
if [[ -r "${HOME}/.git-prompt-colors.sh" ]]; then
echoc ${Red} "You alread have created a custom theme!"
else
git_prompt_dir
local base="Default"
if [[ -n $1 && -r "${__GIT_PROMPT_DIR}/themes/${1}.bgptheme" ]]; then
base=$1
echoc ${Green} "Using theme ${Magenta}\"${base}\"${Green} as base theme!"
else
echoc ${Green} "Using theme ${Magenta}\"Default\"${Green} as base theme!"
fi
if [[ "${base}" = "Custom" ]]; then
echoc ${Red} "You cannot use the custom theme as base"
else
echoc ${Green} "Creating new custom theme in \"${HOME}/.git-prompt-colors.sh\""
echoc ${DimYellow} "Please add ${Magenta}\"GIT_PROMPT_THEME=Custom\"${DimYellow} to your .bashrc to use this theme"
if [[ "${base}" == "Default" ]]; then
cp "${__GIT_PROMPT_DIR}/themes/Custom.bgptemplate" "${HOME}/.git-prompt-colors.sh"
else
cp "${__GIT_PROMPT_DIR}/themes/${base}.bgptheme" "${HOME}/.git-prompt-colors.sh"
fi
fi
fi
}
# gp_set_file_var ENVAR SOMEFILE
#
# If ENVAR is set, check that it's value exists as a readable file. Otherwise,
# Set ENVAR to the path to SOMEFILE, based on $HOME, $__GIT_PROMPT_DIR, and the
# directory of the current script. The SOMEFILE can be prefixed with '.', or
# not.
#
# Return 0 (success) if ENVAR not already defined, 1 (failure) otherwise.
function gp_set_file_var() {
local envar="$1"
local file="$2"
if eval "[[ -n \"\$$envar\" && -r \"\$$envar\" ]]" ; then # is envar set to a readable file?
local basefile
eval "basefile=\"\`basename \\\"\$$envar\\\"\`\"" # assign basefile
if [[ "$basefile" = "$file" || "$basefile" = ".$file" ]]; then
return 0
fi
else # envar is not set, or it's set to a different file than requested
eval "$envar=" # set empty envar
gp_maybe_set_envar_to_path "$envar" "$HOME/.$file" "$HOME/$file" "$HOME/lib/$file" && return 0
git_prompt_dir
gp_maybe_set_envar_to_path "$envar" "$__GIT_PROMPT_DIR/$file" "${0##*/}/$file" && return 0
fi
return 1
}
# gp_maybe_set_envar_to_path ENVAR FILEPATH ...
#
# return 0 (true) if any FILEPATH is readable, set ENVAR to it
# return 1 (false) if not
function gp_maybe_set_envar_to_path() {
local envar="$1"
shift
local file
for file in "$@" ; do
if [[ -r "$file" ]]; then
eval "$envar=\"$file\""
return 0
fi
done
return 1
}
# git_prompt_reset
#
# unsets selected GIT_PROMPT variables, causing the next prompt callback to
# recalculate them from scratch.
git_prompt_reset() {
local var
for var in GIT_PROMPT_DIR __GIT_PROMPT_COLORS_FILE __PROMPT_COLORS_FILE __GIT_STATUS_CMD GIT_PROMPT_THEME_NAME; do
unset $var
done
}
# gp_format_exit_status RETVAL
#
# echos the symbolic signal name represented by RETVAL if the process was
# signalled, otherwise echos the original value of RETVAL
gp_format_exit_status() {
local RETVAL="$1"
local SIGNAL
# Suppress STDERR in case RETVAL is not an integer (in such cases, RETVAL
# is echoed verbatim)
if [ "${RETVAL}" -gt 128 ] 2>/dev/null; then
SIGNAL=$(( ${RETVAL} - 128 ))
kill -l "${SIGNAL}" 2>/dev/null || echo "${RETVAL}"
else
echo "${RETVAL}"
fi
}
function git_prompt_config() {
#Checking if root to change output
_isroot=false
[[ $UID -eq 0 ]] && _isroot=true
# There are two files related to colors:
#
# prompt-colors.sh -- sets generic color names suitable for bash 'PS1' prompt
# git-prompt-colors.sh -- sets the GIT_PROMPT color scheme, using names from prompt-colors.sh
if gp_set_file_var __PROMPT_COLORS_FILE prompt-colors.sh ; then
source "$__PROMPT_COLORS_FILE" # outsource the color defs
else
echo 1>&2 "Cannot find prompt-colors.sh!"
fi
# source the user's ~/.git-prompt-colors.sh file, or the one that should be
# sitting in the same directory as this script
git_prompt_load_theme
if is_function prompt_callback; then
prompt_callback="prompt_callback"
else
prompt_callback="prompt_callback_default"
fi
if [ $GIT_PROMPT_LAST_COMMAND_STATE = 0 ]; then
LAST_COMMAND_INDICATOR="$GIT_PROMPT_COMMAND_OK";
else
LAST_COMMAND_INDICATOR="$GIT_PROMPT_COMMAND_FAIL";
fi
# replace _LAST_COMMAND_STATE_ token with the actual state
GIT_PROMPT_LAST_COMMAND_STATE=$(gp_format_exit_status ${GIT_PROMPT_LAST_COMMAND_STATE})
LAST_COMMAND_INDICATOR="${LAST_COMMAND_INDICATOR//_LAST_COMMAND_STATE_/${GIT_PROMPT_LAST_COMMAND_STATE}}"
# Do this only once to define PROMPT_START and PROMPT_END
if [[ -z "$PROMPT_START" || -z "$PROMPT_END" ]]; then
if [[ -z "$GIT_PROMPT_START" ]] ; then
if $_isroot; then
PROMPT_START="$GIT_PROMPT_START_ROOT"
else
PROMPT_START="$GIT_PROMPT_START_USER"
fi
else
PROMPT_START="$GIT_PROMPT_START"
fi
if [[ -z "$GIT_PROMPT_END" ]] ; then
if $_isroot; then
PROMPT_END="$GIT_PROMPT_END_ROOT"
else
PROMPT_END="$GIT_PROMPT_END_USER"
fi
else
PROMPT_END="$GIT_PROMPT_END"
fi
fi
# set GIT_PROMPT_LEADING_SPACE to 0 if you want to have no leading space in front of the GIT prompt
if [[ "$GIT_PROMPT_LEADING_SPACE" = 0 ]]; then
PROMPT_LEADING_SPACE=""
else
PROMPT_LEADING_SPACE=" "
fi
if [[ "$GIT_PROMPT_ONLY_IN_REPO" = 1 ]]; then
EMPTY_PROMPT="$OLD_GITPROMPT"
else
local ps="$(gp_add_virtualenv_to_prompt)$PROMPT_START$($prompt_callback)$PROMPT_END"
EMPTY_PROMPT="${ps//_LAST_COMMAND_INDICATOR_/${LAST_COMMAND_INDICATOR}}"
fi
# fetch remote revisions every other $GIT_PROMPT_FETCH_TIMEOUT (default 5) minutes
if [[ -z "$GIT_PROMPT_FETCH_TIMEOUT" ]]; then
GIT_PROMPT_FETCH_TIMEOUT="5"
fi
if [[ -z "$__GIT_STATUS_CMD" ]] ; then # if GIT_STATUS_CMD not defined..
git_prompt_dir
if ! gp_maybe_set_envar_to_path __GIT_STATUS_CMD "$__GIT_PROMPT_DIR/$GIT_PROMPT_STATUS_COMMAND" ; then
echo 1>&2 "Cannot find $GIT_PROMPT_STATUS_COMMAND!"
fi
# __GIT_STATUS_CMD defined
fi
}
function setLastCommandState() {
GIT_PROMPT_LAST_COMMAND_STATE=$?
}
function we_are_on_repo() {
if [[ -e "$(git rev-parse --git-dir 2> /dev/null)" ]]; then
echo 1
fi
echo 0
}
function update_old_git_prompt() {
local in_repo=$(we_are_on_repo)
if [[ $GIT_PROMPT_OLD_DIR_WAS_GIT = 0 ]]; then
OLD_GITPROMPT=$PS1
fi
GIT_PROMPT_OLD_DIR_WAS_GIT=$in_repo
}
function setGitPrompt() {
update_old_git_prompt
local repo=$(git rev-parse --show-toplevel 2> /dev/null)
if [[ ! -e "$repo" ]] && [[ "$GIT_PROMPT_ONLY_IN_REPO" = 1 ]]; then
# we do not permit bash-git-prompt outside git repos, so nothing to do
PS1="$OLD_GITPROMPT"
return
fi
local EMPTY_PROMPT
local __GIT_STATUS_CMD
git_prompt_config
if [[ ! -e "$repo" ]]; then
PS1="$EMPTY_PROMPT"
return
fi
local FETCH_REMOTE_STATUS=1
if [[ "$GIT_PROMPT_FETCH_REMOTE_STATUS" = 0 ]]; then
FETCH_REMOTE_STATUS=0
fi
unset GIT_PROMPT_IGNORE
if [[ -e "$repo/.bash-git-rc" ]]; then
source "$repo/.bash-git-rc"
fi
if [[ "$GIT_PROMPT_IGNORE" = 1 ]]; then
PS1="$EMPTY_PROMPT"
return
fi
if [[ "$FETCH_REMOTE_STATUS" = 1 ]]; then
checkUpstream
fi
updatePrompt
}
# some versions of find do not have -mmin
_have_find_mmin=1
function olderThanMinutes() {
local matches
local find_exit_code
if [[ -z "$_find_command" ]]; then
if command -v gfind > /dev/null; then
_find_command=gfind
else
_find_command=find
fi
fi
if [[ "$_have_find_mmin" = 1 ]]; then
matches=$("$_find_command" "$1" -mmin +"$2" 2> /dev/null)
find_exit_code="$?"
if [[ -n "$matches" ]]; then
return 0
else
if [[ "$find_exit_code" != 0 ]]; then
_have_find_mmin=0
else
return 1
fi
fi
fi
# try perl, solaris ships with perl
if command -v perl > /dev/null; then
perl -e '((time - (stat("'"$1"'"))[9]) / 60) > '"$2"' && exit(0) || exit(1)'
return "$?"
else
echo >&2
echo "[1;31mWARNING[0m: neither a find that supports -mmin (such as GNU find) or perl is available, disabling remote status checking. Install GNU find as gfind or perl to enable this feature, or set GIT_PROMPT_FETCH_REMOTE_STATUS=0 to disable this warning." >&2
echo >&2
GIT_PROMPT_FETCH_REMOTE_STATUS=0
return 1
fi
}
function checkUpstream() {
local GIT_PROMPT_FETCH_TIMEOUT
git_prompt_config
local FETCH_HEAD="$repo/.git/FETCH_HEAD"
# Fech repo if local is stale for more than $GIT_FETCH_TIMEOUT minutes
if [[ ! -e "$FETCH_HEAD" ]] || olderThanMinutes "$FETCH_HEAD" "$GIT_PROMPT_FETCH_TIMEOUT"
then
if [[ -n $(git remote show) ]]; then
(
async_run "git fetch --quiet"
disown -h
)
fi
fi
}
function replaceSymbols() {
if [[ -z ${GIT_PROMPT_SYMBOLS_NO_REMOTE_TRACKING} ]]; then
GIT_PROMPT_SYMBOLS_NO_REMOTE_TRACKING=L
fi
local VALUE=${1//_AHEAD_/${GIT_PROMPT_SYMBOLS_AHEAD}}
local VALUE1=${VALUE//_BEHIND_/${GIT_PROMPT_SYMBOLS_BEHIND}}
local VALUE2=${VALUE1//_NO_REMOTE_TRACKING_/${GIT_PROMPT_SYMBOLS_NO_REMOTE_TRACKING}}
echo ${VALUE2//_PREHASH_/${GIT_PROMPT_SYMBOLS_PREHASH}}
}
function updatePrompt() {
local LAST_COMMAND_INDICATOR
local PROMPT_LEADING_SPACE
local PROMPT_START
local PROMPT_END
local EMPTY_PROMPT
local Blue="\[\033[0;34m\]"
git_prompt_config
export __GIT_PROMPT_IGNORE_STASH=${GIT_PROMPT_IGNORE_STASH}
export __GIT_PROMPT_SHOW_UPSTREAM=${GIT_PROMPT_SHOW_UPSTREAM}
local -a git_status_fields
git_status_fields=($("$__GIT_STATUS_CMD" 2>/dev/null))
local GIT_BRANCH=$(replaceSymbols ${git_status_fields[0]})
local GIT_REMOTE="$(replaceSymbols ${git_status_fields[1]})"
if [[ "." == "$GIT_REMOTE" ]]; then
unset GIT_REMOTE
fi
local GIT_UPSTREAM="${git_status_fields[2]}"
if [[ -z "${__GIT_PROMPT_SHOW_UPSTREAM}" || "^" == "$GIT_UPSTREAM" ]]; then
unset GIT_UPSTREAM
else
GIT_UPSTREAM="${GIT_PROMPT_UPSTREAM//_UPSTREAM_/${GIT_UPSTREAM}}"
fi
local GIT_STAGED=${git_status_fields[3]}
local GIT_CONFLICTS=${git_status_fields[4]}
local GIT_CHANGED=${git_status_fields[5]}
local GIT_UNTRACKED=${git_status_fields[6]}
local GIT_STASHED=${git_status_fields[7]}
local GIT_CLEAN=${git_status_fields[8]}
local NEW_PROMPT="$EMPTY_PROMPT"
if [[ -n "$git_status_fields" ]]; then
local STATUS="${PROMPT_LEADING_SPACE}${GIT_PROMPT_PREFIX}${GIT_PROMPT_BRANCH}${GIT_BRANCH}${ResetColor}"
# __add_status KIND VALEXPR INSERT
# eg: __add_status 'STAGED' '-ne 0'
__chk_gitvar_status() {
local v
if [[ "x$2" == "x-n" ]] ; then
v="$2 \"\$GIT_$1\""
else
v="\$GIT_$1 $2"
fi
if eval "test $v" ; then
if [[ $# -lt 2 || "$3" != '-' ]]; then
__add_status "\$GIT_PROMPT_$1\$GIT_$1\$ResetColor"
else
__add_status "\$GIT_PROMPT_$1\$ResetColor"
fi
fi
}
__add_gitvar_status() {
__add_status "\$GIT_PROMPT_$1\$GIT_$1\$ResetColor"
}
# __add_status SOMETEXT
__add_status() {
eval "STATUS=\"$STATUS$1\""
}
__add_status '$GIT_UPSTREAM'
__chk_gitvar_status 'REMOTE' '-n'
__add_status "$GIT_PROMPT_SEPARATOR"
__chk_gitvar_status 'STAGED' '-ne 0'
__chk_gitvar_status 'CONFLICTS' '-ne 0'
__chk_gitvar_status 'CHANGED' '-ne 0'
__chk_gitvar_status 'UNTRACKED' '-ne 0'
__chk_gitvar_status 'STASHED' '-ne 0'
__chk_gitvar_status 'CLEAN' '-eq 1' -
__add_status "$ResetColor$GIT_PROMPT_SUFFIX"
NEW_PROMPT="$(gp_add_virtualenv_to_prompt)$PROMPT_START$($prompt_callback)$STATUS$PROMPT_END"
else
NEW_PROMPT="$EMPTY_PROMPT"
fi
PS1="${NEW_PROMPT//_LAST_COMMAND_INDICATOR_/${LAST_COMMAND_INDICATOR}}"
}
# Helper function that returns virtual env information to be set in prompt
# Honors virtualenvs own setting VIRTUAL_ENV_DISABLE_PROMPT
function gp_add_virtualenv_to_prompt {
local ACCUMULATED_VENV_PROMPT=""
local VENV=""
if [[ -n "$VIRTUAL_ENV" && -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ]]; then
VENV=$(basename "${VIRTUAL_ENV}")
ACCUMULATED_VENV_PROMPT="${ACCUMULATED_VENV_PROMPT}${GIT_PROMPT_VIRTUALENV//_VIRTUALENV_/${VENV}}"
fi
if [[ -n "$CONDA_DEFAULT_ENV" ]]; then
VENV=$(basename "${CONDA_DEFAULT_ENV}")
ACCUMULATED_VENV_PROMPT="${ACCUMULATED_VENV_PROMPT}${GIT_PROMPT_VIRTUALENV//_VIRTUALENV_/${VENV}}"
fi
echo "$ACCUMULATED_VENV_PROMPT"
}
# Use exit status from declare command to determine whether input argument is a
# bash function
function is_function {
declare -Ff "$1" >/dev/null;
}
#Helper function that truncates $PWD depending on window width
function gp_truncate_pwd {
local tilde="~"
local newPWD="${PWD/#${HOME}/${tilde}}"
local pwdmaxlen=$((${COLUMNS:-80}/3))
[ ${#newPWD} -gt $pwdmaxlen ] && newPWD="...${newPWD:3-$pwdmaxlen}"
echo -n "$newPWD"
}
#Sets the window title to the given argument string
function gp_set_window_title {
echo -ne "\033]0;"$@"\007"
}
function prompt_callback_default {
return
}
function gp_install_prompt {
if [ -z "$OLD_GITPROMPT" ]; then
OLD_GITPROMPT=$PS1
fi
if [ -z "$GIT_PROMPT_OLD_DIR_WAS_GIT" ]; then
GIT_PROMPT_OLD_DIR_WAS_GIT=$(we_are_on_repo)
fi
if [ -z "$PROMPT_COMMAND" ]; then
PROMPT_COMMAND=setGitPrompt
else
PROMPT_COMMAND=${PROMPT_COMMAND%% }; # remove trailing spaces
PROMPT_COMMAND=${PROMPT_COMMAND%\;}; # remove trailing semi-colon
local new_entry="setGitPrompt"
case ";$PROMPT_COMMAND;" in
*";$new_entry;"*)
# echo "PROMPT_COMMAND already contains: $new_entry"
:;;
*)
PROMPT_COMMAND="$PROMPT_COMMAND;$new_entry"
# echo "PROMPT_COMMAND does not contain: $new_entry"
;;
esac
fi
local setLastCommandStateEntry="setLastCommandState"
case ";$PROMPT_COMMAND;" in
*";$setLastCommandStateEntry;"*)
# echo "PROMPT_COMMAND already contains: $setLastCommandStateEntry"
:;;
*)
PROMPT_COMMAND="$setLastCommandStateEntry;$PROMPT_COMMAND"
# echo "PROMPT_COMMAND does not contain: $setLastCommandStateEntry"
;;
esac
git_prompt_dir
source "$__GIT_PROMPT_DIR/git-prompt-help.sh"
}
gp_install_prompt
|
jfojtl/bash-git-prompt
|
gitprompt.sh
|
Shell
|
bsd-2-clause
| 16,852 |
#!/bin/bash
unset CDPATH
cd "$( dirname "${BASH_SOURCE[0]}" )"
BB_LOG_TIME='date-mock'
BB_LOG_FORMAT='${PREFIX} ${TIME} [${LEVEL}] ${MESSAGE}'
source ../../bashbooster.sh
date-mock() {
echo '2014-07-02 13:26:41+07:00'
}
BB_LOG_LEVEL=$BB_LOG_DEBUG
bb-log-debug "Debug message"
bb-log-info "Info message"
bb-log-warning "Warning message"
bb-log-error "Error message"
BB_LOG_LEVEL=$BB_LOG_INFO
|
polydawn-ports/bash-booster
|
unit tests/log-time/test.sh
|
Shell
|
bsd-2-clause
| 400 |
#!/bin/sh
cd ../../lib
ln -fs libefsw.so.$1.$2.$3 libefsw.so.$1
ln -fs libefsw.so.$1 libefsw.so
if [ "$4" == "strip-symbols" ]; then
objcopy --only-keep-debug libefsw.so.$1.$2.$3 libefsw.debug
objcopy --strip-debug libefsw.so.$1.$2.$3
fi
|
septag/termite
|
deps/efsw/project/build.reldbginfo.sh
|
Shell
|
bsd-2-clause
| 241 |
#!/bin/sh
# source the common platform independent functionality and option parsing
script_location=$(dirname $(readlink --canonicalize $0))
. ${script_location}/common_setup.sh
# configuring apt for non-interactive environment
echo -n "configure package manager for non-interactive usage... "
export DEBIAN_FRONTEND=noninteractive
echo "done"
# update package manager cache
echo -n "updating package manager cache... "
sudo apt-get update > /dev/null || die "fail (apt-get update)"
echo "done"
# install package dependency resolve program
echo -n "installing gdebi-core... "
sudo apt-get install gdebi-core > /dev/null || die "fail (install gdebi-core)"
echo "done"
# install deb packages
echo "installing DEB packages... "
install_deb $CLIENT_PACKAGE
# setup environment
echo -n "setting up CernVM-FS environment... "
sudo cvmfs_config setup || die "fail (cvmfs_config setup)"
sudo mkdir -p /var/log/cvmfs-test || die "fail (mkdir /var/log/cvmfs-test)"
sudo chown sftnight:sftnight /var/log/cvmfs-test || die "fail (chown /var/log/cvmfs-test)"
attach_user_group fuse || die "fail (add fuse group to user)"
sudo service autofs restart > /dev/null || die "fail (restart autofs)"
sudo cvmfs_config chksetup > /dev/null || die "fail (cvmfs_config chksetup)"
echo "done"
# install test dependencies
echo "installing test dependencies..."
install_from_repo gcc || die "fail (installing gcc)"
install_from_repo make || die "fail (installing make)"
|
btovar/cvmfs
|
test/cloud_testing/platforms/ubuntu_x86_64_setup.sh
|
Shell
|
bsd-3-clause
| 1,536 |
#!/usr/bin/env bash
# in case we're run from out of git repo
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
"$DIR"/keep_alive.sh & export PID_KEEP_ALIVE=$!
|
bmsherman/topology
|
etc/ci/travis_keep_alive.sh
|
Shell
|
mit
| 167 |
#! /bin/bash
make clean-objects
make -j3 Bootloader=1 SimpleCircuit=1 Platform=blueDev
make clean-objects
make -j3 Bootloader=1 Platform=blueDev
make clean-objects
make -j3 SimpleCircuit=1 Platform=blueDev
make clean-objects
make -j3 Platform=blueDev
make clean-objects
rm *.elf
rm *.map
|
j1rie/IRMP_STM32
|
STM32F401xE/make_all.sh
|
Shell
|
gpl-2.0
| 288 |
#!/bin/bash
function run { sdk=$1; arch=$2; shift 2; xcrun --sdk "${sdk}" g++ -arch "${arch}" "$@" -o a \
-isysroot "$(xcodebuild -sdk "${sdk}" -version Path)" \
-x c <(echo "void CYListenServer(short port); int main() { CYListenServer(6667); return 0; }") \
-framework Foundation -framework JavaScriptCore -framework Cycript; }
echo macosx
run macosx i386 -mmacosx-version-min=10.6 -F Cycript.osx "${flags[@]}"
run macosx x86_64 -mmacosx-version-min=10.6 -F Cycript.osx "${flags[@]}"
echo iphoneos
run iphoneos armv6 -miphoneos-version-min=4.0 -F Cycript.ios "${flags[@]}"
run iphoneos arm64 -miphoneos-version-min=7.0 -F Cycript.ios "${flags[@]}"
echo iphonesimulator
run iphonesimulator i386 -mios-simulator-version-min=4.0 -F Cycript.ios "${flags[@]}"
run iphonesimulator x86_64 -mios-simulator-version-min=4.0 -F Cycript.ios "${flags[@]}" -fobjc-abi-version=2 -fobjc-legacy-dispatch
|
Ju2ender/cycript
|
section.sh
|
Shell
|
gpl-3.0
| 900 |
#!/bin/sh
. "${TEST_SCRIPTS_DIR}/unit.sh"
define_test "empty config, reconfigure, NOOP"
setup
create_policy_routing_config 0
ok "Reconfiguring service \"${service_name}\"..."
simple_test_event "reconfigure"
check_routes 0
|
sathieu/samba
|
ctdb/tests/eventscripts/13.per_ip_routing.016.sh
|
Shell
|
gpl-3.0
| 228 |
#!/bin/bash -e
# The Gettext package contains utilities for internationzlization and
# localization. These allow programs to be compiled with NLS (Native
# Language Support), enabling them to output messages in the user's
# native language.
. ../lfs.comm
build_src() {
version=0.19.8.1
srcfil=gettext-$version.tar.xz
srcdir=gettext-$version
tar -xf $LFSSRC/$srcfil && cd $srcdir/gettext-tools
EMACS="no" ./configure --prefix=/tools --disable-shared
make -C gnulib-lib
make -C intl pluralx.c
make -C src msgfmt
make -C src msgmerge
make -C src xgettext
cp -v src/{msgfmt,msgmerge,xgettext} /tools/bin
cd ../.. && rm -rf $srcdir
}
build
|
fangxinmiao/projects
|
Architeture/OS/Linux/Distributions/LFS/build-scripts/lfs-latest/1-build-tmp-system/20-gettext.sh
|
Shell
|
gpl-3.0
| 690 |
#!/bin/bash
# this script relies on having a template copy of the full production
# database available first -- use the dump_restore.sh script
COURSE_SHORT_NAME="TAM 212"
COURSE_INSTANCE_SHORT_NAME="Sp18"
OUTPUT_NAME="tam212sp18"
OUTPUT_DUMP="${OUTPUT_NAME}.dump"
OUTPUT_JSON_DIR="${OUTPUT_NAME}.tables"
echo "Dumping ${COURSE_SHORT_NAME} ${COURSE_INSTANCE_SHORT_NAME} into ${OUTPUT_DUMP} and ${OUTPUT_JSON_DIR}"
TEMPLATE_DB=proddb # database to copy (will not be changed)
TMP_DB=filterdb # temporary working database (will be destroyed)
echo "Reading data from DB ${TEMPLATE_DB} and using temporary DB ${TMP_DB}"
# list of tables to be included in the filtered output
OUTPUT_TABLE_LIST="\
'pl_courses', \
'users', \
'course_permissions', \
'administrators', \
'course_instances', \
'course_instance_access_rules', \
'assessment_sets', \
'assessments', \
'assessment_access_rules', \
'topics', \
'questions', \
'tags', \
'question_tags', \
'zones', \
'alternative_groups', \
'assessment_questions', \
'enrollments', \
'assessment_instances', \
'instance_questions', \
'variants', \
'submissions' \
"
CLEAN_OUTPUT_TABLE_LIST=$(echo "${OUTPUT_TABLE_LIST}" | tr "'," " ")
echo "Output table list: ${OUTPUT_TABLE_LIST}"
set -v -e
dropdb ${TMP_DB} || true # don't stop on error
createdb --template=${TEMPLATE_DB} ${TMP_DB}
# drop pg_stat_statements
# Dropping pg_stat_statements...
psql --dbname=${TMP_DB} --file=- <<EOF
drop extension pg_stat_statements;
\gexec
EOF
# drop all unnecessary tables
# Dropping all tables not in output table list...
psql --dbname=${TMP_DB} --file=- <<EOF
select format('drop table %I cascade', tablename)
from pg_catalog.pg_tables
where schemaname = 'public'
and tablename not in (${OUTPUT_TABLE_LIST});
\gexec
EOF
# drop all views
# Dropping all views...
psql --dbname=${TMP_DB} --file=- <<EOF
select format('drop view %I cascade', viewname)
from pg_catalog.pg_views
where schemaname = 'public'
\gexec
EOF
# Copy out the data we care about from large tables. This is needed to
# speed up the later DELETE commands. It's fine to leave the small
# tables in place and let them be cleaned up by the automatic CASCADE
# on the foreign key constraints.
# Saving required assessment_instances to ai_tmp...
psql --dbname=${TMP_DB} --file=- <<EOF
create table ai_tmp as
select ai.*
from assessment_instances as ai
join assessments as a on (a.id = ai.assessment_id)
join course_instances as ci on (ci.id = a.course_instance_id)
join pl_courses as c on (c.id = ci.course_id)
join enrollments as e on (e.user_id = ai.user_id and e.course_instance_id = ci.id)
where c.short_name = '${COURSE_SHORT_NAME}'
and ci.short_name = '${COURSE_INSTANCE_SHORT_NAME}';
EOF
# Saving required instance_questions to iq_tmp...
psql --dbname=${TMP_DB} --file=- <<EOF
create table iq_tmp as
select iq.*
from instance_questions as iq
join assessment_instances as ai on (ai.id = iq.assessment_instance_id)
join assessments as a on (a.id = ai.assessment_id)
join course_instances as ci on (ci.id = a.course_instance_id)
join pl_courses as c on (c.id = ci.course_id)
join enrollments as e on (e.user_id = ai.user_id and e.course_instance_id = ci.id)
where c.short_name = '${COURSE_SHORT_NAME}'
and ci.short_name = '${COURSE_INSTANCE_SHORT_NAME}';
EOF
# Saving required variants to v_tmp...
psql --dbname=${TMP_DB} --file=- <<EOF
create table v_tmp as
select v.*
from variants as v
join instance_questions as iq on (iq.id = v.instance_question_id)
join assessment_instances as ai on (ai.id = iq.assessment_instance_id)
join assessments as a on (a.id = ai.assessment_id)
join course_instances as ci on (ci.id = a.course_instance_id)
join pl_courses as c on (c.id = ci.course_id)
join enrollments as e on (e.user_id = ai.user_id and e.course_instance_id = ci.id)
where c.short_name = '${COURSE_SHORT_NAME}'
and ci.short_name = '${COURSE_INSTANCE_SHORT_NAME}';
EOF
# Saving required submissions to s_tmp...
psql --dbname=${TMP_DB} --file=- <<EOF
create table s_tmp as
select s.*
from submissions as s
join variants as v on (v.id = s.variant_id)
join instance_questions as iq on (iq.id = v.instance_question_id)
join assessment_instances as ai on (ai.id = iq.assessment_instance_id)
join assessments as a on (a.id = ai.assessment_id)
join course_instances as ci on (ci.id = a.course_instance_id)
join pl_courses as c on (c.id = ci.course_id)
join enrollments as e on (e.user_id = ai.user_id and e.course_instance_id = ci.id)
where c.short_name = '${COURSE_SHORT_NAME}'
and ci.short_name = '${COURSE_INSTANCE_SHORT_NAME}';
EOF
# delete all the data from the tables that we copied
# Deleting all submissions, variants, instance_questions, and assessment_instances...
psql --dbname=${TMP_DB} --command="TRUNCATE submissions, variants, instance_questions, assessment_instances;"
# do the actual filtering
# Deleting all courses except the one we want...
psql --dbname=${TMP_DB} --command="DELETE FROM pl_courses WHERE short_name != '${COURSE_SHORT_NAME}';"
# Deleting all courses_instances except the one we want...
psql --dbname=${TMP_DB} --command="DELETE FROM course_instances WHERE short_name != '${COURSE_INSTANCE_SHORT_NAME}';"
# Deleting all users except ones used by other retained tables...
psql --dbname=${TMP_DB} --file=- <<EOF
delete from users
where user_id not in (
select distinct user_id
from (
(select user_id from enrollments)
union
(select user_id from ai_tmp)
union
(select auth_user_id as user_id from ai_tmp)
union
(select authn_user_id as user_id from iq_tmp)
union
(select user_id from v_tmp)
union
(select authn_user_id as user_id from v_tmp)
union
(select auth_user_id as user_id from s_tmp)
) as tmp_user_ids
)
EOF
# copy back the saved data
# Copying back saved data for assessment_instances, instance_questions, variants, and submissions...
psql --dbname=${TMP_DB} --command="insert into assessment_instances select * from ai_tmp;"
psql --dbname=${TMP_DB} --command="insert into instance_questions select * from iq_tmp;"
psql --dbname=${TMP_DB} --command="insert into variants select * from v_tmp;"
psql --dbname=${TMP_DB} --command="insert into submissions select * from s_tmp;"
# drop the temporary tables used for saving
# Dropping the temporary tables ai_tmp, iq_tmp, v_tmp, and s_tmp...
psql --dbname=${TMP_DB} --command="drop table ai_tmp, iq_tmp, v_tmp, s_tmp;"
# anonymize
# Anonymizing data...
psql --dbname=${TMP_DB} --command="UPDATE users AS u SET uid = 'user' || u.user_id || '@example.com', name = 'User Name';"
psql --dbname=${TMP_DB} --command="UPDATE users AS u SET uin = NULL;"
psql --dbname=${TMP_DB} --command="update course_instance_access_rules as ar \
set uids = (select array_agg('user' || u.user_id || '@example.com') \
from unnest(ar.uids) as tmp (tmp_uid) join users as u on (u.uid = tmp_uid));"
psql --dbname=${TMP_DB} --command="update assessment_access_rules as ar \
set uids = (select array_agg('user' || u.user_id || '@example.com') \
from unnest(ar.uids) as tmp (tmp_uid) join users as u on (u.uid = tmp_uid));"
# dump everything that's left
# Dumping data to ${OUTPUT_DUMP}...
pg_dump -Fc --file="${OUTPUT_DUMP}" ${TMP_DB}
# output all tables as JSON
# Writing JSON data to ${OUTPUT_JSON_DIR}...
mkdir -p "${OUTPUT_JSON_DIR}"
for table in ${CLEAN_OUTPUT_TABLE_LIST} ; do
psql --tuples-only --dbname=${TMP_DB} --file=- <<EOF
\pset format unaligned
select json_agg(t) from ${table} as t
\g ${OUTPUT_JSON_DIR}/${table}.json
EOF
gzip ${OUTPUT_JSON_DIR}/${table}.json
done
|
PrairieLearn/PrairieLearn
|
tools/dump_filter.sh
|
Shell
|
agpl-3.0
| 7,793 |
# -------------------------------------------------------------------------------------------------
# Copyright (c) 2015 zsh-syntax-highlighting contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the zsh-syntax-highlighting contributors nor the names of its contributors
# may be used to endorse or promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -------------------------------------------------------------------------------------------------
# -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*-
# vim: ft=zsh sw=2 ts=2 et
# -------------------------------------------------------------------------------------------------
alias alias1="ls"
alias -s alias2="echo"
function alias1() {} # to check that it's highlighted as an alias, not as a function
BUFFER='x.alias2; alias1; alias2'
# Set expected_region_highlight as a function of zsh version.
#
# Highlight of suffix alias requires zsh-5.1.1 or newer; see issue #126,
# and commit 36403 to zsh itself. Therefore, check if the requisite zsh
# functionality is present, and skip verifying suffix-alias highlighting
# if it isn't.
expected_region_highlight=()
if zmodload -e zsh/parameter || [[ "$(type -w x.alias2)" == *suffix* ]]; then
expected_region_highlight+=(
"1 8 suffix-alias" # x.alias2
)
fi
expected_region_highlight+=(
"9 9 commandseparator" # ;
"11 16 alias" # alias1
"11 16 command" # alias1 (ls)
"17 17 commandseparator" # ;
"19 24 unknown-token" # alias2
)
|
codeprimate/arid
|
zsh/zsh-syntax-highlighting/highlighters/main/test-data/alias.zsh
|
Shell
|
bsd-2-clause
| 2,813 |
#!/bin/sh
echo "#!/bin/sh
cd MPlayer-1.0rc3/
make -j \$NUM_CPU_JOBS 2>&1" > time-compile-mplayer
chmod +x time-compile-mplayer
|
mugglmenzel/BalloonRace
|
src/main/resources/test-profiles/pts/build-mplayer-1.3.0/install.sh
|
Shell
|
bsd-3-clause
| 130 |
rm app/data/test.db
app/yiic migrate --connectionID=dbTest --interactive=0 > /dev/null
vendor/bin/codecept run unit
rm app/data/test.db
app/yiic migrate --connectionID=dbTest --interactive=0 > /dev/null
vendor/bin/codecept run functional
rm app/data/test.db
app/yiic migrate --connectionID=dbTest --interactive=0 > /dev/null
vendor/bin/codecept run acceptance
|
motin/giic-gtc-hybrid-template-demo
|
test.sh
|
Shell
|
bsd-3-clause
| 361 |
POMICONS_GLYPHS_LOADED=1
POMICONS_GLYPHS_VERSION='1.0.0'
CODEPOINT_OF_POMICONS_CLEAN_CODE='e000'
CODEPOINT_OF_POMICONS_POMODORO_DONE='e001'
CODEPOINT_OF_POMICONS_POMODORO_ESTIMATED='e002'
CODEPOINT_OF_POMICONS_POMODORO_TICKING='e003'
CODEPOINT_OF_POMICONS_POMODORO_SQUASHED='e004'
CODEPOINT_OF_POMICONS_LONG_PAUSE='e005'
CODEPOINT_OF_POMICONS_AWAY='e006'
CODEPOINT_OF_POMICONS_PAIR_PROGRAMMING='e007'
|
gabrielelana/awesome-terminal-fonts
|
fonts/pomicons-regular.sh
|
Shell
|
mit
| 401 |
#!/bin/bash
source environ.sh
BROKEN
SRC=ftp://ftp.freedesktop.org/pub/mesa/11.0.4/mesa-11.0.4.tar.xz
DIR=mesa-11.0.4
CONFIGURE_ARGS="--host=i386-elf-redox --prefix=$PREFIX --enable-osmesa --disable-driglx-direct --disable-dri --with-gallium-drivers=swrast"
autoconf_template $*
|
stryan/redox
|
libc/ports/mesa.sh
|
Shell
|
mit
| 282 |
#!/bin/sh
cp -r src/files/* dest/files/
cp -r extra dest/
cp src/.htaccess dest/
|
snscltt/cosmofox.net
|
scripts/insert-files.sh
|
Shell
|
cc0-1.0
| 82 |
#!/bin/bash -e
# Stable versions contains only 3 groups of digits separated by a dot,
# i.e. no "dev", "alpha", "beta, "rc", ... keyword.
STABLE_REGEX="^[0-9]+\.[0-9]+\.[0-9]+$"
# Validate composer config
# Cannot use `--strict` mode due to following warning: "Defining autoload.psr-4 with an empty namespace prefix is a bad idea for performance"
composer validate
if [[ "$PHP_VERSION" =~ $STABLE_REGEX ]]; then
composer check-platform-reqs;
fi
# Install dependencies
if ! [[ "$PHP_VERSION" =~ $STABLE_REGEX ]]; then
COMPOSER_ADD_OPTS=--ignore-platform-reqs;
fi
bin/console dependencies install --composer-options="$COMPOSER_ADD_OPTS --prefer-dist --no-progress"
# Compile translation files
php bin/console locales:compile
|
orthagh/glpi
|
.github/actions/init_build.sh
|
Shell
|
gpl-2.0
| 731 |
java -cp JPCApplication.jar:Tools.jar tools.Tools
make application
java -cp JPCApplication.jar:Tools.jar tools.Tools -decoder > src/org/jpc/emulator/execution/decoder/ExecutableTables.java
make application
|
ianopolous/JPC
|
regenerate_decoder.sh
|
Shell
|
gpl-2.0
| 205 |
#!/bin/sh
#
# Set ip and subnet for shorewall
#
/usr/bin/perl /opt/sark/scripts/setip.pl
#
# Run the generator
#
echo Running the Generator
/bin/sh /opt/sark/scripts/srkgenAst
echo Done
|
aelintra/sail
|
sail-4.1.0/opt/sark/scripts/setip.sh
|
Shell
|
gpl-2.0
| 187 |
#! /bin/sh -e
# tup - A file-based build system
#
# Copyright (C) 2009-2015 Mike Shal <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Make sure an empty input variable doesn't generate a rule, but a blank input
# pattern does.
. ./tup.sh
cat > Tupfile << HERE
: foreach \$(srcs) |> nope |> %B.o
: \$(objs) |> not gonna work |> prog
: \$(objs) | order.h |> also not gonna work |> prog2
: | order.h |> this should work |> prog3
: |> echo foo > %o |> bar
HERE
tup touch Tupfile order.h
tup parse
tup_object_no_exist . "nope"
tup_object_no_exist . "not gonna work"
tup_object_no_exist . "also not gonna work"
tup_object_exist . "this should work"
tup_object_exist . "echo foo > bar"
eotup
|
p2rkw/tup
|
test/t2026-empty-input-var.sh
|
Shell
|
gpl-2.0
| 1,305 |
#!/bin/bash
# This script should be run whenever new packages are installed to ensure things are
# set for future runs, and of course to setup a new virtualenv
pushd $(dirname $0)
HOMEDIR=$(pwd)
LOG_DIR="$HOMEDIR/log"
if [ $1 ]; then
REQ="$HOMEDIR/$1"
echo "Using pip requirements file $1"
else
REQ="$HOMEDIR/requirements.txt"
fi
EGG_CACHE="$HOMEDIR/wsgi/egg-cache"
PG_CONFIG=$(locate --regex "bin/pg_config$")
PATH=$(dirname $PG_CONFIG):$PATH
if [ -z $VIRTUAL_ENV ]; then
echo "Need to be in your virtual environment."
exit 1
else
VENV_NAME=$(basename $VIRTUAL_ENV)
VENV_DIR="$HOMEDIR/$VENV_NAME"
fi
# Required to install the hdf5 libraries
echo "Need to sudo to install hdf5 packages..."
sudo yum -y install hdf5 hdf5-devel
if [ $? -ne 0 ] ; then
echo "Exiting $0"
exit
fi
sudo yum -y remove numpy
# Install GDAL in venv
CONFIG=$(which gdal-config)
if [ $? -ne 0 ]; then
echo "gdal-config is not in PATH"
rm -rf $VENV_DIR
exit 1
fi
ln -s $CONFIG $VENV_DIR/bin/
# Make sure weh have pip in the virtualenv
easy_install pip
# Install numpy
NUMPY=$(grep numpy requirements.txt)
echo "Installing numpy (${NUMPY:=numpy})"
pip install $NUMPY
if [ $? -ne 0 ] ; then
echo "*** pip install $NUMPY failed. ***"
exit 1
fi
# Install Matplotlib from GIT
pip install -e git+https://github.com/matplotlib/matplotlib.git#egg=matplotlib
if [ $? -ne 0 ] ; then
echo "*** pip install -e git+https://github.com/matplotlib/matplotlib.git#egg=matplotlib failed. ***"
exit 1
fi
# Install everything in $REQ
if [ -f "$REQ" ]; then
pip install -r $REQ
if [ $? -ne 0 ] ; then
echo "*** pip install -r $REQ failed. ***"
exit 1
fi
fi
# Save config and give apache access
pip freeze | grep -v pysqlite | grep -v ga_ows | grep -v matplotlib > requirements_installed.txt
if [ ! -d $EGG_CACHE ]; then
echo "Creating the egg cache"
mkdir -p $EGG_CACHE
fi
sudo chown apache $EGG_CACHE
mkdir -p $LOG_DIR
sudo chgrp apache $LOG_DIR
sudo chmod g+s $LOG_DIR
touch $LOG_DIR/django.log
chmod g+w log/django.log
##################################################################
# Apply the django patch to escape unicode strings properly
# Required for specific versions of psycopg and postgis as of 2013
pushd venv-stoqs/lib/python*/site-packages
patch django/contrib/gis/db/backends/postgis/adapter.py << __EOT__
--- django/contrib/gis/db/backends/postgis/adapter.py.orig 2011-09-09 11:51:27.769648151 +0100
+++ django/contrib/gis/db/backends/postgis/adapter.py 2011-09-09 11:51:38.279842827 +0100
@@ -3,7 +3,7 @@
"""
from psycopg2 import Binary
-from psycopg2.extensions import ISQLQuote
+from psycopg2.extensions import ISQLQuote, adapt
class PostGISAdapter(object):
def __init__(self, geom):
@@ -12,6 +12,7 @@
# the adaptor) and the SRID from the geometry.
self.ewkb = str(geom.ewkb)
self.srid = geom.srid
+ self._adapter = Binary(self.ewkb)
def __conform__(self, proto):
# Does the given protocol conform to what Psycopg2 expects?
@@ -26,10 +27,15 @@
def __str__(self):
return self.getquoted()
+ def prepare(self, conn):
+ # Pass the connection to the adapter: this allows escaping the binary
+ # in the style required by the server's standard_conforming_string setting.
+ self._adapter.prepare(conn)
+
def getquoted(self):
"Returns a properly quoted string for use in PostgreSQL/PostGIS."
- # Want to use WKB, so wrap with psycopg2 Binary() to quote properly.
- return 'ST_GeomFromEWKB(E%s)' % Binary(self.ewkb)
+ # psycopg will figure out whether to use E'\\000' or '\000'
+ return 'ST_GeomFromEWKB(%s)' % adapt(self._adapter)
def prepare_database_save(self, unused):
return self
__EOT__
popd
echo "$0 finished."
|
mikemccann/stoqs
|
setup.sh
|
Shell
|
gpl-3.0
| 3,855 |
#!/bin/bash
#set -x
source $LIBMESH_DIR/examples/run_common.sh
example_name=fem_system_ex5
example_dir=examples/fem_system/$example_name
common_options="write_interval=1 solver_quiet=false relative_step_tolerance=1e-3 relative_residual_tolerance=1.e-3"
# Note: Too much ILU fails badly on this problem in single precision.
# We force simple Jacobi to be safe.
petsc_options="-ksp_type cg -pc_type jacobi"
# Note: Use 25 timesteps to simulate approximately three periods of oscillation.
options="deltat=0.25 n_timesteps=5 time_solver=newmark $common_options $petsc_options"
run_example_no_extra_options "$example_name" "$options"
options="time_solver=steady n_timesteps=1 $common_options $petsc_options"
run_example_no_extra_options "$example_name" "$options"
# With first order solvers, the Jacobian is no longer symmetric
petsc_options="-ksp_type gmres -pc_type bjacobi -sub_pc_type ilu"
options="deltat=0.25 n_timesteps=5 time_solver=euler theta=0.5 $common_options $petsc_options"
run_example_no_extra_options "$example_name" "$options"
options="deltat=0.25 n_timesteps=5 time_solver=euler2 theta=0.5 $common_options $petsc_options"
run_example_no_extra_options "$example_name" "$options"
# No benchmarks here - we don't do IGA refinement yet so it's hard to
# scale this up.
|
dschwen/libmesh
|
examples/fem_system/fem_system_ex5/run.sh
|
Shell
|
lgpl-2.1
| 1,291 |
#!/bin/bash
# get directory of script
DIR="$( cd "$( dirname "$0" )" && pwd )"
# assuming a local guacmole version is located properly
LOCAL_GUACAMOLE="$DIR/../../../guacamole"
LOCAL_AVANGO="$DIR/../../../avango"
# if not, this path will be used
GUACAMOLE=/opt/guacamole/master
AVANGO=/opt/avango/master
# third party libs
export LD_LIBRARY_PATH=/opt/boost/boost_1_55_0/lib:/opt/zmq/current/lib
# schism
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/schism/current/lib/linux_x86
# avango
export LD_LIBRARY_PATH="$LOCAL_AVANGO/lib":$AVANGO/lib:$LD_LIBRARY_PATH:/opt/pbr/inst_cb/lib:/opt/Awesomium/lib
export PYTHONPATH="$LOCAL_AVANGO/lib/python3.4":"$LOCAL_AVANGO/examples":$AVANGO/lib/python3.4:$AVANGO/examples
# guacamole
export LD_LIBRARY_PATH="$LOCAL_GUACAMOLE/lib":$GUACAMOLE/lib:$LD_LIBRARY_PATH
# run daemon
if [ -f "$LOCAL_AVANGO/examples/examples_common/daemon.py" ]
then
python3 $LOCAL_AVANGO/examples/examples_common/daemon.py > /dev/null &
else
python3 $AVANGO/examples/examples_common/daemon.py > /dev/null &
fi
# run program
if [[ $* == *-d* ]]
then
cd "$DIR" && gdb --args python3.4 ./main.py
else
cd "$DIR" && python3.4 ./main.py
fi
# kill daemon
kill %1
|
yaroslav-tarasov/avango
|
examples/shadows/start.sh
|
Shell
|
lgpl-3.0
| 1,188 |
#!/bin/bash
# Building gatekeeper components
bash gtk*.sh
bash son-sec-gw.sh
|
dang03/son-gkeeper
|
tests/integration/build/build-all.sh
|
Shell
|
apache-2.0
| 76 |
# chmod 0600 /opt/couchdb-search/etc/jmxremote.password
exec -c "java -server \
-Xmx2G \
-Dsun.net.inetaddr.ttl=30 \
-Dsun.net.inetaddr.negative.ttl=30 \
-Dlog4j.configuration=file:/opt/couchdb-search/etc/log4j.properties \
-XX:OnOutOfMemoryError="kill -9 %p" \
-XX:+UseConcMarkSweepGC \
-XX:+CMSParallelRemarkEnabled \
-classpath '/opt/couchdb-search/lib/*' \
com.cloudant.clouseau.Main \
/opt/couchdb-search/etc/clouseau.ini"
|
apache/couchdb-docker
|
3.2.0-ubi-clouseau/resources/clouseau/clouseau.sh
|
Shell
|
apache-2.0
| 469 |
#!/bin/sh
#
# Copyright (c) 1980, 1993
# The Regents of the University of California. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 4. Neither the name of the University nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# @(#)vgrind.sh 8.1 (Berkeley) 6/6/93
#
# $FreeBSD$
#
voptions=""
options=""
files=""
f=""
head=""
vf="/usr/libexec/vfontedpr"
tm="/usr/share/tmac"
postproc="psroff"
# Parse args
while test $# -gt 0; do
case $1 in
-f)
f="filter"
options="$options -f"
;;
-t)
voptions="$voptions -t"
;;
-o*)
voptions="$voptions $1"
;;
-W)
voptions="$voptions -W"
;;
-d)
if test $# -lt 2; then
echo "$0: option $1 must have argument" >&2
exit 1
fi
options="$options $1 $2"
shift
;;
-h)
if test $# -lt 2; then
echo "$0: option $1 must have argument" >&2
exit 1
fi
head="$2"
shift
;;
-p)
if test $# -lt 2; then
echo "$0: option $1 must have argument" >&2
exit 1
fi
postproc="$2"
shift
;;
-*)
options="$options $1"
;;
*)
files="$files $1"
;;
esac
shift
done
if test -r index; then
echo > nindex
for i in $files; do
# make up a sed delete command for filenames
# being careful about slashes.
echo "? $i ?d" | sed -e "s:/:\\/:g" -e "s:?:/:g" >> nindex
done
sed -f nindex index > xindex
if test "x$f" = xfilter; then
if test "x$head" != x; then
$vf $options -h "$head" $files
else
$vf $options $files
fi | cat $tm/tmac.vgrind -
else
if test "x$head" != x; then
$vf $options -h "$head" $files
else
$vf $options $files
fi | sh -c "$postproc -rx1 $voptions -i -mvgrind 2>> xindex"
fi
sort -df -k 1,2 xindex > index
rm nindex xindex
else
if test "x$f" = xfilter; then
if test "x$head" != x; then
$vf $options -h "$head" $files
else
$vf $options $files
fi | cat $tm/tmac.vgrind -
else
if test "x$head" != x; then
$vf $options -h "$head" $files
else
$vf $options $files
fi | $postproc -i $voptions -mvgrind
fi
fi
|
jhbsz/OSI-OS
|
usr.bin/vgrind/vgrind.sh
|
Shell
|
bsd-3-clause
| 3,321 |
#!/bin/sh
if [ ! -f arch ]; then
echo '#include <stdio.h>
int main(){printf("%d", sizeof(long));return 0;}' | cc -x c -
SIZEOF_LONG=$(./a.out)
rm -f a.out
case $SIZEOF_LONG in
8)
echo 'int main() {
#ifdef __x86_64__
return 0;
#endif
return 1;}' | cc -x c -
./a.out
x86_64=$?
rm -f a.out
if [ $x86_64 -eq 0 ]; then
echo 64opt > arch
else
echo 64compact > arch
fi
;;
4)
echo 32BI > arch
;;
esac
fi
rm -f SnP-interface.h
ln -s $(cat arch)/SnP-interface.h SnP-interface.h
make clean
make CVMFS_CASE_C_FLAGS="$CVMFS_BASE_C_FLAGS" ARCH=$(cat arch)
strip -S libsha3.a
cp -v *.h $EXTERNALS_INSTALL_LOCATION/include/
cp -v libsha3.a $EXTERNALS_INSTALL_LOCATION/lib/
|
Gangbiao/cvmfs
|
externals/sha3/src/makeHook.sh
|
Shell
|
bsd-3-clause
| 755 |
#!/bin/bash
set -e # abort on error
if [[ `pwd` == */test/cfg ]] ; then # we are in test/cfg
CPPCHECK="../../cppcheck"
DIR=""
else # assume we are in repo root
CPPCHECK="./cppcheck"
DIR=./test/cfg/
fi
# posix.c
gcc -fsyntax-only ${DIR}posix.c
${CPPCHECK} --check-library --library=posix --enable=information --enable=style --error-exitcode=1 --suppress=missingIncludeSystem --inline-suppr ${DIR}posix.c
# gnu.c
gcc -fsyntax-only -D_GNU_SOURCE ${DIR}gnu.c
${CPPCHECK} --check-library --library=gnu --enable=information --enable=style --error-exitcode=1 --suppress=missingIncludeSystem --inline-suppr ${DIR}gnu.c
# windows.cpp
#g++ -fsyntax-only ${DIR}windows.cpp
${CPPCHECK} --check-library --library=windows --enable=information --enable=style --error-exitcode=1 --inline-suppr ${DIR}windows.cpp
# std.c
gcc -fsyntax-only ${DIR}std.c
${CPPCHECK} --check-library --enable=information --error-exitcode=1 --enable=style --suppress=missingIncludeSystem --inline-suppr ${DIR}std.c
|
jctampere/cppcheck
|
test/cfg/runtests.sh
|
Shell
|
gpl-3.0
| 985 |
cite about-plugin
about-plugin 'manage your jekyll site'
editpost() {
about 'edit a post'
param '1: site directory'
group 'jekyll'
unset SITE
if [ -z "$1" ]
then
echo "Error: no site specified."
echo "The site is the name of the directory your project is in."
return 1
fi
for site in ${SITES[@]}
do
if [ "$(basename $site)" = "$1" ]
then
SITE=$site
break
fi
done
if [ -z "$SITE" ]
then
echo "No such site."
return 1
fi
builtin cd "$SITE/_posts"
COUNTER=1
NUMBER="$RANDOM"
TMPFILE="/tmp/editpost-$NUMBER"
for POST in *
do
DATE=`echo $POST | grep -oE "[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}"`
TITLE=`cat $POST | grep -oE "title: (.+)"`
TITLE=`echo $TITLE | sed 's/title: //'`
echo "$COUNTER) $DATE $TITLE" >> "$TMPFILE"
POSTS[$COUNTER]=$POST
COUNTER=`expr $COUNTER + 1`
done
less $TMPFILE
read -p "Number of post to edit: " POST_TO_EDIT
if [ -z "$JEKYLL_EDITOR" ]
then
nano "${POSTS[$POST_TO_EDIT]}"
else
"$JEKYLL_EDITOR" "${POSTS[$POST_TO_EDIT]}"
fi
}
newpost() {
about 'create a new post'
param '1: site directory'
group 'jekyll'
unset SITE
if [ -z "$1" ]
then
echo "Error: no site specified."
echo "The site is the name of the directory your project is in."
return 1
fi
if [ -z "$SITE" ]
then
echo "No such site."
return 1
fi
loc=0
for site in ${SITES[@]}
do
if [ "$(basename $site)" = "$1" ]
then
SITE=$site
JEKYLL_FORMATTING=${MARKUPS[$loc]}
break
fi
loc=$(($loc+1))
done
# 'builtin cd' into the local jekyll root
builtin cd "$SITE/_posts"
# Get the date for the new post's filename
FNAME_DATE=$(date "+%Y-%m-%d")
# If the user is using markdown or textile formatting, let them choose what type of post they want. Sort of like Tumblr.
OPTIONS="Text Quote Image Audio Video Link"
if [ $JEKYLL_FORMATTING = "markdown" -o $JEKYLL_FORMATTING = "textile" ]
then
select OPTION in $OPTIONS
do
if [[ $OPTION = "Text" ]]
then
POST_TYPE="Text"
break
fi
if [[ $OPTION = "Quote" ]]
then
POST_TYPE="Quote"
break
fi
if [[ $OPTION = "Image" ]]
then
POST_TYPE="Image"
break
fi
if [[ $OPTION = "Audio" ]]
then
POST_TYPE="Audio"
break
fi
if [[ $OPTION = "Video" ]]
then
POST_TYPE="Video"
break
fi
if [[ $OPTION = "Link" ]]
then
POST_TYPE="Link"
break
fi
done
fi
# Get the title for the new post
read -p "Enter title of the new post: " POST_TITLE
# Convert the spaces in the title to hyphens for use in the filename
FNAME_POST_TITLE=`echo $POST_TITLE | tr ' ' "-"`
# Now, put it all together for the full filename
FNAME="$FNAME_DATE-$FNAME_POST_TITLE.$JEKYLL_FORMATTING"
# And, finally, create the actual post file. But we're not done yet...
touch "$FNAME"
# Write a little stuff to the file for the YAML Front Matter
echo "---" >> $FNAME
# Now we have to get the date, again. But this time for in the header (YAML Front Matter) of
# the file
YAML_DATE=$(date "+%B %d %Y %X")
# Echo the YAML Formatted date to the post file
echo "date: $YAML_DATE" >> $FNAME
# Echo the original post title to the YAML Front Matter header
echo "title: $POST_TITLE" >> $FNAME
# And, now, echo the "post" layout to the YAML Front Matter header
echo "layout: post" >> $FNAME
# Close the YAML Front Matter Header
echo "---" >> $FNAME
echo >> $FNAME
# Generate template text based on the post type
if [[ $JEKYLL_FORMATTING = "markdown" ]]
then
if [[ $POST_TYPE = "Text" ]]
then
true
fi
if [[ $POST_TYPE = "Quote" ]]
then
echo "> Quote" >> $FNAME
echo >> $FNAME
echo "— Author" >> $FNAME
fi
if [[ $POST_TYPE = "Image" ]]
then
echo "" >> $FNAME
fi
if [[ $POST_TYPE = "Audio" ]]
then
echo "<html><audio src=\"/path/to/audio/file\" controls=\"controls\"></audio></html>" >> $FNAME
fi
if [[ $POST_TYPE = "Video" ]]
then
echo "<html><video src=\"/path/to/video\" controls=\"controls\"></video></html>" >> $FNAME
fi
if [[ $POST_TYPE = "Link" ]]
then
echo "[link][1]" >> $FNAME
echo >> $FNAME
echo "> Quote" >> $FNAME
echo >> $FNAME
echo "[1]: url" >> $FNAME
fi
fi
if [[ $JEKYLL_FORMATTING = "textile" ]]
then
if [[ $POST_TYPE = "Text" ]]
then
true
fi
if [[ $POST_TYPE = "Quote" ]]
then
echo "bq. Quote" >> $FNAME
echo >> $FNAME
echo "— Author" >> $FNAME
fi
if [[ $POST_TYPE = "Image" ]]
then
echo "!url(alt text)" >> $FNAME
fi
if [[ $POST_TYPE = "Audio" ]]
then
echo "<html><audio src=\"/path/to/audio/file\" controls=\"controls\"></audio></html>" >> $FNAME
fi
if [[ $POST_TYPE = "Video" ]]
then
echo "<html><video src=\"/path/to/video\" controls=\"controls\"></video></html>" >> $FNAME
fi
if [[ $POST_TYPE = "Link" ]]
then
echo "\"Site\":url" >> $FNAME
echo >> $FNAME
echo "bq. Quote" >> $FNAME
fi
fi
# Open the file in your favorite editor
"$JEKYLL_EDITOR" $FNAME
}
function testsite() {
about 'launches local jekyll server'
param '1: site directory'
group 'jekyll'
unset SITE
if [ -z "$1" ]
then
echo "Error: no site specified."
echo "The site is the name of the directory your project is in."
return 1
fi
for site in ${SITES[@]}
do
if [ "$(basename $site)" = "$1" ]
then
SITE=$site
break
fi
done
if [ -z "$SITE" ]
then
echo "No such site."
return 1
fi
builtin cd $SITE
jekyll --server --auto
}
function buildsite() {
about 'builds site'
param '1: site directory'
group 'jekyll'
unset SITE
if [ -z "$1" ]
then
echo "Error: no site specified."
echo "The site is the name of the directory your project is in."
return 1
fi
for site in ${SITES[@]}
do
if [ "$(basename $site)" = "$1" ]
then
SITE=$site
break
fi
done
if [ -z "$SITE" ]
then
echo "No such site."
return 1
fi
builtin cd $SITE
rm -rf _site
jekyll --no-server
}
function deploysite() {
about 'rsyncs site to remote host'
param '1: site directory'
group 'jekyll'
unset SITE
if [ -z "$1" ]
then
echo "Error: no site specified."
echo "The site is the name of the directory your project is in."
return 1
fi
loc=0
for site in ${SITES[@]}
do
if [ "$(basename $site)" = "$1" ]
then
SITE=$site
REMOTE=${REMOTES[$loc]}
break
fi
loc=$(($loc+1))
done
if [ -z "$SITE" ]
then
echo "No such site."
return 1
fi
builtin cd $SITE
rsync -rz $REMOTE
}
|
prajnak/bash_it
|
plugins/available/jekyll.plugin.bash
|
Shell
|
mit
| 6,984 |
#!/bin/bash
# Config
DirectorY="$1"
OutDir="$2"
FileName="$3"
BuiltDLP="$4"
MD5Sum="$5"
BackupDLP="http://wz2100.net/~dak180/BuildTools/Mac/"
# Make sure we are in the right place
cd "${SRCROOT}"
if [ ! -d "external" ]; then
mkdir external
fi
if [ ! -d "prebuilt" ]; then
mkdir prebuilt
fi
# Checks
if [ "${ACTION}" = "clean" ]; then
# Force cleaning when directed
rm -fRv "prebuilt/${DirectorY}" "external/${OutDir}"
MD5SumLoc="$(md5 -q "prebuilt/${FileName}")"
if [ "${MD5SumLoc}" != "${MD5Sum}" ]; then
rm -fRv "prebuilt/${FileName}"
fi
exit 0
elif [ -d "prebuilt/${DirectorY}" ]; then
# Clean if dirty
echo "error: ${DirectorY} exists, probably from an earlier failed run" >&2
# rm -frv "prebuilt/${DirectorY}"
exit 1
elif [[ -d "external/${OutDir}" ]] && [[ ! -f "prebuilt/${FileName}" ]]; then
# Clean up when updating versions
echo "warning: Cached file is outdated or incomplete, removing" >&2
rm -fR "prebuilt/${DirectorY}" "external/${OutDir}"
elif [[ -d "external/${OutDir}" ]] && [[ -f "prebuilt/${FileName}" ]]; then
# Check to make sure we have the right file
MD5SumLoc="$(cat "external/${OutDir}/.MD5SumLoc" 2>/dev/null || echo "")"
if [ "${MD5SumLoc}" != "${MD5Sum}" ]; then
echo "warning: Cached file is outdated or incorrect, removing" >&2
rm -fR "prebuilt/${DirectorY}" "external/${OutDir}"
MD5SumFle=`md5 -q "prebuilt/${FileName}"`
if [ "${MD5SumFle}" != "${MD5Sum}" ]; then
rm -fR "prebuilt/${FileName}"
fi
else
# Do not do more work then we have to
echo "${OutDir} already exists, skipping" >&2
exit 0
fi
fi
# Fetch
cd prebuilt
if [ ! -f "${FileName}" ]; then
echo "Fetching ${FileName}"
if ! curl -LfO --connect-timeout "30" "${BuiltDLP}"; then
if ! curl -LfOC - --connect-timeout "30" "${BackupDLP}${FileName}"; then
echo "error: Unable to fetch ${SourceDLP}" >&2
exit 1
fi
fi
else
echo "${FileName} already exists, skipping" >&2
fi
# MD5 check
MD5SumLoc="$(md5 -q "${FileName}")"
if [ -z "${MD5SumLoc}" ]; then
echo "error: Unable to compute md5 for ${FileName}" >&2
exit 1
elif [ "${MD5SumLoc}" != "${MD5Sum}" ]; then
echo "error: MD5 does not match for ${FileName}" >&2
exit 1
fi
# Unpack
if ! tar -xzf "${FileName}"; then
echo "error: Unpacking $FileName failed" >&2
exit 1
fi
# Save the sum
echo "${MD5SumLoc}" > "${DirectorY}/.MD5SumLoc"
# Move
if [ ! -d "${DirectorY}" ]; then
echo "error: Can't find $DirectorY to rename" >&2
exit 1
else
cd ..
mv "prebuilt/${DirectorY}" "external/${OutDir}"
touch external/${OutDir}/*
fi
exit 0
|
omgbebebe/warzone2100
|
macosx/configs/FetchPrebuilt.sh
|
Shell
|
gpl-2.0
| 2,537 |
MAKE=make
unm=$(uname -n);
if [ $unm = "ol-collab1" ];
then
MAKE=gmake
fi;
if [ $# -le 1 ]; # pass any param to avoid compilation
then
LATENCY=1 INIT=one GRANULARITY=GLOBAL_LOCK $MAKE -k tas
LATENCY=1 INIT=one $MAKE -k tas
LATENCY=1 INIT=one $MAKE -k lockfree
fi
inits="256 1024 2048 8192 65536"
duration=1000;
source ./scripts/heatmap.config
if [ $# -ge 1 ];
then
source "$1";
else
source ./scripts/executables.config
fi;
for initial in ${inits}
do
range=$((2*${initial}));
echo "## initial: $initial";
unm=$(uname -n);
rm data/${unm}_*_heatmap_uc_lat_*_${initial}.csv
if [ $do_ll -eq 1 ];
then
echo "# ll (${lb_ll} vs. ${lf_ll})";
./scripts/heatmap_lat.sh "${lb_ll}" "${lf_ll}" u c -i${initial} -r${range} -d$duration
cp data/lat_put_lb.txt data/${unm}_ll_heatmap_uc_lat_put_lb_${initial}.csv
cp data/lat_put_lf.txt data/${unm}_ll_heatmap_uc_lat_put_lf_${initial}.csv
cp data/lat_put_ratio.txt data/${unm}_ll_heatmap_uc_lat_put_ratio_${initial}.csv
cp data/lat_get_lb.txt data/${unm}_ll_heatmap_uc_lat_get_lb_${initial}.csv
cp data/lat_get_lf.txt data/${unm}_ll_heatmap_uc_lat_get_lf_${initial}.csv
cp data/lat_get_ratio.txt data/${unm}_ll_heatmap_uc_lat_get_ratio_${initial}.csv
cp data/lat_rem_lb.txt data/${unm}_ll_heatmap_uc_lat_rem_lb_${initial}.csv
cp data/lat_rem_lf.txt data/${unm}_ll_heatmap_uc_lat_rem_lf_${initial}.csv
cp data/lat_rem_ratio.txt data/${unm}_ll_heatmap_uc_lat_rem_ratio_${initial}.csv
fi
if [ $do_ht -eq 1 ];
then
echo "# ht (${lb_ht} vs. ${lf_ht})";
./scripts/heatmap_lat.sh "${lb_ht}" "${lf_ht}" u c -i${initial} -r${range} -d$duration
cp data/lat_put_lb.txt data/${unm}_ht_heatmap_uc_lat_put_lb_${initial}.csv
cp data/lat_put_lf.txt data/${unm}_ht_heatmap_uc_lat_put_lf_${initial}.csv
cp data/lat_put_ratio.txt data/${unm}_ht_heatmap_uc_lat_put_ratio_${initial}.csv
cp data/lat_get_lb.txt data/${unm}_ht_heatmap_uc_lat_get_lb_${initial}.csv
cp data/lat_get_lf.txt data/${unm}_ht_heatmap_uc_lat_get_lf_${initial}.csv
cp data/lat_get_ratio.txt data/${unm}_ht_heatmap_uc_lat_get_ratio_${initial}.csv
cp data/lat_rem_lb.txt data/${unm}_ht_heatmap_uc_lat_rem_lb_${initial}.csv
cp data/lat_rem_lf.txt data/${unm}_ht_heatmap_uc_lat_rem_lf_${initial}.csv
cp data/lat_rem_ratio.txt data/${unm}_ht_heatmap_uc_lat_rem_ratio_${initial}.csv
fi
if [ $do_sl -eq 1 ];
then
echo "# sl (${lb_sl} vs. ${lf_sl})";
./scripts/heatmap_lat.sh "${lb_sl}" "${lf_sl}" u c -i${initial} -r${range} -d$duration
cp data/lat_put_lb.txt data/${unm}_sl_heatmap_uc_lat_put_lb_${initial}.csv
cp data/lat_put_lf.txt data/${unm}_sl_heatmap_uc_lat_put_lf_${initial}.csv
cp data/lat_put_ratio.txt data/${unm}_sl_heatmap_uc_lat_put_ratio_${initial}.csv
cp data/lat_get_lb.txt data/${unm}_sl_heatmap_uc_lat_get_lb_${initial}.csv
cp data/lat_get_lf.txt data/${unm}_sl_heatmap_uc_lat_get_lf_${initial}.csv
cp data/lat_get_ratio.txt data/${unm}_sl_heatmap_uc_lat_get_ratio_${initial}.csv
cp data/lat_rem_lb.txt data/${unm}_sl_heatmap_uc_lat_rem_lb_${initial}.csv
cp data/lat_rem_lf.txt data/${unm}_sl_heatmap_uc_lat_rem_lf_${initial}.csv
cp data/lat_rem_ratio.txt data/${unm}_sl_heatmap_uc_lat_rem_ratio_${initial}.csv
fi
if [ $do_bst -eq 1 ];
then
echo "# bst (${lb_bst} vs. ${lf_bst})";
./scripts/heatmap_lat.sh "${lb_bst}" "${lf_bst}" u c -i${initial} -r${range} -d$duration
cp data/lat_put_lb.txt data/${unm}_bst_heatmap_uc_lat_put_lb_${initial}.csv
cp data/lat_put_lf.txt data/${unm}_bst_heatmap_uc_lat_put_lf_${initial}.csv
cp data/lat_put_ratio.txt data/${unm}_bst_heatmap_uc_lat_put_ratio_${initial}.csv
cp data/lat_get_lb.txt data/${unm}_bst_heatmap_uc_lat_get_lb_${initial}.csv
cp data/lat_get_lf.txt data/${unm}_bst_heatmap_uc_lat_get_lf_${initial}.csv
cp data/lat_get_ratio.txt data/${unm}_bst_heatmap_uc_lat_get_ratio_${initial}.csv
cp data/lat_rem_lb.txt data/${unm}_bst_heatmap_uc_lat_rem_lb_${initial}.csv
cp data/lat_rem_lf.txt data/${unm}_bst_heatmap_uc_lat_rem_lf_${initial}.csv
cp data/lat_rem_ratio.txt data/${unm}_bst_heatmap_uc_lat_rem_ratio_${initial}.csv
fi
done
#remove data for u0 from put and rem files
sed -i '2d' data/${unm}_*_heatmap_uc_lat_rem_*.csv
sed -i '2d' data/${unm}_*_heatmap_uc_lat_put_*.csv
#remove data for u100 form get files
sed -i '7d' data/${unm}_*_heatmap_uc_lat_get_*.csv
|
egeyar/ASCYLIB
|
scripts/heatmap_lat_all.sh
|
Shell
|
gpl-2.0
| 4,357 |
#!/bin/sh
set -e
GREETING=${GREETING:-Goodbye}
echo "$GREETING World! from $0"
|
kaday/cylc
|
examples/tutorial/oneoff/external/bin/hello.sh
|
Shell
|
gpl-3.0
| 81 |
#!/bin/bash
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
tar zxf cifar-10-python.tar.gz
rm cifar-10-python.tar.gz
rm -rf cifar-out/*
echo Converting CIFAR data to images.....
python process_cifar.py ./cifar-10-batches-py ./cifar-out
|
emailweixu/Paddle
|
demo/image_classification/data/download_cifar.sh
|
Shell
|
apache-2.0
| 865 |
#!/bin/sh
#check JAVA_HOME & java
noJavaHome=false
if [ -z "$JAVA_HOME" ] ; then
noJavaHome=true
fi
if [ ! -e "$JAVA_HOME/bin/java" ] ; then
noJavaHome=true
fi
if $noJavaHome ; then
echo
echo "Error: JAVA_HOME environment variable is not set."
echo
exit 1
fi
#==============================================================================
#set JAVA_OPTS
JAVA_OPTS="-server -Xms2G -Xmx2G -XX:MaxPermSize=64M -XX:+AggressiveOpts -XX:MaxDirectMemorySize=2G"
#JAVA_OPTS="-server -Xms4G -Xmx4G -XX:MaxPermSize=64M -XX:+AggressiveOpts -XX:MaxDirectMemorySize=6G"
#performance Options
#JAVA_OPTS="$JAVA_OPTS -Xss256k"
#JAVA_OPTS="$JAVA_OPTS -XX:+AggressiveOpts"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseBiasedLocking"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseFastAccessorMethods"
#JAVA_OPTS="$JAVA_OPTS -XX:+DisableExplicitGC"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseParNewGC"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseConcMarkSweepGC"
#JAVA_OPTS="$JAVA_OPTS -XX:+CMSParallelRemarkEnabled"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseCMSCompactAtFullCollection"
#JAVA_OPTS="$JAVA_OPTS -XX:+UseCMSInitiatingOccupancyOnly"
#JAVA_OPTS="$JAVA_OPTS -XX:CMSInitiatingOccupancyFraction=75"
#JAVA_OPTS="$JAVA_OPTS -XX:CMSInitiatingOccupancyFraction=75"
#GC Log Options
#JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCApplicationStoppedTime"
#JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCTimeStamps"
#JAVA_OPTS="$JAVA_OPTS -XX:+PrintGCDetails"
#debug Options
#JAVA_OPTS="$JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=8065,server=y,suspend=n"
#==============================================================================
#set HOME
CURR_DIR=`pwd`
cd `dirname "$0"`/..
MYCAT_HOME=`pwd`
cd $CURR_DIR
if [ -z "$MYCAT_HOME" ] ; then
echo
echo "Error: MYCAT_HOME environment variable is not defined correctly."
echo
exit 1
fi
#==============================================================================
#set CLASSPATH
MYCAT_CLASSPATH="$MYCAT_HOME/conf:$MYCAT_HOME/lib/classes"
for i in "$MYCAT_HOME"/lib/*.jar
do
MYCAT_CLASSPATH="$MYCAT_CLASSPATH:$i"
done
#==============================================================================
#startup Server
RUN_CMD="\"$JAVA_HOME/bin/java\""
RUN_CMD="$RUN_CMD -DMYCAT_HOME=\"$MYCAT_HOME\""
RUN_CMD="$RUN_CMD -classpath \"$MYCAT_CLASSPATH\""
RUN_CMD="$RUN_CMD $JAVA_OPTS"
RUN_CMD="$RUN_CMD org.opencloudb.util.rehasher.RehashLauncher "
#to specify the following main args
#RUN_CMD="$RUN_CMD -jdbcDriver="
#RUN_CMD="$RUN_CMD -jdbcUrl="
#RUN_CMD="$RUN_CMD -host="
#RUN_CMD="$RUN_CMD -user="
#RUN_CMD="$RUN_CMD -database="
#RUN_CMD="$RUN_CMD -password="
#RUN_CMD="$RUN_CMD -tablesFile="
#RUN_CMD="$RUN_CMD -shardingField="
#RUN_CMD="$RUN_CMD -rehashHostsFile="
#RUN_CMD="$RUN_CMD -hashType="
#RUN_CMD="$RUN_CMD -seed="
#RUN_CMD="$RUN_CMD -virtualBucketTimes="
#RUN_CMD="$RUN_CMD -weightMapFile="
#RUN_CMD="$RUN_CMD -rehashNodeDir="
echo $RUN_CMD
eval $RUN_CMD
#==============================================================================
|
wenerme/Mycat-Server
|
src/main/assembly/bin/rehash.sh
|
Shell
|
apache-2.0
| 2,947 |
#!/usr/bin/env bash
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# Enter the gRPC repo root
cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc
python tools/run_tests/run_tests.py \
--use_docker \
-t \
-l all \
-c gcov \
-x report.xml \
-j 16
|
daniel-j-born/grpc
|
tools/internal_ci/linux/grpc_coverage.sh
|
Shell
|
apache-2.0
| 954 |
#!/bin/bash
. ../setup_bash
dc_shell-xg-t -f step1.tcl
|
anderson1008/NOCulator
|
hring/hw/synthesis/scripts/calf_energy/step1.sh
|
Shell
|
mit
| 57 |
#!/bin/bash
telnet nyancat.dakko.us
|
ultranaut/illacceptanything
|
code/nyan.sh
|
Shell
|
mit
| 36 |
#!/bin/bash
FN="TxDb.Celegans.UCSC.ce6.ensGene_3.2.2.tar.gz"
URLS=(
"http://bioconductor.org/packages/3.6/data/annotation/src/contrib/TxDb.Celegans.UCSC.ce6.ensGene_3.2.2.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-txdb.celegans.ucsc.ce6.ensgene/bioconductor-txdb.celegans.ucsc.ce6.ensgene_3.2.2_src_all.tar.gz"
)
MD5="215c2edd440a3df8229fe6a75b431aa8"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
wget -O- -q $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
dmaticzka/bioconda-recipes
|
recipes/bioconductor-txdb.celegans.ucsc.ce6.ensgene/post-link.sh
|
Shell
|
mit
| 1,317 |
#!/bin/sh
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
verbose=1
set -e
# Setup
PASS=1
# Define the .cdl files to test
CLASSIC="small ref_tst_nans ref_tst_utf8"
EXTENDED="ref_nc_test_netcdf4 ref_tst_comp ref_tst_opaque_data"
rm -fr ./results_tst_inmemory_nc4
mkdir ./results_tst_inmemory_nc4
# Dump classic files two ways and compare
dotest() {
K=$1
for f in $2 ; do
echo "Testing ${f}"
${NCGEN} -$K -o ./results_tst_inmemory_nc4/${f}.nc ${srcdir}/${f}.cdl
${NCDUMP} ./results_tst_inmemory_nc4/${f}.nc > ./results_tst_inmemory_nc4/${f}.cdl
${NCDUMP} -Xm ./results_tst_inmemory_nc4/${f}.nc > ./results_tst_inmemory_nc4/${f}.cdx
diff -w ./results_tst_inmemory_nc4/${f}.cdl ./results_tst_inmemory_nc4/${f}.cdx &> ./results_tst_inmemory_nc4/${f}.diff
if test -s ./results_tst_inmemory_nc4/${f}.diff ; then
echo "***FAIL: $f"
PASS=0
fi
done
}
dotest "3" "$CLASSIC"
dotest "5" "$EXTENDED5"
if test -f ${top_builddir}/config.h ; then
if fgrep -e '#define USE_NETCDF4 1' ${top_builddir}/config.h >/dev/null ; then
dotest "4" "$EXTENDED4"
fi
fi
# Cleanup
rm -fr results_tst_inmemory_nc4
if test "x$PASS" = x1 ; then
echo "*** PASS all tests"
CODE=0
else
CODE=1
fi
exit $CODE
|
Unidata/netcdf-c
|
ncdump/tst_inmemory_nc4.sh
|
Shell
|
bsd-3-clause
| 1,235 |
#!/bin/bash
cd ../modules/flowable-ui-task
./start.sh
|
gro-mar/flowable-engine
|
scripts/start-ui.sh
|
Shell
|
apache-2.0
| 54 |
#!/bin/bash
cd `dirname $0`
cd ..
find . | grep -f scripts/count_lines.include | grep -v -f scripts/count_lines.exclude | xargs cat | wc -l
|
jasonkajita/Espruino
|
scripts/count_lines.sh
|
Shell
|
mpl-2.0
| 142 |
#!/bin/bash
# Copyright 2015-01-29 by Tim Eifler
# This file is part of the rsyslog project, released under ASL 2.0
# The configuration test should fail because of the invalid config file.
echo ===============================================================================
echo \[abort-uncleancfg-badcfg_1.sh\]: testing abort on unclean configuration
echo "testing a bad Configuration verification run"
. $srcdir/diag.sh init
../tools/rsyslogd -C -N1 -f$srcdir/testsuites/abort-uncleancfg-badcfg_1.conf -M../runtime/.libs:../.libs
if [ $? == 0 ]; then
echo "Error: config check should fail"
exit 1
fi
. $srcdir/diag.sh exit
|
RomeroMalaquias/rsyslog
|
tests/abort-uncleancfg-badcfg-check_1.sh
|
Shell
|
gpl-3.0
| 635 |
i# Install Ruby from source in /opt so that users of Vagrant
# can install their own Rubies using packages or however.
# We must install the 1.8.x series since Puppet doesn't support
# Ruby 1.9 yet.
wget http://ftp.ruby-lang.org/pub/ruby/1.8/ruby-1.8.7-p334.tar.gz
tar xvzf ruby-1.8.7-p334.tar.gz
cd ruby-1.8.7-p334
./configure --prefix=/opt/ruby
make
make install
cd ..
rm -rf ruby-1.8.7-p334*
# Install RubyGems 1.8.17
wget http://production.cf.rubygems.org/rubygems/rubygems-1.8.17.tgz
tar xzf rubygems-1.8.17.tgz
cd rubygems-1.8.17
/opt/ruby/bin/ruby setup.rb
cd ..
rm -rf rubygems-1.8.17*
# Add /opt/ruby/bin to the global path as the last resort so
# Ruby, RubyGems, and Chef/Puppet are visible
echo 'PATH=$PATH:/opt/ruby/bin/'> /etc/profile.d/vagrantruby.sh
|
Gamevy/veewee
|
templates/ubuntu-10.04.4-server-i386/ruby.sh
|
Shell
|
mit
| 767 |
#!/bin/sh
# Blackbox test for wbinfo
if [ $# -lt 4 ]; then
cat <<EOF
Usage: test_wbinfo.sh DOMAIN USERNAME PASSWORD TARGET
EOF
exit 1;
fi
DOMAIN=$1
USERNAME=$2
PASSWORD=$3
TARGET=$4
shift 4
failed=0
samba4bindir="$BINDIR"
wbinfo="$VALGRIND $samba4bindir/wbinfo"
. `dirname $0`/../../testprogs/blackbox/subunit.sh
testfail() {
name="$1"
shift
cmdline="$*"
echo "test: $name"
$cmdline
status=$?
if [ x$status = x0 ]; then
echo "failure: $name"
else
echo "success: $name"
fi
return $status
}
knownfail() {
name="$1"
shift
cmdline="$*"
echo "test: $name"
$cmdline
status=$?
if [ x$status = x0 ]; then
echo "failure: $name [unexpected success]"
status=1
else
echo "knownfail: $name"
status=0
fi
return $status
}
# List users
testit "wbinfo -u against $TARGET" $wbinfo -u || failed=`expr $failed + 1`
# List groups
testit "wbinfo -g against $TARGET" $wbinfo -g || failed=`expr $failed + 1`
# Convert netbios name to IP
# Does not work yet
testit "wbinfo -N against $TARGET" $wbinfo -N $NETBIOSNAME || failed=`expr $failed + 1`
# Convert IP to netbios name
# Does not work yet
testit "wbinfo -I against $TARGET" $wbinfo -I $SERVER_IP || failed=`expr $failed + 1`
# Convert name to SID
testit "wbinfo -n against $TARGET" $wbinfo -n "$DOMAIN/$USERNAME" || failed=`expr $failed + 1`
admin_sid=`$wbinfo -n "$DOMAIN/$USERNAME" | cut -d " " -f1`
echo "$DOMAIN/$USERNAME resolved to $admin_sid"
testit "wbinfo -s $admin_sid against $TARGET" $wbinfo -s $admin_sid || failed=`expr $failed + 1`
admin_name=`$wbinfo -s $admin_sid | cut -d " " -f1| tr a-z A-Z`
echo "$admin_sid resolved to $admin_name"
tested_name=`echo $DOMAIN/$USERNAME | tr a-z A-Z`
echo "test: wbinfo -s check for sane mapping"
if test x$admin_name != x$tested_name; then
echo "$admin_name does not match $tested_name"
echo "failure: wbinfo -s check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -s check for sane mapping"
fi
testit "wbinfo -n on the returned name against $TARGET" $wbinfo -n $admin_name || failed=`expr $failed + 1`
test_sid=`$wbinfo -n $tested_name | cut -d " " -f1`
echo "test: wbinfo -n check for sane mapping"
if test x$admin_sid != x$test_sid; then
echo "$admin_sid does not match $test_sid"
echo "failure: wbinfo -n check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -n check for sane mapping"
fi
testit "wbinfo -U against $TARGET" $wbinfo -U 30000 || failed=`expr $failed + 1`
echo "test: wbinfo -U check for sane mapping"
sid_for_30000=`$wbinfo -U 30000`
if test x$sid_for_30000 != "xS-1-22-1-30000"; then
echo "uid 30000 mapped to $sid_for_30000, not S-1-22-1-30000"
echo "failure: wbinfo -U check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -U check for sane mapping"
fi
admin_uid=`$wbinfo -S $admin_sid`
testit "wbinfo -G against $TARGET" $wbinfo -G 30000 || failed=`expr $failed + 1`
echo "test: wbinfo -G check for sane mapping"
sid_for_30000=`$wbinfo -G 30000`
if test x$sid_for_30000 != "xS-1-22-2-30000"; then
echo "gid 30000 mapped to $sid_for_30000, not S-1-22-2-30000"
echo "failure: wbinfo -G check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -G check for sane mapping"
fi
testit "wbinfo -S against $TARGET" $wbinfo -S "S-1-22-1-30000" || failed=`expr $failed + 1`
echo "test: wbinfo -S check for sane mapping"
uid_for_sid=`$wbinfo -S S-1-22-1-30000`
if test 0$uid_for_sid -ne 30000; then
echo "S-1-22-1-30000 mapped to $uid_for_sid, not 30000"
echo "failure: wbinfo -S check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -S check for sane mapping"
fi
testfail "wbinfo -S against $TARGET using invalid SID" $wbinfo -S "S-1-22-2-30000" && failed=`expr $failed + 1`
testit "wbinfo -Y against $TARGET" $wbinfo -Y "S-1-22-2-30000" || failed=`expr $failed + 1`
echo "test: wbinfo -Y check for sane mapping"
gid_for_sid=`$wbinfo -Y S-1-22-2-30000`
if test 0$gid_for_sid -ne 30000; then
echo "S-1-22-2-30000 mapped to $gid_for_sid, not 30000"
echo "failure: wbinfo -Y check for sane mapping"
failed=`expr $failed + 1`
else
echo "success: wbinfo -Y check for sane mapping"
fi
testfail "wbinfo -Y against $TARGET using invalid SID" $wbinfo -Y "S-1-22-1-30000" && failed=`expr $failed + 1`
testit "wbinfo -t against $TARGET" $wbinfo -t || failed=`expr $failed + 1`
#didn't really work anyway
testit "wbinfo --trusted-domains against $TARGET" $wbinfo --trusted-domains || failed=`expr $failed + 1`
testit "wbinfo --all-domains against $TARGET" $wbinfo --all-domains || failed=`expr $failed + 1`
testit "wbinfo --own-domain against $TARGET" $wbinfo --own-domain || failed=`expr $failed + 1`
echo "test: wbinfo --own-domain against $TARGET check output"
own_domain=`$wbinfo --own-domain`
if test x$own_domain = x$DOMAIN; then
echo "success: wbinfo --own-domain against $TARGET check output"
else
echo "Own domain reported as $own_domain instead of $DOMAIN"
echo "failure: wbinfo --own-domain against $TARGET check output"
failed=`expr $failed + 1`
fi
# this does not work
knownfail "wbinfo --sequence against $TARGET" $wbinfo --sequence
# this is stubbed out now
testit "wbinfo -D against $TARGET" $wbinfo -D $DOMAIN || failed=`expr $failed + 1`
testit "wbinfo -i against $TARGET" $wbinfo -i "$DOMAIN/$USERNAME" || failed=`expr $failed + 1`
echo "test: wbinfo --group-info against $TARGET"
gid=`$wbinfo --group-info "$DOMAIN/Domain users" | cut -d: -f3`
if test x$? = x0; then
echo "success: wbinfo --group-info against $TARGET"
else
echo "failure: wbinfo --group-info against $TARGET"
failed=`expr $failed + 1`
fi
test_name="wbinfo -i against $TARGET"
subunit_start_test "$test_name"
passwd_line=`$wbinfo -i "$DOMAIN/$USERNAME"`
if test x$? = x0; then
subunit_pass_test "$test_name"
else
subunit_fail_test "$test_name"
failed=`expr $failed + 1`
fi
test_name="confirm output of wbinfo -i against $TARGET"
subunit_start_test "$test_name"
# The full name (GECOS) is based on name (the RDN, in this case CN)
# and displayName in winbindd_ads, and is based only on displayName in
# winbindd_msrpc and winbindd_rpc. Allow both versions.
expected_line="$DOMAIN/administrator:*:$admin_uid:$gid:Administrator:/home/$DOMAIN/administrator:/bin/false"
expected2_line="$DOMAIN/administrator:*:$admin_uid:$gid::/home/$DOMAIN/administrator:/bin/false"
if test x$passwd_line = x"$expected_line" -o x$passwd_line = x"$expected2_line"; then
subunit_pass_test "$test_name"
else
echo "expected '$expected_line' or '$expected2_line' got '$passwd_line'" | subunit_fail_test "$test_name"
failed=`expr $failed + 1`
fi
test_name="wbinfo --uid-info against $TARGET"
subunit_start_test "$test_name"
passwd_line=`$wbinfo --uid-info=$admin_uid`
if test x$? = x0; then
subunit_pass_test "$test_name"
else
subunit_fail_test "$test_name"
failed=`expr $failed + 1`
fi
test_name="confirm output of wbinfo --uid-info against $TARGET"
subunit_start_test "$test_name"
if test x$passwd_line = x"$expected_line" -o x$passwd_line = x"$expected2_line"; then
subunit_pass_test "$test_name"
else
echo "expected '$expected_line' or '$expected2_line' got '$passwd_line'" | subunit_fail_test "$test_name"
failed=`expr $failed + 1`
fi
testfail "wbinfo --group-info against $TARGET with $USERNAME" $wbinfo --group-info $USERNAME && failed=`expr $failed + 1`
testit "wbinfo --gid-info against $TARGET" $wbinfo --gid-info $gid || failed=`expr $failed + 1`
testit "wbinfo -r against $TARGET" $wbinfo -r "$DOMAIN/$USERNAME" || failed=`expr $failed + 1`
testit "wbinfo --user-domgroups against $TARGET" $wbinfo --user-domgroups $admin_sid || failed=`expr $failed + 1`
testit "wbinfo --user-sids against $TARGET" $wbinfo --user-sids $admin_sid || failed=`expr $failed + 1`
testit "wbinfo -a against $TARGET with domain creds" $wbinfo -a "$DOMAIN/$USERNAME"%"$PASSWORD" || failed=`expr $failed + 1`
testit "wbinfo --getdcname against $TARGET" $wbinfo --getdcname=$DOMAIN
testit "wbinfo -p against $TARGET" $wbinfo -p || failed=`expr $failed + 1`
testit "wbinfo -K against $TARGET with domain creds" $wbinfo -K "$DOMAIN/$USERNAME"%"$PASSWORD" || failed=`expr $failed + 1`
testit "wbinfo --separator against $TARGET" $wbinfo --separator || failed=`expr $failed + 1`
exit $failed
|
sYnfo/samba-1
|
nsswitch/tests/test_wbinfo.sh
|
Shell
|
gpl-3.0
| 8,420 |
#!/bin/sh
cd /usr/share/ppsspp
exec ./PPSSPPSDL $@
|
radare/void-packages
|
srcpkgs/ppsspp/files/ppsspp-sdl.sh
|
Shell
|
bsd-2-clause
| 51 |
#!sh
iptables --list --table filter
iptables --list --table mangle
iptables --list --table nat
|
wangybgit/Chameleon
|
wifidog-1.2.1/contrib/dump_fw.sh
|
Shell
|
apache-2.0
| 96 |
read -r -p "Are you sure you want to remove Oh My Zsh? [y/N] " confirmation
if [ "$confirmation" != y ] && [ "$confirmation" != Y ]; then
echo "Uninstall cancelled"
exit
fi
echo "Removing ~/.oh-my-zsh"
if [ -d ~/.oh-my-zsh ]; then
rm -rf ~/.oh-my-zsh
fi
echo "Looking for original zsh config..."
ZSHRC_ORIG=~/.zshrc.pre-oh-my-zsh
if [ -e "$ZSHRC_ORIG" ]; then
echo "Found $ZSHRC_ORIG -- Restoring to ~/.zshrc"
if [ -e ~/.zshrc ]; then
ZSHRC_SAVE=~/.zshrc.omz-uninstalled-$(date +%Y-%m-%d_%H-%M-%S)
echo "Found ~/.zshrc -- Renaming to ${ZSHRC_SAVE}"
mv ~/.zshrc "${ZSHRC_SAVE}"
fi
mv "$ZSHRC_ORIG" ~/.zshrc
echo "Your original zsh config was restored."
fi
if hash chsh >/dev/null 2>&1; then
if [ -f ~/.shell.pre-oh-my-zsh ]; then
old_shell=$(cat ~/.shell.pre-oh-my-zsh)
else
old_shell=/bin/bash
fi
echo "Switching your shell back to '$old_shell':"
if chsh -s "$old_shell"; then
rm -f ~/.shell.pre-oh-my-zsh
else
echo "Could not change default shell. Change it manually by running chsh"
echo "or editing the /etc/passwd file."
fi
fi
echo "Thanks for trying out Oh My Zsh. It's been uninstalled."
echo "Don't forget to restart your terminal!"
|
mikeatkins/oh-my-zsh
|
tools/uninstall.sh
|
Shell
|
mit
| 1,203 |
ffmpeg -loglevel verbose -re -i ~/movie.avi -f flv rtmp://localhost/myapp/mystream
|
wangfakang/ReadNginxSrc
|
third_modules/nginx-rtmp-module/test/ffstream.sh
|
Shell
|
bsd-2-clause
| 84 |
#!/bin/bash
#
# Copyright © 2012-2013 Sergio Arroutbi Braojos <[email protected]>
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided that
# the above copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
# OR PERFORMANCE OF THIS SOFTWARE.
#
# This script gets all the bicycles from
# Probikeshop store !
# URL: www.probikeshop.es
URL="http://www.probikeshop.es"
ONLY_DOMAIN="probikeshop.es"
EXCLUDE="-Rgif -Rpng -Rjpg"
MAX_TRIES=15
MAX_TIMEOUT=15
MAX_WGET_TRIES=10
# include common functions
. ./common_get
function get_page()
{
let counter=0
let result=1
BASE_URL="$1"
PAGES="$2"
if [ "${PAGES}" = "" ];
then
echo "GETTING PAGE=>${BASE_URL}<="
while [ $counter -le ${MAX_WGET_TRIES} -a $result -ne 0 ]; do
wget --retry-connrefused -w5 --random-wait -e robots=off -U 'mozilla' --tries=${MAX_TRIES} --timeout=${MAX_TIMEOUT} "${BASE_URL}"
result=$?
echo "RESULT:${result},WGET_TRIES:${counter}<="
let counter=$counter+1
done
else
for page in ${PAGES};
do
echo "GETTING PAGE=>${BASE_URL}${page}<="
while [ $counter -le ${MAX_WGET_TRIES} -a $result -ne 0 ]; do
wget --retry-connrefused -w5 --random-wait -e robots=off -U 'mozilla' --tries=${MAX_TRIES} --timeout=${MAX_TIMEOUT} "${BASE_URL}${page}"
result=$?
echo "RESULT:${result},WGET_TRIES:${counter}<="
let counter=$counter+1
done
let counter=0
let result=1
done
fi
}
URBAN_BIKES_FILE="ciudad-bicicletas-urbanas-c3656.html"
URBAN_BIKES_BASE="${URL}/carretera/${URBAN_BIKES_FILE}"
MTB_BIKES_FILE="bicicletas-c517.html?page="
MTB_BIKES_BASE="${URL}/mtb/${MTB_BIKES_FILE}"
MTB_BIKES_PAGES="$(seq -s ' ' 1 7)"
MTB_WOMAN_FILE="bicicletas-c640.html"
MTB_WOMAN_BASE="${URL}/mujer/${MTB_WOMAN_FILE}"
ROAD_BIKES_FILE="bicicletas-c616.html?page="
ROAD_BIKES_BASE="${URL}/carretera/${ROAD_BIKES_FILE}"
ROAD_BIKES_PAGES="$(seq -s ' ' 1 3)"
BMX_BIKES_FILE="bicicletas-bmx-c673.html?page="
BMX_BIKES_BASE="${URL}/bmx/${BMX_BIKES_FILE}"
BMX_BIKES_PAGES="$(seq -s ' ' 1 10)"
KIDS00_BIKES_FILE="bicicletas-ninos-bicis-ninos-de-12-a-18-c739.html"
KIDS01_BIKES_FILE="bicicletas-ninos-c692.html"
KIDS01B_BIKES_FILE="bicis-de-aprendizaje-patinetes-c698.html"
KIDS02_BIKES_FILE="bicis-de-aprendizaje-sin-pedales-c693.html?page="
KIDS03_BIKES_FILE="bicis-de-aprendizaje-cuatriciclos-c696.html"
KIDS04_BIKES_FILE="bicis-de-aprendizaje-triciclos-c697.html"
KIDS05_BIKES_FILE="bicicletas-ninos-bmx-ninos-c3489.html"
KIDS06_BIKES_FILE="bicicletas-ninos-bicis-ninos-de-12-a-14-c739.html"
KIDS07_BIKES_FILE="bicicletas-ninos-bicis-ninos-de-16-a-18-c3490.html"
KIDS08_BIKES_FILE="bicicletas-ninos-bicis-ninos-de-20-c757.html"
KIDS09_BIKES_FILE="bicicletas-ninos-bicis-ninos-de-24-c3491.html"
KIDS00_BIKES_BASE="${URL}/ninos/${KIDS00_BIKES_FILE}"
KIDS01_BIKES_BASE="${URL}/ninos/${KIDS01_BIKES_FILE}"
KIDS01B_BIKES_BASE="${URL}/ninos/${KIDS01B_BIKES_FILE}"
KIDS02_BIKES_BASE="${URL}/ninos/${KIDS02_BIKES_FILE}"
KIDS02_BIKES_PAGES="$(seq -s ' ' 1 3)"
KIDS03_BIKES_BASE="${URL}/ninos/${KIDS03_BIKES_FILE}"
KIDS04_BIKES_BASE="${URL}/ninos/${KIDS04_BIKES_FILE}"
KIDS05_BIKES_BASE="${URL}/ninos/${KIDS05_BIKES_FILE}"
KIDS06_BIKES_BASE="${URL}/ninos/${KIDS06_BIKES_FILE}"
KIDS07_BIKES_BASE="${URL}/ninos/${KIDS07_BIKES_FILE}"
KIDS08_BIKES_BASE="${URL}/ninos/${KIDS08_BIKES_FILE}"
KIDS09_BIKES_BASE="${URL}/ninos/${KIDS09_BIKES_FILE}"
bubic_get_page_js "${URBAN_BIKES_BASE}" "${URBAN_BIKES_FILE}"
bubic_get_pages_js "${MTB_BIKES_BASE}" "${MTB_BIKES_FILE}" "${MTB_BIKES_PAGES}"
bubic_get_page_js "${MTB_WOMAN_BASE}" "${MTB_WOMAN_FILE}"
bubic_get_pages_js "${ROAD_BIKES_BASE}" "${ROAD_BIKES_FILE}" "${ROAD_BIKES_PAGES}"
bubic_get_pages_js "${BMX_BIKES_BASE}" "${BMX_BIKES_FILE}" "${BMX_BIKES_PAGES}"
bubic_get_page_js "${KIDS00_BIKES_BASE}" "${KIDS00_BIKES_FILE}"
bubic_get_page_js "${KIDS01_BIKES_BASE}" "${KIDS01_BIKES_FILE}"
bubic_get_page_js "${KIDS01B_BIKES_BASE}" "${KIDS01B_BIKES_FILE}"
bubic_get_pages_js "${KIDS02_BIKES_BASE}" "${KIDS02_BIKES_FILE}" "${KIDS02_BIKES_PAGES}"
bubic_get_page_js "${KIDS03_BIKES_BASE}" "${KIDS03_BIKES_FILE}"
bubic_get_page_js "${KIDS04_BIKES_BASE}" "${KIDS04_BIKES_FILE}"
bubic_get_page_js "${KIDS05_BIKES_BASE}" "${KIDS05_BIKES_FILE}"
bubic_get_page_js "${KIDS06_BIKES_BASE}" "${KIDS06_BIKES_FILE}"
bubic_get_page_js "${KIDS07_BIKES_BASE}" "${KIDS07_BIKES_FILE}"
bubic_get_page_js "${KIDS08_BIKES_BASE}" "${KIDS08_BIKES_FILE}"
bubic_get_page_js "${KIDS09_BIKES_BASE}" "${KIDS09_BIKES_FILE}"
|
sarroutbi/buscobici
|
getscripts/probikeshop_get.sh
|
Shell
|
isc
| 5,070 |
#!/usr/bin/env sh
#
# Parse input using Enju and convert it to PTB-format.
#
# Author: Pontus Stenetorp <pontus stenetorp se>
# Version: 2013-02-27
set -e
SCRIPT_DIR=`dirname "$0"`
TLS_DIR=${SCRIPT_DIR}/../tls
ENJU_DIR=${TLS_DIR}/ext/enju
STEPP_PATH=${ENJU_DIR}/bin/stepp
MEDLINE_MODEL_PATH=${ENJU_DIR}/share/stepp/models_medline
FLATPARSER_PATH=${TLS_DIR}/flatparser.py
TOKENISE_PATH=${TLS_DIR}/GTB-tokenize.pl
# Replace sentences that Enju failed to parse with a corresponding flat parse.
error_repl () {
LINE_NUM=1
while read LINE
do
LINE_START=`echo ${LINE} | cut -c -12`
if [ "${LINE_START}" = '(TOP (error ' ]
then
echo "WARNING: Parse error for line:\t${LINE_NUM}" 1>&2
# Note: A small oddity here is that Enju actually preserves the
# final punctuation mark when it has an error, but not upon
# success. We emulate the same behaviour and don't remove the
# final punctuation mark for the flat parses.
LINE=`echo "${LINE}" | ${FLATPARSER_PATH} -t`
fi
echo "${LINE}"
LINE_NUM=`expr ${LINE_NUM} + 1`
done
}
# Note: We kill blank lines and leave no trace of them, this is consistent
# with for example the Stanford Parser, but it breaks the converter.
# Note: We replace UNK (unknown) PoS-tags with NP, since NP is the majority
# class and thus a feasible candidate.
sed '/^$/d' \
| ${TOKENISE_PATH} \
| ${ENJU_DIR}/enju -A -genia -xml \
-t "${STEPP_PATH} -e -p -m ${MEDLINE_MODEL_PATH}" \
| ${ENJU_DIR}/share/enju2ptb/convert -genia \
| ${TLS_DIR}/postenju2ptb.prl \
| sed -e 's|(UNK |(NP |g' \
| error_repl
|
ninjin/bionlp_st_2013_supporting
|
src/enju_ptb.sh
|
Shell
|
isc
| 1,709 |
#!/bin/sh
export MBD_LISTEN="0.0.0.0:8080"
export MBD_MPD_SERVER="localhost:6600"
export MBD_MUSIC_DIRECTORY="/home/mastensg/lib/audio"
./02
|
mastensg/52
|
02/run.sh
|
Shell
|
isc
| 143 |
scriptdir=$(cd "$(dirname $0)"; pwd)
source ${scriptdir}/etcd.conf
# CLOUD SERVICE AND VMS
# ------------------------>
# Create etcd cloud service
azure service create --serviceName $CLOUD_SERVICE --location "$LOCATION"
# Prepare metadata
storagename=${CLOUD_SERVICE}store
storagekey=$(azure storage account keys list etcdsydstore | grep -i Primary | cut -d':' -f3 | sed -e 's# ##')
storagepath=$(azure storage account show $storagename --json | grep ${storagename}.file | cut -d'"' -f2 | sed -e 's#https:##' | awk '{ print $1"cluster-files" }')
blobpath=$(azure storage account show $storagename --json | grep ${storagename}.blob | cut -d'"' -f2 | awk '{ print $1"vhd" }' )
initialcluster=$(./bootstrap-files/to-initial-cluster.sh -n $VM_PREFIX ${VM_IPS[@]})
cat /bootstrap-files/etcd-template.yaml /bootstrap-files/etcd-template-azure-xtn.yaml > /tmp/etcd-template.yaml
# Create virtual machines (enabling ssh but deleting public endpoint)
for i in `seq ${#VM_IPS[@]}`; do
membername="$VM_PREFIX$i"
replaceexpr="s#<MEMBER-NAME>#$membername#;s#<INITIAL-CLUSTER>#$initialcluster#;s#<STORAGEACC>#$storagename#;s#<STORAGEKEY>#$storagekey#;s#<STORAGEPATH>#$storagepath#;"
sed -e $replaceexpr /tmp/etcd-template.yaml > "/tmp/${membername}-config.yaml"
azure vm create --vm-name=$membername --blob-url="$blobpath/$membername.vhd" --static-ip="${VM_IPS[i-1]}" \
--userName=$VM_USER --password=$VM_PASSWORD --virtual-network-name=$VNET --connect=$CLOUD_SERVICE \
--custom-data "/tmp/${membername}-config.yaml" \
--vm-size=Basic_A0 --ssh "1001$i" $VM_IMAGE \
&& azure vm endpoint delete $membername ssh;
done;
|
xynova/coreos-tls-secured
|
vagrant-builds/azure/scripts/etcd-peers.sh
|
Shell
|
mit
| 1,636 |
#!/bin/sh
readonly RESPONSE_FILE=/root/db2response.rsp
get_param() {
local PARAM=`grep "^$1" $RESPONSE_FILE | cut -d* -f1 | cut -d= -f2 | sed -e 's/ //g'`
if [ $PARAM ]; then
echo $PARAM
else
echo $2
fi
}
DB2_FILEPATH=`get_param FILE`
DB2_INST_NAME=`get_param DB2_INST.NAME db2inst1`
DB2_INST_UID=`get_param DB2_INST.UID 1001`
DB2_INST_GROUP_NAME=`get_param DB2_INST.GROUP_NAME db2iadm1`
DB2_INST_GID=`get_param DB2_INST.GID 901`
DB2_INST_PASSWORD=`get_param DB2_INST.PASSWORD db2inst1`
DB2_INST_HOME_DIRECTORY=`get_param DB2_INST.HOME_DIRECTORY /opt/db2inst1`
DB2_INST_FENCED_USERNAME=`get_param DB2_INST.FENCED_USERNAME db2sdfe1`
DB2_INST_FENCED_UID=`get_param DB2_INST.FENCED_UID 1002`
DB2_INST_FENCED_GROUP_NAME=`get_param DB2_INST.FENCED_GROUP_NAME db2fsdm1`
DB2_INST_FENCED_GID=`get_param DB2_INST.FENCED_GID 902`
DB2_INST_FENCED_PASSWORD=`get_param DB2_INST.FENCED_PASSWORD db2sdfe1`
DB2_INST_FENCED_HOME_DIRECTORY=`get_param DB2_INST.FENCED_HOME_DIRECTORY /opt/db2sdfe1`
DB2_INST_SVCENAME=`get_param DB2_INST.SVCENAME db2c_db2inst1`
DB2_INST_PORT_NUMBER=`get_param DB2_INST.PORT_NUMBER 50000`
DATABASE=`get_param DATABASE`
DATABASE_NAME=`get_param $DATABASE.DATABASE_NAME`
DATABASE_USERNAME=`get_param $DATABASE.USERNAME`
DATABASE_PASSWORD=`get_param $DATABASE.PASSWORD`
echo "DB2_INST.NAME=$DB2_INST_NAME"
echo "DB2_INST.UID=$DB2_INST_UID"
echo "DB2_INST.GROUP_NAME=$DB2_INST_GROUP_NAME"
echo "DB2_INST.GID=$DB2_INST_GID"
echo "DB2_INST.PASSWORD=$DB2_INST_PASSWORD"
echo "DB2_INST.HOME_DIRECTORY=$DB2_INST_HOME_DIRECTORY"
echo "DB2_INST.FENCED_USERNAME=$DB2_INST_FENCED_USERNAME"
echo "DB2_INST.FENCED_UID=$DB2_INST_FENCED_UID"
echo "DB2_INST.FENCED_GROUP_NAME=$DB2_INST_FENCED_GROUP_NAME"
echo "DB2_INST.FENCED_GID=$DB2_INST_FENCED_GID"
echo "DB2_INST.FENCED_PASSWORD=$DB2_INST_FENCED_PASSWORD"
echo "DB2_INST.FENCED_HOME_DIRECTORY=$DB2_INST_FENCED_HOME_DIRECTORY"
echo "DB2_INST.SVCENAME=$DB2_INST_SVCENAME"
echo "DB2_INST.PORT_NUMBER=$DB2_INST_PORT_NUMBER"
echo "DATABASE=$DATABASE"
echo "$DATABASE.DATABASE_NAME=$DATABASE_NAME"
echo "$DATABASE.USERNAME=$DATABASE_USERNAME"
echo "$DATABASE.PASSWORD=$DATABASE_PASSWORD"
if [ -e $DB2_FILEPATH ]
then
groupadd -g $DB2_INST_GID $DB2_INST_GROUP_NAME
groupadd -g $DB2_INST_FENCED_GID $DB2_INST_FENCED_GROUP_NAME
useradd -u $DB2_INST_UID -g $DB2_INST_GROUP_NAME -M -d $DB2_INST_HOME_DIRECTORY $DB2_INST_NAME
useradd -u $DB2_INST_FENCED_UID -g $DB2_INST_FENCED_GROUP_NAME -M -d $DB2_INST_FENCED_HOME_DIRECTORY $DB2_INST_FENCED_USERNAME
echo $DB2_INST_PASSWORD | passwd --stdin $DB2_INST_NAME
echo $DB2_INST_FENCED_PASSWORD | passwd --stdin $DB2_INST_FENCED_USERNAME
useradd $DATABASE_USERNAME
echo $DATABASE_PASSWORD | passwd --stdin $DATABASE_USERNAME
echo "$DB2_INST_SVCENAME $DB2_INST_PORT_NUMBER/tcp" >> /etc/services
sudo -u db2inst1 -i db2start
else
/usr/local/src/expc/db2setup -r $RESPONSE_FILE
useradd $DATABASE_USERNAME
echo $DATABASE_PASSWORD | passwd --stdin $DATABASE_USERNAME
fi
|
miraitechno/docker-db2
|
config/init.sh
|
Shell
|
mit
| 2,986 |
#!/bin/bash
node miubot.js | tee miu-con.log
|
yeputons/MiuBot
|
run.sh
|
Shell
|
mit
| 45 |
#!/bin/bash
# start monitor an all machines
# -- these monitors will collect data on all machines
# run this script on machine with HiBench installed
# input is which test to run
# this sctipt will del all log files at teh start
# montiroes will continue writing
#!/bin/bash
for i in "$@"
do
case $i in
-t=*|--test=*)
TEST="${i#*=}"
shift # past argument=value
;;
-r=*|--run=*)
RUN="${i#*=}"
shift # past argument=value
;;
-x=*|--x=*)
TYPE="${i#*=}"
shift # past argument=value
;;
*)
# unknown option
;;
esac
done
OUTFILE="HB$TEST-$RUN.result"
START="/root/mon/mon.sh"
GATHER="cat /root/mon/*.log > /root/mon/$OUTFILE"
KILL="/root/mon/killsar.sh"
echo $TYPE
if [ "$TYPE" == "mr" ]
then
TESTPATH="/root/HiBench/workloads/$TEST/mapreduce/bin/run.sh"
else
TESTPATH="/root/HiBench/workloads/$TEST/spark/java/bin/run.sh"
fi
machines=(
172.31.23.33
172.31.27.36
172.31.27.34
172.31.27.33
172.31.27.37
172.31.27.35
)
#echo $TEST
#echo $START
#echo $GATHER
#echo $KILL
#echo $TESTPATH
#Prep for test
echo "Preparing data"
time workloads/$TEST/prepare/prepare.sh
#start monitor on all machines
echo "stating monitor on all machines"
for i in "${machines[@]}"
do
ssh root@$i "rm -f /root/mon/*.log"
ssh root@$i $START
done
sleep 20
# Run Test
echo "Run test"
TIMEFILE="HB$TEST-$TYPE-$RUN.time"
echo "run $TESTPATH"
{ time $TESTPATH ;} 2>./results/$TIMEFILE
sleep 5
for i in "${machines[@]}"
do
OUTFILE="HB$TEST-$i-$TYPE-$RUN.result"
GATHER="cat /root/mon/*.log > /root/mon/$OUTFILE"
ssh root@$i "$KILL"
ssh root@$i "$GATHER"
scp root@$i:/root/mon/$OUTFILE ./results
done
ssh root@${machines[0]} "/root/ephemeral-hdfs/bin/hadoop fs -rmr /HiBench"
|
co-bri/quevedo
|
v1_scripts/run_HB1.sh
|
Shell
|
mit
| 1,789 |
#!/bin/bash
set -e
if [ "$1" = 'honcho' ]; then
chown -R nobody:nogroup $DATA_DIR $OUTPUT_DIR $STATICFILES_DIR
if ! test -f $DATA_DIR/default.sqlite; then
gosu nobody python3 manage.py syncdb --noinput
test -z "$DJANGO_ADMIN_USER" && (echo "DJANGO_ADMIN_USER env variable not set, aborting!"; exit 1)
echo "from django.contrib.auth.models import User;" \
"User.objects.create_superuser(\"$DJANGO_ADMIN_USER\", \"$DJANGO_ADMIN_EMAIL\", \"$DJANGO_ADMIN_PASS\")" | \
gosu nobody python3 manage.py shell
fi
if ! test -f $DATA_DIR/sessions.sqlite; then
gosu nobody python3 manage.py syncdb --database=sessions_db
fi
if ! test -f $DATA_DIR/ogrgeoconverter.sqlite; then
gosu nobody python3 manage.py syncdb --database=ogrgeoconverter_db
gosu nobody python3 manage.py loaddata ogr_formats.json --database=ogrgeoconverter_db
gosu nobody python3 manage.py loaddata global_shell_parameters.json --database=ogrgeoconverter_db
fi
if ! test -f $DATA_DIR/log.sqlite; then
gosu nobody python3 manage.py syncdb --database=ogrgeoconverter_log_db
fi
if ! test -f $DATA_DIR/conversionjobs.sqlite; then
gosu nobody python3 manage.py syncdb --database=ogrgeoconverter_conversion_jobs_db
fi
#python3 manage.py migrate && \
gosu nobody python3 manage.py collectstatic --noinput
eval exec gosu nobody "$@"
fi
exec "$@"
|
geometalab/geoconverter
|
docker/entrypoint.sh
|
Shell
|
mit
| 1,377 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2013:0807
#
# Security announcement date: 2013-05-09 18:18:55 UTC
# Script generation date: 2017-01-01 21:14:38 UTC
#
# Operating System: Red Hat 5
# Architecture: i386
#
# Vulnerable packages fix on version:
# - hypervkvpd.i686:0-0.7.el5_9.3
# - hypervkvpd-debuginfo.i686:0-0.7.el5_9.3
#
# Last versions recommanded by security team:
# - hypervkvpd.i686:0-0.7.el5_9.3
# - hypervkvpd-debuginfo.i686:0-0.7.el5_9.3
#
# CVE List:
# - CVE-2012-5532
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install hypervkvpd.i686-0 -y
sudo yum install hypervkvpd-debuginfo.i686-0 -y
|
Cyberwatch/cbw-security-fixes
|
Red_Hat_5/i386/2013/RHSA-2013:0807.sh
|
Shell
|
mit
| 740 |
#!/bin/bash -xe
cp "$1" _posts/$(date "+%Y-%m-%d-")$(basename "$1")
|
sibson/sibson.github.io
|
publish.sh
|
Shell
|
mit
| 69 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2011:0436
#
# Security announcement date: 2011-04-12 18:24:11 UTC
# Script generation date: 2017-01-01 21:13:05 UTC
#
# Operating System: Red Hat 5
# Architecture: i386
#
# Vulnerable packages fix on version:
# - avahi.i386:0.6.16-10.el5_6
# - avahi-compat-howl.i386:0.6.16-10.el5_6
# - avahi-compat-libdns_sd.i386:0.6.16-10.el5_6
# - avahi-debuginfo.i386:0.6.16-10.el5_6
# - avahi-glib.i386:0.6.16-10.el5_6
# - avahi-qt3.i386:0.6.16-10.el5_6
# - avahi-tools.i386:0.6.16-10.el5_6
# - avahi-compat-howl-devel.i386:0.6.16-10.el5_6
# - avahi-compat-libdns_sd-devel.i386:0.6.16-10.el5_6
# - avahi-devel.i386:0.6.16-10.el5_6
# - avahi-glib-devel.i386:0.6.16-10.el5_6
# - avahi-qt3-devel.i386:0.6.16-10.el5_6
#
# Last versions recommanded by security team:
# - avahi.i386:0.6.16-10.el5_6
# - avahi-compat-howl.i386:0.6.16-10.el5_6
# - avahi-compat-libdns_sd.i386:0.6.16-10.el5_6
# - avahi-debuginfo.i386:0.6.16-10.el5_6
# - avahi-glib.i386:0.6.16-10.el5_6
# - avahi-qt3.i386:0.6.16-10.el5_6
# - avahi-tools.i386:0.6.16-10.el5_6
# - avahi-compat-howl-devel.i386:0.6.16-10.el5_6
# - avahi-compat-libdns_sd-devel.i386:0.6.16-10.el5_6
# - avahi-devel.i386:0.6.16-10.el5_6
# - avahi-glib-devel.i386:0.6.16-10.el5_6
# - avahi-qt3-devel.i386:0.6.16-10.el5_6
#
# CVE List:
# - CVE-2011-1002
# - CVE-2010-2244
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install avahi.i386-0.6.16 -y
sudo yum install avahi-compat-howl.i386-0.6.16 -y
sudo yum install avahi-compat-libdns_sd.i386-0.6.16 -y
sudo yum install avahi-debuginfo.i386-0.6.16 -y
sudo yum install avahi-glib.i386-0.6.16 -y
sudo yum install avahi-qt3.i386-0.6.16 -y
sudo yum install avahi-tools.i386-0.6.16 -y
sudo yum install avahi-compat-howl-devel.i386-0.6.16 -y
sudo yum install avahi-compat-libdns_sd-devel.i386-0.6.16 -y
sudo yum install avahi-devel.i386-0.6.16 -y
sudo yum install avahi-glib-devel.i386-0.6.16 -y
sudo yum install avahi-qt3-devel.i386-0.6.16 -y
|
Cyberwatch/cbw-security-fixes
|
Red_Hat_5/i386/2011/RHSA-2011:0436.sh
|
Shell
|
mit
| 2,134 |
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/DeepOnion-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature-osx.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: \"${TARGETFILE}\""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
deeponion/deeponion
|
contrib/macdeploy/detached-sig-create.sh
|
Shell
|
mit
| 1,533 |
#!/bin/bash
set -eu
COMMIT=0b77c93c69797fcc6aeb44844a2aa8c6f08f331d
GITHUBUSER=wipu
URL=https://raw.githubusercontent.com/$GITHUBUSER/iwant/$COMMIT
HERE=$(dirname "$0")
cd "$HERE/../../.."
fetch() {
local RELPATH=$1
rm -f "$RELPATH"
local PARENT=$(dirname "$RELPATH")
mkdir -p "$PARENT"
cd "$PARENT"
wget "$URL/essential/iwant-entry/as-some-developer/$RELPATH"
cd -
}
fetch with/ant/iw/build.xml
fetch with/bash/iwant/help.sh
chmod u+x with/bash/iwant/help.sh
fetch with/java/org/fluentjava/iwant/entry/Iwant.java
CONF=i-have/conf
mkdir -p "$CONF"
echo "iwant-from=https://github.com/$GITHUBUSER/iwant/archive/$COMMIT.zip" > "$CONF/iwant-from"
|
wipu/iwant
|
essential/iwant-entry/as-some-developer/with/bash/iwant/iwant.sh
|
Shell
|
mit
| 677 |
export DYLD_FRAMEWORK_PATH=../_mac
cc -I../_mac/SDL2.framework/Headers -I../_mac/SDL2_ttf.framework/Headers -F../_mac -framework SDL2 -framework SDL2_ttf -o tet tet.c && ./tet
|
superjer/tinyc.games
|
tet/run-mac.sh
|
Shell
|
mit
| 177 |
#!/bin/bash
#
# Simple Server Manager
#
# Script to load Main config, in line
#
# Parameter: config id
#
#
#
#
#
#
#
|
AndyPuettmann/sm_manager
|
core/config_load.sh
|
Shell
|
mit
| 119 |
NAME="le-tweeting"
FOLDER="application"
PACKAGE="$NAME.zip"
BUILD="build"
echo "# Building $NAME package"
mkdir $BUILD
mkdir $BUILD/dist
mkdir $BUILD/$FOLDER
echo "# Get source (local)"
cp dist/manifest.xml $BUILD/dist
cp dist/installer.php $BUILD/dist
cp dist/prepend.php $BUILD/dist
cp index.php $BUILD/$FOLDER
cp twitter.php $BUILD/$FOLDER
cp -R css $BUILD/$FOLDER
cp -R js $BUILD/$FOLDER
cp -R views $BUILD/$FOLDER
cp icon.png $BUILD/$FOLDER
cp ld-icon.png $BUILD/$FOLDER
# Remove some unwanted files (mac)
find . -name '*.DS_Store' -type f -delete
echo "# Packing $PACKAGE"
cd $BUILD
zip -r $PACKAGE $FOLDER dist -x \*.svn/\* \*.preserve
mv $PACKAGE ..
cd ..
# Clean
rm -rf $BUILD
|
ladistribution/le-tweeting
|
build.sh
|
Shell
|
mit
| 691 |
#!/bin/bash
if [[ -z "$SYMFONY_SECRET" ]]; then echo "SYMFONY_SECRET is unset" && exit 1; fi
if [[ -z "$MAILER_HOST" ]]; then echo "MAILER_HOST is unset" && exit 1; fi
if [[ -z "$MAILER_USER" ]]; then echo "MAILER_USER is unset" && exit 1; fi
if [[ -z "$MAILER_PASSWORD" ]]; then echo "MAILER_PASSWORD is unset" && exit 1; fi
if [[ -z "$APP_DOMAIN" ]]; then APP_DOMAIN='search-awesome.com'; fi
if [[ -z "$MAILER_TRANSPORT" ]]; then MAILER_TRANSPORT='smtp'; fi
if [[ -z "$MAILER_PORT" ]]; then MAILER_PORT=465; fi
if [[ -z "$MAILER_ENCRYPTION" ]]; then MAILER_ENCRYPTION='ssl'; fi
if [[ -z "$MONGO_DATABASE" ]]; then MONGO_DATABASE='search_awesome_app'; fi
if [[ -z "$RECAPTCHA_PUBLIC_KEY" ]]; then echo "RECAPTCHA_PUBLIC_KEY is unset" && exit 1; fi
if [[ -z "$RECAPTCHA_PRIVATE_KEY" ]]; then echo "RECAPTCHA_PRIVATE_KEY is unset" && exit 1; fi
if [[ -z "$RECAPTCHA_SESSION_KEY" ]]; then RECAPTCHA_SESSION_KEY='_recaptcha_valid'; fi
if [[ -z "$DELIVERY_ADDRESS" ]]; then DELIVERY_ADDRESS='null'; fi
sed -i "s@secret: ThisTokenIsNotSoSecretChangeIt@secret: ${SYMFONY_SECRET}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_transport: smtp@mailer_transport: ${MAILER_TRANSPORT}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_host: 127.0.0.1@mailer_host: ${MAILER_HOST}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_user: null@mailer_user: ${MAILER_USER}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_password: null@mailer_password: ${MAILER_PASSWORD}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_encryption: ssl@mailer_encryption: ${MAILER_ENCRYPTION}@" /www/app/config/parameters_base.yml
sed -i "s@mailer_port: 465@mailer_port: ${MAILER_ENCRYPTION}@" /www/app/config/parameters_base.yml
sed -i "s@app_domain: null@app_domain: ${APP_DOMAIN}@" /www/app/config/parameters_base.yml
sed -i "s@mongo_database: search_awesome_app@mongo_database: ${MONGO_DATABASE}@" /www/app/config/parameters_base.yml
sed -i "s@delivery_address: null@delivery_address: ${DELIVERY_ADDRESS}@" /www/app/config/parameters_base.yml
sed -i "s@recaptcha_public_key: null@recaptcha_public_key: ${RECAPTCHA_PUBLIC_KEY}@" /www/app/config/parameters_base.yml
sed -i "s@recaptcha_private_key: null@recaptcha_private_key: ${RECAPTCHA_PRIVATE_KEY}@" /www/app/config/parameters_base.yml
sed -i "s@recaptcha_session_key: _recaptcha_valid@recaptcha_session_key: ${RECAPTCHA_SESSION_KEY}@" /www/app/config/parameters_base.yml
cp /www/app/config/parameters_base.yml /www/app/config/parameters.yml
# run HHVM
cd /www/web
sudo -u www-data hhvm --mode server -vServer.Port=9000 -vServer.Type=fastcgi
|
CodeLoversAt/docker-search-awesome-hhvm
|
run.sh
|
Shell
|
mit
| 2,609 |
#!/bin/bash
ln -sv "$(pwd)/bash_profile" ~/.bash_profile
ln -sv "$(pwd)/bashrc" ~/.bashrc
ln -sv "$(pwd)/zprofile" ~/.zprofile
ln -sv "$(pwd)/zshrc" ~/.zshrc
ln -sv "$(pwd)/tmux.conf" ~/.tmux.conf
ln -sv "$(pwd)/gitconfig" ~/.gitconfig
ln -sv "$(pwd)/gitignore" ~/.gitignore
ln -sv "$(pwd)/ideavimrc" ~/.ideavimrc
mkdir -v ~/.ssh && chmod 700 ~/.ssh
ln -sv "$(pwd)/ssh/config" ~/.ssh/config
mkdir -v ~/.gnupg && chmod 700 ~/.gnupg
ln -sv "$(pwd)/gnupg/gpg-agent.conf" ~/.gnupg/gpg-agent.conf
mkdir -v ~/.clojure
ln -sv "$(pwd)/clojure/deps.edn" ~/.clojure/deps.edn
mkdir -pv ~/.config/karabiner
ln -sv "$(pwd)/config/karabiner/karabiner.json" ~/.config/karabiner/karabiner.json
[ ! -d ~/Org ] && ln -sv ~/Library/Mobile\ Documents/com~apple~CloudDocs/Org ~/Org
|
rynffoll/envy
|
scripts/dotfiles.sh
|
Shell
|
mit
| 771 |
#!/bin/bash
KEYWORDS_MANGANESE="Manganese"
KEYWORDS_TECHNETIUM="Technetium"
KEYWORDS_RHENIUM="Rhenium"
KEYWORDS_BOHRIUM="Bohrium"
KEYWORDS_GROUP7_ELEMENTS="$KEYWORDS_MANGANESE|$KEYWORDS_TECHNETIUM|$KEYWORDS_RHENIUM|$KEYWORDS_BOHRIUM"
if [ "$1" == "" ];
then
debug_start "Group 7 elements"
MANGANESE=$(egrep -i "$KEYWORDS_MANGANESE" "$NEWPAGES")
TECHNETIUM=$(egrep -i "$KEYWORDS_TECHNETIUM" "$NEWPAGES")
RHENIUM=$(egrep -i "$KEYWORDS_RHENIUM" "$NEWPAGES")
BOHRIUM=$(egrep -i "$KEYWORDS_BOHRIUM" "$NEWPAGES")
categorize "MANGANESE" "Manganese"
categorize "TECHNETIUM" "Technetium"
categorize "RHENIUM" "Rhenium"
categorize "BOHRIUM" "Bohrium"
debug_end "Group 7 elements"
fi
|
MW-autocat-script/MW-autocat-script
|
catscripts/Science/Chemistry/Elements/Group7.sh
|
Shell
|
mit
| 700 |
#!/usr/bin/env bash
trap 'dump_ERROR_LOG' ERR
set -o pipefail
msg "INSTALL" "CMAKE"
info "CC: ${CXX} | CXX: ${CXX} | CXXFLAGS: ${CXXFLAGS} | CFLAGS: ${CFLAGS} | LDFLAGS: ${LDFLAGS}"
POOL_DIR=${POOL:?"Need to set POOL to a non-empty path"}
POOL_JOBS_=${POOL_JOBS:?"Need to set POOL to number > 0"}
SRC_BASE=/tmp/src
BUILD_BASE=/tmp/build
PREFIX=${POOL_DIR}
DEBUG_PREFIX=${POOL_DIR}/debug
CMAKE_SRC=${SRC_BASE}/cmake
CMAKE_BUILD=${BUILD_BASE}/cmake
cd ${POOL_DIR}
mkdir -p ${SRC_BASE}
mkdir -p ${BUILD_BASE}
rm -rf ${CMAKE_SRC} || true
msg_ne "INFO" "Cloning CMake git repositories...\r"
export ERROR_LOG="git.log"
git clone --depth 1 https://cmake.org/cmake.git ${CMAKE_SRC} >${ERROR_LOG} 2>&1
unset ERROR_LOG
info "Cloning CMake git repositories... done."
# Normal build
rm -rf ${CMAKE_BUILD}
mkdir ${CMAKE_BUILD}
cd ${CMAKE_BUILD}
msg_ne "INFO" "Configuring CMake... (configure)\r"
export ERROR_LOG="configure.log"
cmake ${CMAKE_SRC} -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=${PREFIX} \
>${ERROR_LOG} 2>&1
unset ERROR_LOG
info "Configuring CMake... done."
msg_ne "INFO" "Building CMake...\r"
export ERROR_LOG="make.log"
make ${MAKE_OUTPUT_SYNC} -j ${POOL_JOBS} >${ERROR_LOG} 2>&1
unset ERROR_LOG
info "Building CMake... done."
msg_ne "INFO" "Installing CMake...\r"
export ERROR_LOG="make_install.log"
make install ${MAKE_OUTPUT_SYNC} -j ${POOL_JOBS} >${ERROR_LOG} 2>&1
unset ERROR_LOG
info "Installing CMake... done."
rm -rf ${CMAKE_BUILD}
rm -rf ${CMAKE_SRC}
cd ${PREFIX}
msg "DONE" "CMAKE installed!"
|
gnzlbg/unix_env
|
pool/install/cmake.sh
|
Shell
|
mit
| 1,543 |
#!/bin/bash
set -eu
CACHE_DIRECTORY=~/cache
mkdir -p "$CACHE_DIRECTORY";
DOCKER_IMAGES_FILE=$CACHE_DIRECTORY/dockerImages.tar
if [[ -e "$DOCKER_IMAGES_FILE" ]]; then
echo "Restoring docker images from $DOCKER_IMAGES_FILE"
docker load -i "$DOCKER_IMAGES_FILE"
else
echo "Pulling docker images"
docker pull ansi/mosquitto
docker pull node:4.4.2
echo "Saving docker images to $DOCKER_IMAGES_FILE"
docker save \
ansi/mosquitto \
node:4.4.2 \
> "$DOCKER_IMAGES_FILE"
fi
|
CanTireInnovations/mqtt-lambda
|
circle/cacheDockerImages.sh
|
Shell
|
mit
| 526 |
/tools/terrier-4.0/bin/trec_terrier.sh -i -Dcollection.spec=/Volumes/ext/data/clef2015.collection.spec -Dterrier.index.path=/Users/team1/index -Dtermpipelines=Stopwords,PorterStemmer -Dtrec.collection.class=SimpleFileCollection -Dindexer.meta.forward.keys=filename -Dindexer.meta.forward.keylens=512 -Dindexing.simplefilecollection.extensionsparsers=html:TaggedDocument -Dindexer.meta.reverse.keys=filename -Dindexer.meta.reverse.keylens=512
|
lzfelix/inb344
|
project/tools/index_terrier.sh
|
Shell
|
mit
| 442 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2013:1090
#
# Security announcement date: 2013-07-17 19:24:14 UTC
# Script generation date: 2017-01-01 21:14:48 UTC
#
# Operating System: Red Hat 5
# Architecture: i386
#
# Vulnerable packages fix on version:
# - ruby.i386:1.8.5-31.el5_9
# - ruby-debuginfo.i386:1.8.5-31.el5_9
# - ruby-docs.i386:1.8.5-31.el5_9
# - ruby-irb.i386:1.8.5-31.el5_9
# - ruby-libs.i386:1.8.5-31.el5_9
# - ruby-rdoc.i386:1.8.5-31.el5_9
# - ruby-ri.i386:1.8.5-31.el5_9
# - ruby-tcltk.i386:1.8.5-31.el5_9
# - ruby-devel.i386:1.8.5-31.el5_9
# - ruby-mode.i386:1.8.5-31.el5_9
#
# Last versions recommanded by security team:
# - ruby.i386:1.8.5-31.el5_9
# - ruby-debuginfo.i386:1.8.5-31.el5_9
# - ruby-docs.i386:1.8.5-31.el5_9
# - ruby-irb.i386:1.8.5-31.el5_9
# - ruby-libs.i386:1.8.5-31.el5_9
# - ruby-rdoc.i386:1.8.5-31.el5_9
# - ruby-ri.i386:1.8.5-31.el5_9
# - ruby-tcltk.i386:1.8.5-31.el5_9
# - ruby-devel.i386:1.8.5-31.el5_9
# - ruby-mode.i386:1.8.5-31.el5_9
#
# CVE List:
# - CVE-2013-4073
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install ruby.i386-1.8.5 -y
sudo yum install ruby-debuginfo.i386-1.8.5 -y
sudo yum install ruby-docs.i386-1.8.5 -y
sudo yum install ruby-irb.i386-1.8.5 -y
sudo yum install ruby-libs.i386-1.8.5 -y
sudo yum install ruby-rdoc.i386-1.8.5 -y
sudo yum install ruby-ri.i386-1.8.5 -y
sudo yum install ruby-tcltk.i386-1.8.5 -y
sudo yum install ruby-devel.i386-1.8.5 -y
sudo yum install ruby-mode.i386-1.8.5 -y
|
Cyberwatch/cbw-security-fixes
|
Red_Hat_5/i386/2013/RHSA-2013:1090.sh
|
Shell
|
mit
| 1,625 |
# remove fat-client
[[ -d /opt/chef ]] && rm -rf /opt/chef
# drop symlink
[[ -f /opt/local/bin/chef-solo ]] && rm -f /opt/local/bin/chef-solo
|
tylerflint/caterer
|
lib/templates/provisioner/chef_solo/uninstall/smartos.sh
|
Shell
|
mit
| 142 |
#! /bin/sh
#
# This script generates configuration directives for runover.
# The machine script. This can probably be left alone.
echo "machinescript /etc/runover/machine-script.sh"
# If we can detect if we are running under DQS or another batch #
# system, created a "jobname" directive to pass this information to
# runover.
|
RPI-HPC/runover
|
config-script.sh
|
Shell
|
mit
| 331 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DLA-742-1
#
# Security announcement date: 2016-12-13 00:00:00 UTC
# Script generation date: 2017-01-01 21:09:27 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - chrony:1.24-3.1+deb7u4
#
# Last versions recommanded by security team:
# - chrony:1.24-3.1+deb7u4
#
# CVE List:
# - CVE-2016-1567
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade chrony=1.24-3.1+deb7u4 -y
|
Cyberwatch/cbw-security-fixes
|
Debian_7_(Wheezy)/x86_64/2016/DLA-742-1.sh
|
Shell
|
mit
| 613 |
#!/bin/bash
# {{ ansible_managed }}
MEDIAHOME="{{ media_home }}"
function debug_out {
[ ! -z "$VERBOSE" ] && echo $@
}
while getopts d:hv flag
do
case "${flag}" in
d) MEDIAHOME=${OPTARG};;
h) SHOWHELP=Help;;
v) VERBOSE=Yes
esac
done
if [[ ! -z $SHOWHELP ]];
then
echo $0 [-d DIRECTORY]
echo "Sync non-TV shows to their own libraries"
echo "Usage:"
echo " -h - This"
echo " -d - Directory to start at. Defaults to {{ media_home }}"
echo " -v - Be noisy about it"
exit 4
fi
if [[ -z $MEDIAHOME ]]
then
echo "Directory not set"
exit 5
fi;
if [[ ! -d $MEDIAHOME ]]
then
echo "Directory not found at $MEDIAHOME"
exit 5
fi;
debug_out "Hello $MEDIAHOME"
for dir in $MEDIAHOME/*;
do
if [[ $dir == $MEDIAHOME/TV ]];
then
debug_out "Ignoring TV dir";
continue;
fi;
if [[ $dir == $MEDIAHOME/Music ]];
then
debug_out "Ignoring Music dir";
continue;
fi;
debug_out "Process $dir";
find $dir -maxdepth 1 -mindepth 1 -type d | while read showpath;
do
show=`basename "$showpath"`
# debug_out " - Is there a " $show " in TV?";
if [[ -d "$MEDIAHOME/TV/$show" ]];
then
debug_out " - Syncing " TV/$show " -> " $dir/$show
rsync -aWhv --progress --remove-source-files "$MEDIAHOME/TV/$show" "$dir/"
find "$MEDIAHOME/TV/$show" -type d -exec rmdir --ignore-fail-on-non-empty "{}" \;
# else
# debug_out " - Nope. Moving on..."
fi
done
done
|
aquarion/autopelago
|
roles/water-gkhs-filebot/templates/sync_or_swim.sh
|
Shell
|
mit
| 1,577 |
mkdir -p data/biorxiv_altmetric
SUFFIX=`cat data/biorxiv_doi_urls.tsv | cut -f 5 -d /`
for DOI in $SUFFIX
do
curl -o data/biorxiv_altmetric/$DOI.json http://api.altmetric.com/v1/doi/10.1101/$DOI?key=992a953baa805c06d19db4c2b6bb1348
sleep 0.5s
done
|
SchlossLab/Schloss_PrePrints_mBio_2017
|
code/get_biorxiv_altmetric.sh
|
Shell
|
mit
| 252 |
for i in /mnt/data3/fan/rpf/April2016_ana/oid_html/fasta_files/metagenomes/*0
#for i in /mnt/data3/fan/rpf/April2016_ana/oid_html/fasta_files/test/*
do cd $i
for j in *.gbk
do hmmsearch /mnt/data3/fan/rpf/April2016_ana/Hmm.ssu.hmm $j > $j.hmmsearch.txt
done
done
|
fandemonium/code
|
seq_util/hmmsearch_command.sh
|
Shell
|
mit
| 269 |
# java -Djava.ext.dirs=jars clojure.main src/redwine.clj
# java -DDEBUG -cp $PWD -Djava.ext.dirs=jars clojure.main src/redwine/server.clj
JAVA=/System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home/bin/java
EXT="$(find $PWD/jars -mindepth 1 -maxdepth 1 -print0 | tr \\0 \:)"
CP=$EXT$PWD
CMD="$JAVA -cp $CP -DDEBUG clojure.main src/redwine/jetty.clj"
echo $CMD
$CMD
|
lancecarlson/redwine
|
run.sh
|
Shell
|
mit
| 373 |
# This file is used to generate Github releases, please ignore it.
version='v2.4.5'
name='js-stack-boilerplate-no-services-'$version
dir='dist/'$name
rm -rf dist
mkdir dist
mkdir $dir
cp -R public $dir
cp -R src $dir
cp .babelrc $dir
cp .eslintrc.json $dir
cp .flowconfig $dir
cp .gitignore $dir
cp package.json $dir
cp webpack.config.babel.js $dir
cp yarn.lock $dir
cd dist
zip -r -X $name'.zip' $name
rm -rf $name
|
sbs863/SBS.com
|
release.sh
|
Shell
|
mit
| 421 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2012:0544
#
# Security announcement date: 2012-05-07 22:49:25 UTC
# Script generation date: 2017-01-01 21:10:22 UTC
#
# Operating System: CentOS 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - ImageMagick.i686:6.5.4.7-6.el6_2
# - .i686:6.5.4.7-6.el6_2
# - -devel.i686:6.5.4.7-6.el6_2
# - ImageMagick-devel.i686:6.5.4.7-6.el6_2
# - ImageMagick.x86_64:6.5.4.7-6.el6_2
# - .x86_64:6.5.4.7-6.el6_2
# - -devel.x86_64:6.5.4.7-6.el6_2
# - ImageMagick-devel.x86_64:6.5.4.7-6.el6_2
# - ImageMagick-doc.x86_64:6.5.4.7-6.el6_2
# - ImageMagick-perl.x86_64:6.5.4.7-6.el6_2
#
# Last versions recommanded by security team:
# - ImageMagick.i686:6.7.2.7-5.el6_8
# - .i686:6.7.2.7-5.el6_8
# - -devel.i686:6.7.2.7-5.el6_8
# - ImageMagick-devel.i686:6.7.2.7-5.el6_8
# - ImageMagick.x86_64:6.7.2.7-5.el6_8
# - .x86_64:6.7.2.7-5.el6_8
# - -devel.x86_64:6.7.2.7-5.el6_8
# - ImageMagick-devel.x86_64:6.7.2.7-5.el6_8
# - ImageMagick-doc.x86_64:6.7.2.7-5.el6_8
# - ImageMagick-perl.x86_64:6.7.2.7-5.el6_8
#
# CVE List:
# - CVE-2010-4167
# - CVE-2012-0247
# - CVE-2012-0248
# - CVE-2012-0259
# - CVE-2012-0260
# - CVE-2012-1798
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install ImageMagick.i686-6.7.2.7 -y
sudo yum install .i686-6.7.2.7 -y
sudo yum install -devel.i686-6.7.2.7 -y
sudo yum install ImageMagick-devel.i686-6.7.2.7 -y
sudo yum install ImageMagick.x86_64-6.7.2.7 -y
sudo yum install .x86_64-6.7.2.7 -y
sudo yum install -devel.x86_64-6.7.2.7 -y
sudo yum install ImageMagick-devel.x86_64-6.7.2.7 -y
sudo yum install ImageMagick-doc.x86_64-6.7.2.7 -y
sudo yum install ImageMagick-perl.x86_64-6.7.2.7 -y
|
Cyberwatch/cbw-security-fixes
|
CentOS_6/x86_64/2012/CESA-2012:0544.sh
|
Shell
|
mit
| 1,832 |
#!/bin/bash
if [ -n "$1" ]
then
destination="$1"
else
destination="`dirname \"$0\"`/../web_copy"
fi
destination="`realpath \"$destination\"`"
source="`dirname \"$0\"`"
source="`realpath \"$source\"/../web`"
echo "Copying files from \"$source\" to \"$destination\"."
IFS=$'\n'
cd "$source"
mkdir -p "$destination"
for file in `find -L -regextype posix-extended -regex '\.(/[^/.][^/]*)*' 2>>/dev/null`
do
echo "$file"
if [ -d "$file" ]
then
mkdir -p "$destination/$file"
else
# asuming file
cp -T "$file" "$destination/$file"
fi
done
|
cdpoffline/offline-material
|
bin/create_copy_of_web_folder.sh
|
Shell
|
mit
| 566 |
psql -U postgres -c 'create database lynx_test;'
psql -U postgres -d lynx_test -f tests/schemas/pqsql/lynx_test.sql
|
lynx/lynx
|
tests/ci/pqsql.sh
|
Shell
|
mit
| 116 |
#!/bin/sh
set -ev
cd ..
git clone https://github.com/rodjek/rspec-puppet.git
cd rspec-puppet
bundle install
gem build rspec-puppet.gemspec
gem install rspec-puppet-*.gem
cd ..
git clone https://github.com/rodjek/puppet-lint.git
cd puppet-lint
bundle install
gem build puppet-lint.gemspec
gem install puppet-lint-*.gem
cd ../puppet-redis
|
gsick/puppet-redis
|
fix_future_test.sh
|
Shell
|
mit
| 340 |
#!/bin/bash
readonly EXIT_FAILURE=1
function create_soft_links() {
local -r SCRIPT="$(readlink -f "${0}")"
local -r HOME_FILES="$(dirname "${SCRIPT}")/home"
local SRC
local DST
for SRC in "${HOME_FILES}"/{.[!.],}*
do
if [[ "${SRC: -1}" != "*" ]] ; then
DST="${HOME}/$(basename "${SRC}")"
# Remove existing symbolic links.
if [ -h "${DST}" ] ; then
if ! unlink "${DST}" ; then
exit "${EXIT_FAILURE}"
fi
fi
# Remove existing destination files.
if [ -f "${DST}" ] ; then
if ! rm -f "${DST}" ; then
exit "${EXIT_FAILURE}"
fi
fi
# Create symbolic link to file.
echo "${DST} -> ${SRC}"
if ! ln -s "${SRC}" "${DST}" ; then
exit "${EXIT_FAILURE}"
fi
fi
done
}
function create_vim_directories() {
local -r VIM_ROOT="${HOME}/.vim"
# Create directories used by VIM as specified in .vimrc file.
local -r VIM_DIRS=(
"${VIM_ROOT}/backup"
"${VIM_ROOT}/swap"
"${VIM_ROOT}/undo"
)
for VIM_DIR in "${VIM_DIRS[@]}" ; do
if [[ ! -d "${VIM_DIR}" ]] ; then
echo "Creating ${VIM_DIR}"
if ! mkdir -p "${VIM_DIR}" ; then
exit "${EXIT_FAILURE}"
fi
fi
done
}
function set_custom_login() {
local -r BASHRC="${HOME}/.bashrc"
local -r BASHRC_CUSTOM="${BASHRC}-custom"
local -r VORNERC="${HOME}/.vornerc"
if grep --no-messages --quiet "VORNE_USERNAME" "${BASHRC}" ; then
if ! echo "source ${BASHRC_CUSTOM}" > "${VORNERC}" ; then
exit "${EXIT_FAILURE}"
fi
elif ! grep --no-messages --quiet "${BASHRC_CUSTOM}" "${BASHRC}" ; then
if ! echo "source ${BASHRC_CUSTOM}" >> "${BASHRC}" ; then
exit "${EXIT_FAILURE}"
fi
fi
}
function main() {
create_soft_links
create_vim_directories
set_custom_login
}
main
|
JNiewiadomski/unix
|
setup.sh
|
Shell
|
mit
| 2,092 |
#!/bin/bash
# Argument: the homework number
# Edit these variables with your information
STUDENT_NAME="Student Name"
UTEID="stu1234"
CS_LOGIN="student"
EMAIL="[email protected]"
UNIQUE_NUM="12345"
# Do not edit below this line
HW=$1
if [ $# -lt 1 ]; then
echo 1>&2 "$0: not enough arguments"
exit 2
elif [ $# -gt 1 ]; then
echo 1>&2 "$0: too many arguments"
exit 2
fi
cat >> hw${HW}_${UNIQUE_NUM}_${UTEID}.txt << EOF
Homework #: $HW
Student Name: $STUDENT_NAME
EID: $UTEID
CS login: $CS_LOGIN
Email address: $EMAIL
Unique Number: $UNIQUE_NUM
EOF
|
pfista/hwgen
|
generate_homework.sh
|
Shell
|
mit
| 570 |
ROOT=~/C/CAA-DEV-APPS-SAMPLE-DATA/S/S4/INVALID_TIMESPAN
cefmdd_v1 -f $ROOT/C2_CQ_STA_CALIBRATION_CAVEATS__20100806_000000_20100806_235959_V170103.cef $1 $2 $3 $4
#cefmdd_v1 -f $ROOT/C4_CP_PEA_MOMENTS_INERT__20010106_000000_20010106_240000_V170103.cef $1 $2 $3 $4
|
caa-dev-apps/cefmdd_v1
|
tests/2017-00-00__v0.0.0/s40.sh
|
Shell
|
mit
| 264 |
#!/bin/bash
# require root
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
# install necessary 3rd party pkg
echo "inv: updating Ubuntu"
apt-get update &>/dev/null
echo "inv: installing pkgs"
echo -e "$pw\n$pw" | apt-get install apache2 libapache2-mod-wsgi python-dev python-pip -y &>/dev/null
echo "inv: installing python pkgs"
pip install -r requirements.txt &>/dev/null
# setup dir
echo "inv: setting up inv directory"
mkdir server/db server/img server/log server/receipt
touch server/log/access.log server/log/error.log
touch server/db/deploy.db
cp -r `pwd`/server /var/inv
chown -R www-data /var/inv
chgrp -R www-data /var/inv
# setup ssl
echo "inv: setting up ssl"
a2enmod ssl &>/dev/null
mkdir /etc/apache2/ssl
openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout /etc/apache2/ssl/apache.key -out /etc/apache2/ssl/apache.crt
# setup conf
echo "inv: setting up apache conf"
cp conf/inv.conf /etc/apache2/sites-available
echo "<Directory /var/inv/>
Options Indexes FollowSymLinks
AllowOverride None
Require all granted
</Directory>" >> /etc/apache2/apache2.conf
# setup apache
echo "inv: initializing apache"
a2ensite inv &>/dev/null
service apache2 reload
|
briansan/inv
|
setup.sh
|
Shell
|
mit
| 1,235 |
#!/bin/bash
# Install 'nas' script in /usr/local/sbin for use by root/sudo
SCRIPT_FILE="nas"
TARGET_DIR="/usr/local/sbin"
cp -p ${SCRIPT_FILE} ${TARGET_DIR}
chmod a+rx ${TARGET_DIR}/${SCRIPT_FILE}
|
maxwax/nas
|
install.sh
|
Shell
|
mit
| 200 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.