code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env bash
thisdir=$(dirname $0)
PYTHONPATH=$thisdir/../../src epydoc pyflow.WorkflowRunner --no-private -o WorkflowRunner_API_html_doc
|
Illumina/HapMix
|
pyflow/doc/client_api/make_WorkflowRunner_API_html_doc.bash
|
Shell
|
gpl-3.0
| 148 |
sort -k1,1 -k2,2n Peaks_27samples.bed > Peaks_27samples.sorted.bed
bedToGenePred Peaks_27samples.sorted.bed Peaks_27samples.sorted.genePred
genePredToGtf file Peaks_27samples.sorted.genePred Peaks_27samples.sorted.gtf
bgzip Peaks_27samples.sorted.gtf
|
CTLife/SomeRecords
|
useQTLtools/v3/bed_to_gtf.sh
|
Shell
|
gpl-3.0
| 282 |
#!/bin/bash
# Source: https://gist.github.com/ryin/3106801
# Script for installing tmux on systems where you don't have root access.
# tmux will be installed in $HOME/local/bin.
# It's assumed that wget and a C/C++ compiler are installed.
# exit on error
set -e
TMUX_VERSION=2.1
LIBEVENT_VERSION=2.0.22
NCURSES_VERSION=6.0
TMUX_ARCHIVE=tmux-${TMUX_VERSION}.tar.gz
LIBEVENT_ARCHIVE=libevent-${LIBEVENT_VERSION}-stable.tar.gz
NCURSES_ARCHIVE=ncurses-${NCURSES_VERSION}.tar.gz
TEMP_DIR=/tmp
# create our directories
mkdir -p $HOME/local ${TEMP_DIR}
cd ${TEMP_DIR}
# download source files for tmux, libevent, and ncurses
wget -O ${TMUX_ARCHIVE} --no-check-certificate https://github.com/tmux/tmux/releases/download/${TMUX_VERSION}/${TMUX_ARCHIVE}
wget -O ${LIBEVENT_ARCHIVE} --no-check-certificate https://github.com/libevent/libevent/releases/download/release-${LIBEVENT_VERSION}-stable/${LIBEVENT_ARCHIVE}
wget -O ${NCURSES_ARCHIVE} ftp://ftp.gnu.org/gnu/ncurses/${NCURSES_ARCHIVE}
# extract files, configure, and compile
############
# libevent #
############
tar xvzf ${LIBEVENT_ARCHIVE}
cd libevent-${LIBEVENT_VERSION}-stable
./configure --prefix=$HOME/local --disable-shared
make
make install
cd ..
############
# ncurses #
############
tar xvzf ${NCURSES_ARCHIVE}
cd ncurses-${NCURSES_VERSION}
./configure --prefix=$HOME/local
make
make install
cd ..
############
# tmux #
############
tar xvzf ${TMUX_ARCHIVE}
cd tmux-${TMUX_VERSION}
./configure CFLAGS="-I$HOME/local/include -I$HOME/local/include/ncurses" LDFLAGS="-L$HOME/local/lib -L$HOME/local/include/ncurses -L$HOME/local/include"
CPPFLAGS="-I$HOME/local/include -I$HOME/local/include/ncurses" LDFLAGS="-static -L$HOME/local/include -L$HOME/local/include/ncurses -L$HOME/local/lib" make
cp tmux $HOME/local/bin
cd ..
# cleanup
rm -rf ${TEMP_DIR}
echo "$HOME/local/bin/tmux is now available. You can optionally add $HOME/local/bin to your PATH."
|
tom29739/dotfiles
|
tmux_local_install.sh
|
Shell
|
gpl-3.0
| 1,925 |
#!/bin/bash
rm -f Archaeologist.jar *.class
javac stub.java Archaeologist.java -Xlint:all
jar cfe Archaeologist.jar stub *.class
|
ranaldmiao/sg_noi_archive
|
2021_finals/tasks/archaeologist/attachments/compile_java.sh
|
Shell
|
gpl-3.0
| 129 |
#!/usr/bin/env bash
source ~/.bash_profile
full=n "$PATH_BIN/ํ์ผ๋ช
ํ๋ฒ์ ๊ณ ์น๊ธฐ ํ์ ๋ชจ๋ ์์ญ์์.sh" "$@"
exit
|
Thestar3Preservation/ScriptPack
|
Bash Shell Script/nautilus-scripts/ํ์ผ๋ช
์ ํ๋ฒ์ ๊ณ ์น๊ธฐ/ํ์ฌ ์์ญ์์๋ง.sh
|
Shell
|
gpl-3.0
| 131 |
#!/bin/bash
# Set IFS so that it won't consider spaces as entry separators. Without this, spaces in file/folder names can make the loop go wacky.
IFS=$'\n'
# See if the Nautilus environment variable is empty
if [ -z $NAUTILUS_SCRIPT_SELECTED_FILE_PATHS ]; then
# If it's blank, set it equal to $1
NAUTILUS_SCRIPT_SELECTED_FILE_PATHS=$1
fi
# Loop through the list (from either Nautilus or the command line)
for ARCHIVE_FULLPATH in $NAUTILUS_SCRIPT_SELECTED_FILE_PATHS; do
NEWDIRNAME=${ARCHIVE_FULLPATH%.*}
FILENAME=${ARCHIVE_FULLPATH##*/}
NAME=${ARCHIVE_FULLPATH##*/.*}
"/home/$USER/.gnome2/nautilus-scripts/File Processing/Doc-Tools/Convert-To/.batch-convert-documents.sh" -f html -s -c "$ARCHIVE_FULLPATH"
done
|
kernt/linuxtools
|
gnome3-shell/nautilus-scripts/System/File Processing/Doc-Tools/Convert-To/2Html-To-Subfolder.sh
|
Shell
|
gpl-3.0
| 742 |
#!/usr/bin/env bash
# Copyright 2017 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
#
# This script checks if we changed anything with regard to dependency management
# for our repo and makes sure that it was done in a valid way.
#
# This file is a copy of https://github.com/golang/dep/blob/master/hack/validate-vendor.bash
# with some comments added.
set -e -o pipefail
# Is validate upstream empty ?
if [ -z "$VALIDATE_UPSTREAM" ]; then
VALIDATE_REPO='https://github.com/status-im/status-go'
if [ -z "$VALIDATE_BRANCH" ]; then
VALIDATE_BRANCH='develop'
fi
VALIDATE_HEAD="$(git rev-parse --verify HEAD)"
git fetch -q "$VALIDATE_REPO" "refs/heads/$VALIDATE_BRANCH"
VALIDATE_UPSTREAM="$(git rev-parse --verify FETCH_HEAD)"
VALIDATE_COMMIT_DIFF="$VALIDATE_UPSTREAM...$VALIDATE_HEAD"
validate_diff() {
if [ "$VALIDATE_UPSTREAM" != "$VALIDATE_HEAD" ]; then
git diff "$VALIDATE_COMMIT_DIFF" "$@"
fi
}
fi
IFS=$'\n'
files=( $(validate_diff --diff-filter=ACMR --name-only -- 'Gopkg.toml' 'Gopkg.lock' 'vendor/' || true) )
unset IFS
# `files[@]` splits the content of files by whitespace and returns a list.
# `#` returns the number of the lines.
if [ ${#files[@]} -gt 0 ]; then
dep ensure -vendor-only
# Let see if the working directory is clean
diffs="$(git status --porcelain -- vendor Gopkg.toml Gopkg.lock 2>/dev/null)"
if [ "$diffs" ]; then
{
echo 'The contents of vendor differ after "dep ensure":'
echo
echo "$diffs"
echo
echo 'Make sure these commands have been run before committing.'
echo
} >&2
false
else
echo 'Congratulations! All vendoring changes are done the right way.'
fi
else
echo 'No vendor changes in diff.'
fi
|
status-im/status-go
|
_assets/ci/validate-vendor.sh
|
Shell
|
mpl-2.0
| 1,783 |
#!/usr/bin/env bash
sudo kill -9 `cat /var/run/decaf/vnf_manager_adapterd.pid`
sudo rm -rf /var/run/decaf/vnf_manager_adapterd.pid
|
CN-UPB/OpenBarista
|
scripts/vnf_manager_adapter_kill.sh
|
Shell
|
mpl-2.0
| 132 |
#! /usr/bin/env bash
cat > /etc/apt/sources.list <<EOF
deb http://cn.archive.ubuntu.com/ubuntu/ xenial main restricted universe multiverse
deb http://cn.archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
deb http://cn.archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe multiverse
deb http://cn.archive.ubuntu.com/ubuntu/ xenial-proposed main restricted universe multiverse
deb http://cn.archive.ubuntu.com/ubuntu/ xenial-backports main restricted universe multiverse
deb-src http://cn.archive.ubuntu.com/ubuntu/ xenial main restricted universe multiverse
deb-src http://cn.archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
deb-src http://cn.archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe multiverse
deb-src http://cn.archive.ubuntu.com/ubuntu/ xenial-proposed main restricted universe multiverse
deb-src http://cn.archive.ubuntu.com/ubuntu/ xenial-backports main restricted universe multiverse
deb http://repo.saltstack.com/apt/ubuntu/16.04/amd64/latest xenial main
EOF
# export http{,s}_proxy=http://10.94.97.161:8080/
wget -O - https://repo.saltstack.com/apt/ubuntu/16.04/amd64/latest/SALTSTACK-GPG-KEY.pub | apt-key add -
apt-get update
apt-get install -y python python-pip salt-minion
cat > /etc/salt/minion.d/minion.conf <<EOF
master: 192.168.88.101
EOF
echo minion > /etc/salt/minion_id
systemctl restart salt-minion
|
xuguruogu/go-salt
|
salt-minion.sh
|
Shell
|
mpl-2.0
| 1,419 |
#!/bin/bash
CONFIG=`find . -type d -name ".bottlenose"`
COUNT=`echo $CONFIG | wc -w`
if [[ $COUNT -eq 0 ]]
then
echo "Falling back to old rule: Make exactly one Makefile"
MAKEFILE=`find . -type f -name "Makefile"`
MF_COUNT=`echo $MAKEFILE | wc -w`
if [[ $MF_COUNT -eq 1 ]]
then
(cd `dirname $MAKEFILE` && make)
HAX=`find . -type d -name ".bottlenose" | wc -l`
if [[ $HAX -gt 0 ]]
then
echo "Found a new .bottlenose directory in the student submission."
echo "HAX!"
exit 1
fi
else
echo "Bad submissions state - testing directory has multiple Makefiles:"
echo $MAKEFILE
fi
elif [[ $COUNT -eq 1 ]]
then
if [[ -e "$CONFIG/build" ]]
then
(cd `dirname $CONFIG` && .bottlenose/build `readlink -f ..`/sub.tar.gz)
fi
else
echo "Found too many .bottlenose directories:"
echo
echo $CONFIG
echo
echo "Skipping build step."
fi
|
marksherman/bottlenose
|
sandbox/scripts/build-assignment.sh
|
Shell
|
agpl-3.0
| 977 |
#!/bin/bash
## smartITSM Demo System
## Copyright (C) 2014 synetics GmbH <http://www.smartitsm.org/>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## Base Library
##
## Includes shell script.
## #1 Path to file
function includeShellScript {
loginfo "Including shell script..."
local file="$1"
if [ ! -r "$file" ]; then
logwarning "File '${file}' does not exist or is not readable."
logerror "Cannot include shell script."
return 1
fi
source "$1"
logdebug "File '${file}' included."
return 0
}
## Executes command.
## $1 Command
function exe {
logdebug "Executing command..."
logdebug "Execute '${1}'"
let "relevant = (($LOG_DEBUG & $VERBOSITY))"
if [ "$relevant" -gt 0 ]; then
eval $1
local status="$?"
else
logdebug "Suppress output."
eval $1 &> /dev/null
local status="$?"
fi
return $status
}
## Logs events to standard output and log file.
## $1 Log level
## $2 Log message
function log {
local level=""
case "$1" in
"$LOG_DEBUG") level="debug";;
"$LOG_INFO") level="info";;
"$LOG_NOTICE") level="notice";;
"$LOG_WARNING") level="warning";;
"$LOG_ERROR") level="error";;
"$LOG_FATAL") level="fatal";;
*) logwarning $"Unknown log event triggered.";;
esac
let "relevant = (($1 & $LOG_LEVEL))"
if [ "$relevant" -gt 0 ]; then
echo "[$level] $2" >> "$LOG_FILE"
fi
let "relevant = (($1 & $VERBOSITY))"
if [ "$relevant" -gt 0 ]; then
prntLn "[$level] $2"
fi
}
# Logs debug message
## $1 Log message
function logdebug {
log "$LOG_DEBUG" "$1"
}
# Logs info message
function loginfo {
log "$LOG_INFO" "$1"
}
# Logs notice message
## $1 Log message
function lognotice {
log "$LOG_NOTICE" "$1"
}
# Logs warning message
## $1 Log message
function logwarning {
log "$LOG_WARNING" "$1"
}
# Logs error message
## $1 Log message
function logerror {
log "$LOG_ERROR" "$1"
}
# Logs fatal message
## $1 Log message
function logfatal {
log "$LOG_FATAL" "$1"
}
## Calculates spent time.
function calculateSpentTime {
loginfo "Calculating spent time..."
local now=`date +%s`
local sec=`expr $now - $START`
local duration=""
local div=0
if [ "$sec" -ge 3600 ]; then
div=`expr "$sec" \/ 3600`
sec=`expr "$sec" - "$div" \* 3600`
if [ "$div" = 1 ]; then
duration="$div hour"
elif [ "$div" -gt 1 ]; then
duration="$div hours"
fi
fi
if [ "$sec" -ge 60 ]; then
if [ -n "$duration" ]; then
duration="$duration and "
fi
div=`expr "$sec" \/ 60`
sec=`expr "$sec" - "$div" \* 60`
if [ "$div" = 1 ]; then
duration="${duration}${div} minute"
elif [ "$div" -gt 1 ]; then
duration="${duration}${div} minutes"
fi
fi
if [ "$sec" -ge 1 ]; then
if [ -n "$duration" ]; then
duration="$duration and "
fi
duration="${duration}${sec} second"
if [ "$sec" -gt 1 ]; then
duration="${duration}s"
fi
fi
if [ -z "$duration" ]; then
duration="0 seconds"
fi
logdebug "Spent time calculated."
lognotice "Everything done after ${duration}. Exiting."
return 0
}
## Runs clean finishing
function finishing {
loginfo "Finishing operation..."
calculateSpentTime
logdebug "Exit code: 0"
exit 0
}
## Runs clean abortion
## $1 Exit code
function abort {
loginfo "Aborting operation..."
calculateSpentTime
logdebug "Exit code: $1"
logfatal "Operation failed."
exit $1
}
## Prints line to standard output
## $1 string
function prntLn {
echo -e "$1" 1>&2
return 0
}
## Prints line without trailing new line to standard output
## $1 string
function prnt {
echo -e -n "$1" 1>&2
return 0
}
## Prints some information about this script
function printVersion {
loginfo "Printing some information about this script..."
prntLn "$PROJECT_SHORT_DESC $PROJECT_VERSION"
prntLn "Copyright (C) 2014 $PROJECT_COPYRIGHT"
prntLn "This program comes with ABSOLUTELY NO WARRANTY."
prntLn "This is free software, and you are welcome to redistribute it"
prntLn "under certain conditions. Type '--license' for details."
logdebug "Information printed."
return 0
}
## Prints license information
function printLicense {
loginfo "Printing license information..."
logdebug "Look for license text..."
licenses[0]="${BASE_DIR}/COPYING"
licenses[1]="/usr/share/common-licenses/AGPL-3"
licenses[2]="/usr/share/doc/licenses/agpl-3.0.txt"
for i in "${licenses[@]}"; do
if [ -r "$i" ]; then
logdebug "License text found under '${i}'."
cat "$i" 1>&2
logdebug "License information printed."
return 0
fi
done
logwarning "Cannot find any fitting license text on this system."
logerror "Failed to print license. But it's the AGPL3+."
return 1
}
|
bheisig/smartitsm
|
lib/base.sh
|
Shell
|
agpl-3.0
| 5,759 |
#!/bin/bash
# Copyright (C) 2014 - Sebastien Alaiwan
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
function opus_build {
host=$1
pushDir $WORK/src
lazy_git_clone "https://github.com/xiph/opus.git" opus
mkdir -p opus/build/$host
pushDir opus
./autogen.sh
popDir
mkdir -p opus/build/$host
pushDir opus/build/$host
../../configure \
--enable-static \
--disable-shared \
--host=$HOST \
--prefix=$PREFIX/$host
$MAKE
$MAKE install
popDir
popDir
}
function opus_get_deps {
echo sodium
}
|
Ace17/zenbuild
|
zen-opus.sh
|
Shell
|
agpl-3.0
| 1,190 |
#!/bin/sh
PREFIX=/gm
BUILD_INFO="`pwd`/build.info"
WAR_DIR="${PREFIX}/web/vhost/swdouglass.com/webapps"
NAME="joid-swd"
SWD_WAR="`pwd`/dist/${NAME}.war"
PATCHES="`pwd`/deploy"
PRIVATE="`pwd`/private"
pushd ${WAR_DIR}
if [ -d "$NAME" ]; then
rm -rf "$NAME"
rm -f ${PREFIX}/etc/tomcat-vhost/Catalina/swdouglass/${NAME}.xml
fi
mkdir "$NAME" && pushd "$NAME"
cp $BUILD_INFO .
jar xf $SWD_WAR
if [ -d "$PATCHES" ]; then
for PATCH in $PATCHES/*.patch; do
patch -p1 < $PATCH
done
fi
if [ -d "$PRIVATE" ]; then
for PATCH in $PRIVATE/*.patch; do
patch -p1 < $PATCH
done
fi
chown -R apache:apache .
popd
|
shanghaiscott/joid
|
deploy.sh
|
Shell
|
agpl-3.0
| 645 |
#!/bin/sh
#
# Copyright (C) 2009-2012:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
DIR="$(cd $(dirname "$0"); pwd)"
BIN="$DIR"/../bin
ETC="$DIR"/../etc
DEBUG_PATH="/tmp/receiver.debug"
echo "Launching receiver (which manages passive data) in debug mode to the file $DEBUG_PATH"
"$BIN"/shinken-receiver -d -c "$ETC"/receiverd.ini --debug "$DEBUG_PATH"
|
wbsavage/shinken
|
bin/launch_receiver_debug.sh
|
Shell
|
agpl-3.0
| 1,158 |
#!/bin/bash
rel=../..
if [ x"$TOP" == x ]; then TOP=`pwd`/$rel; fi
. $rel/linux-ow.sh
if [ "$1" == "clean" ]; then
do_clean
make clean
exit 0
fi
if [ "$1" == "disk" ]; then
true
fi
if [[ "$1" == "build" || "$1" == "" ]]; then
true
fi
|
joncampbell123/doslib
|
tool/opcc/make.sh
|
Shell
|
lgpl-2.1
| 258 |
#!/usr/bin/env bash
. $builddir/tests/test_common.sh
set -e -o pipefail
function test_probes_yamlfilecontent_offline_mode {
probecheck "yamlfilecontent" || return 255
local ret_val=0
local DF="${srcdir}/test_probes_yamlfilecontent_offline_mode.xml"
local RF="results.xml"
local YAML_FILE="openshift-logging.yaml"
[ -f $RF ] && rm -f $RF
tmpdir=$(make_temp_dir /tmp "test_offline_mode_yamlfilecontent")
# Setup chroot fs and host with test files in different states
mkdir $tmpdir/tmp
cp "${srcdir}/${YAML_FILE}" "${tmpdir}/tmp"
cp "${srcdir}/${YAML_FILE}" /tmp
xsed -i 's/name: instance/name: outstance/' "/tmp/${YAML_FILE}"
cp "${srcdir}/${YAML_FILE}" "/tmp/host-${YAML_FILE}"
set_chroot_offline_test_mode "$tmpdir"
$OSCAP oval eval --results $RF $DF
unset_chroot_offline_test_mode
if [ -f $RF ]; then
verify_results "def" $DF $RF 3 && verify_results "tst" $DF $RF 3
ret_val=$?
else
ret_val=1
fi
rm -f /tmp/$YAML_FILE
rm -rf ${tmpdir}
return $ret_val
}
test_probes_yamlfilecontent_offline_mode
|
OpenSCAP/openscap
|
tests/probes/yamlfilecontent/test_probes_yamlfilecontent_offline_mode.sh
|
Shell
|
lgpl-2.1
| 1,124 |
# Run with ./run-tests.sh
test_ocr_on_live_video() {
cat > test.py <<-EOF
import stbt
stbt.frames(timeout_secs=30).next() # wait 'til video pipeline playing
text = stbt.ocr()
assert text == "Hello there", "Unexpected text: %s" % text
text = stbt.ocr(region=stbt.Region(x=70, y=180, width=90, height=40))
assert text == "Hello", "Unexpected text: %s" % text
EOF
stbt run -v \
--source-pipeline="videotestsrc pattern=black ! \
textoverlay text=Hello\ there font-desc=Sans\ 48" \
test.py
}
|
wmanley/stb-tester
|
tests/test-ocr.sh
|
Shell
|
lgpl-2.1
| 540 |
#!/bin/bash
## Copyright (C) 2012 Kolibre
#
# This file is part of kolibre-clientcore.
#
# Kolibre-clientcore is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# Kolibre-clientcore is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with kolibre-clientcore. If not, see <http://www.gnu.org/licenses/>.
#
PROMPTSFILE='prompts.csv'
usage()
{
echo "usage: $0 [ACTION]"
echo ""
echo "[ACTION]"
echo " --missing find and add missing prompts (default action)"
echo " --extra find prompts that are not used"
}
find_prompt_in_string()
{
local string=$1
# TODO: allow optional spaces between marco and parantheses e.g. _N ( "one hour" )
prompt=$(echo "$line" | sed 's/.*_N("\(.*\)".*).*/"\1"/')
# now prompt variable should store a prompt inside double quotations e.g. "one hour"
length_before=`expr length "$prompt"`
prompt=$(echo "$prompt" | tr -d '\"')
length_after=`expr length "$prompt"`
# if ( length_before == (length_after + 2) ) then
# the prompt variable store a valid prompt string
# otherwise we set prompt to an empty string
compare_value=`expr $length_after + 2`
if ! [ $length_before -eq $compare_value ]; then
prompt=""
fi
}
prompt_not_in_prompts_file()
{
local prompt=$1
if sed 's/#.*//' $PROMPTSFILE | grep "\"$prompt\"" &> /dev/null; then
return 1
else
return 0
fi
}
find_missing_prompts()
{
FOUNDMISSINGPROMPT=0
TMPFILE="/tmp/$(basename $0).$$.tmp"
# TODO: allow optional spaces between macro and parantheses e.g. _N ( "one hour" )
find ../src/ -name '*.cpp' -exec grep -Hn "_N(.*)" {} \; > $TMPFILE
while read line; do
file=`echo $line | sed 's/\(.*cpp:[0-9]\+:\)\(.*\)/\1/' | sed 's/:[0-9]\+://'`
code=`echo $line | sed 's/\(.*cpp:[0-9]\+:\)\(.*\)/\2/' | tr -d ' '`
# continue if file include substring /tests/
[[ "$file" =~ "/tests/" ]] && continue
# continue if code begins with substring //
[[ "$code" =~ "//" ]] && continue
find_prompt_in_string $line
# continue if string length of prompt is zero
if [ -z "$prompt" ]; then
continue
fi
if prompt_not_in_prompts_file "$prompt"; then
echo "prompt '$prompt' is missing in $PROMPTSFILE"
if [ $FOUNDMISSINGPROMPT -eq 0 ]; then
datestr=`date`
echo "" >> $PROMPTSFILE
echo "# prompts added on $datestr" >> $PROMPTSFILE
echo "" >> $PROMPTSFILE
FOUNDMISSINGPROMPT=1
fi
echo "\"$prompt\"" >> $PROMPTSFILE
fi
done < $TMPFILE
rm $TMPFILE
exit $FOUNDMISSINGPROMPT
}
find_extra_prompts()
{
FOUNDEXTRAPROMPT=0
while read line; do
# continue if line begins with #
[[ "$line" =~ "#" ]] && continue
# strip leading and trailing white spaces
prompt=`echo $line | tr -d ' '`
prompt=$(echo $line | sed -e 's/^ *//g;s/ *$//g')
# continue if string length of prompt is zero
if [ -z "$prompt" ]; then
continue
fi
if ! grep -r "$prompt" ../src -q; then
if [ $FOUNDEXTRAPROMPT -eq 0 ]; then
echo "List of prompts which are not used"
FOUNDEXTRAPROMPT=1
fi
echo "$prompt"
fi
done < $PROMPTSFILE
}
if [ ! -f $PROMPTSFILE ]; then
echo "error: could not find file '$PROMPTSFILE'"
exit 1
fi
if [ $# -eq 0 ] || [ "$1" == "--missing" ]; then
find_missing_prompts
elif [ "$1" == "--extra" ]; then
find_extra_prompts
elif [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
usage
exit 0
else
usage
exit 1
fi
|
kolibre/libkolibre-clientcore
|
prompts/prompthelper.sh
|
Shell
|
lgpl-2.1
| 4,216 |
#! /bin/bash
#
# Replace text with something else in a given directory
#
function usage(){
echo "replace.all.sh what_be_replaced replace_with_what where"
exit 1
}
if [ $# != 3 ]
then
usage
fi
what=$1
to=$2
where=$3
grep "$what" $where -Rl | xargs sed -i -e "s/$what/$to/g"
|
chfw/bash-cli-utils
|
bin/replace.all.sh
|
Shell
|
lgpl-3.0
| 289 |
#! /bin/bash
#
# verify json-ld encoding.
# reiterates sparql-protocol/media-types for json-ld
#
# json_diff:
# pip install json-delta
# when the documents match, it emit an empty array: []
curl_sparql_request \
-H "Accept: application/ld+json" \
-H "Content-Type:application/sparql-query" <<EOF \
| tee $ECHO_OUTPUT | json_pp | json_diff /dev/stdin /dev/fd/3 3<<TEST 2>&1 #| egrep -q '^\[\]$'
construct {
[ <http://example.org#value> ?o ]
}
where {
values ?o {
<http://example.org/aURI>
'2020-12-01'^^xsd:date
'2020-12-01T00:00:00'^^xsd:dateTime
'2020-12-01T00:00:00Z'^^xsd:dateTime
'2020-12-01T00:00:00-06:00'^^xsd:dateTime
'2020-12-01T00:00:00+06:00'^^xsd:dateTime
'true'^^xsd:boolean
'false'^^xsd:boolean
'1.1'^^xsd:decimal
'1'^^xsd:integer
'2.0'^^xsd:double
'3.0'^^xsd:float
'string'
'langstring'@en
}
}
order by ?o
EOF
{
"@graph" : [
{
"@id" : "_:g1",
"http://example.org#value" : "http://example.org/aURI"
},
{
"@id" : "_:g2",
"http://example.org#value" : {
"@value" : "langstring",
"@language" : "en"
}
},
{
"@id" : "_:g3",
"http://example.org#value" : "string"
},
{
"@id" : "_:g4",
"http://example.org#value" : false
},
{
"http://example.org#value" : true,
"@id" : "_:g5"
},
{
"@id" : "_:g6",
"http://example.org#value" : 1
},
{
"@id" : "_:g7",
"http://example.org#value" : 1.1
},
{
"@id" : "_:g8",
"http://example.org#value" : 2
},
{
"http://example.org#value" : 3,
"@id" : "_:g9"
},
{
"http://example.org#value" : "2020-12-01",
"@id" : "_:g10"
},
{
"@id" : "_:g11",
"http://example.org#value" : "2020-11-30T18:00:00Z"
},
{
"http://example.org#value" : "2020-12-01T00:00:00Z",
"@id" : "_:g12"
},
{
"@id" : "_:g13",
"http://example.org#value" : "2020-12-01T00:00:00"
},
{
"@id" : "_:g14",
"http://example.org#value" : "2020-12-01T06:00:00Z"
}
]
}
TEST
|
dydra/http-api-tests
|
tickets/release-202012/284-jsonld-compacted.sh
|
Shell
|
unlicense
| 2,298 |
echo "{
\"parent\":\"moregenerators:item/baseItem\",
\"textures\": {
\"layer0\":\"moregenerators:items/$1\"
}
}" > $1.json
|
adamsong/MoreGenerators
|
resources/assets/moregenerators/models/item/mktexture.sh
|
Shell
|
unlicense
| 132 |
ns=(5000 10000 20000 50000)
# The capacities values below are between "10<their indice>" and
# "1000<their indice>". Generated by random.org. 500 instances are
# generated by combination, then we need four times 500 capacities.
declare -A cs
cs[5000]="5052835 3897377 9445422 5172202 7957284 945504 3023224 7129097 6701143 5081854
6746697 9741414 6348861 5984264 4983076 5241702 3795391 5488791 2452692 589971
4277527 4156282 760592 1320121 289111 295352 4784946 4924154 3244282 1100532
3528071 7557273 9214816 6034433 7638994 3962519 8093966 2884248 6354493 7875145
5024417 6678450 3877296 5628220 2014515 1766445 3579055 3637543 8258588 5197266
192100 2280742 9589035 9401492 8383254 3832351 5190132 8879894 9333188 1395982
7118461 2917193 1103941 7925587 931102 9880127 7943163 2539367 6703953 3462059
4242352 8280366 5876494 2319116 3821535 4055087 6610157 3545314 7070176 4994228
4147712 704800 5304125 1296865 6464625 9041821 399692 6663268 5024217 8534676
5649759 8860854 5662484 9044620 7900677 5722358 2325294 7010023 1604949 4183453
8739009 3602716 6929655 6025564 5214625 426261 1624859 781945 1390263 6566158
8503123 5642919 3333886 6496602 9062392 2437680 5521733 9870608 2282298 1532938
4741484 205285 2390844 5836897 3402745 5497075 9908634 6471546 6859533 2724542
6050355 1312716 7175107 4723154 9937690 2735182 9994523 6584063 9564497 1223852
9609036 4604639 6536876 1787475 7268559 486058 8028958 6876245 9183373 8652281
3802200 3205135 4215227 7465127 5473784 8726377 5783115 8479680 810121 1827804
2865787 4744515 564361 3173165 7343572 763193 5633168 4970401 1997287 2381969
750624 2672525 9809750 1807019 6987539 2596379 4003615 8110064 3928017 7185290
4902827 5966410 385066 1018780 1621867 2341834 4026290 172532 4248243 9777440
5416722 1840037 6019596 1719966 241702 9488730 2472338 2823788 9154189 6675866
5454693 9009494 7218736 6835586 4477214 7547207 1785483 2912463 2328490 8528025
2331404 4303780 1275108 9155556 3715611 5029329 6445041 311791 5638982 333169
481094 3976632 7798653 4174611 6060928 3807344 6776832 2169081 4705487 4055287
1421347 8713168 6938471 987670 3833765 1904194 1047009 973923 1984741 7454675
6662963 4927373 884832 812254 1296023 7514492 236748 3598813 2235698 7781968
5698356 1220326 1817429 4753908 4231738 438436 1836766 4873798 8847073 6109183
1910660 4904088 863684 3026004 5044432 8113183 4487433 2961817 7468046 1523292
586926 9201628 7202544 7071725 5492503 1160915 5046564 9452552 9290651 4021294
870904 9066197 8792909 9650898 8828235 3210675 9677867 7770367 4771071 1552971
8673995 8575336 8186875 3130207 2824619 9194612 6346480 6734168 9484981 5376652
3236401 7380818 6984282 1454522 3520595 4355573 7500671 6649204 7115160 8491738
6518923 3039709 3624655 3967138 7870170 9811788 6878434 2006966 1948539 1597948
3172217 4513848 3329501 3452613 847798 7335268 1143730 1444653 3365508 5964651
1208881 2563958 1167491 253605 1446262 7246530 910577 9117405 4167307 6539973
7801239 5506961 4662957 3056498 148537 1622113 3557977 8038279 3356493 2254967
3497645 2351527 5378797 8637834 3957722 3161965 2053171 9110142 6725396 5381030
4410851 4520921 161385 6131577 6135028 1637945 4329792 9672339 5845631 4756834
8694772 3420575 7380526 9087456 3545087 9816839 7815102 3003322 8752609 6175729
8834721 9024951 4352861 2256221 7967360 6156535 8851581 6596333 4836116 8779853
782644 8710115 2217853 3294607 5210159 6776687 2653458 7552202 2046538 2651146
260346 9980013 7863311 1858306 1071853 8747204 3632894 3795425 454666 7493231
6458611 4593393 8774726 5153321 8406329 2496359 9712767 2805826 2695422 9052030
2265695 6661646 5592252 7396308 5776668 4429236 6680412 578241 4056591 3102931
8635466 4426277 4627185 4711363 2123994 5634552 7794145 2797913 7046114 3180683
7173676 9857164 1376079 2664875 6994714 7324377 6491429 8784980 1044606 5673598
2409016 2549843 7907514 3490958 5282893 1971778 6605694 7412599 6410335 4174573
9547163 276866 4763568 4676318 7805074 9166267 2674068 3992549 7176247 4113054
299685 5494901 9637936 7658994 8950445 8618519 6689067 7566511 9265905 9290359
509698 4583760 9464983 9311050 3762785 2145848 6695285 6923487 8225632 6861099
8928363 3683925 2143112 9884120 6911947 4231857 4373156 7161943 8772358 1019453"
cs[10000]="45667812 52926330 67603969 65617717 48752651 27721806 16372434 93158297 34964751 6311802
69682757 87027021 69330407 51012814 72410003 76269221 6712104 76272157 63036883 61166829
73621875 96279181 92425089 54812232 83848093 76710035 8247071 72723498 77307903 54711483
56574382 77889933 8388832 7867079 48504579 70201258 59442236 73320246 90683806 48918775
95312321 74110651 82121499 33929358 80958891 34021658 64789316 11037950 85762730 59878474
21065435 80721916 7539408 24000855 36017953 65303731 21781331 72886391 58633254 28696807
73816185 6649016 13393044 26103694 65554581 55420795 66211103 84762560 17768833 97623503
36013126 35140134 75494204 22996436 26511344 48746888 87539808 42013430 95406154 49660146
37273903 56667075 67739320 5741489 24791156 18813217 84973943 30139045 31355039 55306961
58162833 7786125 54993951 99947939 73340584 60604500 23649002 15613807 97959883 99840518
53704595 4166538 6389019 10388797 45518851 45458930 42536160 75126340 53352688 75456123
21087652 3362862 88769444 45420750 29391418 18555434 72714935 20581375 62289029 50409463
78856406 15235637 16342441 93358101 42815789 8803729 93220260 37264059 16123217 20350242
57131242 64359857 11054168 71860733 71177761 42011865 36686216 23772910 15085263 4988260
21267747 80980341 42869767 71175596 14985291 37925648 7240187 56069316 2623462 2293788
74513131 44163683 73949523 20180366 74781762 5590253 32032990 69314154 8367345 96021846
95575771 39179680 71822450 49571060 75412060 53265396 63442624 47139048 29041453 56205560
57725132 71501062 39545255 72807244 43232789 65804643 11059993 46470724 30601366 66754892
16268264 43038225 79139351 82045239 20049058 7148219 12253773 48545940 66914613 83973295
32729394 81446325 4818762 73862405 69997274 99055211 70863228 28501327 41143872 65245038
49433417 58475891 11358565 53143976 41101871 88191831 90794670 81724154 95848138 2567875
75430404 11248850 75672542 74480053 61205659 63376904 10374469 56263099 4309866 59690451
87751611 94793942 54210410 95875310 39336239 78916401 35725875 6870624 90961818 39146777
63032838 91224172 84487498 16920172 69031267 51844230 17228310 72856050 11566888 82157453
18453917 49749995 21419484 7868539 85127195 94609986 15688767 29033077 68626132 80154435
83022319 89185377 7731270 38443197 34384721 9133038 58106216 21909975 60158684 23150543
12472909 52768400 3616274 41520764 27992676 57162510 32638555 46484320 74428427 52055497
39132583 98487068 72084240 24285250 2186961 60626353 75255976 50481442 60792332 45331828
6468045 14487138 10235569 2826473 88976062 88817447 90168948 49285745 70334635 78950639
5980217 96512683 3205558 63973753 5325948 10442827 74835473 9517459 28050446 73671980
50458012 96690566 62623458 41158354 38018343 60935070 73917375 68171942 61131058 83540015
34617283 50840071 46495603 51897618 66834737 58833585 60545638 97122954 84334033 12780232
51259982 80292247 66644421 38032118 80409554 81991499 81936904 16657131 15423561 77743926
61533391 29402241 98972500 3598908 76519692 98140622 32905362 42443650 84433578 37551242
51289687 5221854 88591286 37587582 12780880 77235193 35443532 94828394 11742257 24508818
56477119 82263976 52359572 29362429 84771813 27987881 98870925 94212476 42244064 78312231
47822444 10464637 49092236 6068614 61521125 67350401 5591557 68688348 73726433 25778973
58229587 11551498 8266762 98905068 10592318 85428792 96034242 73663377 52364034 36906316
45767429 86476214 96268822 97824125 63594575 34004790 40489326 48810185 40803683 37631849
36779224 3773069 49615641 55319044 54863187 81618731 82493360 43762165 70399479 62794320
40340840 20966660 41340661 18011113 49374815 13967694 46087227 68630324 66736253 82854889
76801257 67990413 99162216 97066001 59042793 69658761 70721608 58819820 27751917 34419441
57119738 93883514 32654845 64968112 9814318 99268884 97854042 80742346 35698147 86845957
63620386 22339365 99037658 18791359 46583871 65541970 15821718 28917125 74271426 41809555
51590486 54968277 26024155 25120970 68749197 73644494 30706850 20595199 15422392 63386953
73469410 83105179 13124297 75016539 22377212 15234430 16445217 70210825 30407503 80295612
83177864 53185047 86160398 76482734 45009088 45397408 1067363 38396871 53511820 71873681
92466068 1537919 12026819 17059545 78918657 31401664 59750218 36794569 26682848 38988509
87387586 67790333 52044968 68063458 96441889 56033282 63766493 82948557 70782042 93842309
67014713 16697385 8077600 27999417 31012781 84240510 93061083 46901795 98031202 63183139"
cs[20000]="13130598 61666657 91206311 56418527 46428515 51592665 8780715 28033333 88368892 11465474
47897810 21445624 55461121 77271005 57605881 17795343 31287203 39687653 60094940 78733090
66621104 80951944 66089141 74433867 99252240 12031782 44036015 55453527 67192471 92125949
82162365 69379204 74104370 21861015 40842012 63466318 47872978 48079947 52694607 94443202
12675341 55170783 86648320 19041897 31401096 13553251 63853327 7402537 53293071 75371478
78627470 8954775 89418814 38475405 51790152 33376285 59259383 51502063 5587967 22098410
30779780 49811343 8887790 12450408 58454045 9234188 5953438 28041912 31396782 1435117
12869185 38682412 60494055 54695884 53156575 76903042 51360799 4033052 25199892 75136703
95020882 49847613 52179337 46466066 13045986 59287308 79134496 3358570 56117159 24331496
4933606 53485801 23256876 95022402 61439148 84957257 39415323 16615051 2303128 92469375
8724677 88389327 17832305 35073562 58803419 80248273 21905435 22644552 77209306 98184707
1838818 18359342 71038052 30739230 25351319 27402514 31878551 99221590 41560252 51560789
88187848 82162199 36428636 58076374 65265801 93677312 31828338 91555197 83031137 4744867
61669346 76073681 68627178 70427462 62494305 17370746 58356449 48187338 47106099 16886386
75414573 94345854 65167671 49419603 38023634 87663745 52329650 83325383 57746670 31817418
97240215 89340071 45630688 4884406 23032025 71976917 44747234 86072934 46256516 18534626
35558105 14946789 78627240 30412928 92744426 88919574 43020606 36799110 59078873 14304379
70721795 13704348 84783165 73075292 95688643 66272779 17058728 11754811 65919825 42070997
88578646 85388357 91130381 16014702 28737418 30717855 12174602 44909208 38227553 83855307
23925585 66737995 36276295 76126382 72024873 37972509 26086675 57211689 44348262 95014983
17945451 5011469 28540646 83532511 28953057 66780896 53429549 25684139 58162545 37381231
26432624 33338406 1193533 12207178 73816611 33712050 85820352 4187183 78183876 31557812
53260098 46701980 38689140 71797329 13017855 37818144 28344025 54309226 26687509 7079662
90428381 25861629 64238054 26801676 64354645 33316553 95452116 73851195 4587927 58377371
9740317 40276727 11420225 99499717 16962349 82998273 78873022 3107085 35596499 58218097
9042490 45913699 73367502 54476551 60672780 96496790 54180664 61599291 46038046 10049128
84193482 65965696 64144179 39098858 43606912 98178529 89758410 13011232 87787330 28262911
82072651 11759899 79954243 69316121 73530463 60409837 56831821 3527901 73842356 39817293
92043069 34539024 75935940 4386174 76476388 86880327 82425148 26394075 33873554 10711944
78929114 85285147 36744122 46185044 66779590 37853335 87201581 41311687 67084978 17716614
51822310 61207265 31202891 5454436 95333595 13616178 44967278 92907831 69988505 21648477
41709486 56751841 41644153 16583595 21960982 29515701 1601935 9388844 32384154 23370627
22913143 62689914 35734121 95898160 90752609 49031488 51998429 91101281 32571604 53792760
95710192 30270618 24585723 39955951 45583142 91919703 51379242 55855748 70469229 51779895
73559665 64595744 61569579 93491535 9479163 42573896 52604025 80594115 30699449 4046248
34389104 82862049 33857980 27145169 46337608 31471260 22296794 1186950 99666825 41356492
27952686 42902259 47394244 89309930 50169945 81336674 30565166 6848383 80744801 97902462
98458522 21817967 72296227 84516568 34609441 96939870 34506118 73061430 30412968 48914481
84484246 40777168 54093827 2821539 78848759 51213083 31485764 83817117 19827194 93237588
75492111 57698194 49900605 51379837 48048269 90957190 33817984 79813072 87936951 24134565
79016090 22872926 17847878 70749847 60242472 76309495 45683072 95331646 68405468 55436182
74837752 10625345 48953814 23894207 99476258 56950114 14033169 57914625 26398723 7504006
34592297 31429311 31465281 43545213 17975316 51425276 44922271 65742465 17777632 77788146
69910068 86005580 91987958 72978115 51117179 89619250 34931085 70116163 93140984 71511619
27078353 90123722 44498186 17105220 10755847 5294876 97423674 56543236 95652199 94807786
80683282 79087991 53670995 70357223 69984600 90741616 43191425 36505331 13692860 16902880
8111534 90480409 12074994 59777829 45245317 32959614 5244439 61171644 37928261 75551672
14702902 76912671 54111035 31097504 66181640 89911693 57964152 99771879 82919278 14731399
56294101 62161370 56901266 56442366 78615586 65813462 10046071 59219188 71440435 38817990
7134744 38658058 46341943 48357006 45789592 44422753 30872091 11829306 87277856 87409125"
cs[50000]="74736455 53791268 50146896 58496380 27011999 52985829 71971621 24692675 5172209 88886832
2381940 37192439 11457020 65982738 28750683 5684250 4342551 38562037 15725846 41622293
54992286 19149666 25572452 35373194 94209554 19155432 31212485 87763824 87480129 83030003
90211289 5184644 38630208 89138604 41676479 39186005 8269526 57280825 81942907 36882487
55171512 62864809 27074738 20206604 74810341 59507496 64815374 21809652 8772196 93577815
46209855 40364203 55566386 37344181 12709874 11377422 28478111 97195157 8483983 51843160
13444398 92369739 23946082 40297241 80352764 43966896 72945319 87891343 73210230 95186569
48593938 88706404 58667599 56988426 19854492 58709211 10344355 56745423 19167481 95756884
72892714 61652913 80875050 46722756 6003373 26657187 60876515 96886442 79235699 49297191
45953168 21370040 48228257 51173456 32838569 31665439 22764011 89435820 43975076 86128637
71646670 9644556 73441712 34351298 28155703 83569257 36322248 26148173 46788992 3173356
87353318 33946088 33227108 64216379 90874067 42121290 47800736 46578165 73399203 40861026
77295751 47557536 21875231 55621054 28963618 85145269 72878727 40342070 85306004 39775972
34171165 49704463 26042092 35827064 91937967 45222185 1988650 32320297 85626095 33286195
12954038 59711619 91926696 15965127 46490384 66258570 62672863 42100972 13055270 2735104
84465154 45567939 14312308 85849648 24693476 63718488 11854710 21545065 61622435 11777490
38955764 59886533 28627351 94732614 6943341 9509004 59966167 84856064 73518041 75866728
43620066 46596229 97157472 99997134 52800283 87942205 37108800 76239580 94827254 5561353
87349171 81684592 80728327 34224451 55069219 48159588 90250596 76849630 10150992 2370775
85469516 83114302 78890623 5848305 50864837 28356806 99932916 78009484 58517006 22012757
6256825 19384666 95211147 12345348 20199167 85606132 47586775 2371539 70223585 73029890
37983366 26349126 76128856 96590462 84871339 32154675 15275720 83661615 14029395 15951790
15481608 63469158 5803211 7508433 73264621 19008312 97558332 25164160 52092738 54282542
92914493 69028392 37675694 20999642 37027466 94185085 91365489 51695617 74357523 14098896
25906798 65004251 3114824 56431537 69880208 63461056 45582554 77372040 2551248 50623986
60468039 77349319 24975539 40362067 99188385 6197212 19925659 16464549 52113577 39587651
73153081 47095118 74859565 88244106 29968997 63936272 61750732 51264571 82026274 71739734
45496672 33325504 29937925 57927334 39371843 6455721 86520137 60494327 30786873 83529215
87498669 70887075 15566859 74512220 44719849 51798520 31366487 90549517 35112083 15956194
18584022 22516024 58222428 89962425 95832073 28053698 97188943 46374074 21391072 4827164
19899175 31099542 32571531 31483130 21107341 96125477 32806790 79790161 31934220 46654758
79208336 63747180 54348724 82284801 82335164 31076287 50356763 23661677 6926528 62938855
12502616 70941477 21391264 59239811 78912298 86974018 72456682 22654111 82889175 53846997
9887500 13060366 61825724 34084722 97647555 89102322 86949385 21416369 74627295 70266461
30037614 10155778 8748032 83744303 21078474 26367984 50360706 49494774 39735629 94221710
7307565 11679823 43920202 77930219 7434740 70429389 47818384 11990004 87674129 9241120
59562386 40433107 97447755 28110804 26090204 40638026 49655841 29324973 5679530 86150538
44269350 41940959 56732105 17075585 57677488 97440430 24435140 27064914 3868694 48565799
89078114 54997611 55446700 64472124 96669073 79427278 86824683 82967885 94667685 84665482
65139496 76168547 78194388 17571881 59965920 85342653 46567863 28134908 14088953 87157707
9876806 35519583 94122816 57714109 16956627 81003433 47335105 98329081 51948557 24534858
17445009 37814978 75759463 44116573 84140267 93878694 67312155 25398079 12535833 33310968
2493304 5284055 43381825 85597685 64416589 48035418 1296483 43734727 21172030 95414099
86280682 58022892 3384996 62876015 25018429 28003602 43073048 64992510 15946180 7090197
16560005 68230375 97477572 38926787 80548789 44900743 88464721 99780015 23608673 71669205
74304113 91435039 45426308 53180300 33108959 38451209 32504934 6434790 70573906 53762094
2664634 11959906 31366898 72200331 11386531 45187085 15902727 56125515 69190893 93196404
11560973 62521531 1821853 18276686 94225029 54678209 45030394 1955938 86025236 49632885
15122955 92190284 97264876 21187018 98234239 95543962 83522026 19845768 85500707 31432216
96979810 79810092 33147568 30437174 86657759 51587535 97137681 4085935 71245345 5880065"
# 500 instances for each combination are generated, then we need
# 500 seeds
seeds=(731232778 594914841 324673955 300958364 182130978 554525798 398797429 586106970 67683560 915258292
199607488 895832870 420935988 204947608 318339058 750851024 221755733 994385767 575639252 808826180
742053853 669712253 434860255 605460062 320356631 199811851 90924128 532584774 882501307 443600954
418727292 410031365 674362184 126192256 467641010 889293313 442208726 252959003 527753500 713413380
534548386 262685617 261146317 325989053 539788685 19378828 956143877 442730424 888315654 147981886
814994716 857022904 183617890 174653861 539601603 760758831 943575858 578238354 757232490 178019452
660566353 891640290 131770940 457237483 878190975 76284724 751246846 942314578 591038534 171832189
177962211 191284186 914558283 36435194 726744628 377669997 437409015 736725116 269229181 642347878
10284050 81032338 7449337 185217113 379401505 981659194 5313477 438663387 292252175 222856866
962238163 580479505 102695567 727623794 9656937 155870397 622778615 932484623 475574108 190469618
707331854 84621548 172246721 178264529 327926759 142189552 848304836 714494527 95037918 463136216
212281133 586238258 192471812 992980497 455861864 379824190 239745902 248366140 155606980 735284445
942541845 115898593 353625363 787365902 393407547 167176164 916049162 214618533 358804843 504746787
213005048 871698101 874105839 569207249 669547585 851758828 474992288 784804938 695516247 853540834
158609986 676413851 445508302 648685815 886468679 156741052 691792251 665348674 158763026 749672123
284805461 718716699 95933711 212229957 898053813 228047780 338119343 491707671 394447196 889351749
795342803 357785697 993255606 498052849 976819494 562005415 986141003 503963159 568785642 918500202
865173399 982002864 332240810 898334592 902437174 690859146 749117416 12157337 399402953 512244761
497842916 597147704 551254315 334933134 675142562 594945988 781864574 665806388 679641345 759786854
709804088 436160055 571431776 98451920 589607845 468077220 623091554 733451078 358578213 702592739
244653983 562427866 856354096 203350890 185719001 455315464 331409890 396803529 397415542 933939819
809677775 796012404 630649375 713186253 232256585 375173258 79856705 620046486 237779488 637974412
32041991 125393811 128158014 303164619 233995869 266680480 143998406 792636241 461982901 430465835
927753013 687055819 640375662 814183802 363583529 8562352 152942261 301795704 123616711 462789496
213174395 805600152 149558978 325290391 892829231 40353095 368872582 9199696 569030747 552268100
173859084 268449167 51132700 43310200 432746252 379254110 142613265 340363402 229317703 941705476
659470201 966708025 792039649 825928988 956618993 343126569 358801221 837911274 530664442 132278514
130765173 506311923 876785217 618044498 478569737 720039518 989609714 289188356 818372142 183931098
813425575 125668799 85427437 491803659 921541777 913104367 54059702 15834020 543662160 682002986
611057533 244607098 931912657 510834339 171630872 64565003 756653619 125514086 521709984 168042333
726001211 777137311 730277963 574476872 227337887 338216231 91067200 896730166 432774422 479378978
688536648 159071442 329959058 913802786 773924804 741879402 734079079 612149826 72714954 803803347
851423352 137237919 528686676 526816235 999192827 180680115 90048873 730437088 545189497 620558410
16136692 363501936 829921210 435115918 798298455 935632244 118607476 789959996 684896905 390142488
85004012 373038940 644582044 730267981 224170978 245844219 729743919 802152580 97045152 206981756
285463174 225507421 118454554 834407151 208448807 441143757 383859101 78596203 166672639 46944898
920056164 10177486 659945289 183358641 195387934 77820708 904341899 114859999 169616393 235930013
659803863 888531767 726307672 997278557 890735735 134917378 224319647 657896393 578113070 289613875
360265767 349395051 662703693 91594350 313927832 985161383 363374108 447645624 725950929 863176603
344204701 356074706 927015049 52174714 78094432 697826265 685946136 29049563 133251444 19578528
527938850 949050623 577881003 421267430 698153803 986435278 651738470 115684993 231573541 240120430
550957023 191359717 395191829 241841235 173350832 308291223 106334440 920986475 727403291 384707603
530564065 589477672 205756437 268622270 79166218 203306834 425004306 692094263 111763732 286513221
453065461 624559068 174617445 687201656 344071035 151468172 442513933 238429703 781201605 137305354
102994897 603869953 242289138 513173851 540209456 120106892 852721236 108509831 566177757 911451083
723629950 72680581 139986500 622472121 764661701 984814964 291807947 896083696 100358347 591626342
251685855 386597034 10096367 578849134 504944976 346438026 494524491 647172791 683894430 160863513
217303292 350731266 359051883 368525918 971450924 686216765 473841069 213205243 955986354 832284478
818213356 321777396 544590797 788052237 162391755 939900702 133188677 184991892 729971223 536731580
229998815 993158591 733097480 704476488 629839208 159240154 380744672 825468163 909110308 861354474)
for n in ${ns[@]}; do
pmin=$n
wmin=$n
wmax="10$n"
eval "cs2=(${cs[$n]})"
for ((i=0; i < 500; ++i)); do
pyasukp -nosolve -form hi -save "hi_n${n}-${i}-s${seeds[$i]}c${cs2[$i]}.ukp" -seed ${seeds[$i]} -cap ${cs2[$i]} -pmin $pmin -wmin $wmin -wmax $wmax -n $n
#pyasukp -nosolve -form hi2 -save "hi2_n${n}-${i}-s${seeds[$i]}c${cs2[$i]}.ukp" -seed ${seeds[$i]} -cap ${cs2[$i]} -pmin $pmin -wmin $wmin -wmax $wmax -n $n
done
done
|
henriquebecker91/masters
|
codes/sh/pyasukp_paper_bench/hi.sh
|
Shell
|
unlicense
| 23,087 |
#!/bin/bash
# Execute this from the commandline like this:
# bash <(curl -s https://raw.githubusercontent.com/rezitech/letsencrypt-unifi-nvr/master/init.bash) DOMAIN-NAME-HERE.COM
# This script will install the package in one step
apt-get install -y git
git clone https://github.com/rezitech/letsencrypt-unifi-nvr.git /usr/local/bin/letsencrypt-unifi-nvr/
/usr/local/bin/letsencrypt-unifi-nvr/letsencrypt-unifi-nvr.bash -i -d $1
|
rezitech/letsencrypt-unifi-nvr
|
init.bash
|
Shell
|
apache-2.0
| 431 |
#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This setup script sets up the project as it starts in the exercise.
# It's recommended to run this on a separate VM.
# Packages that need to be installed (tested on a Debian 9 VM)
sudo DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends \
python3-django python3-djangorestframework
# Files are taken from this dir
ORIGIN_DIR=~
DATA_DIR=/var/www/data
# Copy feedback files
sudo mkdir -p ${DATA_DIR}/feedback
sudo cp ${ORIGIN_DIR}/feedback/* ${DATA_DIR}/feedback
# Setup Django and the project
PROJECT_DIR=/var/www/projects
PROJECT_NAME=corpweb
APP_NAME=feedback
sudo mkdir -p ${PROJECT_DIR}
cd ${PROJECT_DIR}
sudo django-admin startproject ${PROJECT_NAME}
cd ${PROJECT_NAME}
sudo python3 manage.py startapp ${APP_NAME}
sudo cp ${ORIGIN_DIR}/app-files/settings.py ${PROJECT_NAME}/settings.py
sudo cp ${ORIGIN_DIR}/app-files/urls.py ${PROJECT_NAME}/urls.py
sudo cp ${ORIGIN_DIR}/app-files/models.py ${APP_NAME}/models.py
sudo cp ${ORIGIN_DIR}/app-files/serializers.py ${APP_NAME}/serializers.py
sudo cp ${ORIGIN_DIR}/app-files/views.py ${APP_NAME}/views.py
sudo mkdir ${APP_NAME}/templates
sudo cp ${ORIGIN_DIR}/app-files/feedback_index.html ${APP_NAME}/templates/feedback_index.html
sudo python3 manage.py makemigrations ${APP_NAME}
sudo python3 manage.py migrate
sudo cp ${ORIGIN_DIR}/app-files/corpweb.service /etc/systemd/system/corpweb.service
sudo systemctl start ${PROJECT_NAME}
|
google/it-cert-automation-project
|
Part2/setup.sh
|
Shell
|
apache-2.0
| 2,005 |
#!/usr/bin/env bash
# The name of the ~/.ssh/public_key to transfer to hosts to enable password free ssh
SSH_KEY=""
COPY_BOOT=""
while getopts "K:BH:" opt; do
case $opt in
K)
SSH_KEY=$OPTARG
;;
B)
COPY_BOOT="true"
;;
H)
SCANNERS=$OPTARG
;;
\?)
echo "Invalid option: -$OPTARG" >&2
;;
esac
done
# include the DEFAULT_SCANNERS
. bin/hosts.conf
# The scanner ssh hosts to sync to, override with SCANNERS env variable
SCANNERS=${SCANNERS:-${DEFAULT_SCANNERS}}
echo "SCANNERS=${SCANNERS}"
function copy() {
for host in $SCANNERS;
do
rsync -rz -e ssh $1 root@${host}:$2
done
}
function copy_systemd() {
for host in $SCANNERS;
do
rsync -rz -e ssh $1 root@${host}:$2
if [ $? -eq 0 ]; then
ssh root@${host} systemctl daemon-reload
fi
done
}
# First setup the authorized_keys on hosts if -K given
if [ -n "${SSH_KEY}" ]; then
for host in $SCANNERS;
do
rsync -rz -e ssh -p --chmod=u=r,og-rw ~/.ssh/${SSH_KEY} root@${host}:/root/.ssh/authorized_keys
done
fi
# This needs a shared ssh private key in order to avoid having to enter password for each host
copy_systemd systemd/ /lib/systemd/system
if [ -n "${COPY_BOOT}" ]; then
copy boot/ /boot
fi
|
starksm64/RaspberryPiBeaconParser
|
ConfigManager/src/main/resources/scanners-rsync.sh
|
Shell
|
apache-2.0
| 1,299 |
#----------- using incremental import parameters -----------#
#--- data to this export is sourced by import script -> bigdata/sqoop/import/06-custom-import-for-export-demo.sh
#--- copy data to export dir
hdfs dfs -mkdir -p /user/cloudera/staging/sq_export/retail_db/customers_custom/{01..05}
hdfs dfs -cp staging/sq_import/retail_db/customers_custom/01/* staging/sq_export/retail_db/customers_custom/01
hdfs dfs -ls -R /user/cloudera/staging/sq_export/retail_db/customers_custom/
#--- create target and staging table in mysql
#--- tgt table modeled after retail db and stg table without key contraint
mysql -u root -pcloudera
CREATE TABLE sq_export.customers_inc LIKE retail_db.customers;
CREATE TABLE sq_export.customers_inc_stg AS SELECT * FROM retail_db.customers LIMIT 0;
ALTER TABLE sq_export.customers_inc_stg MODIFY customer_id int NOT NULL;
DESC sq_export.customers_inc;
DESC sq_export.customers_inc_stg;
#----------- initial insert ----------#
#--- now the tgt table is empty all the records will be exported
sqoop export \
--connect jdbc:mysql://quickstart.cloudera:3306/sq_export \
--username root \
--password cloudera \
--staging-table customers_inc_stg \
--table customers_inc \
--export-dir /user/cloudera/staging/sq_export/retail_db/customers_custom/01 \
--num-mappers 1
#--- check data in table
SELECT * FROM sq_export.customers_inc;
#----------- update only mode ----------#
#--- prepare data for update in hdfs
#--- lets change city,state and zip forcustomer with id 1
#--- "Brownsville,TX,78521" will become "Farmville,FB,00000"
#--- modified data will be writen to hdfs
hdfs dfs -cat /user/cloudera/staging/sq_export/retail_db/customers_custom/01/* \
| sed 's/Brownsville,TX,78521/Farmville,FB,00000/' \
| hdfs dfs -put - /user/cloudera/staging/sq_export/retail_db/customers_custom/02/data.csv
#--- now run export with update key as customer id in update only mode
sqoop export \
--connect jdbc:mysql://quickstart.cloudera:3306/sq_export \
--username root \
--password cloudera \
--table customers_inc \
--update-key customer_id \
--update-mode updateonly \
--export-dir /user/cloudera/staging/sq_export/retail_db/customers_custom/02 \
--num-mappers 1
#--- lets check data in table
SELECT * FROM sq_export.customers_inc;
|
nixphix/bigdata
|
sqoop/export/03-incremental-by-update-key-hdfs.sh
|
Shell
|
apache-2.0
| 2,293 |
#! /bin/sh
sudo rm -R .tmp/
sudo rm -R dist
gulp;
node dev-menu > app/templates/includes/dev-menu.njk;
|
chrisplatts01/protect
|
build.sh
|
Shell
|
apache-2.0
| 103 |
#!/bin/bash -eu
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env bash
set -e
echo "mode: atomic" > coverage.txt
go test -coverprofile=profile.out -covermode=atomic github.com/apid/apidApigeeSync
if [ -f profile.out ]; then
tail -n +2 profile.out >> coverage.txt
rm profile.out
fi
go tool cover -html=coverage.txt -o cover.html
|
apid/apidApigeeSync
|
cover.sh
|
Shell
|
apache-2.0
| 882 |
#!/bin/bash
# Author:Tyson
# E-mail:admin#svipc.com
# Website:http://www.svipc.com
# Version:1.0.0 Aug-16-2015-12:28:58
# Notes:Autoscripts for CentOS/RadHat 5+ Debian 6+ and Ubuntu 12+
for Package in apache2 apache2-doc apache2-utils apache2.2-common apache2.2-bin apache2-mpm-prefork apache2-doc apache2-mpm-worker mysql-client mysql-server mysql-common libmysqlclient18 php5 php5-common php5-cgi php5-mysql php5-curl php5-gd libmysql* mysql-*
do
apt-get -y remove --purge $Package
done
dpkg -l | grep ^rc | awk '{print $2}' | xargs dpkg -P
apt-get -y update
# check upgrade OS
[ "$upgrade_yn" == 'y' ] && apt-get -y upgrade
# Install needed packages
for Package in gcc g++ make cmake autoconf libjpeg8 libjpeg8-dev libjpeg-dev libpng12-0 libpng12-dev libpng3 libfreetype6 libfreetype6-dev libxml2 libxml2-dev zlib1g zlib1g-dev libc6 libc6-dev libglib2.0-0 libglib2.0-dev bzip2 libzip-dev libbz2-1.0 libncurses5 libncurses5-dev libaio1 libaio-dev libreadline-dev curl libcurl3 libcurl4-openssl-dev e2fsprogs libkrb5-3 libkrb5-dev libltdl-dev libidn11 libidn11-dev openssl libssl-dev libtool libevent-dev bison re2c libsasl2-dev libxslt1-dev locales libcloog-ppl0 patch vim zip unzip tmux htop wget bc expect rsync git
do
apt-get -y install $Package
done
# PS1
[ -z "`cat ~/.bashrc | grep ^PS1`" ] && echo "PS1='\${debian_chroot:+(\$debian_chroot)}\\[\\e[1;32m\\]\\u@\\h\\[\\033[00m\\]:\\[\\033[01;34m\\]\\w\\[\\033[00m\\]\\$ '" >> ~/.bashrc
# history size
[ -z "`cat ~/.bashrc | grep ^HISTSIZE`" ] && echo 'HISTSIZE=100' >> ~/.bashrc
[ -z "`cat ~/.bashrc | grep history-timestamp`" ] && echo "export PROMPT_COMMAND='{ msg=\$(history 1 | { read x y; echo \$y; });user=\$(whoami); echo \$(date \"+%Y-%m-%d %H:%M:%S\"):\$user:\`pwd\`/:\$msg ---- \$(who am i); } >> /tmp/\`hostname\`.\`whoami\`.history-timestamp'" >> ~/.bashrc
# /etc/security/limits.conf
[ -z "`cat /etc/security/limits.conf | grep 'nproc 65535'`" ] && cat >> /etc/security/limits.conf <<EOF
* soft nproc 65535
* hard nproc 65535
* soft nofile 65535
* hard nofile 65535
EOF
[ -z "`cat /etc/rc.local | grep 'ulimit -SH 65535'`" ] && echo "ulimit -SH 65535" >> /etc/rc.local
# /etc/hosts
[ "$(hostname -i | awk '{print $1}')" != "127.0.0.1" ] && sed -i "s@^127.0.0.1\(.*\)@127.0.0.1 `hostname` \1@" /etc/hosts
# Set timezone
rm -rf /etc/localtime
ln -s /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
# Set DNS
#cat > /etc/resolv.conf << EOF
#nameserver 114.114.114.114
#nameserver 8.8.8.8
#EOF
# alias vi
[ -z "`cat ~/.bashrc | grep 'alias vi='`" ] && sed -i "s@^alias l=\(.*\)@alias l=\1\nalias vi='vim'@" ~/.bashrc
[ -z "`cat /etc/vim/vimrc | grep 'syntax on'`" ] && echo 'syntax on' >> /etc/vim/vimrc
sed -i 's@^# export LS_OPTIONS@export LS_OPTIONS@' ~/.bashrc
sed -i 's@^# alias@alias@g' ~/.bashrc
# /etc/sysctl.conf
[ -z "`cat /etc/sysctl.conf | grep 'fs.file-max'`" ] && cat >> /etc/sysctl.conf << EOF
fs.file-max=65535
net.ipv4.tcp_syncookies = 1
net.ipv4.tcp_fin_timeout = 30
net.ipv4.tcp_tw_reuse = 1
net.ipv4.tcp_tw_recycle = 1
net.ipv4.ip_local_port_range = 1024 65000
net.ipv4.tcp_max_syn_backlog = 65536
net.ipv4.tcp_max_tw_buckets = 20000
net.ipv4.route.gc_timeout = 100
net.ipv4.tcp_syn_retries = 1
net.ipv4.tcp_synack_retries = 1
net.core.somaxconn = 65535
net.core.netdev_max_backlog = 262144
net.ipv4.tcp_timestamps = 0
net.ipv4.tcp_max_orphans = 262144
EOF
sysctl -p
sed -i 's@^ACTIVE_CONSOLES.*@ACTIVE_CONSOLES="/dev/tty[1-2]"@' /etc/default/console-setup
sed -i 's@^[3-6]:23:respawn@#&@g' /etc/inittab
sed -i "s@^ctrlaltdel@#&@" /etc/inittab
sed -i 's@^# en_US.UTF-8@en_US.UTF-8@' /etc/locale.gen
init q
# Update time
ntpdate pool.ntp.org
[ -z "`grep 'pool.ntp.org' /var/spool/crontabs/root`" ] && { echo "*/20 * * * * `which ntpdate` pool.ntp.org > /dev/null 2>&1" >> /var/spool/cron/crontabs/root;chmod 600 /var/spool/cron/crontabs/root; }
service cron restart
# iptables
if [ -e '/etc/iptables.up.rules' ] && [ -n "`grep ':INPUT DROP' /etc/iptables.up.rules`" -a -n "`grep 'NEW -m tcp --dport 22 -j ACCEPT' /etc/iptables.up.rules`" -a -n "`grep 'NEW -m tcp --dport 80 -j ACCEPT' /etc/iptables.up.rules`" ];then
IPTABLES_STATUS=yes
else
IPTABLES_STATUS=no
fi
if [ "$IPTABLES_STATUS" == 'no' ];then
cat > /etc/iptables.up.rules << EOF
# Firewall configuration written by system-config-securitylevel
# Manual customization of this file is not recommended.
*filter
:INPUT DROP [0:0]
:FORWARD ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
:syn-flood - [0:0]
-A INPUT -i lo -j ACCEPT
-A INPUT -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -p tcp -m state --state NEW -m tcp --dport 22 -j ACCEPT
-A INPUT -p tcp -m state --state NEW -m tcp --dport 80 -j ACCEPT
-A INPUT -p tcp -m state --state NEW -m tcp --dport 443 -j ACCEPT
-A INPUT -p icmp -m limit --limit 100/sec --limit-burst 100 -j ACCEPT
-A INPUT -p icmp -m limit --limit 1/s --limit-burst 10 -j ACCEPT
-A INPUT -p tcp -m tcp --tcp-flags FIN,SYN,RST,ACK SYN -j syn-flood
-A INPUT -j REJECT --reject-with icmp-host-prohibited
-A syn-flood -p tcp -m limit --limit 3/sec --limit-burst 6 -j RETURN
-A syn-flood -j REJECT --reject-with icmp-port-unreachable
COMMIT
EOF
fi
FW_PORT_FLAG=`grep -ow "dport $SSH_PORT" /etc/iptables.up.rules`
[ -z "$FW_PORT_FLAG" -a "$SSH_PORT" != '22' ] && sed -i "s@dport 22 -j ACCEPT@&\n-A INPUT -p tcp -m state --state NEW -m tcp --dport $SSH_PORT -j ACCEPT@" /etc/iptables.up.rules
iptables-restore < /etc/iptables.up.rules
echo 'pre-up iptables-restore < /etc/iptables.up.rules' >> /etc/network/interfaces
service ssh restart
. ~/.bashrc
|
LongTaiJun/Autoscripts
|
include/init_Debian.sh
|
Shell
|
apache-2.0
| 5,559 |
#!/bin/bash
#
#
# uCodev ELK Deployment Scripts v0.01
#
# Date: 19/08/2015
#
# Copyright 2015 Pedro A. Hortas ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
PREFIX="${1}"
KB_VER="4.4.1-linux-x64"
mkdir -p /opt/${PREFIX}-elk-solution-v1.0
ln -s /opt/${PREFIX}-elk-solution-v1.0 /opt/elk-solution
cd /opt/elk-solution
wget https://download.elastic.co/kibana/kibana/kibana-${KB_VER}.tar.gz
groupadd kibana
useradd kibana -g kibana
tar zxvf kibana-${KB_VER}.tar.gz
rm -f *.gz
chown -R kibana:kibana kibana*
ln -s /opt/elk-solution/kibana-${KB_VER} /opt/elk-solution/kibana
mkdir -p /opt/elk-solution/config
|
ucodev/elastictools
|
testing/deploy/install/kb_deploy.sh
|
Shell
|
apache-2.0
| 1,148 |
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PACKER=${PACKER:-packer}
PACKER_ARGS="${*}"
INTERNAL=${INTERNAL:-true}
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
pushd ${SCRIPTDIR}
if [[ -n "${CONCOURSE_GCP_KEY}" ]]; then
dd of=credentials.json <<< "${CONCOURSE_GCP_KEY}"
export GOOGLE_APPLICATION_CREDENTIALS=${SCRIPTDIR}/credentials.json
fi
GCP_NETWORK="default"
GCP_SUBNETWORK="default"
MY_NAME=$(curl -s "http://metadata.google.internal/computeMetadata/v1/instance/name" -H "Metadata-Flavor: Google")
if [[ -n "${MY_NAME}" ]]; then
MY_ZONE=$(curl -s "http://metadata.google.internal/computeMetadata/v1/instance/zone" -H "Metadata-Flavor: Google")
MY_ZONE=${MY_ZONE##*/}
NETWORK_INTERFACE_INFO="$(gcloud compute instances describe ${MY_NAME} --zone ${MY_ZONE} --format="json(networkInterfaces)")"
GCP_NETWORK=$(echo ${NETWORK_INTERFACE_INFO} | jq -r '.networkInterfaces[0].network')
GCP_NETWORK=${GCP_NETWORK##*/}
GCP_SUBNETWORK=$(echo ${NETWORK_INTERFACE_INFO} | jq -r '.networkInterfaces[0].subnetwork')
GCP_SUBNETWORK=${GCP_SUBNETWORK##*/}
fi
if [[ -z "${GCP_PROJECT}" ]]; then
echo "GCP_PROJECT is unset. Cowardly refusing to continue."
exit 1
fi
HASHED_PIPELINE_PREFIX="i$(uuidgen -n @dns -s -N "${PIPELINE_PREFIX}")-"
echo "Running packer"
PACKER_LOG=1 ${PACKER} build ${PACKER_ARGS} \
--var "base_family=${BASE_FAMILY}" \
--var "geode_docker_image=${GEODE_DOCKER_IMAGE}" \
--var "pipeline_prefix=${PIPELINE_PREFIX}" \
--var "hashed_pipeline_prefix=${HASHED_PIPELINE_PREFIX}" \
--var "java_build_version=${JAVA_BUILD_VERSION}" \
--var "gcp_project=${GCP_PROJECT}" \
--var "gcp_network=${GCP_NETWORK}" \
--var "gcp_subnetwork=${GCP_SUBNETWORK}" \
--var "use_internal_ip=${INTERNAL}" \
windows-packer.json
|
davebarnes97/geode
|
ci/images/google-windows-geode-builder/build_image.sh
|
Shell
|
apache-2.0
| 2,900 |
#!/bin/bash
# Variables
path_website='/Users/Nyx/Sites/only24/templating'
theme='only24h'
## Delete old
# Root
sudo rm $path_website/flashsalesoffer.php
sudo rm $path_website/flashsalescatalog.php
# Controllers
sudo rm $path_website/controllers/FlashSalesOfferController.php
sudo rm $path_website/controllers/FlashSalesCatalogController.php
# TPL
sudo rm $path_website/themes/$theme/flashsalesoffer.tpl
sudo rm $path_website/themes/$theme/flashsalescatalog.tpl
## Create Symlinks
# Root
sudo ln -s $path_website/modules/flashsales/frontend/flashsalesoffer.php $path_website/flashsalesoffer.php
sudo ln -s $path_website/modules/flashsales/frontend/flashsalescatalog.php $path_website/flashsalescatalog.php
# Controllers
sudo ln -s $path_website/modules/flashsales/frontend/controllers/FlashSalesOfferController.php $path_website/controllers/FlashSalesOfferController.php
sudo ln -s $path_website/modules/flashsales/frontend/controllers/FlashSalesCatalogController.php $path_website/controllers/FlashSalesCatalogController.php
# TPL
sudo ln -s $path_website/modules/flashsales/frontend/flashsalesoffer.tpl $path_website/themes/$theme/flashsalesoffer.tpl
sudo ln -s $path_website/modules/flashsales/frontend/flashsalescatalog.tpl $path_website/themes/$theme/flashsalescatalog.tpl
## CHMOD
# Root
sudo chmod 777 $path_website/flashsalesoffer.php $path_website/flashsalescatalog.php
# Controllers
sudo chmod 777 $path_website/controllers/FlashSalesOfferController.php $path_website/controllers/FlashSalesCatalogController.php
# TPL
sudo chmod 777 $path_website/themes/$theme/flashsalesoffer.tpl $path_website/themes/$theme/flashsalescatalog.tpl
|
prcaen/prestashop-module-flashsales
|
flashsales.sh
|
Shell
|
apache-2.0
| 1,643 |
#!/bin/bash
## postgres healthcheck
##dev-generated
set -eo pipefail
sleeptime=59
# Since docker doesn't provide a lazy period for startup,
# we track health. If the last check showed healthy
# as determined by existence of /tmp/healthy, then
# sleep at startup. This requires the timeout to be set
# higher than the sleeptime used here.
if [ -f /tmp/healthy ]; then
printf "container was previously healthy, so sleeping ${sleeptime} seconds before continuing healthcheck... "
sleep ${sleeptime}
fi
# If we can now access the server, we're healthy and ready
if pg_isready >/dev/null; then
printf "pg_isready: healthy"
touch /tmp/healthy
exit 0
fi
rm -f /tmp/healthy
exit 1
|
drud/ddev
|
pkg/ddevapp/healthcheck/postgres_healthcheck.sh
|
Shell
|
apache-2.0
| 702 |
#!/usr/bin/env bash
moonc -l .
|
acleverpun/oneofthesedays
|
scripts/lint.sh
|
Shell
|
apache-2.0
| 32 |
#!/bin/bash
script=`dirname $0;`
basedir=`cd -P $script/..; echo $PWD`
XPONENTS=$basedir
export PYTHONPATH=$XPONENTS/python:$XPONENTS/piplib
export PYTHONUNBUFFERED=1
do_test=0
while [ "$1" != "" ]; do
case $1 in
'test')
do_test=1
shift
;;
*)
echo "Bad argument"
exit 1
;;
esac
done
# GLOB NationalFile*
USGS_FILE=`ls ./tmp/NationalFile_202*`
for f in $USGS_FILE \
"./tmp/Countries.txt" \
"./tmp/allCountries.txt" \
"./tmp/ne_10m_admin_1_states_provinces" \
"./tmp/wordstats.sqlite"; do
if [ -e "$f" ]; then
echo "All is good, resource exists: $f"
else
echo "Missing resource, $f"
echo "See README on sources and wordstats"
exit 1
fi
done
if [ "$do_test" -eq 1 ] ; then
DB=./tmp/test.sqlite
python3 ./script/gaz_popstats.py --db $DB
python3 ./script/gaz_usgs.py ${USGS_FILE} --max 100000 --db $DB
python3 ./script/gaz_nga.py ./tmp/Countries.txt --max 100000 --db $DB
python3 ./script/gaz_geonames.py ./tmp/allCountries.txt --max 100000 --db $DB
python3 ./script/gaz_administrative_codes.py ./tmp/ne_10m_admin_1_states_provinces/ne_10m_admin_1_states_provinces.shp --db $DB --max 100000
python3 ./script/gaz_fix_country_coding.py "US" --db $DB
python3 ./script/gaz_generate_variants.py --db $DB
python3 ./script/gaz_country_meta.py --db $DB
python3 ./script/gaz_finalize.py adjust-id --db $DB
python3 ./script/gaz_finalize.py adjust-bias --db $DB
python3 ./script/gaz_finalize.py dedup --optimize --db $DB
else
# PRODUCTION -- RAW SOURCES
# ==========================
# start fresh:
DB=./tmp/master_gazetteer.sqlite
rm -f $DB
datekey=`date +%Y%m%d`
echo GEONAMES POPULATION STATS
python3 ./script/gaz_popstats.py
# Geonames is FIRST, because it has better consistent coverage for all names
echo GEONAMES `date`
LOG=./tmp/gaz_geonames_${datekey}.log
python3 ./script/gaz_geonames.py ./tmp/allCountries.txt > $LOG
# Note - USGS does not provide ADM1 names for US States in NationalFile.
echo USGS `date`
LOG=./tmp/gaz_usgs_${datekey}.log
python3 ./script/gaz_usgs.py $USGS_FILE > $LOG
datekey=`date +%Y%m%d`
echo NGA GNIS `date`
LOG=./tmp/gaz_nga_${datekey}.log
python3 ./script/gaz_nga.py ./tmp/Countries.txt > $LOG
echo ADMIN CODES `date`
LOG=./tmp/gaz_administrative_codes_${datekey}.log
python3 ./script/gaz_administrative_codes.py ./tmp/ne_10m_admin_1_states_provinces/ne_10m_admin_1_states_provinces.shp > $LOG
# OMISSIONS
echo OMISSIONS: Remove selected uncommon/problematic name variations
python3 ./script/gaz_exclusions.py ./etc/gazetteer/filters/exclude-features.csv
# DERIVATIONS
# ==========================
echo US STATE CODES `date`
LOG=./tmp/gaz_fix_country_coding_${datekey}.log
python3 ./script/gaz_fix_country_coding.py "US" > $LOG
echo VARIANTS `date`
LOG=./tmp/gaz_generate_variants_${datekey}.log
python3 ./script/gaz_generate_variants.py > $LOG
echo COUNTRIES `date`
LOG=./tmp/gaz_country_meta${datekey}.log
python3 ./script/gaz_country_meta.py > $LOG
echo ADJUST `date`
LOG=./tmp/gaz_adjustments_${datekey}.log
python3 ./script/gaz_finalize.py adjust-id > $LOG
python3 ./script/gaz_finalize.py adjust-bias >> $LOG
echo `date`
echo OPTIMIZE/DEDUP `date`
LOG=./tmp/gaz_dedup_${datekey}.log
python3 ./script/gaz_finalize.py dedup --optimize > $LOG
echo `date`
fi
|
OpenSextant/Xponents
|
solr/build-sqlite-master.sh
|
Shell
|
apache-2.0
| 3,430 |
#!/usr/bin/env bash
#############################################################################
##
## Logtalk uninstall script
## Last updated on May 4, 2021
##
## This file is part of Logtalk <https://logtalk.org/>
## Copyright 1998-2022 Paulo Moura <[email protected]>
## SPDX-License-Identifier: Apache-2.0
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#############################################################################
if ! [ "$LOGTALKHOME" ]; then
echo "The environment variable LOGTALKHOME should be defined first!"
echo "Trying default Logtalk installation directories..."
if [ -d "/usr/local/share/logtalk" ]; then
LOGTALKHOME=/usr/local/share/logtalk
echo "Using Logtalk installation at \"/usr/local/share/logtalk\""
elif [ -d "/usr/share/logtalk" ]; then
LOGTALKHOME=/usr/share/logtalk
echo "Using Logtalk installation at \"/usr/share/logtalk\""
elif [ -d "/opt/local/share/logtalk" ]; then
LOGTALKHOME=/opt/local/share/logtalk
echo "Using Logtalk installation at \"/opt/local/share/logtalk\""
elif [ -d "/opt/share/logtalk" ]; then
LOGTALKHOME=/opt/share/logtalk
echo "Using Logtalk installation at \"/opt/share/logtalk\""
else
echo "Unable to locate Logtalk installation directory!" >&2
echo
exit 1
fi
elif ! [ -d "$LOGTALKHOME" ]; then
echo "The environment variable LOGTALKHOME points to a non-existing directory!" >&2
echo "Its current value is: $LOGTALKHOME" >&2
echo "The variable must be set to your Logtalk installation directory!" >&2
echo
exit 1
fi
version=$(cat "$LOGTALKHOME/VERSION.txt")
directory=logtalk-$version
echo
echo "Uninstalling Logtalk $version system-level files..."
echo
cd "$LOGTALKHOME"/.. || exit 1
rm -rf "$directory"
rm -f logtalk
cd ../bin || exit 1
rm -f bplgt
rm -f ciaolgt
rm -f cxlgt
rm -f eclipselgt
rm -f gplgt
rm -f jiplgt
rm -f lgt2svg
rm -f lgt2html
rm -f lgt2pdf
rm -f lgt2xml
rm -f lgt2md
rm -f lgt2rst
rm -f lgt2txt
rm -f logtalk_allure_report
rm -f logtalk_backend_select
rm -f logtalk_tester
rm -f logtalk_doclet
rm -f logtalk_user_setup
rm -f logtalk_version_select
rm -f lvmlgt
rm -f quintuslgt
rm -f scryerlgt
rm -f sicstuslgt
rm -f swilgt
rm -f taulgt
rm -f tplgt
rm -f xsblgt
rm -f yaplgt
cd ../share/man/man1 || exit 1
rm -f bplgt.1.gz
rm -f cxlgt.1.gz
rm -f eclipselgt.1.gz
rm -f gplgt.1.gz
rm -f jiplgt.1.gz
rm -f lgt2svg.1.gz
rm -f lgt2html.1.gz
rm -f lgt2pdf.1.gz
rm -f lgt2xml.1.gz
rm -f lgt2md.1.gz
rm -f lgt2rst.1.gz
rm -f lgt2txt.1.gz
rm -f logtalk_allure_report.1.gz
rm -f logtalk_backend_select.1.gz
rm -f logtalk_tester.1.gz
rm -f logtalk_doclet.1.gz
rm -f logtalk_user_setup.1.gz
rm -f logtalk_version_select.1.gz
rm -f lvmlgt.1.gz
rm -f quintuslgt.1.gz
rm -f scryerlgt.1.gz
rm -f sicstuslgt.1.gz
rm -f swilgt.1.gz
rm -f taulgt.1.gz
rm -f tplgt.1.gz
rm -f xsblgt.1.gz
rm -f yaplgt.1.gz
echo "Logtalk $version system-level uninstall completed. For uninstalling user-level"
echo "Logtalk files simply delete the LOGTALKUSER directories."
echo
|
LogtalkDotOrg/logtalk3
|
scripts/uninstall.sh
|
Shell
|
apache-2.0
| 3,536 |
#!/bin/bash
for TYPE in dmrs eds; do
MRS_DIR="deepbank-${TYPE}"
MRS_WDIR=${MRS_DIR}-working
mkdir -p $MRS_WDIR
# Construct lexicon.
python $HOME/DeepDeepParser/mrs/extract_erg_lexicon.py $ERG_DIR $MRS_WDIR
python $HOME/DeepDeepParser/mrs/extract_data_lexicon.py $MRS_DIR $MRS_WDIR
# Runs Stanford NLP tools over input.
printf "$MRS_DIR/train.raw\n$MRS_DIR/dev.raw\n$MRS_DIR/test.raw\n" > FILELIST
$JAVA -cp "$STANFORD_NLP/*" -Xmx16g \
edu.stanford.nlp.pipeline.StanfordCoreNLP \
-annotators tokenize,ssplit,pos,lemma,ner \
-ssplit.eolonly \
-filelist FILELIST \
-outputFormat text -outputDirectory $MRS_WDIR \
-tokenize.options "normalizeCurrency=False,normalizeFractions=False"\
"normalizeParentheses=False,normalizeOtherBrackets=False,"\
"latexQuotes=False,unicodeQuotes=True,"\
"ptb3Ellipsis=False,unicodeEllipsis=True,"\
"escapeForwardSlashAsterisk=False"
rm FILELIST
# Processes Stanford NLP output.
python $HOME/DeepDeepParser/mrs/stanford_to_linear.py $MRS_DIR $MRS_WDIR $MRS_WDIR
# Converts MRS graphs to multiple linearizations.
python $HOME/DeepDeepParser/mrs/read_mrs.py $MRS_DIR $MRS_WDIR $TYPE
# Copies data for parser training.
LIN_DIR=${TYPE}-parse-data-deepbank
mkdir -p $LIN_DIR
ORACLE=dmrs.ae.ao # Arc-eager parser, alignment-ordered oracle
for SET in train dev test; do
cp $MRS_WDIR/${SET}.en $MRS_WDIR/${SET}.pos $MRS_WDIR/${SET}.ne $LIN_DIR/
cp $MRS_WDIR/${SET}.${ORACLE}.nospan.unlex.lin $LIN_DIR/${SET}.parse
cp $MRS_WDIR/${SET}.${ORACLE}.point.lin $LIN_DIR/${SET}.att
cp $MRS_WDIR/${SET}.${ORACLE}.endpoint.lin $LIN_DIR/${SET}.endatt
done
python $HOME/DeepDeepParser/scripts/find_bucket_sizes.py $LIN_DIR/train.en $LIN_DIR/train.parse > $LIN_DIR/buckets
done
|
janmbuys/DeepDeepParser
|
scripts/preprocess.sh
|
Shell
|
apache-2.0
| 1,833 |
#!/bin/bash
source ./ci/functions.sh
runBuild=false
echo "Reviewing changes that might affect the Gradle build..."
currentChangeSetAffectsTests
retval=$?
if [ "$retval" == 0 ]
then
echo "Found changes that require the build to run test cases."
runBuild=true
else
echo "Changes do NOT affect project test cases."
runBuild=false
fi
if [ "$runBuild" = false ]; then
exit 0
fi
prepCommand="echo 'Running command...'; "
gradle="./gradlew $@"
gradleBuild=""
gradleBuildOptions="--stacktrace --build-cache --configure-on-demand --no-daemon -DtestCategoryType=MEMCACHED "
echo -e "***********************************************"
echo -e "Gradle build started at `date`"
echo -e "***********************************************"
./ci/tests/memcached/run-memcached-server.sh
gradleBuild="$gradleBuild testMemcached jacocoRootReport -x test -x javadoc -x check \
-DskipGradleLint=true --parallel \
-DskipNestedConfigMetadataGen=true "
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[show streams]"* ]]; then
gradleBuild="$gradleBuild -DshowStandardStreams=true "
fi
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[rerun tasks]"* ]]; then
gradleBuild="$gradleBuild --rerun-tasks "
fi
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[refresh dependencies]"* ]]; then
gradleBuild="$gradleBuild --refresh-dependencies "
fi
if [ -z "$gradleBuild" ]; then
echo "Gradle build will be ignored since no commands are specified to run."
else
tasks="$gradle $gradleBuildOptions $gradleBuild"
echo -e "***************************************************************************************"
echo $prepCommand
echo $tasks
echo -e "***************************************************************************************"
waitloop="while sleep 9m; do echo -e '\n=====[ Gradle build is still running ]====='; done &"
eval $waitloop
waitRetVal=$?
eval $prepCommand
eval $tasks
retVal=$?
echo -e "***************************************************************************************"
echo -e "Gradle build finished at `date` with exit code $retVal"
echo -e "***************************************************************************************"
if [ $retVal == 0 ]; then
echo "Gradle build finished successfully."
else
echo "Gradle build did NOT finish successfully."
exit $retVal
fi
fi
|
rrenomeron/cas
|
ci/tests/memcached/run-tests-memcached.sh
|
Shell
|
apache-2.0
| 2,384 |
#!/usr/bin/env bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "### "
echo "### Port forwarding for prometheus, grafana and kiali"
echo "### "
# Expose PROMETHEUS POD on Port 9090 (gcp) and 9091 (onprem)
PROM_PORT_1=9090
PROM_PORT_2=9091
PROM_POD_1=$(kubectl get po --namespace istio-system -l "app=prometheus" \
-o jsonpath="{.items[0].metadata.name}" --context gcp)
PROM_POD_2=$(kubectl get po --namespace istio-system -l "app=prometheus" \
-o jsonpath="{.items[0].metadata.name}" --context onprem)
EXISTING_PID_9090=$(sudo netstat -nlp | grep $PROM_PORT_1 | awk '{print $7}' | cut -f1 -d '/')
EXISTING_PID_9091=$(sudo netstat -nlp | grep $PROM_PORT_2 | awk '{print $7}' | cut -f1 -d '/')
if [ -n "$EXISTING_PID_9090" ]; then
echo "PID $EXISTING_PID_9090 already listening... restarting port-forward"
kill $EXISTING_PID_9090
sleep 5
fi
if [ -n "$EXISTING_PID_9091" ]; then
echo "PID $EXISTING_PID_9091 already listening... restarting port-forward"
kill $EXISTING_PID_9091
sleep 5
fi
kubectl port-forward $PROM_POD_1 $PROM_PORT_1:9090 -n istio-system --context gcp >> /dev/null &
echo "Prometheus Port opened on $PROM_PORT_1 for gcp"
kubectl port-forward $PROM_POD_2 $PROM_PORT_2:9090 -n istio-system --context onprem >> /dev/null &
echo "Prometheus Port opened on $PROM_PORT_2 for onprem"
# Expose GRAFANA POD on Port 3000 (gcp) and 3001 (onprem)
GRAFANA_PORT_1=3000
GRAFANA_PORT_2=3001
GRAFANA_POD_1=$(kubectl get po --namespace istio-system -l "app=grafana" \
-o jsonpath="{.items[0].metadata.name}" --context gcp)
GRAFANA_POD_2=$(kubectl get po --namespace istio-system -l "app=grafana" \
-o jsonpath="{.items[0].metadata.name}" --context onprem)
EXISTING_PID_3000=$(sudo netstat -nlp | grep $GRAFANA_PORT_1 | awk '{print $7}' | cut -f1 -d '/')
EXISTING_PID_3001=$(sudo netstat -nlp | grep $GRAFANA_PORT_2 | awk '{print $7}' | cut -f1 -d '/')
if [ -n "$EXISTING_PID_3000" ]; then
echo "PID $EXISTING_PID_3000 already listening... restarting port-forward"
kill $EXISTING_PID_3000
sleep 5
fi
if [ -n "$EXISTING_PID_3001" ]; then
echo "PID $EXISTING_PID_3001 already listening... restarting port-forward"
kill $EXISTING_PID_3001
sleep 5
fi
kubectl port-forward $GRAFANA_POD_1 $GRAFANA_PORT_1:3000 -n istio-system --context gcp >> /dev/null &
echo "Grafana Port opened on $GRAFANA_PORT_1 for gcp"
kubectl port-forward $GRAFANA_POD_2 $GRAFANA_PORT_2:3000 -n istio-system --context onprem >> /dev/null &
echo "Grafana Port opened on $GRAFANA_PORT_2 for onprem"
# Expose KIALI POD on Port 20001 (gcp) and 20002 (onprem)
KIALI_PORT_1=20001
KIALI_PORT_2=20002
KIALI_POD_1=$(kubectl get po --namespace istio-system -l "app=kiali" \
-o jsonpath="{.items[0].metadata.name}" --context gcp)
KIALI_POD_2=$(kubectl get po --namespace istio-system -l "app=kiali" \
-o jsonpath="{.items[0].metadata.name}" --context onprem)
EXISTING_PID_20001=$(sudo netstat -nlp | grep $KIALI_PORT_1 | awk '{print $7}' | cut -f1 -d '/')
EXISTING_PID_20002=$(sudo netstat -nlp | grep $KIALI_PORT_2 | awk '{print $7}' | cut -f1 -d '/')
if [ -n "$EXISTING_PID_20001" ]; then
echo "PID $EXISTING_PID_20001 already listening... restarting port-forward"
kill $EXISTING_PID_20001
sleep 5
fi
if [ -n "$EXISTING_PID_20002" ]; then
echo "PID $EXISTING_PID_20002 already listening... restarting port-forward"
kill $EXISTING_PID_20002
sleep 5
fi
kubectl port-forward $KIALI_POD_1 $KIALI_PORT_1:20001 -n istio-system --context gcp >> /dev/null &
echo "Kiali Port opened on $KIALI_PORT_1 for gcp"
kubectl port-forward $KIALI_POD_2 $KIALI_PORT_2:20001 -n istio-system --context onprem >> /dev/null &
echo "Kiali Port opened on $KIALI_PORT_2 for onprem"
|
GoogleCloudPlatform/bank-of-anthos-scripts
|
install/istio/istio-connect.sh
|
Shell
|
apache-2.0
| 4,213 |
#!/bin/bash
# Opens an interactive bash terminal to the target container
#
# Example call
# ./container-bash.sh stack-path/mysql-1
# Get the current working directory
WORKDIR="`dirname \"$0\"`"
cd "${WORKDIR}" || exit 1
# Process the container name
TARGET_CONTAINER="$1"
TARGET_CONTAINER=$(echo $TARGET_CONTAINER | sed 's|/|-|g');
# Open the bash terminal to the respective container
./rancher.sh exec -it $TARGET_CONTAINER bash
|
picoded/rancher-container-backup-via-cli
|
container-bash.sh
|
Shell
|
apache-2.0
| 432 |
#!/bin/bash
set -e
if [[ $EUID -eq 0 ]]; then
# running as root - don't need sudo
SUDO=
else
SUDO=sudo
fi
set -x
FLOCKER_BRANCH=$1
source /etc/os-release
OPSYS=${ID}-${VERSION_ID}
case "${OPSYS}" in
centos-7 | fedora-20)
DEFAULT_REPO=https://s3.amazonaws.com/clusterhq-archive/${ID}/clusterhq-release$(rpm -E %dist).noarch.rpm
# Add ClusterHQ repository
# Ignore status, as it may be yum having nothing to do if repo was installed previously.
${SUDO} yum install -y "${DEFAULT_REPO}" || true
if [ "${FLOCKER_BRANCH}" ]; then
BUILDBOT_REPO=http://build.clusterhq.com/results/omnibus/${FLOCKER_BRANCH}/${OPSYS}
cat > /tmp/repo <<EOF
[clusterhq-build]
name=clusterhq-build
baseurl=${BUILDBOT_REPO}
gpgcheck=0
enabled=0
EOF
${SUDO} mv /tmp/repo /etc/yum.repos.d/clusterhq-build.repo
branch_opt=--enablerepo=clusterhq-build
else
branch_opt=
fi
# Add ClusterHQ packages
# Install cli package to get flocker-ca command
${SUDO} yum -y install ${branch_opt} clusterhq-flocker-node
;;
ubuntu-14.04)
# Add ClusterHQ repository
${SUDO} apt-get -y install apt-transport-https software-properties-common
${SUDO} add-apt-repository -y ppa:james-page/docker
${SUDO} add-apt-repository -y 'deb https://clusterhq-archive.s3.amazonaws.com/ubuntu-testing/14.04/$(ARCH) /'
if [ "${FLOCKER_BRANCH}" ]; then
BUILDBOT_REPO=http://build.clusterhq.com/results/omnibus/${FLOCKER_BRANCH}/${OPSYS}
${SUDO} add-apt-repository -y "deb ${BUILDBOT_REPO} /"
cat > /tmp/apt-pref <<EOF
Package: *
Pin: origin build.clusterhq.com
Pin-Priority: 900
EOF
${SUDO} mv /tmp/apt-pref /etc/apt/preferences.d/buildbot-900
fi
${SUDO} apt-get update
# Unauthenticated packages need --force-yes
# Install cli package to get flocker-ca command
${SUDO} apt-get -y --force-yes install clusterhq-flocker-node
;;
esac
# Install control certificates
${SUDO} mkdir -p /etc/flocker
${SUDO} chmod u=rwX,g=,o= /etc/flocker
${SUDO} cp cluster.crt /etc/flocker/cluster.crt
${SUDO} mv control-service.crt /etc/flocker/control-service.crt
${SUDO} mv control-service.key /etc/flocker/control-service.key
${SUDO} chmod 600 /etc/flocker/control-service.key
# Enable Flocker Control
case "${OPSYS}" in
centos-7 | fedora-20)
# Setup firewall
if [ "$(which firewall-cmd)" -a "$(firewall-cmd --state)" = 'running' ]; then
${SUDO} firewall-cmd --add-service flocker-control-api --permanent
${SUDO} firewall-cmd --add-service flocker-control-agent --permanent
${SUDO} firewall-cmd --reload
fi
# Start control service
${SUDO} systemctl enable flocker-control
${SUDO} systemctl start flocker-control
;;
ubuntu-14.04 | ubuntu-15.04)
# Setup firewall
cp /etc/services /tmp/services
echo 'flocker-control-api\t4523/tcp\t\t\t# Flocker Control API port' >> /tmp/services
echo 'flocker-control-agent\t4524/tcp\t\t\t# Flocker Control Agent port' >> /tmp/services
${SUDO} cp /tmp/services /etc/services
if [ "$(which ufw)" -a "$(${SUDO} ufw status)" != 'Status: inactive' ]; then
${SUDO} ufw allow flocker-control-api
${SUDO} ufw allow flocker-control-agent
fi
# Start control service
cat > /tmp/upstart.override <<EOF
start on runlevel [2345]
stop on runlevel [016]
EOF
${SUDO} mv /tmp/upstart.override /etc/init/flocker-control.override
${SUDO} service flocker-control start
;;
esac
echo "Flocker Control installed."
|
jongiddy/flocker-install
|
bin/install-control.sh
|
Shell
|
apache-2.0
| 3,347 |
#!/bin/bash -eu
#
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
echo "Getting current deployment"
kubectl get deployments spectrogram -o wide
if [ -z "$1" ]
then
echo "Need to supply a version number like 'v38'"
else
echo "Building spectrogram:$1"
cd ../
gcloud docker -- build -t gcr.io/gweb-deepblue/spectrogram:$1 -f spectrogram/Dockerfile .
cd -
gcloud docker -- push gcr.io/gweb-deepblue/spectrogram:$1
kubectl set image deployments spectrogram spectrogram=gcr.io/gweb-deepblue/spectrogram:$1
kubectl apply -f hpa.yaml
echo "Deployed $1"
fi
|
googlecreativelab/pattern-radio
|
kubernetes/spectrogram/deploy.sh
|
Shell
|
apache-2.0
| 1,130 |
#!/usr/bin/env bash
# Requires:
# - Xcode 9.3
# - Ruby 2.5
# - Cocoapods 1.5.2
# - EarlGrey gem 1.13.0
#
# See getting started guide:
# - https://github.com/google/EarlGrey/tree/master/Demo/EarlGreyExample
REPO_NAME="EarlGrey"
if [ ! -d "$REPO_NAME" ]; then
git clone https://github.com/google/${REPO_NAME}.git
fi
DIR=$(pwd)
cd "$DIR/$REPO_NAME/Demo/EarlGreyExample"
pod install
DD_PATH="$DIR/xctestrun/"
rm -rf "$DD_PATH"
echo "open $DIR/$REPO_NAME/Demo/EarlGreyExample/EarlGreyExample.xcworkspace"
echo "Manually update with a valid Apple id."
echo "[Press Enter to continue]"
read
xcodebuild build-for-testing \
-workspace EarlGreyExample.xcworkspace \
-scheme "EarlGreyExampleSwiftTests" \
-destination "generic/platform=iOS" \
-derivedDataPath "$DD_PATH"
FIXTURES_PATH="$DIR/src/main/kotlin/xctest/fixtures/swift"
cp "$DIR/xctestrun/Build/Products/Debug-iphoneos/EarlGreyExampleSwift.app/PlugIns/EarlGreyExampleSwiftTests.xctest/EarlGreyExampleSwiftTests" \
"$FIXTURES_PATH"
|
Flank/flank
|
test_runner/build_earlgrey_example.sh
|
Shell
|
apache-2.0
| 1,006 |
#!/usr/bin/env bash
jps | grep Server | cut -f1 -d" " | xargs kill
|
kerumai/chunking
|
src/dist/bin/shutdown.sh
|
Shell
|
apache-2.0
| 68 |
#!/usr/bin/env bash
# This script identifies the unit test modules that do not correspond
# directly with a module in the code tree. See TESTING.rst for the
# intended structure.
neutron_path=$(cd "$(dirname "$0")/.." && pwd)
base_test_path=neutron/tests/unit
test_path=$neutron_path/$base_test_path
test_files=$(find ${test_path} -iname 'test_*.py')
ignore_regexes=(
# The following vendor plugins are not required to confrm to the
# structural requirements.
"^plugins/ibm.*$"
# The following open source plugin tests are not actually unit
# tests and are ignored pending their relocation to the functional
# test tree.
"^plugins/ml2/drivers/mech_sriov/mech_driver/test_mech_sriov_nic_switch.py$"
"^plugins/ml2/test_security_group.py$"
"^plugins/ml2/test_port_binding.py$"
"^plugins/ml2/test_extension_driver_api.py$"
"^plugins/ml2/test_ext_portsecurity.py$"
"^plugins/ml2/test_agent_scheduler.py$"
"^plugins/ml2/drivers/openvswitch/agent/test_agent_scheduler.py$"
"^plugins/ml2/drivers/openvswitch/agent/test_ovs_tunnel.py$"
"^plugins/openvswitch/test_agent_scheduler.py$"
)
error_count=0
ignore_count=0
total_count=0
for test_file in ${test_files[@]}; do
relative_path=${test_file#$test_path/}
expected_path=$(dirname $neutron_path/neutron/$relative_path)
test_filename=$(basename "$test_file")
expected_filename=${test_filename#test_}
# Module filename (e.g. foo/bar.py -> foo/test_bar.py)
filename=$expected_path/$expected_filename
# Package dir (e.g. foo/ -> test_foo.py)
package_dir=${filename%.py}
if [ ! -f "$filename" ] && [ ! -d "$package_dir" ]; then
for ignore_regex in ${ignore_regexes[@]}; do
if [[ "$relative_path" =~ $ignore_regex ]]; then
((ignore_count++))
continue 2
fi
done
echo "Unexpected test file: $base_test_path/$relative_path"
((error_count++))
fi
((total_count++))
done
if [ "$ignore_count" -ne 0 ]; then
echo "$ignore_count unmatched test modules were ignored"
fi
if [ "$error_count" -eq 0 ]; then
echo 'Success! All test modules match targets in the code tree.'
exit 0
else
echo "Failure! $error_count of $total_count test modules do not match targets in the code tree."
exit 1
fi
|
apporc/neutron
|
tools/check_unit_test_structure.sh
|
Shell
|
apache-2.0
| 2,333 |
#!/bin/bash
#example:
#./uploadFile.sh /home/$USER/Downloads/files/ http://$USER:[email protected]:8082/lobcder/dav/ wlan0 multithread
# ----------------------------Parse args---------------------------
srcFilePath=$1
destURL=$2
interface=$3
multithread=$4
# extract the protocol
proto="`echo $destURL | grep '://' | sed -e's,^\(.*://\).*,\1,g'`"
# remove the protocol
url=`echo $destURL| sed -e s,$proto,,g`
# extract the user and password (if any)
userpass="`echo $url | grep @ | cut -d@ -f1`"
pass=`echo $userpass | grep : | cut -d: -f2`
if [ -n "$pass" ]; then
user=`echo $userpass | grep : | cut -d: -f1`
else
user=$userpass
fi
# extract the host -- updated
hostport=`echo $url | sed -e s,$userpass@,,g | cut -d/ -f1`
port=`echo $hostport | grep : | cut -d: -f2`
if [ -n "$port" ]; then
host=`echo $hostport | grep : | cut -d: -f1`
else
host=$hostport
fi
# extract the path (if any)
path="`echo $url | grep / | cut -d/ -f2-`"
echo "machine $host" > $HOME/.netrc
echo " login $user" >> $HOME/.netrc
echo " password $pass" >> $HOME/.netrc
chmod 600 $HOME/.netrc
# --------------------------check if we upload file or directory ---------------------
if [[ -d $srcFilePath ]]; then
#echo "$srcFilePath is a directory"
files=$srcFilePath/*
if [ -n "$multithread" ]; then
#echo "it's multithread"
i=0
for f in $files
do
i=`expr $i + 1`
echo open $destURL > cadaver$i.script
echo put $f >> cadaver$i.script
echo quit >> cadaver$i.script
done
else
echo open $destURL > cadaver.script
for f in $files
do
echo put $f >> cadaver.script
done
echo quit >> cadaver.script
fi
elif [[ -f $srcFilePath ]]; then
#echo "$srcFilePath is a file"
echo open $destURL > cadaver.script
echo put $srcFilePath >> cadaver.script
echo quit >> cadaver.script
else
echo "$srcFilePath is not valid"
exit 1
fi
#----------------------start monitor-----------------------------------
bwm-ng -o csv rate -t 1000 > monitor.csv &
bwmPID=$!
#----------------------start upload-----------------------------------
#start="$(date +%s)"
startNano="$(date +%s%N)"
if [ -n "$multithread" ] && [[ -d $srcFilePath ]] ; then
#echo "it's multithread"
i=0
for f in $files
do
i=`expr $i + 1`
/usr/bin/time --format="%e" cadaver < cadaver$i.script >/dev/null &
#time cadaver < cadaver$i.script &
cadaverPID[$i]=$!
done
for job in `jobs -p`
do
if [ $job -ne $bwmPID ]; then
wait $job || let "FAIL+=1"
fi
done
else
/usr/bin/time --format="%e" cadaver < cadaver.script >/dev/null
#time cadaver < cadaver.script
fi
elapsed="$(($(date +%s%N)-$startNano))"
elapsedMil=$(echo "$elapsed / 1000000.0" |bc -l)
#elapsedMil="$(($elapsed/1000000))"
elapsedSec=$(echo "$elapsed / 1000000000.0" |bc -l)
#echo "$elapsed 1000000000.0" | awk '{printf "%f", $1 / $2}'
#elapsedSec="$(($elapsed/1000000000))"
#end="$(date +%s)"
startMil=$(echo "$startNano/ 1000000.0" |bc -l)
#elapsedMil="$(($elapsed/1000000))"
startSec=$(echo "$startNano / 1000000000.0" |bc -l)
sleep 1.5
kill $bwmPID
#elapsed="$(expr $end - $start)"
#-----------------------Build report------------------------------------
# StartTime(sec);EndTime(sec);Elapsed(sec);
header="Time(sec);ifaceName;MBitsOut;MBitsIn;MBitsTotal;elapsed(sec);fileSize(MBit);speed(MBit/sec)"
grep $interface monitor.csv > tmp.csv
echo $header > report.csv
if [[ -d $srcFilePath ]]; then
size=`du -sb $srcFilePath | awk '{print $1}'`
else
size=$(stat -c%s "$srcFilePath")
fi
fileSize=$(echo "($size * 8) / (1000.0 * 1000.0)" |bc -l)
speed=$(echo "$fileSize / $elapsedSec" |bc -l)
awk -v s=$startSec -v e=$elapsedSec -v fs=$fileSize -v sp=$speed -F "\"*;\"*" '{ print $1-s ";" $2 ";" ($3*8)/(1000*1000) ";" ($4*8)/(1000*1000) ";" ($5*8)/(1000*1000) ";" e ";" fs ";" sp}' tmp.csv >> report.csv
rm monitor.csv tmp.csv cadaver*.script
|
skoulouzis/lobcder
|
lobcder-tests/scripts/uploadFile.sh
|
Shell
|
apache-2.0
| 3,970 |
#!/bin/bash
pwd
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2cmy.cpp -o cmy2cmy
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2cmyk.cpp -o cmy2cmyk
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2gray.cpp -o cmy2gray
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2hsl.cpp -o cmy2hsl
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2hsv.cpp -o cmy2hsv
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2rgb.cpp -o cmy2rgb
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2yiq.cpp -o cmy2yiq
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src cmy2yuv.cpp -o cmy2yuv
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src gray2cmy.cpp -o gray2cmy
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src gray2rgb.cpp -o gray2rgb
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2cmy.cpp -o rgb2cmy
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2cmyk.cpp -o rgb2cmyk
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2gray.cpp -o rgb2gray
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2hsl.cpp -o rgb2hsl
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2hsv.cpp -o rgb2hsv
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2rgb.cpp -o rgb2rgb
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2yiq.cpp -o rgb2yiq
g++ $gccBaseSwitch $gccExtraSwitch -I../../../src rgb2yuv.cpp -o rgb2yuv
|
dmilos/color
|
example/less-than-1k/convert/make.sh
|
Shell
|
apache-2.0
| 1,446 |
if [ -z $1 ]; then
echo "file containing POST body not provided, Usage: $0 <filename>"
exit 0
fi
if [ ! -f $1 ]; then
echo "$1 not found!"
exit 0
fi
echo "POSTing contents of $1"
curl -i -v -XGET 'http://localhost:9200/denver_crime/report/_search?pretty=true' -d @$1 --header "Content-Type: application/json"
|
FJbob/denver_data
|
example_aggs/executeAggregation.sh
|
Shell
|
apache-2.0
| 325 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# included in all the hadoop scripts with source command
# should not be executable directly
# also should not be passed any arguments, since we need original $*
# resolve links - $0 may be a softlink
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
export HADOOP_HOME=/home/ubuntu/hadoop
this="$0"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# convert relative path to absolute path
bin=`dirname "$this"`
script=`basename "$this"`
bin=`cd "$bin"; pwd`
this="$bin/$script"
# the root of the Hadoop installation
export HADOOP_HOME=`dirname "$this"`/..
#check to see if the conf dir is given as an optional argument
if [ $# -gt 1 ]
then
if [ "--config" = "$1" ]
then
shift
confdir=$1
shift
HADOOP_CONF_DIR=$confdir
fi
fi
# Allow alternate conf dir location.
HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
#check to see it is specified whether to use the slaves or the
# masters file
if [ $# -gt 1 ]
then
if [ "--hosts" = "$1" ]
then
shift
slavesfile=$1
shift
export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
fi
fi
|
daniarherikurniawan/Chameleon512
|
bin/hadoop-config.sh
|
Shell
|
apache-2.0
| 2,057 |
#!/bin/bash
echo "** Create key and trust stores **"
if [ -z ${JAVA_HOME} ]
then
echo "JAVA_HOME is not set. Please set and re-run this script."
exit 1
fi
echo "Checking for keytool..."
keytool -help > /dev/null 2>&1
if [ $? != 0 ]
then
echo "Error: keytool is missing from the path, please correct this, then retry"
exit 1
fi
echo "Checking for openssl..."
openssl version > /dev/null 2>&1
if [ $? != 0 ]
then
echo "Error: openssl is missing from the path, please correct this, then retry"
exit 1
fi
if [ $# -lt 2 ]; then
echo "Must specify two parameters:
1) the source directory (can be read-only, contains cert.pem)
2) the target directory (must be read-write)
An optional third parameter can specify additional environment checks.
Known values:
local: verify certificate/keystore volume contents for local dev
keystore.jks and truststore.jks will be created in the specified target directory.
The truststore should be specified as the JVM default using:
-Djavax.net.ssl.trustStore=${target_dir}/truststore.jks \
-Djavax.net.ssl.trustStorePassword=gameontext-trust
"
exit 1
fi
src_dir=$1
target_dir=$2
if [ ! -f ${src_dir}/cert.pem ]; then
echo "Missing certificate: ${src_dir}/cert.pem"
exit 1
fi
if [ "$3" == "local" ]; then
touch ${target_dir}/.local.volume
if [ ! -f ${src_dir}/server.pem ]; then
echo "Missing server certificate: ${src_dir}/server.pem"
exit 1
fi
if [ ! -f ${src_dir}/private.pem ]; then
echo "Missing key certificate: ${src_dir}/private.pem"
exit 1
fi
if [ ! -f ${src_dir}/ltpa.keys ]; then
echo "Missing ltpa.keys: ${src_dir}/ltpa.keys"
exit 1
fi
elif [ -f ${target_dir}/.local.volume ]; then
echo "### Using keystores created in a shared volume"
exit 0
fi
inspect() {
if [ -f ${target_dir}/.local.volume ] ; then
echo "inspect $1 $2"
keytool -list \
-keystore $1 -storepass $2 -storetype PKCS12
echo ""
fi
}
inspect_jks() {
if [ -f ${target_dir}/.local.volume ] ; then
echo "inspect $1 $2"
keytool -list \
-keystore $1 -storepass $2
echo ""
fi
}
cp -f ${src_dir}/*.pem ${target_dir}
cp -f ${src_dir}/*.keys ${target_dir}
echo "### Building keystore/truststore from cert.pem"
echo " # cd ${target_dir}"
cd ${target_dir}
# Clear previous files
rm -f cert.pkcs12
rm -f key.pkcs12
rm -f truststore.jks
echo " # converting pem to pkcs12"
openssl pkcs12 -passin pass:keystore -passout pass:keystore -export -out cert.pkcs12 -in cert.pem
echo " # importing cert.pkcs12 to key.pkcs12"
keytool -v -importkeystore -alias 1 -noprompt \
-srckeystore cert.pkcs12 -srckeypass keystore -srcstorepass keystore -srcstoretype PKCS12 -srcalias 1 \
-destkeystore key.pkcs12 -destkeypass gameontext-keys -deststorepass gameontext-keys -deststoretype PKCS12 -destalias default
echo " # importing jvm truststore to server truststore"
cacerts=$JAVA_HOME/jre/lib/security/cacerts
if [ -e $JAVA_HOME/lib/security/cacerts ]; then
cacerts=$JAVA_HOME/lib/security/cacerts
fi
keytool -importkeystore \
-srckeystore ${cacerts} -srcstorepass changeit \
-destkeystore truststore.jks -deststorepass gameontext-trust
echo " # Add cert.pm to truststore"
keytool -import -alias gameontext -v -noprompt \
-trustcacerts \
-file cert.pem \
-keystore truststore.jks -storepass gameontext-trust
if [ -f ${target_dir}/.local.volume ]; then
echo "** Contents of ${target_dir}"
ls -al ${target_dir}
echo "** Keystore"
inspect ${target_dir}/key.pkcs12 gameontext-keys
fi
|
gameontext/gameon
|
bin/gen-keystore.sh
|
Shell
|
apache-2.0
| 3,562 |
#!/bin/bash
docker build -t nealhardesty/sshbastion .
|
nealhardesty/dockerfiles
|
sshbastion/build.sh
|
Shell
|
apache-2.0
| 54 |
set -e
FILE1="$1"
javac -Xlint kata/"$1".java
java kata."$1"
|
hermantai/kata
|
java/runone.sh
|
Shell
|
apache-2.0
| 63 |
#! /bin/bash
#Plots every *.csv in the folder.
echo "about to plot each *.CSV in the folder."
echo "script will ask you for target delays to plot"
echo "enter it or enter 's' (without quotes ofc) to skip this file"
for f in *.csv
do
echo "enter target delay for $f"
read targetdelay
if [ "$targetdelay" != "s" ];
then
echo "
reset
set terminal postscript eps enhanced solid 'Helvetica' 10
set datafile separator ';'
set yrange [0:600000]
set y2range [0:120000]
set title 'Comparison between MaxWindow and OurDelay'
set xlabel 'time [{/Symbol m}s]'
set ylabel 'MaxWindow [B]'
set y2label 'OurDelay [{/Symbol m}s]'
set y2tics
set format x '%15.0f'
set key left top
set output '$f.window.delays.eps'
f(x) = $targetdelay
plot '$f' every 5::1 using 1:6 lc rgb 'red' axes x1y2 title 'our delay', '$f' every 5::1 using 1:18 lc rgb 'green' axes x1y1 title 'max window', f(x) lc rgb 'blue' axes x1y2 title 'target delay'
set output '$f.delays.rtt.eps'
set ylabel 'RTT estimaded [ms]'
set title 'Comparison between Estimated RTT and OurDelay'
set yrange [0:200]
plot '$f' every 5::1 using 1:6 lc rgb 'red' axes x1y2 title 'our delay', f(x) lc rgb 'blue' axes x1y2 title 'targe delay', '$f' every 5::1 using 1:16 lc rgb 'green' axes x1y1 title 'RTT'
" > plot.gp && gnuplot plot.gp
fi
done
mkdir plots
mv *.eps plots/
|
iiljkic/utp4j
|
src/bash/plotscript.sh
|
Shell
|
apache-2.0
| 1,387 |
#!/bin/bash
set -eo pipefail
defaultAlpineVersion='3.6'
declare -A alpineVersion=(
[17.03]='3.5'
[17.03-rc]='3.5'
[17.05]='3.5'
)
cd "$(dirname "$(readlink -f "$BASH_SOURCE")")"
source '.architectures-lib'
versions=( "$@" )
if [ ${#versions[@]} -eq 0 ]; then
versions=( */ )
fi
versions=( "${versions[@]%/}" )
# see http://stackoverflow.com/a/2705678/433558
sed_escape_lhs() {
echo "$@" | sed -e 's/[]\/$*.^|[]/\\&/g'
}
sed_escape_rhs() {
echo "$@" | sed -e 's/[\/&]/\\&/g' | sed -e ':a;N;$!ba;s/\n/\\n/g'
}
# "tac|tac" for http://stackoverflow.com/a/28879552/433558
dindLatest="$(curl -fsSL 'https://github.com/docker/docker/commits/master/hack/dind.atom' | tac|tac | awk -F '[[:space:]]*[<>/]+' '$2 == "id" && $3 ~ /Commit/ { print $4; exit }')"
# TODO once "Supported Docker versions" minimums at Docker 1.8+ (1.6 at time of this writing), bring this back again
#sed -r -e 's/^(ENV DIND_COMMIT) .*/\1 '"$dindLatest"'/' Dockerfile-dind.template
dockerVersions="$(
{
git ls-remote --tags https://github.com/docker/docker-ce.git
# TODO remove-me (17.06+ live exclusively in docker-ce)
git ls-remote --tags https://github.com/docker/docker.git
} \
| cut -d$'\t' -f2 \
| grep '^refs/tags/v[0-9].*$' \
| sed 's!^refs/tags/v!!; s!\^{}$!!' \
| sort -ruV
)"
travisEnv=
for version in "${versions[@]}"; do
rcGrepV='-v'
rcVersion="${version%-rc}"
if [ "$rcVersion" != "$version" ]; then
rcGrepV=
fi
fullVersion="$(echo "$dockerVersions" | grep $rcGrepV -- '-rc' | grep "^$rcVersion[.]" | head -n1)"
if [ -z "$fullVersion" ]; then
echo >&2 "warning: cannot find full version for $version"
continue
fi
channel='edge'
if [ "$rcVersion" != "$version" ]; then
channel='test'
elif \
minorVersion="${rcVersion##*.}" \
&& minorVersion="${minorVersion#0}" \
&& [ "$(( minorVersion % 3 ))" = '0' ] \
; then
channel='stable'
fi
archCase='apkArch="$(apk --print-arch)"; '$'\\\n'
archCase+=$'\t''case "$apkArch" in '$'\\\n'
for apkArch in $(apkArches "$version"); do
dockerArch="$(apkToDockerArch "$version" "$apkArch")"
archCase+=$'\t\t'"$apkArch) dockerArch='$dockerArch' ;; "$'\\\n'
done
archCase+=$'\t\t''*) echo >&2 "error: unsupported architecture ($apkArch)"; exit 1 ;;'$'\\\n'
archCase+=$'\t''esac'
alpine="${alpineVersion[$version]:-$defaultAlpineVersion}"
sed -r \
-e 's!%%DOCKER-CHANNEL%%!'"$channel"'!g' \
-e 's!%%DOCKER-VERSION%%!'"$fullVersion"'!g' \
-e 's!%%ALPINE-VERSION%%!'"$alpine"'!g' \
-e 's!%%ARCH-CASE%%!'"$(sed_escape_rhs "$archCase")"'!g' \
Dockerfile.template > "$version/Dockerfile"
cp -a docker-entrypoint.sh "$version/"
cp -a dockerd-entrypoint.sh "$version/dind/"
for variant in git dind; do
sed -r \
-e 's!%%VERSION%%!'"$version"'!g' \
"Dockerfile-$variant.template" > "$version/$variant/Dockerfile"
done
travisEnv='\n - VERSION='"$version$travisEnv"
done
travis="$(awk -v 'RS=\n\n' '$1 == "env:" { $0 = "env:'"$travisEnv"'" } { printf "%s%s", $0, RS }' .travis.yml)"
echo "$travis" > .travis.yml
|
skoblenick/docker
|
update.sh
|
Shell
|
apache-2.0
| 3,010 |
#!/bin/bash
EXEDIR="../../../build/bin"
EXECUTION="cc"
DATADIR="/data/gunrock_dataset/large"
DATAHUGE="/data/gunrock_dataset/huge"
SETTING[0]="--iteration-num=10 --in-sizing=1.1 --quick"
NAME[0]="cit-Patents"
NAME[1]="soc-LiveJournal1"
NAME[2]="soc-twitter-2010"
NAME[3]="uk-2002"
NAME[4]="uk-2005"
NAME[5]="kron_g500-logn21"
NAME[6]="twitter_rv.net"
NAME[7]="rmat_n24_e16"
GRAPH[0]="market $DATADIR/${NAME[0]}/${NAME[0]}.mtx"
GRAPH[1]="market $DATADIR/${NAME[1]}/${NAME[1]}.mtx"
GRAPH[2]="market $DATADIR/${NAME[2]}/${NAME[2]}.mtx"
GRAPH[3]="market $DATADIR/${NAME[3]}/${NAME[3]}.mtx"
GRAPH[4]="market $DATADIR/${NAME[4]}/${NAME[4]}.mtx"
GRAPH[5]="market $DATADIR/${NAME[5]}/${NAME[5]}.mtx"
GRAPH[6]="market $DATAHUGE/${NAME[6]}/${NAME[6]}.mtx --64bit-SizeT"
GRAPH[7]="grmat --rmat_scale=24 --rmat_edgefactor=16"
mkdir -p eval
DEVICE="0"
for j in {0..0}
do
for i in {0..8}
do
echo $EXECUTION ${GRAPH[$i]} ${SETTING[$j]} --device=$DEVICE --jsondir=./eval/
$EXEDIR/$EXECUTION ${GRAPH[$i]} ${SETTING[$j]} --device=$DEVICE --jsondir=./eval/ > ./eval/${NAME[$i]}.$EXECUTION.output.txt
sleep 1
done
done
|
gunrock/gunrock
|
scripts/v0.5.x/performance/cc-test.sh
|
Shell
|
apache-2.0
| 1,117 |
#!/usr/bin/env bash
source $(dirname $0)/common.sh
if [ $# -eq 2 ]; then
yes | pv -qL $2 | $kafka_home/bin/kafka-console-producer.sh --topic $1 --broker-list $all_brokers
else
echo "Usage: "$(basename $0)" <topic> <rate eg 4,1k>"
fi
|
imarios/spark-steaming-with-kafka
|
scripts/auto_producer.sh
|
Shell
|
apache-2.0
| 241 |
#!/bin/sh
mvn dependency:build-classpath -Dmdep.outputFile=classpath
export CLASSPATH=target/classes:`cat classpath`
export log4jfile=$(pwd)/src/main/resources/stand-alone-log4j.xml
echo $log4jfile
java -cp $CLASSPATH -Dlog4j.configuration=file://$log4jfile -Xmx2024m -Xms2024m com.outbrain.gruffalo.StandaloneGruffaloServer
|
outbrain/gruffalo
|
gruffalo-standalone.sh
|
Shell
|
apache-2.0
| 326 |
#!/bin/bash
python testyida9at.py >yrun1
python testyida9at.py >yrun2
python testyida9at.py >yrun3
python testyida9at.py >yrun4
python testyida9at.py >yrun5
python testyida9at.py >yrun6
python testyida9at.py >yrun7
python testyida9at.py >yrun8
python testyida9at.py >yrun9
|
dariox2/CADL
|
test/ygena.sh
|
Shell
|
apache-2.0
| 277 |
#!/usr/bin/env bash
#---------------------------------------------------------------------------
# Copyright 2011-2017 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
# Make sure we are root
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
# Are we running on a local repo? If so, don't the "VistA" repo again!
currentDir=$(dirname "$(readlink -f "$0")")
parentDir=$(dirname $currentDir)
parentdirname=$(basename $parentDir)
if [ "$parentdirname" = "Install" ]; then
localVistARepo="true"
fi
# Options
# instance = name of instance
# used http://rsalveti.wordpress.com/2007/04/03/bash-parsing-arguments-with-getopts/
# for guidance
usage()
{
cat << EOF
usage: $0 options
This script will automatically create a VistA instance for GT.M on
Ubuntu
DEFAULTS:
Alternate VistA-M repo = https://github.com/OSEHRA/VistA-M.git
Install EWD.js = false
Create Development Directories = false
Instance Name = OSEHRA
Post Install hook = none
Skip Testing = false
OPTIONS:
-h Show this message
-a Alternate VistA-M repo (zip or git format) (Must be in OSEHRA format)
-r Alternate VistA-M repo branch (git format only)
-b Skip bootstrapping system (used for docker)
-d Create development directories (s & p)
-e Install EWD.js (assumes development directories)
-i Instance name
-p Post install hook (path to script)
-s Skip testing
-y Install YottaDB
EOF
}
while getopts ":ha:bedi:p:sr:y" option
do
case $option in
h)
usage
exit 1
;;
a)
repoPath=$OPTARG
;;
b)
bootstrap=false
;;
d)
developmentDirectories=true
;;
e)
installEWD=true
developmentDirectories=true
;;
i)
instance=$(echo $OPTARG |tr '[:upper:]' '[:lower:]')
;;
p)
postInstall=true
postInstallScript=$OPTARG
;;
r)
branch=$OPTARG
;;
s)
skipTests=true
;;
y)
installYottaDB=true
;;
esac
done
# Set defaults for options
if [[ -z $repoPath ]]; then
repoPath="https://github.com/OSEHRA/VistA-M.git"
fi
if [[ -z $bootstrap ]]; then
bootstrap=true
fi
if [[ -z $developmentDirectories ]]; then
developmentDirectories=false
fi
if [[ -z $installEWD ]]; then
installEWD=false
fi
if [[ -z $instance ]]; then
instance=osehra
fi
if [[ -z $postInstall ]]; then
postInstall=false
fi
if [ -z $skipTests ]; then
skipTests=false
fi
if [ -z $installYottaDB ]; then
installYottaDB=false
fi
# Summarize options
echo "Using $repoPath for routines and globals"
echo "Create development directories: $developmentDirectories"
echo "Installing an instance named: $instance"
echo "Installing EWD.js: $installEWD"
echo "Post install hook: $postInstall"
echo "Skip Testing: $skipTests"
echo "Skip bootstrap: $bootstrap"
echo "Install YottaDB: $installYottaDB"
echo "Running on local repo: $localVistARepo"
# Get primary username if using sudo, default to $username if not sudo'd
if $bootstrap; then
if [[ -n "$SUDO_USER" ]]; then
primaryuser=$SUDO_USER
elif [[ -n "$USERNAME" ]]; then
primaryuser=$USERNAME
else
echo Cannot find a suitable username to add to VistA group
exit 1
fi
else
primaryuser="root"
fi
echo This script will add $primaryuser to the VistA group
# Abort provisioning if it appears that an instance is already installed.
test -d /home/$instance/g &&
{ echo "VistA already Installed. Aborting."; exit 0; }
# control interactivity of debian tools
export DEBIAN_FRONTEND="noninteractive"
# extra utils - used for cmake and dashboards and initial clones
# Note: Amazon EC2 requires two apt-get update commands to get everything
if $bootstrap; then
echo "Updating operating system"
apt-get update -qq > /dev/null
apt-get update -qq > /dev/null
apt-get install -qq -y build-essential cmake-curses-gui git dos2unix daemon unzip > /dev/null
fi
# Clone repos - Dashboard
if ! $skipTests; then
cd /usr/local/src
rm -rf VistA-Dashboard
git clone -q https://github.com/OSEHRA/VistA -b dashboard VistA-Dashboard
fi
# See if vagrant folder exists if it does use it. if it doesn't clone the repo
if [ -d /vagrant ]; then
scriptdir=/vagrant
# Fix line endings
find /vagrant -name \"*.sh\" -type f -print0 | xargs -0 dos2unix > /dev/null 2>&1
dos2unix /vagrant/EWD/etc/init.d/ewdjs > /dev/null 2>&1
dos2unix /vagrant/GTM/etc/init.d/vista > /dev/null 2>&1
dos2unix /vagrant/GTM/etc/xinetd.d/vista-rpcbroker > /dev/null 2>&1
dos2unix /vagrant/GTM/etc/xinetd.d/vista-vistalink > /dev/null 2>&1
dos2unix /vagrant/GTM/gtminstall_SHA1 > /dev/null 2>&1
else
if $bootstrap; then
if $localVistARepo; then
scriptdir=$parentDir
else
git clone -q https://github.com/OSEHRA/VistA
scriptdir=/usr/local/src/VistA/Scripts/Install
fi
else
scriptdir=/opt/vista
fi
fi
# bootstrap the system
if $bootstrap; then
cd $scriptdir
./Ubuntu/bootstrapUbuntuServer.sh
else
# move back to the /opt/vista directory
cd /opt/vista
fi
# Ensure scripts know if we are RHEL like or Ubuntu like
export ubuntu=true;
# Install GT.M or YottaDB (only option for Ubuntu)
cd GTM
if $bootstrap; then
if $installYottaDB; then
./install.sh -y
else
./install.sh
fi
else
if $installYottaDB; then
./install.sh -s -y
else
./install.sh -s
fi
fi
# Create the VistA instance
if $bootstrap; then
if $installYottaDB; then
./createVistaInstance.sh -i $instance -y
else
./createVistaInstance.sh -i $instance
fi
else
if $installYottaDB; then
./createVistaInstance.sh -i $instance -f -y
else
./createVistaInstance.sh -i $instance -f
fi
fi
# Modify the primary user to be able to use the VistA instance
usermod -a -G $instance $primaryuser
chmod g+x /home/$instance
# Setup environment variables so the dashboard can build
# have to assume $basedir since this sourcing of this script will provide it in
# future commands
source /home/$instance/etc/env
# Get running user's home directory
# http://stackoverflow.com/questions/7358611/bash-get-users-home-directory-when-they-run-a-script-as-root
if $bootstrap; then
USER_HOME=$(getent passwd $SUDO_USER | cut -d: -f6)
else
USER_HOME=/root
fi
# source env script during running user's login
echo "source $basedir/etc/env" >> $USER_HOME/.bashrc
# Build a dashboard and run the tests to verify installation
# These use the Dashboard branch of the VistA repository
# The dashboard will clone VistA and VistA-M repos
# run this as the $instance user
if $skipTests; then
# Clone VistA-M repo
cd /usr/local/src
if [[ $repoPath == *.git ]]; then
if ! [ -z $branch ]; then
git clone --depth 1 $repoPath -b $branch VistA-Source
else
git clone --depth 1 $repoPath VistA-Source
fi
else
curl -fsSL --progress-bar $repoPath -o VistA-M-master.zip
unzip -q VistA-M-master.zip
rm VistA-M-master.zip
dir=$(ls -1)
mv $dir VistA-Source
fi
# Go back to the $basedir
cd $basedir
# Perform the import
su $instance -c "source $basedir/etc/env && $scriptdir/GTM/importVistA.sh"
# Run ZTMGRSET accepting the defaults
su $instance -c "mumps -run %XCMD 'D ^ZTMGRSET' << EOF
8
Y
EOF"
else
# Attempt to bypass huge git clone by getting the zip files and unzipping them where they go
su $instance -c "source $basedir/etc/env && mkdir -p $basedir/Dashboard"
cd $basedir/Dashboard
echo "Downloading OSEHRA VistA"
curl -fsSL --progress-bar https://github.com/OSEHRA/VistA/archive/master.zip -o VistA-master.zip
unzip -q VistA-master.zip
rm VistA-master.zip
mv VistA-master VistA
echo "Downloading OSEHRA VistA-M"
curl -fsSL --progress-bar https://github.com/OSEHRA/VistA-M/archive/master.zip -o VistA-M-master.zip
unzip -q VistA-M-master.zip
rm VistA-M-master.zip
mv VistA-M-master VistA-M
# create random string for build identification
# source: http://ubuntuforums.org/showthread.php?t=1775099&p=10901169#post10901169
export buildid=`tr -dc "[:alpha:]" < /dev/urandom | head -c 8`
# Import VistA and run tests using OSEHRA automated testing framework
su $instance -c "source $basedir/etc/env && ctest -S $scriptdir/Ubuntu/test.cmake -V"
# Tell users of their build id
echo "Your build id is: $buildid you will need this to identify your build on the VistA dashboard"
# Compile routines
echo "Compiling routines"
cd $basedir/r/$gtmver
for routine in $basedir/r/*.m; do
mumps ${routine} >> $basedir/log/compile.log 2>&1
done
echo "Done compiling routines"
fi
# Enable journaling
su $instance -c "source $basedir/etc/env && $basedir/bin/enableJournal.sh"
# if we are running on docker we must shutdown gracefully or else corruption will occur
# there is also no need to restart xinetd if we are running in docker as we are going to
# shut it down
if $bootstrap; then
# Restart xinetd
service xinetd restart
else
service ${instance}vista stop
fi
# Add p and s directories to gtmroutines environment variable
if $developmentDirectories; then
su $instance -c "mkdir $basedir/{p,p/$gtmver,s,s/$gtmver}"
perl -pi -e 's#export gtmroutines=\"#export gtmroutines=\"\$basedir/p/\$gtmver\(\$basedir/p\) \$basedir/s/\$gtmver\(\$basedir/s\) #' $basedir/etc/env
fi
# Install EWD.js
if $installEWD; then
cd $scriptdir/EWD
./ewdjs.sh
cd $basedir
fi
# Post install hook
if $postInstall; then
su $instance -c "source $basedir/etc/env && $postInstallScript"
fi
# Ensure group permissions are correct
chmod -R g+rw /home/$instance
|
shabiel/VistA
|
Scripts/Install/Ubuntu/autoInstaller.sh
|
Shell
|
apache-2.0
| 10,749 |
#!/bin/sh
spec=`ls tools/tizen/*.spec`
version=`rpm --query --queryformat '%{version}\n' --specfile $spec`
name=`echo $name|cut -d" " -f 1`
version=`echo $version|cut -d" " -f 1`
name=iotivity
rm -rf $name-$version
builddir=`pwd`
sourcedir=`pwd`
echo `pwd`
mkdir ./tmp
mkdir ./tmp/extlibs/
mkdir ./tmp/packaging
cp -R ./build_common $sourcedir/tmp
cp -R ./examples $sourcedir/tmp
cp -R ./extlibs/cereal $sourcedir/tmp/extlibs
cp -R ./extlibs/cjson $sourcedir/tmp/extlibs
cp -R ./extlibs/tinydtls $sourcedir/tmp/extlibs
cp -R ./extlibs/timer $sourcedir/tmp/extlibs
cp -R ./extlibs/rapidxml $sourcedir/tmp/extlibs
cp -R ./resource $sourcedir/tmp
cp -R ./service $sourcedir/tmp
cp ./extra_options.scons $sourcedir/tmp
cp ./tools/tizen/*.spec ./tmp/packaging
cp ./SConstruct ./tmp
cp ./tools/tizen/*.rpm ./tmp
cp ./tools/tizen/.gbs.conf ./tmp
cd $sourcedir/tmp
echo `pwd`
rm -rf ./extlibs/cereal/cereal/.git*
# Initialize Git repository
if [ ! -d .git ]; then
git init ./
git config user.email "[email protected]"
git config user.name "Your Name"
git add ./
git commit -m "Initial commit"
fi
echo "Calling core gbs build command"
gbscommand="gbs build -A armv7l -B ~/GBS-ROOT --include-all --repository ./"
echo $gbscommand
if eval $gbscommand; then
echo "Build is successful"
else
echo "Build failed!"
fi
cd $sourcedir
rm -rf $sourcedir/tmp
|
kartben/iotivity
|
gbsbuild.sh
|
Shell
|
apache-2.0
| 1,368 |
# Sample script to finetune RAG using Ray for distributed retrieval.
# Add parent directory to python path to access lightning_base.py
export PYTHONPATH="../":"${PYTHONPATH}"
#creates the custom knowlegebase
python use_own_knowledge_dataset.py \
--csv_path /DIR/SQUAD-KB/squad-kb.csv \
--output_dir /DIR/SQUAD-KB
# Start a single-node Ray cluster.
ray start --head
# A sample finetuning run, you need to specify data_dir, output_dir and model_name_or_path
# run ./examples/rag/finetune_rag_ray.sh --help to see all the possible options
python finetune_rag.py \
--data_dir /DIR/squad-training-data \
--output_dir /DIR/model_checkpoints \
--model_name_or_path facebook/rag-token-base \
--model_type rag_token \
--fp16 \
--gpus 2 \
--profile \
--do_train \
--end2end \
--do_predict \
--n_val -1 \
--train_batch_size 4 \
--eval_batch_size 1 \
--max_source_length 128 \
--max_target_length 25 \
--val_max_target_length 25 \
--test_max_target_length 25 \
--label_smoothing 0.1 \
--dropout 0.1 \
--attention_dropout 0.1 \
--weight_decay 0.001 \
--adam_epsilon 1e-08 \
--max_grad_norm 0.1 \
--lr_scheduler polynomial \
--learning_rate 3e-05 \
--num_train_epochs 10 \
--warmup_steps 500 \
--gradient_accumulation_steps 8 \
--distributed_retriever ray \
--num_retrieval_workers 4 \
--passages_path /DIR/SQUAD-KB/my_knowledge_dataset \
--index_path /DIR/SQUAD-KB/my_knowledge_dataset_hnsw_index.faiss \
--index_name custom \
--context_encoder_name facebook/dpr-ctx_encoder-multiset-base \
--csv_path /DIR/SQUAD-KB/squad-kb.csv \
--index_gpus 1 \
--gpu_order [5,6,7,8,9,0,1,2,3,4] \
--shard_dir ./test_dir/kb-shards \
--indexing_freq 500
# Stop the Ray cluster.
ray stop
#this script was used to test the SQuAD data.
#change the dir paramater acording to your prefernece.
#please use the same device ordere when running CUDA_VISIBLE_DEVICES=5,6,7,8,9,0,1,2,3,4 sh finetune_rag_ray_end2end.sh
|
huggingface/transformers
|
examples/research_projects/rag-end2end-retriever/finetune_rag_ray_end2end.sh
|
Shell
|
apache-2.0
| 2,069 |
#!/bin/sh
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.52819428378e-05 --simlength 8000 --debug 0 --seed 979045008 --reps 1 --samplefraction 0.5 --migrationfraction 0.0980338783279 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 2.80438669959e-05 --simlength 8000 --debug 0 --seed 329909555 --reps 1 --samplefraction 0.5 --migrationfraction 0.087691307476 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 5.22954410842e-05 --simlength 8000 --debug 0 --seed 1407159156 --reps 1 --samplefraction 0.5 --migrationfraction 0.0862254706715 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 2.11933679666e-05 --simlength 8000 --debug 0 --seed 1976083982 --reps 1 --samplefraction 0.5 --migrationfraction 0.0624053173014 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.78700749198e-05 --simlength 8000 --debug 0 --seed 273477123 --reps 1 --samplefraction 0.5 --migrationfraction 0.0944413079363 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.41150458684e-05 --simlength 8000 --debug 0 --seed 899989959 --reps 1 --samplefraction 0.5 --migrationfraction 0.0887194010124 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 2.41789355362e-05 --simlength 8000 --debug 0 --seed 1519913915 --reps 1 --samplefraction 0.5 --migrationfraction 0.0848190588831 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 2.90824644623e-05 --simlength 8000 --debug 0 --seed 1489841398 --reps 1 --samplefraction 0.5 --migrationfraction 0.0782513882297 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.39520033158e-05 --simlength 8000 --debug 0 --seed 1788362892 --reps 1 --samplefraction 0.5 --migrationfraction 0.0741358210314 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.00585134962e-05 --simlength 8000 --debug 0 --seed 596944272 --reps 1 --samplefraction 0.5 --migrationfraction 0.0632857285539 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 4.93497731882e-05 --simlength 8000 --debug 0 --seed 230187714 --reps 1 --samplefraction 0.5 --migrationfraction 0.0762050480282 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.624653527e-05 --simlength 8000 --debug 0 --seed 782092527 --reps 1 --samplefraction 0.5 --migrationfraction 0.0801744814483 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.96513128333e-05 --simlength 8000 --debug 0 --seed 13317663 --reps 1 --samplefraction 0.5 --migrationfraction 0.0619286327935 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.3046080159e-05 --simlength 8000 --debug 0 --seed 1982430117 --reps 1 --samplefraction 0.5 --migrationfraction 0.0665086782404 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.97674387759e-05 --simlength 8000 --debug 0 --seed 980114757 --reps 1 --samplefraction 0.5 --migrationfraction 0.0516750954888 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.36687036736e-05 --simlength 8000 --debug 0 --seed 1570321283 --reps 1 --samplefraction 0.5 --migrationfraction 0.0713938297741 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.70101709125e-05 --simlength 8000 --debug 0 --seed 1749465345 --reps 1 --samplefraction 0.5 --migrationfraction 0.0895476141861 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.44153187808e-05 --simlength 8000 --debug 0 --seed 619639116 --reps 1 --samplefraction 0.5 --migrationfraction 0.064835223559 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.60875076904e-05 --simlength 8000 --debug 0 --seed 1899323474 --reps 1 --samplefraction 0.5 --migrationfraction 0.0978080454895 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.67814466947e-05 --simlength 8000 --debug 0 --seed 384444696 --reps 1 --samplefraction 0.5 --migrationfraction 0.0513842509452 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 6.50849775403e-05 --simlength 8000 --debug 0 --seed 658088976 --reps 1 --samplefraction 0.5 --migrationfraction 0.0768184241233 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 1.45426592124e-05 --simlength 8000 --debug 0 --seed 7594921 --reps 1 --samplefraction 0.5 --migrationfraction 0.0644657332637 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.84307089045e-05 --simlength 8000 --debug 0 --seed 735260366 --reps 1 --samplefraction 0.5 --migrationfraction 0.0764123815516 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 5.65910215027e-05 --simlength 8000 --debug 0 --seed 75464785 --reps 1 --samplefraction 0.5 --migrationfraction 0.0763575676691 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.33746475376e-05 --simlength 8000 --debug 0 --seed 877426760 --reps 1 --samplefraction 0.5 --migrationfraction 0.0930510430348 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.33880479439e-05 --simlength 8000 --debug 0 --seed 364303987 --reps 1 --samplefraction 0.5 --migrationfraction 0.0962436688739 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.4181693641e-05 --simlength 8000 --debug 0 --seed 1930247629 --reps 1 --samplefraction 0.5 --migrationfraction 0.0548762862397 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.56492357823e-05 --simlength 8000 --debug 0 --seed 1945441712 --reps 1 --samplefraction 0.5 --migrationfraction 0.0795648206574 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.4966400587e-05 --simlength 8000 --debug 0 --seed 1240529158 --reps 1 --samplefraction 0.5 --migrationfraction 0.0921056984078 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.31869682288e-05 --simlength 8000 --debug 0 --seed 1555520994 --reps 1 --samplefraction 0.5 --migrationfraction 0.0545483518957 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 5.41988333656e-05 --simlength 8000 --debug 0 --seed 2086853854 --reps 1 --samplefraction 0.5 --migrationfraction 0.0509336088914 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.6478950426e-05 --simlength 8000 --debug 0 --seed 1809075261 --reps 1 --samplefraction 0.5 --migrationfraction 0.0723032961777 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.66847394582e-05 --simlength 8000 --debug 0 --seed 298687735 --reps 1 --samplefraction 0.5 --migrationfraction 0.0505108298011 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 4.34600768267e-05 --simlength 8000 --debug 0 --seed 1827859891 --reps 1 --samplefraction 0.5 --migrationfraction 0.0701058853729 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.7058055388e-05 --simlength 8000 --debug 0 --seed 1772459712 --reps 1 --samplefraction 0.5 --migrationfraction 0.0881818740342 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 8.10405344833e-05 --simlength 8000 --debug 0 --seed 179173669 --reps 1 --samplefraction 0.5 --migrationfraction 0.0589882279233 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 4.81699519265e-05 --simlength 8000 --debug 0 --seed 698446729 --reps 1 --samplefraction 0.5 --migrationfraction 0.094427416051 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 6.758423731e-05 --simlength 8000 --debug 0 --seed 185672281 --reps 1 --samplefraction 0.5 --migrationfraction 0.0526111475646 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 4.58296405791e-05 --simlength 8000 --debug 0 --seed 763754230 --reps 1 --samplefraction 0.5 --migrationfraction 0.0543972038232 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.27839572313e-05 --simlength 8000 --debug 0 --seed 473070479 --reps 1 --samplefraction 0.5 --migrationfraction 0.0639236942542 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 5.84528264435e-05 --simlength 8000 --debug 0 --seed 778667019 --reps 1 --samplefraction 0.5 --migrationfraction 0.0509283510611 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.05304330718e-05 --simlength 8000 --debug 0 --seed 1001211919 --reps 1 --samplefraction 0.5 --migrationfraction 0.0639841403288 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 7.85683383151e-05 --simlength 8000 --debug 0 --seed 29888403 --reps 1 --samplefraction 0.5 --migrationfraction 0.0923118100689 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.81600055064e-05 --simlength 8000 --debug 0 --seed 1298984835 --reps 1 --samplefraction 0.5 --migrationfraction 0.0880982057094 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.28899125636e-05 --simlength 8000 --debug 0 --seed 1151617042 --reps 1 --samplefraction 0.5 --migrationfraction 0.0577801571779 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 3.37014323443e-05 --simlength 8000 --debug 0 --seed 110026343 --reps 1 --samplefraction 0.5 --migrationfraction 0.0775241234051 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 5.97484343108e-05 --simlength 8000 --debug 0 --seed 1440534251 --reps 1 --samplefraction 0.5 --migrationfraction 0.0743896911261 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 4.28163291977e-05 --simlength 8000 --debug 0 --seed 401302541 --reps 1 --samplefraction 0.5 --migrationfraction 0.052123163016 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 9.96413408892e-05 --simlength 8000 --debug 0 --seed 818587329 --reps 1 --samplefraction 0.5 --migrationfraction 0.0963638518834 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
sim-seriationct-networkmodel.py --dbhost localhost --dbport 27017 --experiment gc-late-lineage-split-2 --popsize 250 --maxinittraits 5 --numloci 3 --innovrate 1.12122317563e-05 --simlength 8000 --debug 0 --seed 398645963 --reps 1 --samplefraction 0.5 --migrationfraction 0.076548899738 --devel 0 --networkmodel network-split-late/gc-late-lineage-split-2-networkmodel.zip
|
mmadsen/experiment-seriation-classification
|
experiments/sc-2/jobs/job-gc-late-lineage-split-2-c09b50c1-43e2-4b4d-8033-e1c22ea5b5cb.sh
|
Shell
|
apache-2.0
| 18,663 |
#!/bin/bash
# Atualizando o servidor
sudo apt-get update && sudo apt-get -qy upgrade
# Instalando os principais servicos
sudo apt-get -qy install nginx-full php5-fpm php5-cli php5-mcrypt git mysql-server php5-mysql php5-curl php5-imagick php5-intl php5-memcache php-pear php5-dev php5-xdebug mcrypt
# Ira pedir a senha do root para o MySQL - Fornecer a senha e salva-la
sudo mysql_install_db
sudo mysql_secure_installation
# 1 - Ira pedir a senha do root
# 2 - Perguntar se quer alterar. N~ao vejo sentido, visto que acabou de setar ela. Responder n
# 3 - Pede para remover o acesso Anonimo ao MySQL. Responder Y
# 4 - Pede para bloquear o acesso utilizando a conta root. Responder Y
# 5 - Pede para bloquear o acesso utilizando a conta root. Responder Y
# 6 - Pede para remover o banco de teste e o acesso a ele. Responder Y
# 7 - Pede para dar um reload nas permissoes do MySQL. Responder Y
# Otimizando o servidor nginx
# antes vamos fazer um backup do arquivo original
sudo cp /etc/nginx/nginx.conf /etc/nginx/nginx.conf.original
# Agora ja podemos apagar o arquivo
sudo rm -f /etc/nginx/nginx.conf
# Verifica o num de processadores da maquina
NUM_PROC=`grep ^processor /proc/cpuinfo | wc -l`
WORKER_PROCESS=$NUM_PROC * 1024;
sed -e 's/pm.max_children = 5/pm.max_children = 20/' /etc/php5/fpm/pool.d/www.conf | sudo tee /etc/php5/fpm/pool.d/www.conf &> /dev/null
sed -e 's/;log_level = notice/log_level = warning/' /etc/php5/fpm/pool.d/www.conf | sudo tee /etc/php5/fpm/pool.d/www.conf &> /dev/null
TEXTO='\n
emergency_restart_threshold\t10\n
emergency_restart_interval\t1m\n
process_control_timeout\t10s\n'
echo -e $TEXTO | sudo tee -a /etc/php5/fpm/pool.d/www.conf &> /dev/null
TEXTO='user www-data www-data;\n
worker_processes '$NUM_PROC';\n
pid /run/nginx.pid;\n
\n
error_log /var/log/nginx/error.log error;\n
access_log /var/log/nginx/access.log;\n
\n
events {\n
\tworker_connections '$WORKER_PROCESS';\n
\tmulti_accept on;\n
}\n
\n
worker_rlimit_nofile 40000;\n
\n
location ~* \.(jpg|jpeg|gif|png|css|js|ico|xml)$ {\n
\taccess_log off;\n
\tlog_not_found off;\n
\texpires 30d;\n
}\n
\n
http {\n
\tinclude\t/etc/nginx/mime.types;\n
\tdefault_type\tapplication/octet-stream;\n
\n
\tclient_body_timeout\t3m;\n
\tclient_header_buffer_size\t1k;\n
\tclient_body_buffer_size\t16K;\n
\tclient_max_body_size\t8m;\n
\tlarge_client_header_buffers\t4 4k;\n
\tsend_timeout\t3m;\n
\n
\tgzip\ton;\n
\tgzip_comp_level\t2;\n
\tgzip_min_length\t1024;\n
\tgzip_proxied\texpired no-cache no-store private auth;\n
\tgzip_types\ttext/plain application/x-javascript text/xml text/css application/xml;\n
\tgzip_buffers\t4 8k;\n
\tgzip_disable "MSIE [1-6]\.";
\n
\toutput_buffers\t1 32k;\n
\tpostpone_output\t1460;\n
\tsendfile\ton;\n
\ttcp_nopush\ton;\n
\ttcp_nodelay\ton;\n
\tsend_lowat\t12000;\n
\tkeepalive_timeout\t75 20;\n
\ttypes_hash_max_size\t2048;\n
\tserver_tokens\toff;\n
\n
\tinclude /etc/nginx/conf.d/*.conf;\n
\tinclude /etc/nginx/sites-enabled/*;\n
}\n
\n
location ~ .php$ {\n
\t# connect to a unix domain-socket:\n
\tfastcgi_pass unix:/var/run/php5-fpm.sock;\n
\tfastcgi_index index.php;\n
\n
\tfastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;\n
\tfastcgi_param SCRIPT_NAME $fastcgi_script_name;\n
\n
\tfastcgi_buffer_size 128k;\n
\tfastcgi_buffers 256 16k;\n
\tfastcgi_busy_buffers_size 256k;\n
\tfastcgi_temp_file_write_size 256k;\n
\n
\t# This file is present on Debian systems..\n
\tinclude fastcgi_params;\n
}'
echo -e $TEXTO | sudo tee /etc/nginx/nginx.conf &> /dev/null
#
# # Instalando servidor SSH
# sudo apt-get install -qy openssh-server
# # Fazendo backup do arquivo de configuraรงao original
# sudo mv /etc/ssh/sshd_config /etc/ssh/sshd_config.original
#
# # Configurando o SSH Server
# TEXTO = 'Port 22\n
# PermitRootLogin no\n
# PermitEmptyPasswords no\n
# Protocol 2\n
# \n
# HostKey /etc/ssh/ssh_host_rsa_key\n
# HostKey /etc/ssh/ssh_host_dsa_key\n
# HostKey /etc/ssh/ssh_host_ecdsa_key\n
# HostKey /etc/ssh/ssh_host_ed25519_key\n
# \n
# UsePrivilegeSeparation yes\n
# \n
# KeyRegenerationInterval 3600\n
# ServerKeyBits 1024\n
# \n
# SyslogFacility AUTH\n
# LogLevel INFO\n
# \n
# Banner none\n
# PrintLastLog yes\n
# AllowGroups ssh\n
# LoginGraceTime 120\n
# StrictModes yes\n
# \n
# RSAAuthentication yes\n
# PubkeyAuthentication yes\n
# \n
# IgnoreRHosts yes\n
# RhostsRSAAuthentication no\n
# HostbasedAuthentication no\n
# IgnoreUserKnownHosts no\n
# \n
# ChallengeResponseAuthentication no\n
# X11Forwarding yes\n
# X11DisplayOffset 10\n
# PrintMotd no\n
# TCPKeepAlive yes\n
# AcceptEnv LANG LC_*\n
# \n
# Subsytem sftp /usr/lib/openssh/sftp-server\n
# \n
# UsePAM yes\n'
#
# echo -e $TEXTO | sudo tee /etc/ssh/sshd_config &> /dev/null
#
# # Instalando o Composer
# curl -sS https://getcomposer.org/installer | php
# sudo mv composer.phar /usr/local/bin/composer
#
# composer global require 'laravel/installer=~1.1'
# sudo ln -s ~/.composer/vendor/laravel/installer/laravel /usr/local/bin/laravel
#
#
# # Vamos aumentar a seguranรงa do servidor
#
# # Criando usuario para manutencao - NAO VAMOS USAR O ROOT
# # Nome do usuario = peduser
# sudo useradd -G www-data,sudo,adm,ssh -m peduser
# # Vamos definir uma senha para ele
# sudo passwd peduser
#
# # Inserir algumas configuracoes para firewall e proteรงao de ataques
# echo -e '\nnet.ipv4.conf.all.accept_redirects = 0\n' | sudo tee -a /etc/sysctl.conf &> /dev/null
|
rscorrea/montaServerDevWeb
|
script-install.sh
|
Shell
|
apache-2.0
| 5,410 |
#! /bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
#=================================================================#
# System Required: CentOS 6,7, Debian, Ubuntu #
# Description: One click Install ShadowsocksR Server #
# Author: 91yun #
# Thanks: @breakwa11 <https://twitter.com/breakwa11> #
# Thanks: @Teddysun <[email protected]> #
# Intro: https://shadowsocks.be/9.html #
#=================================================================#
rm -f SSR
clear
yum -y install git
echo -e "\033[34m================================================================\033[0m
\033[31m ๆฌข่ฟไฝฟ็จSSRๅ
ๆตOr็ฟปๅขไธ้ฎ่ๆฌ \033[0m
\033[31m ๅฆ็ซ็ฝ่ฎบๅ๏ผhttp://yaohuo.me๏ผI'M ่ฅฟ้จๅน้ช \033[0m
\033[31m ๅณๅฐๅผๅงๆญๅปบ... ้ๅฆ็ซ่ฎบๅๅฆๅไฝฟ็จๆญปๅ
จๅฎถ \033[0m
\033[34m================================================================\033[0m";
echo
echo
#Current folder
cur_dir=`pwd`
# Get public IP address
IP=$(ip addr | egrep -o '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | egrep -v "^192\.168|^172\.1[6-9]\.|^172\.2[0-9]\.|^172\.3[0-2]\.|^10\.|^127\.|^255\.|^0\." | head -n 1)
if [[ "$IP" = "" ]]; then
IP=$(wget -qO- -t1 -T2 ipv4.icanhazip.com)
fi
# Make sure only root can run our script
function rootness(){
if [[ $EUID -ne 0 ]]; then
echo "Error:This script must be run as root!" 1>&2
exit 1
fi
}
# Check OS
function checkos(){
if [ -f /etc/redhat-release ];then
OS='CentOS'
elif [ ! -z "`cat /etc/issue | grep bian`" ];then
OS='Debian'
elif [ ! -z "`cat /etc/issue | grep Ubuntu`" ];then
OS='Ubuntu'
else
echo "Not support OS, Please reinstall OS and retry!"
exit 1
fi
}
# Get version
function getversion(){
if [[ -s /etc/redhat-release ]];then
grep -oE "[0-9.]+" /etc/redhat-release
else
grep -oE "[0-9.]+" /etc/issue
fi
}
# CentOS version
function centosversion(){
local code=$1
local version="`getversion`"
local main_ver=${version%%.*}
if [ $main_ver == $code ];then
return 0
else
return 1
fi
}
# Disable selinux
function disable_selinux(){
if [ -s /etc/selinux/config ] && grep 'SELINUX=enforcing' /etc/selinux/config; then
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
setenforce 0
fi
}
# Pre-installation settings
function pre_install(){
# Not support CentOS 5
if centosversion 5; then
echo "Not support CentOS 5, please change OS to CentOS 6+/Debian 7+/Ubuntu 12+ and retry."
exit 1
fi
# Set ShadowsocksR config password
echo "่ฏท่พๅ
ฅSSR่ฟๆฅๅฏ็ :"
read -p "(้ป่ฎคๅฏ็ : yaohuo520):" shadowsockspwd
[ -z "$shadowsockspwd" ] && shadowsockspwd="yaohuo520"
echo
echo "---------------------------"
echo "password = $shadowsockspwd"
echo "---------------------------"
echo
# Set ShadowsocksR config port
while true
do
echo -e "่ฏท่พๅ
ฅSSR่ฟๆฅ็ซฏๅฃ,ไธ่ฎพ็ฝฎๅฐ้ป่ฎค138็ซฏๅฃ:"
read -p "(้ป่ฎค่ชๅจ่ฎพ็ฝฎSSๅ
ๆต็ซฏๅฃไธบ138):" shadowsocksport
[ -z "$shadowsocksport" ] && shadowsocksport="138"
expr $shadowsocksport + 0 &>/dev/null
if [ $? -eq 0 ]; then
if [ $shadowsocksport -ge 1 ] && [ $shadowsocksport -le 65535 ]; then
echo
echo "---------------------------"
echo "port = $shadowsocksport"
echo "---------------------------"
echo
break
else
echo "่พๅ
ฅ้่ฏฏ๏ผ่ฏท่พๅ
ฅ1-65535ไน้ด็ๆฐๅญ๏ผ"
fi
else
echo "่พๅ
ฅ้่ฏฏ๏ผ่ฏท่พๅ
ฅ1-65535ไน้ด็ๆฐๅญ๏ผ"
fi
done
get_char(){
SAVEDSTTY=`stty -g`
stty -echo
stty cbreak
dd if=/dev/tty bs=1 count=1 2> /dev/null
stty -raw
stty echo
stty $SAVEDSTTY
}
echo
echo "Press any key to start...or Press Ctrl+C to cancel"
char=`get_char`
# Install necessary dependencies
if [ "$OS" == 'CentOS' ]; then
yum install -y wget unzip openssl-devel gcc swig python python-devel python-setuptools autoconf libtool libevent git ntpdate
yum install -y m2crypto automake make curl curl-devel zlib-devel perl perl-devel cpio expat-devel gettext-devel
else
apt-get -y update
apt-get -y install python python-dev python-pip python-m2crypto curl wget unzip gcc swig automake make perl cpio build-essential git ntpdate
fi
cd $cur_dir
}
# Download files
function download_files(){
# Download libsodium file
if ! wget --no-check-certificate -O libsodium-1.0.10.tar.gz https://github.com/jedisct1/libsodium/releases/download/1.0.10/libsodium-1.0.10.tar.gz; then
echo "Failed to download libsodium file!"
exit 1
fi
# Download ShadowsocksR file
# if ! wget --no-check-certificate -O manyuser.zip https://github.com/breakwa11/shadowsocks/archive/manyuser.zip; then
# echo "Failed to download ShadowsocksR file!"
# exit 1
# fi
# Download ShadowsocksR chkconfig file
if [ "$OS" == 'CentOS' ]; then
if ! wget --no-check-certificate https://raw.githubusercontent.com/91yun/shadowsocks_install/master/shadowsocksR -O /etc/init.d/shadowsocks; then
echo "Failed to download ShadowsocksR chkconfig file!"
exit 1
fi
else
if ! wget --no-check-certificate https://raw.githubusercontent.com/91yun/shadowsocks_install/master/shadowsocksR-debian -O /etc/init.d/shadowsocks; then
echo "Failed to download ShadowsocksR chkconfig file!"
exit 1
fi
fi
}
# firewall set
function firewall_set(){
echo "firewall set start..."
if centosversion 6; then
/etc/init.d/iptables status > /dev/null 2>&1
if [ $? -eq 0 ]; then
iptables -L -n | grep '${shadowsocksport}' | grep 'ACCEPT' > /dev/null 2>&1
if [ $? -ne 0 ]; then
iptables -I INPUT -m state --state NEW -m tcp -p tcp --dport ${shadowsocksport} -j ACCEPT
iptables -I INPUT -m state --state NEW -m udp -p udp --dport ${shadowsocksport} -j ACCEPT
/etc/init.d/iptables save
/etc/init.d/iptables restart
else
echo "port ${shadowsocksport} has been set up."
fi
else
echo "WARNING: iptables looks like shutdown or not installed, please manually set it if necessary."
fi
elif centosversion 7; then
systemctl status firewalld > /dev/null 2>&1
if [ $? -eq 0 ];then
firewall-cmd --permanent --zone=public --add-port=${shadowsocksport}/tcp
firewall-cmd --permanent --zone=public --add-port=${shadowsocksport}/udp
firewall-cmd --reload
else
echo "Firewalld looks like not running, try to start..."
systemctl start firewalld
if [ $? -eq 0 ];then
firewall-cmd --permanent --zone=public --add-port=${shadowsocksport}/tcp
firewall-cmd --permanent --zone=public --add-port=${shadowsocksport}/udp
firewall-cmd --reload
else
echo "WARNING: Try to start firewalld failed. please enable port ${shadowsocksport} manually if necessary."
fi
fi
fi
echo "firewall set completed..."
}
# Config ShadowsocksR
function config_shadowsocks(){
cat > /etc/shadowsocks.json<<-EOF
{
"server": "0.0.0.0",
"server_ipv6": "::",
"server_port": ${shadowsocksport},
"local_address": "127.0.0.1",
"local_port": 1081,
"password": "${shadowsockspwd}",
"timeout": 120,
"udp_timeout": 60,
"method": "chacha20",
"protocol": "auth_sha1_compatible",
"protocol_param": "",
"obfs": "http_simple_compatible",
"obfs_param": "",
"dns_ipv6": false,
"connect_verbose_info": 0,
"redirect": "",
"fast_open": false,
"workers": 1
}
EOF
}
# Install ShadowsocksR
function install_ss(){
# Install libsodium
tar zxf libsodium-1.0.10.tar.gz
cd $cur_dir/libsodium-1.0.10
./configure && make && make install
echo "/usr/local/lib" > /etc/ld.so.conf.d/local.conf
ldconfig
# Install ShadowsocksR
cd $cur_dir
# unzip -q manyuser.zip
# mv shadowsocks-manyuser/shadowsocks /usr/local/
git clone -b manyuser https://github.com/breakwa11/shadowsocks.git /usr/local/shadowsocks
if [ -f /usr/local/shadowsocks/server.py ]; then
chmod +x /etc/init.d/shadowsocks
# Add run on system start up
if [ "$OS" == 'CentOS' ]; then
chkconfig --add shadowsocks
chkconfig shadowsocks on
else
update-rc.d -f shadowsocks defaults
fi
# Run ShadowsocksR in the background
/etc/init.d/shadowsocks start
clear
echo
echo "ๆญๅไฝ ๏ผshadowsocksrๅฎ่ฃ
ๅฎๆ๏ผ"
echo -e "ๆๅกๅจIP: \033[41;37m ${IP} \033[0m"
echo -e "่ฟ็จ่ฟๆฅ็ซฏๅฃ: \033[41;37m ${shadowsocksport} \033[0m"
echo -e "่ฟ็จ่ฟๆฅๅฏ็ : \033[41;37m ${shadowsockspwd} \033[0m"
echo -e "ๆฌๅฐ็ๅฌIP: \033[41;37m 127.0.0.1 \033[0m"
echo -e "ๆฌๅฐ็ๅฌ็ซฏๅฃ: \033[41;37m 1080 \033[0m"
echo -e "่ฎค่ฏๆนๅผ: \033[41;37m auth_sha1 \033[0m"
echo -e "ๅ่ฎฎ: \033[41;37m http_simple \033[0m"
echo -e "ๅ ๅฏๆนๅผ: \033[41;37m chacha20 \033[0m"
echo
echo "ๆฌข่ฟๆฅ่ฎฟๅฆ็ซ่ฎบๅ:http://yaohuo.me Or https://yaohw.com"
echo "ๅฆๆไฝ ๆณๆนๅ่ฎค่ฏๆนๅผๅๅ่ฎฎ๏ผ่ฏทๅ่็ฝๅ"
echo "https://github.com/breakwa11/shadowsocks-rss/wiki/Server-Setup"
echo
echo "ๅฎ่ฃ
ๅฎๆฏ๏ผๅปไบซๅ่ฟ็งๆๆฆๆๆ๏ผI'm your old friend ่ฅฟ้จๅน้ช"
echo
else
echo "Shadowsocksๅฎ่ฃ
ๅคฑ่ดฅ!"
install_cleanup
exit 1
fi
}
#ๆนๆๅไบฌๆถ้ด
function check_datetime(){
rm -rf /etc/localtime
ln -s /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
ntpdate 1.cn.pool.ntp.org
}
# Install cleanup
function install_cleanup(){
cd $cur_dir
rm -f manyuser.zip
rm -rf shadowsocks-manyuser
rm -f libsodium-1.0.10.tar.gz
rm -rf libsodium-1.0.10
}
# Uninstall ShadowsocksR
function uninstall_shadowsocks(){
printf "ไฝ ็กฎๅฎๅธ่ฝฝshadowsocksr๏ผ (y/n) "
printf "\n"
read -p "(Default: n):" answer
if [ -z $answer ]; then
answer="n"
fi
if [ "$answer" = "y" ]; then
/etc/init.d/shadowsocks status > /dev/null 2>&1
if [ $? -eq 0 ]; then
/etc/init.d/shadowsocks stop
fi
checkos
if [ "$OS" == 'CentOS' ]; then
chkconfig --del shadowsocks
else
update-rc.d -f shadowsocks remove
fi
rm -f /etc/shadowsocks.json
rm -f /etc/init.d/shadowsocks
rm -rf /usr/local/shadowsocks
echo "ShadowsocksR uninstall success!"
else
echo "uninstall cancelled, Nothing to do"
fi
}
# Install ShadowsocksR
function install_shadowsocks(){
checkos
rootness
disable_selinux
pre_install
download_files
config_shadowsocks
install_ss
if [ "$OS" == 'CentOS' ]; then
firewall_set > /dev/null 2>&1
fi
check_datetime
install_cleanup
}
# Initialization step
action=$1
[ -z $1 ] && action=install
case "$action" in
install)
install_shadowsocks
;;
uninstall)
uninstall_shadowsocks
;;
*)
echo "Arguments error! [${action} ]"
echo "Usage: `basename $0` {install|uninstall}"
;;
esac
|
bycarl/mypro
|
shadowsockR.sh
|
Shell
|
apache-2.0
| 11,871 |
#!/bin/bash
#
# Copyright (c) 2001-2016 Primeton Technologies, Ltd.
# All rights reserved.
#
# author: ZhongWen Li (mailto:[email protected])
#
CONTEXT_PATH=$(cd $(dirname ${0}); pwd)
source ${CONTEXT_PATH}/../common/env.sh
IMAGE_VERSION="1.0.0"
IMAGE_NAME="oaapp"
#
# main code
#
main() {
echo "${IMAGE_NAME}:${IMAGE_VERSION}"
}
# docker build/tag/push
source ${CONTEXT_PATH}/../common/template.sh
|
Primeton-External/euler-chidi
|
installer/docker/oaapp/build.sh
|
Shell
|
apache-2.0
| 408 |
#! /bin/sh
srcdir="`dirname $0`"
echo >Makefile "SRCDIR := $srcdir"
echo >>Makefile "HOST_GCC := $HOST_GCC"
echo >>Makefile "HOST_AS := $HOST_AS"
echo >>Makefile "SYSROOT := $GLIDIX_SYSROOT"
echo >>Makefile "CFLAGS := -Wall -Werror -fPIC -ggdb -I\$(SRCDIR)/../libddi -O3 -I\$(SRCDIR)/../libgl/include"
# Get the list of drivers
driver_list_tmp="`ls -l $srcdir | grep '^d' | awk 'NF>1{print $NF}'`"
driver_list="`echo $driver_list_tmp`"
# Generate makefile list
so_list=""
for driver in $driver_list
do
so_list="$so_list ${driver}.so"
done
echo >>Makefile "DRIVERS := $so_list"
echo >>Makefile ".PHONY: all install"
echo >>Makefile "all: \$(DRIVERS)"
echo >>Makefile "install:"
echo >>Makefile " @mkdir -p \$(DESTDIR)/usr/lib/ddidrv"
for driver in $driver_list
do
echo >>Makefile " cp ${driver}.so \$(DESTDIR)/usr/lib/ddidrv/${driver}.so"
done
for driver in $driver_list
do
cat $srcdir/driver.mk.in | sed "s/DRIVER_NAME/$driver/g" >> Makefile
done
cat $srcdir/driver.mk.end >> Makefile
|
madd-games/glidix
|
ddi-drivers/subconf.sh
|
Shell
|
bsd-2-clause
| 995 |
#!/bin/sh
export HOME="/home/$USER"
cat /webapp/templates/php-fpm-pool.conf.template | envsubst '$USER' > /etc/php5/fpm/pool.d/www.conf
service php5-fpm start
cat /webapp/templates/nginx.conf.template | envsubst '$JPY_BASE_USER_URL $URL_ID' > /webapp/nginx.conf
nginx -c /webapp/nginx.conf
|
simphony/simphony-remote-docker
|
app_images/filetransfer/container-files/webapp.sh
|
Shell
|
bsd-2-clause
| 290 |
#!/bin/bash
qemu-system-xtensa -M lx60 -m 96M -drive if=pflash,format=raw,file=lx60.flash -serial stdio
|
windelbouwman/ppci-mirror
|
examples/xtensa/wasm_fac/run.sh
|
Shell
|
bsd-2-clause
| 105 |
#!/bin/sh
cd /var/www
exec hhvm -u www -m server
|
ecc12/docker
|
hhvm-server/start.sh
|
Shell
|
bsd-2-clause
| 52 |
#!/bin/bash
pushd ../../VRLib
./build.sh $1
pushd
export BUILD_MODULE=Oculus360VideosSDK
echo "========================== Update "${BUILD_MODULE}" Project ==========================="
android update project -t android-19 -p . -s
if [ -z "$ANDROID_NDK" ]; then
ANDROID_NDK=~/ndk
fi
if [ "$1" == "" ]; then
echo "========================== Build "${BUILD_MODULE}" ==========================="
$ANDROID_NDK/ndk-build -j16 NDK_DEBUG=1 OVR_DEBUG=1
fi
if [ "$1" == "debug" ]; then
echo "========================== Build "${BUILD_MODULE} $1 " ==========================="
$ANDROID_NDK/ndk-build -j16 NDK_DEBUG=1 OVR_DEBUG=1
fi
if [ "$1" == "release" ]; then
echo "========================== Build "${BUILD_MODULE} $1 " ==========================="
$ANDROID_NDK/ndk-build -j16 NDK_DEBUG=0 OVR_DEBUG=0
fi
if [ "$1" == "clean" ]; then
echo "========================== Build "${BUILD_MODULE} $1 " ==========================="
$ANDROID_NDK/ndk-build clean NDK_DEBUG=1
$ANDROID_NDK/ndk-build clean NDK_DEBUG=0
ant clean
fi
|
kexplo/Oculus360VideosSDK
|
build.sh
|
Shell
|
bsd-2-clause
| 1,065 |
#!/bin/bash
PID_FILE=rstat.pid
if [ -f "$PID_FILE" ]; then
while read PID; do
if [ "$(ps -p $PID -o comm=)" = 'ssh' ]; then
kill $PID
fi
done < $PID_FILE
rm -f $PID_FILE
fi
|
sh2/rstat
|
rstat_stop.sh
|
Shell
|
bsd-3-clause
| 221 |
#!/bin/bash
# Create a dashboard using the JSON filee given in $1 in the project ID $2
curl -X POST "https://monitoring.googleapis.com/v1/projects/$2/dashboards" \
-d @$1 \
--header "Authorization: Bearer $(gcloud auth print-access-token)" \
--header "Content-Type: application/json"
|
all-of-us/workbench
|
ops/bash/create-dashboard.sh
|
Shell
|
bsd-3-clause
| 292 |
#!/usr/bin/env bash
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export TARGET=i386-atomos
export PREFIX="$ROOT/../src/Build/Local"
export SOURCES="$ROOT/../src/Build/Temp"
export TARBALLS="$ROOT/../tarballs"
export PATCHFILES="$ROOT/../toolchain"
export PATH=$PREFIX/bin:/usr/bin:$PATH
LIB_URL=https://cairographics.org/releases/cairo-1.12.18.tar.xz
LIB_FOLDER=cairo-1.12.18
bail()
{
echo -e "\033[1;31mBuild failed. Please check the logs above to see what went wrong.\033[0m"
exit 1
}
if [ ! -d $LIB_FOLDER ]; then
if [ ! -f "$TARBALLS/$LIB_FOLDER.tar.gz" ]; then
wget -O "$TARBALLS/$LIB_FOLDER.tar.gz" $LIB_URL || bail
fi
tar -xvf "$TARBALLS/$LIB_FOLDER.tar.gz" -C $ROOT >> Setup.log 2>&1
pushd $ROOT/$LIB_FOLDER || bail
patch -p1 -f -i "$PATCHFILES/$LIB_FOLDER.diff" || bail
popd
fi
pushd "$ROOT/../src/Build/Bin" || bail
if [ -d $LIB_FOLDER ]; then
rm -rf $LIB_FOLDER
fi
mkdir $LIB_FOLDER || bail
pushd $LIB_FOLDER || bail
CPPFLAGS="-I$PREFIX/include" LDFLAGS="-L$PREFIX/lib" PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig $ROOT/$LIB_FOLDER/configure --prefix=$PREFIX --host=$TARGET --enable-ps=no --enable-pdf=no --enable-interpreter=no --disable-xlib --disable-xcb --enable-fc=no --disable-gobject >> Setup.log 2>&1 || bail
cp "$PATCHFILES/cairo-Makefile" test/Makefile || bail
cp "$PATCHFILES/cairo-Makefile" perf/Makefile || bail
echo -e "\n\n#define CAIRO_NO_MUTEX 1" >> config.h || bail
make -j4 >> Setup.log || bail
make -j4 install >> Setup.log || bail
popd
popd
|
amaneureka/AtomOS
|
libs/Build-cairo.sh
|
Shell
|
bsd-3-clause
| 1,521 |
#!/bin/bash
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
source pkg_info
source ../../build_tools/common.sh
TestStep() {
Banner "Testing ${PACKAGE_NAME}"
ChangeDir ${NACL_PACKAGES_REPOSITORY}/${PACKAGE_NAME}/${NACL_BUILD_SUBDIR}
if [ "${NACL_GLIBC}" = "1" ]; then
local exe_dir=.libs
else
local exe_dir=
fi
export SEL_LDR_LIB_PATH=$PWD/src/${exe_dir}
pushd UnitTests/BulletUnitTests/${exe_dir}
RunSelLdrCommand AppBulletUnitTests${NACL_EXEEXT}
popd
pushd Demos/HelloWorld/${exe_dir}
RunSelLdrCommand AppHelloWorld${NACL_EXEEXT}
popd
}
AutogenStep() {
ChangeDir ${NACL_PACKAGES_REPOSITORY}/${PACKAGE_NAME}
# Remove \r\n from the shell script.
# The default sed on Mac is broken. Work around it by using $'...' to have
# bash convert \r to a carriage return.
sed -i.bak $'s/\r//g' ./autogen.sh
/bin/sh ./autogen.sh
# install-sh is extracted without the execute bit set; for some reason this
# works OK on Linux, but fails on Mac.
chmod +x install-sh
PatchConfigure
PatchConfigSub
}
ConfigureStep() {
AutogenStep
DefaultConfigureStep
}
PackageInstall
exit 0
|
adlr/naclports
|
ports/bullet/build.sh
|
Shell
|
bsd-3-clause
| 1,249 |
#!/bin/bash
if [ ! $# -eq 2 ]
then
echo "Usage: $0 <clonetreevisualization_executable> <rf_executable>" >&2
exit 1
fi
if [ ! -e result_m8 ]
then
mkdir result_m8
fi
echo "pattern,seed,method,RF" > results_m8.txt
for p in {mS,S,M,R}
do
for f in ../../../data/sims/m8/$p/reads_seed*.tsv
do
s=$(basename $f .tsv | sed -e s/reads_seed//g)
echo Running neighbor joining for seed $s pattern $p...
if [ ! -e ${p}_seed${s}.txt ]
then
python convert_reads_to_nj.py ../../../data/sims/m8/$p/reads_seed$s.tsv > result_m8/${p}_seed${s}.txt
fi
if [ ! -e result_m8/${p}_seed${s}.newick ]
then
Rscript run_nj.R result_m8/${p}_seed${s}.txt > result_m8/${p}_seed${s}.newick
fi
if [ ! -e ${p}_seed${s}.tree ]
then
python newickToTree.py result_m8/${p}_seed${s}.newick > result_m8/${p}_seed${s}.tree 2> result_m8/${p}_seed${s}.labeling
fi
if [ ! -e ${p}_seed${s}.dot ]
then
$1 -c ../../../data/sims/coloring.txt result_m8/${p}_seed${s}.tree result_m8/${p}_seed${s}.labeling > result_m8/${p}_seed${s}.dot
fi
$2 ../../../data/sims/m8/$p/T_seed$s.tree ../../../data/sims/m8/$p/T_seed$s.labeling result_m8/${p}_seed${s}.tree result_m8/${p}_seed${s}.labeling > result_m8/${p}_seed${s}.RF.txt
echo -n $p,$s,Neighbor joining, >> results_m8.txt
tail -n 1 result_m8/${p}_seed${s}.RF.txt | cut -d' ' -f3 >> results_m8.txt
done
done
|
raphael-group/machina
|
result/sims/neighbor_joining/process_m8.sh
|
Shell
|
bsd-3-clause
| 1,373 |
#!/bin/bash
if [[ $# -ne 4 ]]
then
echo -e "\nusage: $0 input-file.emp library-name.mod <start> <end>. 'start' and 'end' are integer (1,2,3...) sizes in mm (metric)\n"
exit
fi
INPUT_FILE=$1
LIB_NAME=$2
RANGE_START=$3
RANGE_END=$4
LIB_FOLDER="./lib"
if [[ ! -d $LIB_FOLDER ]]
then
echo -e "\n creating '$LIB_FOLDER' folder\n"
mkdir $LIB_FOLDER
fi
echo -e "\n working...\n"
for number in `seq $RANGE_START $RANGE_END`
do
TMP_FILE=`mktemp`
./scale.pl $INPUT_FILE $TMP_FILE 21 ${number}.0mm
perl -pi -e "s/LOGO/${LIB_NAME/%.mod/}_silkscreen-front_${number}mm/" $TMP_FILE
cat $TMP_FILE >> ./$LIB_NAME
rm $TMP_FILE
./scale.pl $INPUT_FILE $TMP_FILE 20 ${number}.0mm
perl -pi -e "s/LOGO/${LIB_NAME/%.mod/}_silkscreen-back_${number}mm/" $TMP_FILE
cat $TMP_FILE >> ./$LIB_NAME
rm $TMP_FILE
done
for number in `seq $RANGE_START $RANGE_END`
do
TMP_FILE=`mktemp`
./scale.pl $INPUT_FILE $TMP_FILE 15 ${number}.0mm
perl -pi -e "s/LOGO/${LIB_NAME/%.mod/}_copper-front_${number}mm/" $TMP_FILE
cat $TMP_FILE >> ./$LIB_NAME
./scale.pl $INPUT_FILE $TMP_FILE 0 ${number}.0mm
perl -pi -e "s/LOGO/${LIB_NAME/%.mod/}_copper-back_${number}mm/" $TMP_FILE
cat $TMP_FILE >> ./$LIB_NAME
done
mv $LIB_NAME $LIB_FOLDER
echo -e "\n done.\n"
|
KaiserSoft/OpenSimButtonBox
|
Misc Circuit Boards/OSHW_logo_KiCad_scalable/make_lib.sh
|
Shell
|
bsd-3-clause
| 1,260 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/pic18f97j60-family.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=pic18f97j60-family.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=pic18f97j60-family/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/pic18f97j60-family/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/pic18f97j60-family.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/pic18f97j60-family.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
zzuzpb/contiki-2.7-xc8
|
platform/pic18f97j60-family/nbproject/Package-default.bash
|
Shell
|
bsd-3-clause
| 1,423 |
#!/bin/bash
if (( $# > 2 || $# < 1 )); then
echo "bad build_racer.sh call: $@" 1>&2
exit 1
fi
user=$1
sha=$2
if [[ ! -d $user ]]; then
echo "build_user user doesn't exist: $user" 1>&2
exit 2
fi
echo "starting build for user $user for $sha" 1>&2
set -x
set -e
cd $user
if [[ $2 ]]; then
git checkout $sha
fi
mkdir -p build
cd build
rm -rf ./*
cmake ..
set +x
make 2>&1
|
theNerd247/ariaRacer
|
scripts/build_racer.sh
|
Shell
|
bsd-3-clause
| 384 |
#!/bin/bash
php gerar_configuracao.php
cd ..
cd models
php ../../bin/classmap_generator.php
|
frf/fsipedidos
|
vendor/propel/script/config.sh
|
Shell
|
bsd-3-clause
| 95 |
#!/bin/bash
export PYTHONPATH=`pwd`
make html
|
Cymmetria/mazerunner_sdk_python
|
make.sh
|
Shell
|
bsd-3-clause
| 47 |
#!/bin/sh
# cs.sh
find . -name "*.h" -o -name "*.c" -o -name "*.cpp" -o -name "*.cc" > cscope.files
cscope -bq
ctags -R --languages=c
|
chrisju/script
|
cs.sh
|
Shell
|
bsd-3-clause
| 134 |
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/home/chonhyon/hydro_workspace/itomp/kuka_description/devel}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
export CPATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
_SETUP_TMP=`mktemp /tmp/setup.sh.XXXXXXXXXX`
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ > $_SETUP_TMP
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f $_SETUP_TMP
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. $_SETUP_TMP
rm -f $_SETUP_TMP
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
Chpark/itomp
|
kuka_description/devel/setup.sh
|
Shell
|
bsd-3-clause
| 2,413 |
#!/bin/bash
# This puts a fresh-built vm into /Applications without removing what was already there,
# by overwriting only the parts you've just built.
# Copy to /Applications, excluding any .svn subdirs.
tar cf - --exclude .svn --exclude 'Squeak*icns' ./Teleplace.app | (cd /Applications; tar xf -)
# All the sub-content frameworks and the exe need to be executable;
# we just do everything so we don't miss anything.
cd /Applications/Teleplace.app
chmod -R a+x ./*
|
zecke/old-pharo-vm-sctp
|
macbuild/install.sh
|
Shell
|
mit
| 473 |
#!/bin/bash
set -Eeuxo pipefail
|
milleniumbug/gdbplz
|
ci/20-run_tests_win.sh
|
Shell
|
mit
| 32 |
#!/bin/bash
# __ __| ___| _)
# | __ `__ \ | | \ \ / \___ \ _ \ __| __| | _ \ __ \ __|
# | | | | | | ` < | __/ \__ \ \__ \ | ( | | | \__ \
# _| _| _| _| \__,_| _/\_\ _____/ \___| ____/ ____/ _| \___/ _| _| ____/
#
####################################################################################
# save the windows, panes and layouts
# of all running tmux sessions to a bash script
construct_panes() {
initial_window=true
initial_pane=true
session=$1
window_index=$2
window_name=$4
layout=$5
shift 5
while [ $# -gt 2 ] ; do
# get child process of pane
child=$(pgrep -P $1)
if [ -z $child ]
then
command=$(ps -o 'args=' -p $1)
else
command=$(ps -o 'args=' -p $child)
fi
if [ "$command" == "-bash" ]; then
command=""
else
command="$command"
fi
[ "$session" = "$last_session" ] && initial_window=false
[ "$window_index" = "$last_window" ] && initial_pane=false
if [ "$initial_window" == "true" ] && [ "$initial_pane" = "true" ]; then
echo "tmux new-session -d -n $window_name -s $session -c "$2""
initial_session=false
elif [ "$initial_window" == "true" ] || [ "$initial_pane" = "true" ]; then
echo "tmux new-window -d -n $window_name -t $session:$window_index -c "$2""
else
echo "tmux split-window -d -t $session:$window_index -c "$2""
fi
# $3 - pane index
[ "$command" ] && echo tmux send-keys -t $session:$window_index.$3 \"$command\" C-m
echo tmux select-layout -t $session:$window_index \"$layout\" \> /dev/null
last_session=$session
last_window=$window_index
shift 3
done >> ./$filename
}
construct_window() {
#tmux list-panes -t $1:$2
session=$1
window_index=$2
name=$3
nr_of_panes=$4
layout=$5
panes=$(tmux list-panes -t $1:$2 -F "#{pane_pid} #{pane_current_path} #{pane_index}")
construct_panes $session $window_index $nr_of_panes $name $layout $panes
}
setup() {
if ! $(tmux has-session 2>/dev/null); then
echo No Sessions exist, exiting.
exit
fi
filename=./tmux-sessions-`date "+%Y%m%d-%H%M%S"`.sh
sessions=$(tmux list-sessions -F "#{session_name}")
echo $sessions
touch $filename
echo '#!/bin/bash' >> $filename
echo 'if $(tmux has-session 2>/dev/null); then tmux att; exit; fi' >> $filename
}
teardown() {
echo 'tmux att' >> $filename
chmod +x $filename
}
save_sessions() {
windows=$(tmux list-windows -a -F "#{session_name} #{window_index} #{window_name} #{window_panes} #{window_layout}")
while read window; do
construct_window $window
done <<< "$windows"
}
setup
save_sessions
teardown
|
DavidMcEwan/ucfg
|
bin/tmux-save-session.sh
|
Shell
|
mit
| 2,738 |
#!/bin/bash
# Adapted from Gregory Pakosz's amazing tmux config at https://github.com/gpakosz/.tmux
_urlview() {
tmux capture-pane -J -S - -E - -b "urlview-$1" -t "$1"
tmux split-window "tmux show-buffer -b urlview-$1 | urlview || true; tmux delete-buffer -b urlview-$1"
}
_urlview
|
58bits/dotfiles
|
tmux/urlview.sh
|
Shell
|
mit
| 288 |
#!/bin/bash
# @raycast.title Search IMDB
# @raycast.author Lucas Costa
# @raycast.authorURL https://github.com/lucasrcosta
# @raycast.description Search IMDB.
# @raycast.icon images/imdb.png
# @raycast.mode silent
# @raycast.packageName Web Searches
# @raycast.schemaVersion 1
# @raycast.argument1 { "type": "text", "placeholder": "Title", "percentEncoded": true }
open "https://www.imdb.com/find?q=${1}"
|
loganlinn/dotfiles
|
tag-darwin/raycast/search-imdb.sh
|
Shell
|
mit
| 409 |
# for sbt: http://code.google.com/p/simple-build-tool/
_sbt_complete() {
local cur goals
COMPREPLY=()
cur=${COMP_WORDS[COMP_CWORD]}
goals="clean clean-cache clean-lib clean-plugins compile console console-quick"
goals="$goals copy-resources copy-test-resources deliver deliver-local"
goals="$goals doc doc-all doc-test exec graph-pkg graph-src increment-version"
goals="$goals javap make-pom package package-all package-docs package-project"
goals="$goals package-src package-test package-test-src publish publish-local"
goals="$goals release run sh test test-compile test-failed test-javap test-only"
goals="$goals test-quick test-run update"
cur=`echo $cur | sed 's/\\\\//g'`
COMPREPLY=($(compgen -W "${goals}" "${cur}" | sed 's/\\\\//g') )
}
complete -F _sbt_complete -o filenames sbt
|
relaynetwork/profile
|
scripts/sbt-completion.sh
|
Shell
|
mit
| 815 |
#!/bin/bash
############################################################################
#
# Author: Nil Portuguรฉs Calderรณ <[email protected]>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
#
############################################################################
if [ "$UID" -ne "0" ]
then
echo ""
echo "You must be sudoer or root. To run this script enter:"
echo ""
echo "sudo chmod +x $0; sudo ./$0"
echo ""
exit 1
fi
## INSTALL
if [ -z $(which varnish) ];
then
sudo apt-get update
sudo apt-get install -y varnish
fi
|
nilopc/bash-scripts
|
varnish.sh
|
Shell
|
mit
| 635 |
#!/bin/bash
set -v # echo commands
redis-server --bind 127.0.0.1 --appendonly yes --appendfsync always
|
jeffbski/microservices
|
redis-queue/start-redis.bash
|
Shell
|
mit
| 103 |
#!/bin/bash -eux
echo '==> Configuring settings for vagrant'
SSH_USER=${SSH_USERNAME:-vagrant}
SSH_USER_HOME=${SSH_USER_HOME:-/home/${SSH_USER}}
VAGRANT_INSECURE_KEY="ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key"
# Add vagrant user (if it doesn't already exist)
if ! id -u $SSH_USER >/dev/null 2>&1; then
echo '==> Creating ${SSH_USER}'
/usr/sbin/groupadd $SSH_USER
/usr/sbin/useradd $SSH_USER -g $SSH_USER -G wheel
echo '==> Giving ${SSH_USER} sudo powers'
echo "${SSH_USER}"|passwd --stdin $SSH_USER
echo "${SSH_USER} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
fi
echo '==> Installing Vagrant SSH key'
mkdir -pm 700 ${SSH_USER_HOME}/.ssh
# https://raw.githubusercontent.com/hashicorp/vagrant/master/keys/vagrant.pub
echo "${VAGRANT_INSECURE_KEY}" > $SSH_USER_HOME/.ssh/authorized_keys
chmod 0600 ${SSH_USER_HOME}/.ssh/authorized_keys
chown -R ${SSH_USER}:${SSH_USER} ${SSH_USER_HOME}/.ssh
|
mitchellh/vagrant-installers
|
packer/vagrant/scripts/centos/vagrant.sh
|
Shell
|
mit
| 1,294 |
#! /bin/sh
. ../../testenv.sh
analyze_failure bug1.vhdl
analyze_failure bug2.vhdl
analyze_failure bug7.vhdl
analyze_failure bug8.vhdl
clean
echo "Test successful"
|
tgingold/ghdl
|
testsuite/gna/bug069/testsuite.sh
|
Shell
|
gpl-2.0
| 167 |
restore:# mysql -u <username> -p[userpassword] [database] < dump.sql
|
andrewjstringer/EnterpriseWebDNS
|
cgi-bin/restore.sh
|
Shell
|
gpl-2.0
| 70 |
#!/bin/bash
summary() {
echo "add cloned genomic inserts from a GFF file"
}
loaderHelp() {
cat <<HELP
Will accept a file with GFF3 style tab-delimited columns (9), where the last column must contain only an ID. e.g.
berg14 pbg clone_genomic_insert 231580 239153 - 0 - ID=PbG01-2349f12
The d
HELP
}
loaderUsage() {
cat <<USAGE
Usage: `basename $0` clonedgenomicinsert [-p] [-d delimiter] <file>
Options:
-d deleteall
If set, will delete all features of type 'cloned_genomic_insert'.
-x delete
If set, will delete the features specified in the file.
USAGE
standard_options
echo
}
doLoad() {
delimiter='\t'
delete=false
OPTIND=0
while getopts ":d:x$stdopts" option; do
case "$option" in
d) deleteall="$OPTARG"
;;
x) delete=true
;;
*) process_standard_options "$option"
;;
esac
done
shift $[ $OPTIND - 1 ]
read_password
java $database_properties -Dlog4j.configuration=log4j.loader.properties \
org.genedb.db.loading.auxiliary.Load clonedGenomicInsertLoader \
--delete="$delete" --deleteall="$deleteall" "$@"
}
|
satta/GeneDB
|
ng/scripts/loaders/clonedgenomicinsert.sh
|
Shell
|
gpl-3.0
| 1,185 |
#!/bin/bash
# This script tests the high level end-to-end functionality demonstrated
# as part of the examples/sample-app
set -o errexit
set -o nounset
set -o pipefail
STARTTIME=$(date +%s)
OS_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${OS_ROOT}/hack/lib/init.sh"
echo "[INFO] Starting containerized end-to-end test"
unset KUBECONFIG
os::util::environment::setup_all_server_vars "test-end-to-end-docker/"
os::util::environment::use_sudo
reset_tmp_dir
function cleanup()
{
out=$?
echo
if [ $out -ne 0 ]; then
echo "[FAIL] !!!!! Test Failed !!!!"
else
echo "[INFO] Test Succeeded"
fi
echo
set +e
dump_container_logs
# pull information out of the server log so that we can get failure management in jenkins to highlight it and
# really have it smack people in their logs. This is a severe correctness problem
grep -a5 "CACHE.*ALTERED" ${LOG_DIR}/container-origin.log
echo "[INFO] Dumping etcd contents to ${ARTIFACT_DIR}/etcd_dump.json"
set_curl_args 0 1
ETCD_PORT="${ETCD_PORT:-4001}"
curl ${clientcert_args} -L "${API_SCHEME}://${API_HOST}:${ETCD_PORT}/v2/keys/?recursive=true" > "${ARTIFACT_DIR}/etcd_dump.json"
echo
if [[ -z "${SKIP_TEARDOWN-}" ]]; then
echo "[INFO] remove the openshift container"
docker stop origin
docker rm origin
echo "[INFO] Stopping k8s docker containers"; docker ps | awk 'index($NF,"k8s_")==1 { print $1 }' | xargs -l -r docker stop
if [[ -z "${SKIP_IMAGE_CLEANUP-}" ]]; then
echo "[INFO] Removing k8s docker containers"; docker ps -a | awk 'index($NF,"k8s_")==1 { print $1 }' | xargs -l -r docker rm
fi
set -u
fi
journalctl --unit docker.service --since -15minutes > "${LOG_DIR}/docker.log"
delete_empty_logs
truncate_large_logs
set -e
echo "[INFO] Exiting"
ENDTIME=$(date +%s); echo "$0 took $(($ENDTIME - $STARTTIME)) seconds"
exit $out
}
trap "cleanup" EXIT INT TERM
os::log::start_system_logger
out=$(
set +e
docker stop origin 2>&1
docker rm origin 2>&1
set -e
)
# Setup
echo "[INFO] openshift version: `openshift version`"
echo "[INFO] oc version: `oc version`"
echo "[INFO] Using images: ${USE_IMAGES}"
echo "[INFO] Starting OpenShift containerized server"
oc cluster up --server-loglevel=4 --version="${TAG}" \
--host-data-dir="${VOLUME_DIR}/etcd" \
--host-volumes-dir="${VOLUME_DIR}"
IMAGE_WORKING_DIR=/var/lib/origin
docker cp origin:${IMAGE_WORKING_DIR}/openshift.local.config ${BASETMPDIR}
export ADMIN_KUBECONFIG="${MASTER_CONFIG_DIR}/admin.kubeconfig"
export CLUSTER_ADMIN_CONTEXT=$(oc config view --config=${ADMIN_KUBECONFIG} --flatten -o template --template='{{index . "current-context"}}')
sudo chmod -R a+rwX "${ADMIN_KUBECONFIG}"
export KUBECONFIG="${ADMIN_KUBECONFIG}"
echo "[INFO] To debug: export KUBECONFIG=$ADMIN_KUBECONFIG"
${OS_ROOT}/test/end-to-end/core.sh
|
tracyrankin/origin
|
hack/test-end-to-end-docker.sh
|
Shell
|
apache-2.0
| 2,822 |
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -eux
export ARROW_BUILD_TOOLCHAIN=$CONDA_PREFIX
mkdir -p /build/lint
pushd /build/lint
cmake -GNinja \
-DARROW_FLIGHT=ON \
-DARROW_GANDIVA=ON \
-DARROW_PARQUET=ON \
-DARROW_PYTHON=ON \
-DCMAKE_CXX_FLAGS='-D_GLIBCXX_USE_CXX11_ABI=0' \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
/arrow/cpp
popd
export IWYU_COMPILATION_DATABASE_PATH=/build/lint
/arrow/cpp/build-support/iwyu/iwyu.sh all
|
tebeka/arrow
|
dev/lint/run_iwyu.sh
|
Shell
|
apache-2.0
| 1,235 |
#!/bin/bash
source ../comm/build.sh
./bin/deploy.sh -pd -s logistics-platform -e test
|
sdgdsffdsfff/athena-rest
|
athena-rest-build/src/main/jenkins-bin/logistics-platform/build-athena-example-test.sh
|
Shell
|
apache-2.0
| 89 |
#!/bin/sh
#This script is assumed to be run on the host where ODL controller is running.
#To run the scripts from a remote machine, change the controller IP accordingly.
controller_ip=192.168.1.4
#Node ID: When OpenDayLight controller starts, this is the switch ID known
#to the controller. Check "printNodes" at the OSGI interface
openflow_node=openflow:123638415924954
echo "l2-interface-group_port2_vlan100_untag.xml"
curl -v -H "Content-Type: application/xml" \
-H "Accept: application/xml" \
-H "Authorization: Basic YWRtaW46YWRtaW4=" \
-X PUT \
--data "@./l2-interface-group_port2_vlan100_untag.xml" \
http://$controller_ip:8181/restconf/config/opendaylight-inventory:nodes/node/$openflow_node/group/6553602
|
macauleycheng/AOS_OF_Example-ODL
|
Customer/CertusNet/case2/test.sh
|
Shell
|
apache-2.0
| 742 |
#!/bin/sh
################################################################################
##
## Licensed to the Apache Software Foundation (ASF) under one or more
## contributor license agreements. See the NOTICE file distributed with
## this work for additional information regarding copyright ownership.
## The ASF licenses this file to You under the Apache License, Version 2.0
## (the "License"); you may not use this file except in compliance with
## the License. You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
################################################################################
set -e
# Download the collection of files associated with an Apache Edgent
# Release or Release Candidate from the Apache Distribution area:
# https://dist.apache.org/repos/dist/release/incubator/edgent
# or https://dist.apache.org/repos/dist/dev/incubator/edgent
# respectively.
#
# Prompts before taking actions unless "--nquery"
# Prompts to perform signature validation (using buildTools/check_sigs.sh)
# unless --nvalidate or --validate is specified.
. `dirname $0`/common.sh
setUsage "`basename $0` [--nquery] [--validate|--nvalidate] <version> [<rc-num>]"
handleHelp "$@"
BUILDTOOLS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
NQUERY=
if [ "$1" == "--nquery" ]; then
NQUERY="--nquery"; shift
fi
VALIDATE=-1 # query
if [ "$1" == "--validate" ]; then
VALIDATE=1; shift
elif [ "$1" == "--nvalidate" ]; then
VALIDATE=0; shift
fi
requireArg "$@"
VER=$1; shift
checkVerNum $VER || usage "Not a X.Y.Z version number \"$VER\""
RC_NUM=
if [ $# -gt 0 ]; then
RC_NUM=$1; shift
checkRcNum ${RC_NUM} || usage "Not a release candidate number \"${RC_NUM}\""
fi
noExtraArgs "$@"
# Release or Release Candidate mode
IS_RC=
if [ ${RC_NUM} ]; then
IS_RC=1
fi
BASE_URL=${EDGENT_ASF_SVN_RELEASE_URL}
if [ ${IS_RC} ]; then
BASE_URL=${EDGENT_ASF_SVN_RC_URL}
fi
RC_SFX=
if [ ${IS_RC} ]; then
RC_SFX=rc${RC_NUM}
fi
DST_BASE_DIR=downloaded-edgent-${VER}${RC_SFX}
[ -d ${DST_BASE_DIR} ] && die "${DST_BASE_DIR} already exists"
[ ${NQUERY} ] || confirm "Proceed to download to ${DST_BASE_DIR} from ${BASE_URL}?" || exit
echo Downloading to ${DST_BASE_DIR} ...
function mywget() {
# OSX lacks wget by default
(set -x; curl -f -O $1)
}
function getSignedBundle() {
mywget ${1}
mywget ${1}.asc
mywget ${1}.md5
mywget ${1}.sha512
}
mkdir -p ${DST_BASE_DIR}
cd ${DST_BASE_DIR}
ABS_BASE_DIR=`pwd`
URL=${BASE_URL}
mywget ${URL}/KEYS
DST_VER_DIR=${VER}-incubating
URL=${BASE_URL}/${VER}-incubating
if [ ${IS_RC} ]; then
DST_VER_DIR=${DST_VER_DIR}/${RC_SFX}
URL=${URL}/${RC_SFX}
fi
mkdir -p ${DST_VER_DIR}
cd ${DST_VER_DIR}
mywget ${URL}/README
mywget ${URL}/RELEASE_NOTES
getSignedBundle ${URL}/apache-edgent-${VER}-incubating-source-release.tar.gz
getSignedBundle ${URL}/apache-edgent-${VER}-incubating-source-release.zip
#mkdir binaries
#cd binaries
#URL=${URL}/binaries
#getSignedBundle ${URL}/apache-edgent-${VER}-incubating-bin.tar.gz
echo
echo Done Downloading to ${DST_BASE_DIR}
[ ${VALIDATE} == 0 ] && exit
[ ${VALIDATE} == 1 ] || [ ${NQUERY} ] || confirm "Do you want to check the bundle signatures and compare source bundles?" || exit
cd ${ABS_BASE_DIR}
echo
echo "Verifying the tar.gz and zip have the same contents..."
(set -x; $BUILDTOOLS_DIR/compare_bundles.sh ${DST_VER_DIR}/apache-edgent-${VER}-incubating-source-release.tar.gz ${DST_VER_DIR}/apache-edgent-${VER}-incubating-source-release.zip)
echo
echo "If the following bundle gpg signature checks fail, you may need to"
echo "import the project's list of signing keys to your keyring"
echo " $ gpg ${DST_BASE_DIR}/KEYS # show the included keys"
echo " $ gpg --import ${DST_BASE_DIR}/KEYS"
echo
echo "Verifying the source bundle signatures..."
(set -x; $BUILDTOOLS_DIR/check_sigs.sh ${DST_VER_DIR})
#echo
#echo "Verifying the binary bundle signatures..."
#(set -x; $BUILDTOOLS_DIR/check_sigs.sh ${DST_VER_DIR}/binaries)
|
dlaboss/incubator-edgent
|
buildTools/download_edgent_asf.sh
|
Shell
|
apache-2.0
| 4,340 |
#!/bin/bash
#
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
# This script is meant to run as part of a Jenkins job such as
# https://builds.apache.org/job/hbase_generate_website/
#
# It needs to be built on a Jenkins server with the label git-websites
#
# It expects to have the hbase repo cloned to the directory hbase
#
# If there is a build error, the Jenkins job is configured to send an email
LOCAL_REPO=${WORKSPACE}/.m2/repo
# Nuke the local maven repo each time, to start with a known environment
rm -Rf "${LOCAL_REPO}"
mkdir -p "${LOCAL_REPO}"
# Clean any leftover files in case we are reusing the workspace
rm -Rf -- *.patch *.patch.zip hbase/target target *.txt hbase-site
# Set up the environment
export JAVA_HOME=$JDK_1_8_LATEST__HOME
export PATH=$JAVA_HOME/bin:$MAVEN_3_3_3_HOME/bin:$PATH
export MAVEN_OPTS="-XX:MaxPermSize=256m -Dmaven.repo.local=${LOCAL_REPO}"
# Verify the Maven version
mvn -version
# Save and print the SHA we are building
CURRENT_HBASE_COMMIT="$(git log --pretty=format:%H -n1)"
echo "Current HBase commit: $CURRENT_HBASE_COMMIT"
# Clone the hbase-site repo manually so it doesn't trigger spurious
# commits in Jenkins.
git clone --depth 1 --branch asf-site https://git-wip-us.apache.org/repos/asf/hbase-site.git
# Figure out the last commit we built the site from, and bail if the build
# still represents the SHA of HBase master
cd "${WORKSPACE}/hbase-site" || exit -1
git log --pretty=%s | grep ${CURRENT_HBASE_COMMIT}
PUSHED=$?
echo "PUSHED is $PUSHED"
if [ $PUSHED -eq 0 ]; then
echo "$CURRENT_HBASE_COMMIT is already mentioned in the hbase-site commit log. Not building."
exit 0
else
echo "$CURRENT_HBASE_COMMIT is not yet mentioned in the hbase-site commit log. Assuming we don't have it yet. $PUSHED"
fi
# Go to the hbase directory so we can build the site
cd "${WORKSPACE}/hbase" || exit -1
# This will only be set for builds that are triggered by SCM change, not manual builds
if [ "$CHANGE_ID" ]; then
echo -n " ($CHANGE_ID - $CHANGE_TITLE)"
fi
# Build and install HBase, then build the site
echo "Building HBase"
mvn \
-DskipTests \
-Dmaven.javadoc.skip=true \
--batch-mode \
-Dcheckstyle.skip=true \
-Dfindbugs.skip=true \
--log-file="${WORKSPACE}/hbase-build-log-${CURRENT_HBASE_COMMIT}.txt" \
clean install \
&& mvn clean site \
--batch-mode \
-DskipTests \
--log-file="${WORKSPACE}/hbase-install-log-${CURRENT_HBASE_COMMIT}.txt"
status=$?
if [ $status -ne 0 ]; then
echo "Failure: mvn clean site"
exit $status
fi
# Stage the site
echo "Staging HBase site"
mvn \
--batch-mode \
--log-file="${WORKSPACE}/hbase-stage-log-${CURRENT_HBASE_COMMIT}.txt" \
site:stage
status=$?
if [ $status -ne 0 ] || [ ! -d target/staging ]; then
echo "Failure: mvn site:stage"
exit $status
fi
# Get ready to update the hbase-site repo with the new artifacts
cd "${WORKSPACE}/hbase-site" || exit -1
#Remove previously-generated files
FILES_TO_REMOVE=("hbase-*"
"apidocs"
"devapidocs"
"testapidocs"
"testdevapidocs"
"xref"
"xref-test"
"*book*"
"*.html"
"*.pdf*"
"css"
"js"
"images")
for FILE in "${FILES_TO_REMOVE[@]}"; do
echo "Removing ${WORKSPACE}/hbase-site/$FILE"
rm -Rf "${FILE}"
done
# Copy in the newly-built artifacts
cp -au "${WORKSPACE}"/hbase/target/staging/* .
# If the index.html is missing, bail because this is serious
if [ ! -f index.html ]; then
echo "The index.html is missing. Aborting."
exit 1
else
# Add all the changes
echo "Adding all the files we know about"
git add .
# Create the commit message and commit the changes
WEBSITE_COMMIT_MSG="Published site at $CURRENT_HBASE_COMMIT."
echo "WEBSITE_COMMIT_MSG: $WEBSITE_COMMIT_MSG"
git commit -m "${WEBSITE_COMMIT_MSG}" -a
# Dump a little report
echo "This commit changed these files (excluding Modified files):"
git diff --name-status --diff-filter=ADCRTXUB origin/asf-site
# Create a patch, which Jenkins can save as an artifact and can be examined for debugging
git format-patch --stdout origin/asf-site > "${WORKSPACE}/${CURRENT_HBASE_COMMIT}.patch"
echo "Change set saved to patch ${WORKSPACE}/${CURRENT_HBASE_COMMIT}.patch"
# Push the real commit
git push origin asf-site || (echo "Failed to push to asf-site. Website not updated." && exit -1)
# Create an empty commit to work around INFRA-10751
git commit --allow-empty -m "INFRA-10751 Empty commit"
# Push the empty commit
git push origin asf-site || (echo "Failed to push the empty commit to asf-site. Website may not update. Manually push an empty commit to fix this. (See INFRA-10751)" && exit -1)
echo "Pushed the changes to branch asf-site. Refresh http://hbase.apache.org/ to see the changes within a few minutes."
git fetch origin
git reset --hard origin/asf-site
# Zip up the patch so Jenkins can save it
cd "${WORKSPACE}" || exit -1
zip website.patch.zip "${CURRENT_HBASE_COMMIT}.patch"
fi
#echo "Dumping current environment:"
#env
|
JingchengDu/hbase
|
dev-support/jenkins-scripts/generate-hbase-website.sh
|
Shell
|
apache-2.0
| 5,902 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.