code
stringlengths
2
1.05M
repo_name
stringlengths
5
110
path
stringlengths
3
922
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
2
1.05M
#!/bin/bash echo "Building leanix-sdk-csharp client example..." cd /var/samples/client nuget install ../../src/LeanIX/packages.config -OutputDirectory ../../src/packages xbuild LeanIX.sln /p:WarningLevel=0 echo "Running leanix-sdk-csharp client example..." echo "" echo "" cd SampleClient/bin/Debug mono SampleClient.exe echo "" echo "" echo "Done!"
leanix/leanix-sdk-csharp
samples/run_client.sh
Shell
mit
351
NAME=fatfs SRC_DIR=$PWD/src/$NAME INCLUDE_DIR=$PWD/include/$NAME INTERFACE_DIR=$PWD/interface/$NAME mkdir -p $SRC_DIR mkdir -p $INCLUDE_DIR DL_DIR=dl/$NAME mkdir -p $DL_DIR pushd $DL_DIR wget http://elm-chan.org/fsw/ff/ff11.zip yes A | unzip ff11.zip rm ff11.zip popd # Move the library source into the src dir mv $DL_DIR/src/* $SRC_DIR # we link all headers to the include dir ln -s $SRC_DIR/*.h $INCLUDE_DIR # and lastly we link our interface sources to the library sources, overwriting # existing files ln -sf $INTERFACE_DIR/*.c $SRC_DIR ln -sf $INTERFACE_DIR/*.h $SRC_DIR # Cleanup rm -r $DL_DIR rm -r dl
UnicornRaceEngineering/g5-nodes
third_party/get_fatfs.sh
Shell
mit
619
#!/bin/bash timestamp=$(date +%c) echo "pulling from github" git pull echo "adding and commiting changes" git add --all git commit -m "$timestamp" echo "pushing to master" git push origin master unset timestamp
gpi-it/GuardiansX
deployment/commitandpush.sh
Shell
mit
214
DEBUG=myapp ./bin/www
jessecooper/EdPurp
debug.sh
Shell
mit
22
#!/bin/bash # Change to parent of this script's directory. GIT_SITE_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. cd $GIT_SITE_ROOT PID_FILE=$GIT_SITE_ROOT/run/git.adamheins.com.pid if [ -f "$PID_FILE" ]; then # Kill the process indicated by the PID file and remove it. kill -9 $(cat "$PID_FILE") rm -f "$PID_FILE" echo "Removed PID file." else # If there isn't a PID file, either the process isn't running or something # has gone wrong in generating it. echo "No PID file found." fi
adamheins/git.adamheins.com
sh/stop.bash
Shell
mit
494
#!/bin/bash #JAVA 7 / 8 sudo add-apt-repository ppa:webupd8team/java sudo apt-get update sudo apt-get install oracle-java8-installer sudo apt-get install oracle-java7-installer sudo update-java-alternatives -s java-8-oracle sudo update-java-alternatives -s java-7-oracle
rrialq/MyPC-install-scripts
xwindowApplications/chrome.sh
Shell
mit
271
rm out* rm NET* rm top* rm RT*
ronakshah725/bgp_ospf_final
rem.sh
Shell
mit
31
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for DSA-2174-1 # # Security announcement date: 2011-02-26 00:00:00 UTC # Script generation date: 2017-01-01 21:06:13 UTC # # Operating System: Debian 6 (Squeeze) # Architecture: i386 # # Vulnerable packages fix on version: # - avahi:0.6.27-2+squeeze1 # # Last versions recommanded by security team: # - avahi:0.6.27-2+squeeze1 # # CVE List: # - CVE-2011-1002 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade avahi=0.6.27-2+squeeze1 -y
Cyberwatch/cbw-security-fixes
Debian_6_(Squeeze)/i386/2011/DSA-2174-1.sh
Shell
mit
616
# # Movidius NCSDK Build Script # @author Loreto Parisi (loretoparisi at gmail dot com) # v1.0.0 # @2018 Loreto Parisi (loretoparisi at gmail dot com) # IMAGE=movidius #docker run --rm -it --privileged --device=/dev/tty.usbserial $IMAGE bash docker run --rm -it $IMAGE bash
loretoparisi/docker
movidius/run.sh
Shell
mit
277
#!/bin/bash name=`echo $1 | cut -f1 -d'.'` Rscript exportcitations.R $1 $name.bib
gadenbuie/getcitations
getcitations.sh
Shell
mit
82
$GOPATH/bin/primitive -i original.jpg -o 30.png -n 30 -m 5 -r 256 -s 1024 -a 255 -v $GOPATH/bin/primitive -i original.jpg -o 40.png -n 40 -m 5 -r 256 -s 1024 -a 255 -v $GOPATH/bin/primitive -i original.jpg -o 60.png -n 60 -m 5 -r 256 -s 1024 -a 255 -v $GOPATH/bin/primitive -i original.jpg -o 120.png -n 120 -m 5 -r 256 -s 1024 -a 255 -v
zns18/zns18.github.io
assets/images/beach/run.sh
Shell
mit
337
python makeatlas.py cp *.png *.json ../javascript/assets/sprites/
paulscottrobson/atari-cosmos
sprites-js/build.sh
Shell
mit
66
if [[ ! -o interactive ]]; then return fi compctl -K _base base _base() { local word words completions read -cA words word="${words[2]}" if [ "${#words}" -eq 2 ]; then completions="$(base commands)" else completions="$(base completions "${word}")" fi reply=("${(ps:\n:)completions}") }
capotej/base
completions/base.zsh
Shell
mit
316
#!/bin/bash GEM_HOME="$(ruby -e 'puts Gem.user_dir')" PATH="$PATH:$GEM_HOME/bin" jekyll s --draft --config _config.yml,_config_local.yml -H 0.0.0.0 -l
Settis/settis.github.io
runLocally.sh
Shell
mit
152
#!/bin/bash IMAGES=(centos_python_pip scientific_python image_python bioformats) for IMAGE in ${IMAGES[@]}; do echo "Building jicscicomp/$IMAGE" cd $IMAGE docker build -t jicscicomp/$IMAGE . cd .. done
JIC-CSB/scicomp_docker
image_descriptions/make_images.sh
Shell
mit
220
TEST=test/axioms/axiom_gen/TreeSet cd ../../../../ ./jsk.sh ${TEST}/TreeSetTester$1.java ${TEST}/Object.java ${TEST}/Integer.java ${TEST}/TreeSet$1.java --no-lib cd ${TEST} rm *~
plum-umd/java-sketch
test/axioms/axiom_gen/TreeSet/run.sh
Shell
mit
181
#!/bin/bash # Set up helper files that let vim know where to find important files while working # under a project path. This is customized to whatever project I'm currently working on, # so it will likely need modification for future project. # Usage: This script should be symlinked into the root directory of a project # and called any time the directory layout changes in a meaningful way. pushd `dirname "$0"` > /dev/null SCRIPTPATH=$(pwd) popd > /dev/null to_absolute() { (cd "$1"; pwd) } cd $SCRIPTPATH # Set up '.include_paths.txt' hints file. # It is used to set up vim's path variable and neomake's cpp linter. ( to_search=('library' 'apps' '3rd-party') for dir in "${to_search[@]}"; do ( find $dir | grep -i '/include$' ) done ) > .include_paths.txt # Set up '.alternate_paths.txt' hints files. # It is used by vim's `a.vim` plugin to find headers related to .cpp file. cat .include_paths.txt | while read path; do abs_path=$(to_absolute $path) cur_path=$abs_path while [[ "$cur_path" != $SCRIPTPATH ]]; do cur_path=$(dirname $cur_path) if [[ -d "$cur_path/src" ]]; then found_header=$(find $abs_path | grep '\.h$' | head -1) if [[ ! -z "$found_header" ]]; then alt_path=$(dirname $found_header) echo "$alt_path" > "$cur_path/src/.alternate_paths.txt" break fi fi done done if [[ -f .include_paths_other.txt ]]; then cat .include_paths_other.txt >> .include_paths.txt fi
ksimek/dotfiles
.scripts/setup_project_vimfiles.sh
Shell
mit
1,554
#!/bin/bash sleep 100
timjwright/godoit
test_wrapper_sleep.sh
Shell
mit
21
#!/bin/bash export ASK_SUDO_PASSWORD=1 export SERV_USER=ubuntu export SERV_NAME=example-ubuntu-server.net export APP_NAME=YesodApp export APP_PORT=3000 bundle exec rake spec
cosmo0920/ansible-playbook-for-keter-deploy
start-spec-example.bash
Shell
mit
175
sudo apt-get -y install dnstracer
sslavov93/kali-linux-tools
scripts/dnstracer.sh
Shell
mit
34
#!/bin/sh echo "Type the REPODIR (default is private), followed by [ENTER]:" read REPODIR if [ -z "$REPODIR" ]; then REPODIR="private" fi echo "The REPODIR is $REPODIR" echo "Type the CODEDIR (default is src), followed by [ENTER]:" read CODEDIR if [ -z "$CODEDIR" ]; then CODEDIR="src" fi echo "The CODEDIR is $CODEDIR" if [ -z "$1" ]; then echo "You are missing paramater. Right use is ./genhook.sh <repo-name>" exit 1 fi if [ ! -d "./$REPODIR" ] || [ ! -d "./$CODEDIR" ]; then echo "The directory structure is not correct. Please check whether you are in the correct directory and directories ./$REPODIR and ./$CODEDIR exist." exit 1 fi if [ -d "./$REPODIR/$1.git" ]; then echo "Repository directory $1.git already exists." exit 1 fi if [ "$(ls -A ./$CODEDIR)" ]; then while true; do read -p "WARNING: Directory ./$CODEDIR is not empty! Genhook will attempt to delete all files there, ok? (y/n): " yn case $yn in [Yy]* ) echo "Deleting files in ./$CODEDIR..."; rm -rd ./$CODEDIR/*; break;; [Nn]* ) echo "Files in ./$CODEDIR were not deleted."; break;; * ) echo "Please answer \"y\" or \"n\".";; esac done fi echo "Type in the IP address or DNS pointed to server, followed by [ENTER]:" read EXTERNALDNS echo "The EXTERNALDNS is $EXTERNALDNS" echo "Type in the environment (prod, stage, test..). Usually its the branch you are deploying in our case, followed by [ENTER]:" read ENVIRONMENT echo "The ENVIRONMENT is $ENVIRONMENT" git init --bare ./$REPODIR/$1.git echo "Creating post-update hook..." cat << EOF > ./$REPODIR/$1.git/hooks/post-update #!/bin/sh git --work-tree=$PWD/$CODEDIR --git-dir=$PWD/$REPODIR/$1.git checkout $ENVIRONMENT -f cd ../../$CODEDIR ################# # CUSTOM COMMANDS # uncomment anything you need or add more: ################# # npm install # npm run build # php app/console assetic:dump --env=prod # php app/console c:c --env=prod # php app/console doctrine:schema:update --force # sudo chmod -R 777 app/cache # sudo chmod -R 777 app/logs # gulp # rm web/config.php # rm web/app_dev.php # ... EOF chmod a+x ./$REPODIR/$1.git/hooks/post-update vi ./$REPODIR/$1.git/hooks/post-update cat << EOF Add this to your local .git/config: [remote "$1"] url = ssh://$USER@$EXTERNALDNS$PWD/$REPODIR/$1.git EOF
Symphony9/Git-bare-repository-generator
genhook.sh
Shell
mit
2,293
./test.sh 10 10 locale ./test.sh 10 100 locale ./test.sh 10 1000 locale ./test.sh 20 10 locale ./test.sh 20 100 locale ./test.sh 20 1000 locale ./test.sh 50 10 locale ./test.sh 50 100 locale ./test.sh 50 1000 locale ./test.sh 100 10 locale ./test.sh 100 100 locale ./test.sh 100 1000 locale ./test.sh 200 10 locale ./test.sh 200 100 locale ./test.sh 200 1000 locale ./test.sh 500 10 locale ./test.sh 500 100 locale ./test.sh 500 1000 locale ./test.sh 1000 10 locale ./test.sh 1000 100 locale ./test.sh 1000 1000 locale
Zihui-Zhong/RecycledNotebook
fullTestAmelioration.sh
Shell
mit
518
#!/bin/bash echo 'Run this script as sudo...' # exit on error set -e INSTALLER_PATH=/opt/installers/nnn mkdir -p $INSTALLER_PATH DESTINATION_PATH=/usr/local/bin VERSION=3.4 wget -O $INSTALLER_PATH/nnn-${VERSION}.tar.gz https://github.com/jarun/nnn/archive/v${VERSION}.tar.gz cd $INSTALLER_PATH && tar xfzv nnn-${VERSION}.tar.gz cd $INSTALLER_PATH/nnn-${VERSION} && make cp $INSTALLER_PATH/nnn-${VERSION}/nnn $DESTINATION_PATH echo "nnn sucessfully installed at $DESTINATION_PATH."
tiagoprn/devops
shellscripts/installers/install_nnn.sh
Shell
mit
491
## Replaces first occurence of a given string to another in a string variable ## @param varname - the name of the variable ## @param search - the string to be searched ## @param replace - the string to be replaced by function b.str.replace { local varname="$(eval echo \$$1)" search="$2" replace="$3" echo ${varname/$search/$replace} } ## Replaces all occurences of a given string to another in a string variable ## @param varname - the name of the variable ## @param search - the string to be searched ## @param replace - the string to be replaced by function b.str.replace_all { local varname="$(eval echo \$$1)" search="$2" replace="$3" echo ${varname//$search/$replace} } ## Returns a part of the string. If no length is given, ## it will return until the last char of the string. Negative ## lengths are relative from the back of the string ## @param varname - the name of the variable ## @param offset - the starting offset ## @param length - the length of chars to include function b.str.part { local varname="$(eval echo \$$1)" if [ $# -eq 3 ]; then echo ${varname: $2:$3} elif [ $# -eq 2 ]; then echo ${varname: $2} else b.raise InvalidArgumentsException fi } ## Trims spaces and tabs from the beginning and at the end string ## @param string - string to be trimmed function b.str.trim () { local arg="$*" [ -z "$arg" ] && read arg echo "$arg" | sed -E 's/^[ \t]*//g ; s/[ \t]*$//g' }
bangsh/bangsh
modules/str.sh
Shell
mit
1,433
#!/bin/bash # Backup MySQL database using mysqldump, storing in AWS S3. # SEE: http://aws.amazon.com/cli/ # This must be configured (aws configure). # Set the following environment variables: # - BACKUP_DATABASE # - BACKUP_S3 set -e tmpdir=`mktemp -d -t backup_mysql_tmpdir.XXXXXX` trap "{ rm -rf $tmpdir; }" EXIT cd $tmpdir dumplocz="mysql_${BACKUP_DATABASE}_`date -u '+%Y-%m-%dT%H:%M:%S%z'`.sql.bz2" time mysqldump $BACKUP_DATABASE | bzip2 --best > "$dumplocz" s3path="s3://$BACKUP_S3/$dumplocz" time /usr/local/bin/aws s3 cp "$dumplocz" "$s3path"
tiredpixel/dev-tiredpixel
2014-05-06-mysql-backup-aws-s3/mysql_backup_aws_s3.sh
Shell
mit
563
config() { NEW="$1" OLD="$(dirname $NEW)/$(basename $NEW .new)" # If there's no config file by that name, mv it over: if [ ! -r $OLD ]; then mv $NEW $OLD elif [ "$(cat $OLD | md5sum)" = "$(cat $NEW | md5sum)" ]; then # toss the redundant copy rm $NEW fi # Otherwise, we leave the .new copy for the admin to consider... } preserve_perms() { NEW="$1" OLD="$(dirname $NEW)/$(basename $NEW .new)" if [ -e $OLD ]; then cp -a $OLD ${NEW}.incoming cat $NEW > ${NEW}.incoming mv ${NEW}.incoming $NEW fi config $NEW } preserve_perms etc/vpnc/vpnc-script.new
panosmdma/SlackOnly-SlackBuilds
network/openconnect/doinst.sh
Shell
mit
600
#!/bin/bash #SBATCH -n 1 #SBATCH --cpus-per-task=1 #SBATCH --qos=highio #SBATCH --time=24:00:00 # Download a bunch of primate data from the NCBI Trace archive TRACE_URL=ftp://ftp-private.ncbi.nlm.nih.gov/pub/TraceDB/ DATALIST=${TOPDIR}/trace_datasets_round2.txt TRACEDIR=${TOPDIR}/trace # Make a directory if [[ ! -d ${TRACEDIR} ]]; then mkdir ${TRACEDIR} fi cd ${TRACEDIR} while read LINE; do DIRNAME=`echo ${LINE} | awk '{print $1}'` FTPF=`echo ${LINE} | awk '{print $2}'` echo "Downloading ${FTPF} into ${DIRNAME}" DIRNAME=${TRACEDIR}/${DIRNAME} if [[ ! -d ${DIRNAME} ]]; then mkdir ${DIRNAME} fi cd ${DIRNAME} wget --quiet ${TRACE_URL}/${FTPF}/fasta* cd ${TRACEDIR} done < ${DATALIST} echo "JOB DONE."
sivakasinathan/cenpb
code/get_trace_data.sh
Shell
mit
769
#!/bin/bash if [ -f windows_2008_r2_virtualbox.box ]; then rm windows_2008_r2_virtualbox.box fi packer build -only=virtualbox-iso windows_2008_r2.json if [ -f windows_2008_r2_virtualbox.box ]; then vagrant box add windows_2008_r2 windows_2008_r2_virtualbox.box --force #rm windows_2008_r2_virtualbox.box fi
seal-mis/packer-windows
build_windows_2008_r2_virtualbox.sh
Shell
mit
316
#!/bin/bash -e export VODAFONE_USERNAME="<SET TO YOUR USERNAME/EMAIL>" export VODAFONE_PASSWORD="<SET TO YOUR PASSWORD>" export DISPLAY=:99 Xvfb $DISPLAY -ac & XVFB_PID=$! vodafone-scraper alert --minutes=540 --megabytes=450 kill $XVFB_PID
paulfurley/vodafone-scraper
examples/vodafone-alert.sh
Shell
mit
244
#!/bin/bash # # control.sh # # A control script to run SusumuTakuan, # the L5R DiscordBot # cmd=$1 branch=`git branch | cut -d " " -f2` if [ "$branch" = "master" ]; then #master branch susumu="takuan" elif [ "$branch" = "develop" ]; then #develop branch susumu="takuantest" fi if [ "$cmd" = "start" ]; then #Start SusumuTakuan supervisorctl start $susumu elif [ "$cmd" = "stop" ]; then #Stop SusumuTakuan supervisorctl stop $susumu elif [ "$cmd" = "restart" ]; then #Restart SusumuTakuan supervisorctl restart $susumu elif [ "$cmd" = "refresh" ]; then #Refresh Code git pull fi
gryffon/SusumuTakuan
control.sh
Shell
mit
608
#!/bin/sh rm -v *.o *.elf *.hex *.ihex *.oct *.bin *.raw *.motorola
jeremiedecock/snippets
atmel_mega_avr/blink_led_pololu_3pi_PD1/clean.sh
Shell
mit
69
DIR=${1:-$(dirname $0)} for f in $(find $DIR -name '*.ltz'); do $DIR/../check.sh $f || exit -2 done
fab13n/lamtez
test/run-tests.sh
Shell
mit
103
#!/bin/sh #ensure we are executing from the containing directory # http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in # get the directory of this script SOURCE="${BASH_SOURCE[0]}" while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SOURCE="$(readlink "$SOURCE")" [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located done DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" cd $DIR echo "now in $(pwd)" ansible-galaxy install -r requirements.yml --force
kraveio/aws-bootstrap
blueprint/vpc/ansible/install-requirements.sh
Shell
mit
691
#!/bin/sh # # download and install the BEAM toolbox # # Copyright (C) 2014 EOX IT Services GmbH #====================================================================== . `dirname $0`/../lib_logging.sh #. `dirname $0`/lib_logging.sh info "Installing BEAM-Toolbox ... " #====================================================================== [ -z "$ODAOSHOSTNAME" ] && error "Missing the required ODAOSHOSTNAME variable!" [ -z "$ODAOS_BEAM_HOME" ] && error "Missing the required ODAOS_BEAM_HOME variable!" [ -z "$CONTRIB" ] && error "Missing the required CONTRIB variable!" [ -z "$ODAOSUSER" ] && error "Missing the required ODAOSUSER variable!" [ -z "$ODAOSGROUP" ] && error "Missing the required ODAOSGROUP variable!" BEAM_TMPDIR='/tmp/beam' # setup automatic cleanup on_exit() { [ ! -d "$BEAM_TMPDIR" ] || rm -fR "$BEAM_TMPDIR" } trap on_exit EXIT #====================================================================== #BEAM_URL_BC="http://www.brockmann-consult.de/cms/web/beam/dlsurvey?p_p_id=downloadportlet_WAR_beamdownloadportlet10&what=software/beam/4.11/beam_4.11_linux64_installer.sh" #BEAM_URL_S3="http://org.esa.beam.s3.amazonaws.com/software/beam/4.11/beam_4.11_linux64_installer.sh" #JAVA_HOME= #BEAM_URL_BC="http://www.brockmann-consult.de/cms/web/beam/dlsurvey?p_p_id=downloadportlet_WAR_beamdownloadportlet10&what=software/beam/4.11/beam_4.11_unix_installer.sh" #BEAM_URL_S3="http://org.esa.beam.s3.amazonaws.com/software/beam/4.11/beam_4.11_unix_installer.sh" #JAVA_HOME="/usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0.x86_64" BEAM_URL_BC="http://www.brockmann-consult.de/cms/web/beam/dlsurvey?p_p_id=downloadportlet_WAR_beamdownloadportlet10&what=software/beam/5.0.0/beam_5.0_unix_installer.sh&amp;submit=Proceed" BEAM_URL_S3="http://org.esa.beam.s3.amazonaws.com/software/beam/5.0.0/beam_5.0_unix_installer.sh" #JAVA_HOME="/srv/odaos/data-quality/q2/local/jdk1.7.0_51" JAVA_HOME="/usr/lib/jvm/java-1.7.0-openjdk-1.7.0.55.x86_64" #====================================================================== # Check if there is already a version available in the contrib # directory. If there is no BEAM installer available donwload it. BEAM_FILE="`find "$CONTRIB" -name 'beam_*installer.sh' | sort | tail -n 1 `" if [ -z "$BEAM_FILE" ] then BEAM_FILE="$CONTRIB/`basename "$BEAM_URL_S3"`" info "Downloading from: $BEAM_URL_BC" info "Downloading from: $BEAM_URL_S3" info "Saving to: $BEAM_FILE" curl -s -S -e "$BEAM_URL_BC" "$BEAM_URL_S3" -o "$BEAM_FILE" [ -f "$BEAM_FILE" ] || { error "Failed to download the BEAM Toolbox installer." ; exit 1 ; } info "BEAM Toolbox downloaded." else info "Installer found: $BEAM_FILE" info "Using the existing local copy of the BEAM Toolbox installer." fi #====================================================================== # run the BEAM installer BEAM_INSTAL="$BEAM_TMPDIR/`basename "$BEAM_FILE"`" # cleanup old stuff [ ! -d "$BEAM_TMPDIR" -a ! -f "$BEAM_TMPDIR" ] || rm -fR "$BEAM_TMPDIR" [ ! -d "$ODAOS_BEAM_HOME" -a ! -f "$ODAOS_BEAM_HOME" ] || rm -fR "$ODAOS_BEAM_HOME" mkdir -p "$BEAM_TMPDIR" info "Fixing the BEAM Toolbox installer ..." # last line of the leading script LNUM=`grep -a -n "^exit" "$BEAM_FILE" | tail -n 1 | cut -f 1 -d ":"` [ 0 -le "$LNUM" ] || { error "Failed to fix the installer!" ; exit 1 ; } # extract script head -n "$LNUM" "$BEAM_FILE" > "$BEAM_INSTAL" # get total file size FSIZE=`stat --format="%s" "$BEAM_FILE"` # size of the script part SSIZE=`stat --format="%s" "$BEAM_INSTAL"` #size of the binary payload let BSIZE=FSIZE-SSIZE [ 0 -le "$BSIZE" ] || { error "Wrong file-size!" ; exit 1 ; } #---------------------------------------------------- # fix the installation script # fix the text part ex "$BEAM_INSTAL" <<END 2i INSTALL4J_JAVA_HOME_OVERRIDE="$JAVA_HOME" . wq END # append the binary payload tail -c "$BSIZE" "$BEAM_FILE" >> "$BEAM_INSTAL" #---------------------------------------------------- # run the installation script info "Installing the BEAM Toolbox ..." sudo -u "$ODAOSUSER" sh "$BEAM_INSTAL" -q -dir "$ODAOS_BEAM_HOME"
DREAM-ODA-OS/ODA-OS_subsystem
scripts/install.d/25_beam_install.sh
Shell
mit
4,107
#!/usr/bin/env bash ## # Deploys current application instance. # # The "app deploy" action triggers by default the "app update" action on given # remote instance. # # @see cwt/app/update.sh # # @example # # Deploy target defaults to the 'prod' remote instance. # make app-deploy # # Or : # cwt/extensions/remote/app/deploy.sh # # # Deploy to the 'dev' remote instance. # make app-deploy 'dev' # # Or : # cwt/extensions/remote/app/deploy.sh 'dev' # p_remote_id="$1" if [[ -z "$p_remote_id" ]]; then p_remote_id='prod' fi cwt/extensions/remote/remote/exec.sh "$p_remote_id" \ 'cwt/app/update.sh'
Paulmicha/common-web-tools
cwt/extensions/remote/app/deploy.sh
Shell
mit
618
#!/bin/bash max=10 for i in `seq 2 $max` do curl -H "Content-Type: application/json" -X POST -d @edgeCases/11.json 127.0.0.1:3003 done
hitripod/HighChartsMailSender
stressTest.sh
Shell
mit
139
## perform imputation repeatedly to evalue the accuracy module load parallel SCRIPT=`echo $0` SCRIPT_PATH=$(perl -MCwd -MFile::Basename -e '$s=shift; $abs= Cwd::abs_path($s); print dirname($abs), "\n"' $SCRIPT) ORIG_VCF=$1 PROBABILITY=$2 beagle=$3 REFPANEL=$4 cycles=10 chrs=$(perl -ne 'next if /\#/; $h{$1}=1 if /^(\S+)/; END{print join(":", sort{$a cmp $b} keys %h), "\n"}' ${ORIG_VCF}) # only use one chr. Comment the following three lines if want to use ALL chrs. chrs=$(perl -e '$f=shift; print ((split(/:/, $f))[0], "\n")' $chrs) perl -e '($vcf, $chr)=@ARGV; open(IN, $vcf) or die $!; while(<IN>){if(/\#/){print $_ ;next} $f=$1 if /^(\S+)/; print $_ if $f eq $chr}' $ORIG_VCF $chrs >${ORIG_VCF}_chr.vcf ORIG_VCF=${ORIG_VCF}_chr.vcf ### if [ ! -d imp_eval ]; then mkdir imp_eval; fi for i in `seq 1 $cycles`; do date echo $i # generate randome mutated perl $SCRIPT_PATH/generate_random_na.pl $ORIG_VCF $PROBABILITY 1>na.vcf 2>na.loci.txt # run beable rm ./na.imputed* perl -e '$beagle = shift; $chr_str=shift; $refpanel=shift; @chrs=split /:/, $chr_str; foreach $chr(@chrs){$cmd="java -jar $beagle gtgl=na.vcf chrom=$chr nthreads=3 gprobs=true out=na.imputed_$chr"; $cmd .= " ref=$refpanel" if $refpanel=~/\S/; print $cmd, "\n"}' $beagle $chrs $REFPANEL |parallel -j 3 perl -e '$chr_str=shift; @chrs=split /:/, $chr_str; foreach $chr(@chrs){$f="na.imputed_${chr}.vcf.gz"; open(F, "zcat $f |") or die; while(<F>){print $_ unless /^\#/} close F}' $chrs >na.imputed.vcf ##java -jar $beagle gtgl=na.vcf nthreads=10 gprobs=true out=na.imputed ## unzip the output ##gunzip na.imputed.vcf.gz # evaluate the imputed genotype perl $SCRIPT_PATH/evaluate_concordancy.pl $ORIG_VCF na.vcf na.loci.txt na.imputed.vcf >imp_eval/imputation_eval.${i}.tsv done
swang8/Perl_scripts_misc
agseq/imputation/run.sh
Shell
mit
1,803
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # use filter instead of exclude so missing patterns dont' throw errors echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\"" /usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "$BUILT_PRODUCTS_DIR/AFNetworking.root-NSURLSession-Reachability-Security-Serialization/AFNetworking.framework" install_framework "$BUILT_PRODUCTS_DIR/SignalR-ObjC-OSX/SignalR_ObjC.framework" install_framework "$BUILT_PRODUCTS_DIR/SocketRocket-OSX/SocketRocket.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "$BUILT_PRODUCTS_DIR/AFNetworking.root-NSURLSession-Reachability-Security-Serialization/AFNetworking.framework" install_framework "$BUILT_PRODUCTS_DIR/SignalR-ObjC-OSX/SignalR_ObjC.framework" install_framework "$BUILT_PRODUCTS_DIR/SocketRocket-OSX/SocketRocket.framework" fi
maheshverma/SamplePod
Example/Pods/Target Support Files/Pods-OS X Example/Pods-OS X Example-frameworks.sh
Shell
mit
4,061
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for RHSA-2015:1439 # # Security announcement date: 2015-07-22 06:38:01 UTC # Script generation date: 2017-01-01 21:16:28 UTC # # Operating System: Red Hat 6 # Architecture: i386 # # Vulnerable packages fix on version: # - wpa_supplicant.i686:0.7.3-6.el6 # - wpa_supplicant-debuginfo.i686:0.7.3-6.el6 # # Last versions recommanded by security team: # - wpa_supplicant.i686:0.7.3-6.el6 # - wpa_supplicant-debuginfo.i686:0.7.3-6.el6 # # CVE List: # - CVE-2015-4142 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo yum install wpa_supplicant.i686-0.7.3 -y sudo yum install wpa_supplicant-debuginfo.i686-0.7.3 -y
Cyberwatch/cbw-security-fixes
Red_Hat_6/i386/2015/RHSA-2015:1439.sh
Shell
mit
764
#!/usr/bin/env bash # Install command-line tools using Homebrew. # Ask for the administrator password upfront. sudo -v # Keep-alive: update existing `sudo` time stamp until the script has finished. while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & # Make sure we’re using the latest Homebrew. brew update # Upgrade any already-installed formulae. brew upgrade --all # Install GNU core utilities (those that come with OS X are outdated). # Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`. # brew install coreutils # sudo ln -s /usr/local/bin/gsha256sum /usr/local/bin/sha256sum # Install some other useful utilities like `sponge`. # brew install moreutils # Install GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed. # brew install findutils # Install GNU `sed`, overwriting the built-in `sed`. # brew install gnu-sed --with-default-names # Install Bash 4. # Note: don’t forget to add `/usr/local/bin/bash` to `/etc/shells` before # running `chsh`. # brew install bash # brew tap homebrew/versions # brew install bash-completion2 # Install `wget` with IRI support. brew install wget --with-iri # Install RingoJS and Narwhal. # Note that the order in which these are installed is important; # see http://git.io/brew-narwhal-ringo. # brew install ringojs # brew install narwhal # Install more recent versions of some OS X tools. brew install vim --override-system-vi # brew install homebrew/dupes/grep # brew install homebrew/dupes/openssh # brew install homebrew/dupes/screen # brew install homebrew/php/php55 --with-gmp # Install font tools. # brew tap bramstein/webfonttools # brew install sfnt2woff # brew install sfnt2woff-zopfli # brew install woff2 # Install some CTF tools; see https://github.com/ctfs/write-ups. # brew install aircrack-ng # brew install bfg # brew install binutils # brew install binwalk # brew install cifer # brew install dex2jar # brew install dns2tcp # brew install fcrackzip # brew install foremost # brew install hashpump # brew install hydra # brew install john # brew install knock # brew install netpbm # brew install nmap # brew install pngcheck # brew install socat # brew install sqlmap # brew install tcpflow # brew install tcpreplay # brew install tcptrace # brew install ucspi-tcp # `tcpserver` etc. # brew install xpdf # brew install xz # Install other useful binaries. # brew install ack # brew install dark-mode #brew install exiv2 # brew install git # brew install git-lfs # brew install imagemagick --with-webp # brew install lua # brew install lynx # brew install p7zip # brew install pigz # brew install pv # brew install rename # brew install rhino # brew install speedtest_cli # brew install ssh-copy-id # brew install tree # brew install webkit2png # brew install zopfli brew install caskroom/cask/brew-cask brew cask install google-chrome brackets slack sourcetree java silverlight flash iterm2 virtualbox fabric brew install node # Remove outdated versions from the cellar. brew cleanup
johnbaker/dotfiles
brew.sh
Shell
mit
3,026
#!/bin/bash # get source directory from arguments SOURCE_DIR=$1 # get destination file path from arguments DEST_FILE=$2 #initialize output file with xml declaration echo '<?xml version="1.0" encoding="UTF-8"?>' > $DEST_FILE # append opening docSet tag echo '<d:docSet' >> $DEST_FILE echo 'xmlns:d="http://lib.ncsu.edu/schema/doc_set"' >> $DEST_FILE echo 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' >> $DEST_FILE echo 'xsi:schemaLocation="http://lib.ncsu.edu/schema/doc_set https://raw.githubusercontent.com/NCSU-Libraries/doc_set_schema/master/doc_set.xsd">' >> $DEST_FILE echo '' >> $DEST_FILE # for each file in source directory, # write contents to $DEST_FILE, excluding xml declaration for file in $SOURCE_DIR/* do while IFS= read -r line || [[ -n "$line" ]]; do if [[ $line != '<?xml'* ]] then echo $line >> $DEST_FILE fi done < "$file" done # append closing docSet tag echo '</d:docSet>' >> $DEST_FILE
NCSU-Libraries/fits_aggregator
fits_aggregator.sh
Shell
mit
950
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-2913-1 # # Security announcement date: 2016-02-24 00:00:00 UTC # Script generation date: 2017-01-01 21:05:13 UTC # # Operating System: Ubuntu 15.10 # Architecture: i686 # # Vulnerable packages fix on version: # - ca-certificates:20160104ubuntu0.15.10.1 # # Last versions recommanded by security team: # - ca-certificates:20160104ubuntu0.15.10.1 # # CVE List: # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade ca-certificates=20160104ubuntu0.15.10.1 -y
Cyberwatch/cbw-security-fixes
Ubuntu_15.10/i686/2016/USN-2913-1.sh
Shell
mit
638
#!/bin/sh -f xv_path="/home/huchao/vivado/Vivado/2015.2" ExecStep() { "$@" RETVAL=$? if [ $RETVAL -ne 0 ] then exit $RETVAL fi } echo "xvlog -m64 --relax -prj lab1_1_2_tb_vlog.prj" ExecStep $xv_path/bin/xvlog -m64 --relax -prj lab1_1_2_tb_vlog.prj 2>&1 | tee compile.log
chaohu/Daily-Learning
Verilog/lab2/lab2_1/lab1_1_2/lab1_2.sim/sim_1/behav/compile.sh
Shell
mit
271
#!/bin/sh make -C /Users/pro/Desktop/Fall2015/COMS557/project_557_2015_tg/Homework_3 -f /Users/pro/Desktop/Fall2015/COMS557/project_557_2015_tg/Homework_3/CMakeScripts/ZERO_CHECK_cmakeRulesBuildPhase.make$CONFIGURATION all
tsgao/project_557_2015_tg
Homework_3/HCI557_ass3_p1.build/Debug/ZERO_CHECK.build/Script-2BB5E9F9A1504E4EAE06489A.sh
Shell
mit
223
git clone https://github.com/wesbos/Cobalt2-Alfred-Theme.git
ccollins/dotfiles
alfred/install.sh
Shell
mit
61
_gen_fzf_default_opts() { local base03="234" local base02="235" local base01="240" local base00="241" local base0="244" local base1="245" local base2="254" local base3="230" local yellow="136" local orange="166" local red="160" local magenta="125" local violet="61" local blue="33" local cyan="37" local green="64" export FZF_DEFAULT_OPTS=" --color fg:-1,bg:-1,hl:$blue,fg+:$base2,bg+:$base02,hl+:$blue --color info:$yellow,prompt:$yellow,pointer:$base3,marker:$base3,spinner:$yellow " } _gen_fzf_default_opts
dnlserrano/dotfiles
scripts/fzf.sh
Shell
mit
556
DIR=$(cd "$(dirname $0)" && pwd) # Apply $DIR/apply.sh # Install vim-plug curl -fLo ~/.vim/autoload/plug.vim --create-dirs \ https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim # Install plugs vim +PlugInstall +qall
shoebillk/dotfiles
vim/install.sh
Shell
mit
242
#!/bin/sh ##PJM -L rscgrp=regular-cache #PJM -L rscgrp=debug-flat #PJM -L elapse=00:30:00 #PJM -g xg18i004 #PJM -N findBound #PJM -j #PJM -L node=96 #PJM --mpi proc=3072 #PJM --omp thread=1 echo "../r004m/run.b1.00_h030-060_0.60_h010-100/time010.00-050.00/" > input.list echo "./hoge" >> input.list echo "10 10" >> input.list echo "0" >> input.list echo "0" >> input.list echo "1" >> input.list # idir # odir # tbgn tend # searchmode(0:bound, 1:unbound) # printmode(0:all, 1:summary only) # excludedID(-1/0/1) odir=`awk '{if(NR==2) print $0}' input.list` if ! test -e $odir then mkdir -p $odir fi mpiexec.hydra -n ${PJM_MPI_PROC} ./run input.list
atrtnkw/sph
tool.hgas/findBound/ofp.sh
Shell
mit
656
#!/bin/bash -e # install fisher plugin manager [ "$(which fish)" ] || exit 1 # Install fisher plugin manager for fish if [ ! -e ~/.config/fish/functions/fisher.fish ] ; then fish -c "curl -sL https://git.io/fisher | source && fisher install jorgebucaran/fisher" fi # More Fish plugins: # https://awesomeopensource.com/projects/fish-plugin/fisher fish_install_plugins=( jorgebucaran/fisher jethrokuan/z ) if [ -e ~/.config/fish/functions/fisher.fish ]; then for plugin in ${fish_install_plugins[*]}; do echo "processing zsh plugin: $plugin" fish -c "fisher install $plugin" done fi
dvidelabs/config
.config/setup/fish.sh
Shell
mit
622
#!/bin/bash # build, test and generate docs in this phase set -ex . "$(dirname $0)/utils.sh" main() { # Test a normal debug build. cargo build --target "$TARGET" --verbose --all # Show the output of the most recent build.rs stderr. set +x stderr="$(find "target/$TARGET/debug" -name stderr -print0 | xargs -0 ls -t | head -n1)" if [ -s "$stderr" ]; then echo "===== $stderr =====" cat "$stderr" echo "=====" fi set -x # sanity check the file type file target/"$TARGET"/debug/rg # Check that we've generated man page and other shell completions. outdir="$(cargo_out_dir "target/$TARGET/debug")" file "$outdir/rg.bash" file "$outdir/rg.fish" file "$outdir/_rg.ps1" file "$outdir/rg.1" # Apparently tests don't work on arm, so just bail now. I guess we provide # ARM releases on a best effort basis? if is_arm; then return 0 fi # Test that zsh completions are in sync with ripgrep's actual args. "$(dirname "${0}")/test_complete.sh" # Run tests for ripgrep and all sub-crates. cargo test --target "$TARGET" --verbose --all } main
nrc/rustc-perf
collector/benchmarks/ripgrep/ci/script.sh
Shell
mit
1,158
for f in *.cl; do /opt/SPIRV-LLVM/build/bin/clang -cc1 -triple spir-unknown-unknown -O3 -cl-std=CL1.2 -cl-kernel-arg-info -cl-single-precision-constant -Wno-unused-parameter -Wno-undefined-inline -Wno-unused-local-typedef -Wno-gcc-compat -DNO_SCALE -DCT=float4 -DFT=float -DWT=float -DST=int -DT=int -DLOCAL_SIZE=8 -DBORDER_CONSTANT -Ddepth=4 -DsrcT=float -DdstT=float -DconvertToDT -DrowsPerWI=4 -DBLOCK_ROWS=4 -DTILE_DIM=1 -DDECLARE_INPUT_MAT_N -DDECLARE_OUTPUT_MAT_N -DDECLARE_INDEX_N -DPROCESS_ELEM_N - -include /home/daniel/workspace/VC4CLStdLib/include/VC4CLStdLib.h -x cl -S -emit-llvm -o $f.ir $f; done
doe300/VC4C
testing/OpenCV/compile_all.sh
Shell
mit
613
#!/bin/bash # $1 = branch # $2+ = args bash run.sh $2 $3 $4 $5 $6 $7 $8 $9 | tee log if [[ $REGRESSION_ENV -eq 1 ]]; then pass_line=`cat log | grep "PASS"` if [[ ${pass_line} = *": 1"* ]]; then pass=1 elif [[ ${pass_line} = *": 0"* ]]; then pass=0 else pass="?" fi timeout_wc=`cat log | grep "TIMEOUT" | wc -l` runtime_string=`cat log | grep "Design ran for" | sed "s/Design ran for //g" | sed "s/ cycles.*//g"` if [[ ${timeout_wc} -gt 0 ]]; then runtime="TIMEOUT" else runtime=$runtime_string fi # Hacky go back until $SPATIAL_HOME hash=`cat ../../../../hash` ahash=`cat ../../../../ahash` appname=`basename \`pwd\`` properties=`cat chisel/IOModule.scala | grep "App Characteristics" | sed "s/^.*App Characteristics: //g" | sed "s/ //g"` if [[ $1 = "Zynq" ]]; then REGRESSION_HOME="/home/mattfel/regression/synth/zynq" elif [[ $1 = "ZCU" ]]; then REGRESSION_HOME="/home/mattfel/regression/synth/zcu" elif [[ $1 = "AWS" ]]; then REGRESSION_HOME="/home/mattfel/regression/synth/aws" elif [[ $1 = "Arria10" ]]; then REGRESSION_HOME="/home/mattfel/regression/synth/arria10" fi python3 ../../../../utilities/gdocs.py "report_regression_results" $1 $appname $pass $runtime $hash $ahash "$properties" "$2 $3 $4 $5 $6 $7 $8 $9" fi
stanford-ppl/spatial-lang
spatial/core/resources/chiselgen/app-level/scripts/regression_run.sh
Shell
mit
1,273
#! /usr/bin/env bash ./node_modules/.bin/intern-client config=tests/intern
graphicore/obtainJS
runtest.sh
Shell
mit
75
################################################################# # Copyright (C) 2016 Sean Guo. All rights reserved. # # > File Name: < 2_usb.sh > # > Author: < Sean Guo > # > Mail: < [email protected] > # > Created Time: < 2016/02/28 > # > Last Changed: # > Description: 配置内核增加其他USB设备驱动 ################################################################# #!/bin/bash usb_dir=usb kernel_dir=linux-2.6.38 # 添加void s3c_otg_phy_config(int enable)函数定义; cp $usb_dir/mach-mini6410.c $kernel_dir/arch/arm/mach-s3c64xx/mach-mini6410.c cp $usb_dir/ohci-s3c2410.c $kernel_dir/drivers/usb/host/ohci-s3c2410.c cp $usb_dir/tiny6410_config $kernel_dir/.config
SeanXP/ARM-Tiny6410
linux/kernel/2_usb.sh
Shell
mit
750
#!/usr/bin/env bash set -ex cd /data2/malmo_1/latest/Minecraft/ ./launchClient.sh -port 11100 & cd /data2/malmo_2/latest/Minecraft/ ./launchClient.sh -port 11200 & cd /data2/malmo_3/latest/Minecraft/ ./launchClient.sh -port 11300 &
matpalm/malmomo
start_3_malmos.sh
Shell
mit
235
http://bchavez.bitarmory.com/archive/2013/01/16/compiling-kernel-modules-for-raspberry-pi.aspx #git clone https://github.com/raspberrypi/linux.git #git clone https://github.com/raspberrypi/tools.git export CCPREFIX=/home/topic/rasp/kernel_new/tools/arm-bcm2708/arm-bcm2708-linux-gnueabi/bin/arm-bcm2708-linux-gnueabi- export KERNEL_SRC=/home/topic/rasp/kernel_new/linux
willemwouters/linux-device-drivers
raspberry_pi/scripts/prepareEnv.sh
Shell
mit
373
#!/bin/bash #################################################################### # Name: export_databases.sh # Author: Kenny Robinson, @almostengr # Usage: export_databases.sh # Description: Export all the databases to a file. #################################################################### source ./config.sh if [ "${hostname}" == "" ]; then hostname="localhost" fi if [ "${port}" == "" ]; then port="3306" fi /usr/bin/mysqldump --all-databases -u "${USERNAME}" -p"${PASSWORD}" -h "${HOSTNAME}" > ${SQLFILE}
bitsecondal/ubuntu-automation
mysql/export_databases.sh
Shell
mit
522
#!/usr/bin/env bash #--------------------------------------------------------------------------------------------------- # Deploys the latest Docker image for Codekvast Login to the staging environment #--------------------------------------------------------------------------------------------------- source $(dirname $0)/.check-requirements.sh aws ecs update-service --cluster=codekvast-staging --service=login --force-new-deployment | jq .service.taskDefinition | xargs
crispab/codekvast
deploy/deploy-login-to-staging.sh
Shell
mit
476
#!/usr/bin/env bash ############################################################################### # Google Chrome & Google Chrome Canary ############################################################################### # # Disable backswipe # defaults write com.google.Chrome AppleEnableSwipeNavigateWithScrolls -bool false # defaults write com.google.Chrome.canary AppleEnableSwipeNavigateWithScrolls -bool false # Use the system-native print preview dialog defaults write com.google.Chrome DisablePrintPreview -bool true defaults write com.google.Chrome.canary DisablePrintPreview -bool true # Expand the print dialog by default defaults write com.google.Chrome PMPrintingExpandedStateForPrint2 -bool true defaults write com.google.Chrome.canary PMPrintingExpandedStateForPrint2 -bool true
drvdijk/dotfiles
macos/apps/google-chrome.sh
Shell
mit
796
#!/usr/bin/env bash # TODO: change configuration # https://docs.influxdata.com/influxdb/v1.2/administration/config/ # default # cache-max-memory-size = 1G # cache-snapshot-memory-size = 24MB # The cache snapshot memory size is the size at which the engine will snapshot the cache and write it to a TSM file, freeing up memor # INFLUXDB_DATA_CACHE_SNAPSHOT_MEMORY_SIZE # TODO: use not hard coded path docker run --name tsdb-influxdb -v /home/at15/workspace/ssd/lib/influxdb:/var/lib/influxdb -p 8083:8083 -p 8086:8086 -d influxdb:1.2.4 sleep 5 curl -XPOST 'http://localhost:8086/query?u=myusername&p=mypassword' --data-urlencode 'q=CREATE DATABASE "xb"'
xephonhq/xephon-k
_legacy/script/influxdb/pull_influxdb.sh
Shell
mit
653
#!/bin/bash LATEXMK_OPT="-pdf -xelatex -shell-escape -8bit -f" DOCUMENT="spec" if [ ! -f './rail/rail' ]; then # Compile the rail utility make -sC ./rail if [ ! $? -eq 0 ]; then echo failed to compile rail echo please ensure thet both flex and bison are presented exit 1 fi fi # make page latexmk $LATEXMK_OPT $DOCUMENT # transfrom the rail spec ./rail/rail $DOCUMENT 2>/dev/null 1>/dev/null if [ ! $? -eq 0 ]; then echo 'rail utility executiion filed, which is generally caused by the syntax error within the rail section' exit 1 fi rm $DOCUMENT.pdf latexmk $LATEXMK_OPT $DOCUMENT
Jack-Q/picol
doc/specification/rail-compile.sh
Shell
mit
615
#!/bin/bash # $Id$ $Revision$ # where everything is graphviz_host=www.graphviz.org SRCDIR=CURRENT if test .$1 != . ;then SRCDIR=$1 fi if test .$SRCDIR = .CURRENT ; then GRAPHVIZ_PUB_PATH=/data/pub/graphviz/development/ else GRAPHVIZ_PUB_PATH=/data/pub/graphviz/stable/ fi work=$HOME/tmp/gviz PREFIX=$HOME/FIX/Lion.x86_64 export PREFIX PATH=$PREFIX/bin:$PATH export PATH SOURCES=$GRAPHVIZ_PUB_PATH/SOURCES PKGS=$GRAPHVIZ_PUB_PATH/macos/lion # search for last graphviz tarball in the public sources source= for file in `ssh [email protected] ls -t $SOURCES`; do source=`expr $file : '\(graphviz-[0-9.]*\).tar.gz$'` if test -n "$source"; then break fi done if test -n "$source" then LOG=$source-log.txt # clean up previous builds mkdir -p $work rm -rf $work/* cd $work # get the sources scp gviz@$graphviz_host:$SOURCES/$source.tar.gz . 2>$LOG # build the package tar xzf $source.tar.gz (cd $source/macosx/graphviz.xcodeproj; cp lion.project.pbxproj project.pbxproj) (cd $source/macosx/build; cp Makefile.lion Makefile) (cd $source/macosx/graphviz.help; cp ../build/graphviz.help.helpindex.lion graphviz.help.helpindex) make -C $source/macosx/build >>$LOG 2>&1 # put the package scp $source/macosx/build/graphviz.pkg gviz@$graphviz_host:$PKGS/$source.pkg 2>>$LOG scp $LOG gviz@$graphviz_host:$PKGS/$LOG fi
ellson/graphviz-build
macosx/graphviz-lion-bin-pkg.sh
Shell
epl-1.0
1,385
#!/usr/bin/env bash ############################################################################### # Copyright (c) 2016, 2021 Red Hat Inc and others # # This program and the accompanying materials are made # available under the terms of the Eclipse Public License 2.0 # which is available at https://www.eclipse.org/legal/epl-2.0/ # # SPDX-License-Identifier: EPL-2.0 # # Contributors: # Red Hat Inc - initial API and implementation # Eurotech ############################################################################### set -e SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" docker_common() { #shellcheck source=./docker-common.sh . "${SCRIPT_DIR}"/docker-common.sh } docker_compose() { declare -a COMPOSE_FILES; if [[ -n "${KAPUA_BROKER_DEBUG_PORT}" ]]; then if [[ "${KAPUA_BROKER_DEBUG_SUSPEND}" == "true" ]]; then KAPUA_BROKER_DEBUG_SUSPEND="y" else KAPUA_BROKER_DEBUG_SUSPEND="n" fi COMPOSE_FILES+=(-f "${SCRIPT_DIR}/../compose/extras/docker-compose.broker-debug.yml") fi if [[ -n "${KAPUA_REST_DEBUG_PORT}" ]]; then if [[ "${KAPUA_REST_DEBUG_SUSPEND}" == "true" ]]; then KAPUA_REST_DEBUG_SUSPEND="y" else KAPUA_REST_DEBUG_SUSPEND="n" fi COMPOSE_FILES+=(-f "${SCRIPT_DIR}/../compose/extras/docker-compose.rest-debug.yml") fi if [[ -n "${KAPUA_ELASTICSEARCH_DATA_DIR}" ]]; then COMPOSE_FILES+=(-f "${SCRIPT_DIR}/../compose/extras/docker-compose.es-storage-dir.yml") fi docker-compose -f "${SCRIPT_DIR}/../compose/docker-compose.yml" "${COMPOSE_FILES[@]}" up -d } check_if_docker_logs() { if [[ "$1" == '--logs' ]]; then #shellcheck source=./docker-logs.sh . "${SCRIPT_DIR}/docker-logs.sh" else echo "Unrecognised parameter: ${1}" print_usage_deploy fi } print_usage_deploy() { echo "Usage: $(basename "$0") [--logs]" >&2 } docker_common echo "Deploying Eclipse Kapua..." docker_compose || { echo "Deploying Eclipse Kapua... ERROR!" exit 1 } echo "Deploying Eclipse Kapua... DONE!" if [[ -z "$1" ]]; then echo "Run \"docker-compose -f ${SCRIPT_DIR}/../compose/docker-compose.yml logs -f\" for container logs" else check_if_docker_logs "$1" fi
stzilli/kapua
deployment/docker/unix/docker-deploy.sh
Shell
epl-1.0
2,306
#!/sbin/busybox sh if [ -f /system/bin/bootanimation.bin ]; then /system/bin/bootanimation.bin elif [ -f /data/local/bootanimation.zip ] || [ -f /system/media/bootanimation.zip ]; then /sbin/bootanimation else /system/bin/samsungani fi;
simone201/neak-kernel-sgs2
aosp-initramfs/sbin/bootanimation.sh
Shell
gpl-2.0
244
#!/bin/sh # Abort provisioning if some select items are already installed. We'll assume if # these are present, everything is. Test for modules directory in /data/claysoil and # for nginx. This is mainly meant for Vagrant. which apache2ctl >/dev/null && { echo "Vagrant test setup already installed. If you are running on vagrant and need to test the installation script, please first issue a vagrant destroy."; exit 0; } export DEBIAN_FRONTEND=noninteractive apt-get update ln -sf /usr/share/zoneinfo/Europe/Amsterdam /etc/localtime # Apache / PHP apt-get install -y apache2 libapache2-mod-php5 php-apc sed -i 's/\/var\/www/\/data\/app/g' /etc/apache2/sites-enabled/000-default service apache2 restart # Memcached apt-get install -y memcached php5-memcached # Set swappiness to 0. This will prevent swapping as much as possible. # This setting is highly recommended when running Cassandra. sysctl vm.swappiness=0 echo "vm.swappiness=0" >> /etc/sysctl.conf # Cassandra echo "deb http://www.apache.org/dist/cassandra/debian 12x main" >> /etc/apt/sources.list echo "deb-src http://www.apache.org/dist/cassandra/debian 12x main" >> /etc/apt/sources.list gpg --keyserver pgp.mit.edu --recv-keys F758CE318D77295D gpg --export --armor F758CE318D77295D | sudo apt-key add - gpg --keyserver pgp.mit.edu --recv-keys 2B5C1B00 gpg --export --armor 2B5C1B00 | apt-key add - apt-get update apt-get install -y cassandra sed -i 's/listen_address: localhost/listen_address: 33.33.33.30/g' /etc/cassandra/cassandra.yaml sed -i 's/rpc_address: localhost/rpc_address: 127.0.0.1/g' /etc/cassandra/cassandra.yaml sed -i 's/seeds: "127.0.0.1"/seeds: "33.33.33.30"/g' /etc/cassandra/cassandra.yaml service cassandra restart # Chkconfig apt-get -y install chkconfig chkconfig cassandra on
mauritsl/vagrant-cassandra-php
vagrant/provision.sh
Shell
gpl-2.0
1,774
#! /bin/sh # Copyright (C) 2002-2017 Free Software Foundation, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Tests that Automake understands suffix rules with subdir objects. # Reported by John Ratliff. required=cc . test-init.sh cat >>configure.ac <<EOF AC_PROG_CC AC_OUTPUT EOF cat >Makefile.am << 'END' AUTOMAKE_OPTIONS = subdir-objects SUFFIXES = .baz .o # We fake here: .baz.o: ## Account for VPATH issues on weaker make implementations. cp `test -f '$<' || echo $(srcdir)/`$< $@ bin_PROGRAMS = foo foo_SOURCES = foo.c sub/bar.baz .PHONY: test-fake test-real test-fake: echo $(foo_OBJECTS) | grep '^foo\.quux sub/bar\.quux$$' test-real: echo $(foo_OBJECTS) | grep '^foo\.$(OBJEXT) sub/bar\.$(OBJEXT)$$' END mkdir sub : > sub/bar.baz : > foo.c $ACLOCAL $AUTOCONF $AUTOMAKE -a ./configure run_make OBJEXT=quux test-fake $MAKE test-real :
Starlink/automake
t/suffix-custom-subobj.sh
Shell
gpl-2.0
1,437
# calculate and visualize a trajectory ./nuco \ --Ap=80 --Zp=36 --At=197 --Zt=79 \ --E=10 \ --b=10 \ --method=relativistic --accuracy=1e-7 cat << EOF > visualize.gnuplot set xlabel "x [fm]" set ylabel "y [fm]" set grid set term wxt size 500,1000 set key top left box opaque spacing 1.5 width 1 set multiplot layout 2,1 set title "^{80}Kr on ^{197}Au at b = 10fm and E_{kin} = 10 MeV/u" plot \ "steps.dat" using 1:2 lt 1 w l title "projectile", \ "steps.dat" using 3:4 lt 2 w l title "target", \ "steps.dat" using 1:2 lt 1 pt 7 title "projectile", \ "steps.dat" using 3:4 lt 2 pt 7 title "target" unset title plot[-40:40][-40:40] \ "steps.dat" using 1:2 lt 1 w l title "projectile", \ "steps.dat" using 3:4 lt 2 w l title "target", \ "steps.dat" using 1:2 lt 1 pt 7 title "projectile", \ "steps.dat" using 3:4 lt 2 pt 7 title "target" unset multiplot EOF gnuplot --persist visualize.gnuplot
miree/nuco
examples/trajectory_Kr_on_Au_01.sh
Shell
gpl-2.0
908
#!/bin/bash cd `dirname $0` . ./common.sh usage=" Prepares softether vpn server Usage: $(basename $0) <server-ip> --username <username> --password <plaintext password> [--ip <static_ip>] [--vpn-hub <hub name>] [--connection_name <name>] [--nicname <vpn adapter name>] [--port <Server's IP port>] where <sever-ip> - Server address --port - Port number. Defaults to 992 --vpn-hub - Name of the virtual hub to connect to, defaults to 'VPN' --username - User name (defaults to hostname) --ip - Static ip to use (good for dhcp servers) --connection_name - Connection name. Defaults to user name --password - User password. In plaintext, so make sure this command is not placed in the history --nicname - Name of the network adapter. Defaults to vpn0. --debug - Flag that sets debugging mode. --service - Add as a system service under name 'softether-client-{connection-name}' --log - Path to the log file that will log all meaningful commands Example: ./$(basename $0) 172.104.148.166 --username adam --password 12345 " if [ "$1" == "" ]; then echo "$usage" >&2 exit 1 fi if [ "$1" == "--help" ]; then echo "$usage" >&2 exit 1 fi server_address=$1 shift nicname=vpn password="" username="$(hostname)" ip="dhcp" vpn_hub=VPN port=992 connection_name="" while [[ $# > 0 ]] do key="$1" shift case $key in --debug) debug=1 ;; --log) log=$1 shift ;; --help) echo "$usage" exit 0 ;; --password) password="$1" shift ;; --connection_name) connection_name="$1" shift ;; --username) username="$1" shift ;; --vpn-hub) vpn_hub="$1" shift ;; --ip) ip="$1" shift ;; --nicname) nicname="$1" shift ;; --port) port="$1" shift ;; --) break ;; -*) echo "Error: Unknown option: $1" >&2 echo "$usage" >&2 exit 1 ;; esac done if [ -n "$debug" ]; then opts="$opts --debug" if [ -z "$log" ]; then log=/dev/stdout else opts="$opts --log $log" fi fi if [ -z "$username" ]; then errcho "You must specify user name!" exit 1 fi if [ -z "$connection_name" ]; then connection_name=$username fi if [ ! "${ip}" == "dhcp" ]; then pattern='^([[:digit:]]+)\.([[:digit:]]+)\.([[:digit:]]+)\.([[:digit:]]+)$' if [[ ! "$ip" =~ $pattern ]]; then errcho "Wrong format of the ip!" exit 1 fi fi add_ppa paskal-07/softethervpn install_apt_package softether-vpnclient logexec sudo vpnclient start if ! vpncmd localhost /CLIENT /CMD AccountList | grep -q "VPN Connection Setting Name |${connection_name}"; then # Create the connection if ! vpncmd localhost /CLIENT /CMD NicList | grep -q "Virtual Network Adapter Name|${nicname}"; then logexec vpncmd localhost /CLIENT /CMD NicCreate ${nicname} fi logexec vpncmd localhost /CLIENT /CMD AccountCreate ${connection_name} /SERVER:"${server_address}:${port}" /HUB:${vpn_hub} /USERNAME:${username} /NICNAME:${nicname} logexec vpncmd localhost /CLIENT /CMD AccountPasswordSet ${connection_name} /PASSWORD:${password} /TYPE:standard fi #logexec sudo vpncmd localhost /CLIENT /CMD accountconnect ${connection_name} textfile /etc/systemd/system/softether_${connection_name}_client.service "[Unit] Description=SoftEther ${connection_name} Client After=softether_client.service Requires=softether_client.service [Service] Type=oneshot ExecStart=/bin/bash /usr/local/lib/softether/start_${connection_name}_vpn.sh ExecStop=/usr/bin/vpncmd localhost /CLIENT /CMD accountdisconnect ${connection_name} ExecStop=/bin/bash -c \"ifconfig vpn_${nicname} down\" RemainAfterExit=yes [Install] WantedBy=multi-user.target" root textfile /etc/systemd/system/softether_client.service "[Unit] Description=SoftEther Client service After=network.target auditd.service [Service] Type=forking ExecStart=/usr/bin/vpnclient start ExecStop=/usr/bin/vpnclient stop KillMode=process Restart=on-failure [Install] WantedBy=multi-user.target" root #install_file files/softether_svc /etc/systemd/system/softether_${connection_name}_client.service root logmkdir /usr/local/lib/softether root if [ "${ip}" == "dhcp" ]; then textfile /usr/local/lib/softether/start_${connection_name}_vpn.sh "#!/bin/sh sudo /usr/bin/vpncmd localhost /CLIENT /CMD accountconnect ${connection_name} sudo dhclient vpn_${nicname}" root else textfile /usr/local/lib/softether/start_${connection_name}_vpn.sh "#!/bin/sh sudo /usr/bin/vpncmd localhost /CLIENT /CMD accountconnect ${connection_name} sudo ifconfig vpn_${nicname} ${ip} if service --status-all | grep -Fq 'isc-dhcp-server'; then sudo systemctl restart isc-dhcp-server.service fi" root fi logexec sudo systemctl daemon-reload logexec sudo systemctl enable softether_client.service logexec sudo systemctl enable softether_${connection_name}_client.service logexec sudo systemctl stop softether_client.service logexec sudo systemctl start softether_client.service logexec sudo systemctl stop softether_${connection_name}_client.service logexec sudo systemctl start softether_${connection_name}_client.service #install_apt_package curl #last_version=$(get_latest_github_release_name SoftEtherVPN/SoftEtherVPN) #link="https://github.com/SoftEtherVPN/SoftEtherVPN/archive/${last_version}.tar.gz" #get_git_repo https://github.com/SoftEtherVPN/SoftEtherVPN.git /opt SoftEther #install_apt_packages curl cmake build-essential libssl-dev zlib1g-dev libreadline-dev #if ! which vpncmd>/dev/null; then # logmkdir "/opt/SoftEther" adam # logmkdir "/opt/SoftEther/build" adam # pushd "/opt/SoftEther/build" # logexec cmake .. # logexec make -j # logexec sudo make install #fi exit 1
adamryczkowski/puppet-bootstrap
softether-client.sh
Shell
gpl-2.0
5,832
#! /bin/bash set -e pip install -r requirements.txt python ./chatbot.py
bbriggs/sithmail-tipbot
sithmail-tipbot/legobot-entrypoint.sh
Shell
gpl-2.0
73
#!/bin/bash -x # # Generated - do not edit! # # Macros TOP=`pwd` CND_PLATFORM=GNU-Linux-x86 CND_CONF=Debug CND_DISTDIR=dist CND_BUILDDIR=build CND_DLIB_EXT=so NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging TMPDIRNAME=tmp-packaging OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/exercici14_modular OUTPUT_BASENAME=exercici14_modular PACKAGE_TOP_DIR=exercici14modular/ # Functions function checkReturnCode { rc=$? if [ $rc != 0 ] then exit $rc fi } function makeDirectory # $1 directory path # $2 permission (optional) { mkdir -p "$1" checkReturnCode if [ "$2" != "" ] then chmod $2 "$1" checkReturnCode fi } function copyFileToTmpDir # $1 from-file path # $2 to-file path # $3 permission { cp "$1" "$2" checkReturnCode if [ "$3" != "" ] then chmod $3 "$2" checkReturnCode fi } # Setup cd "${TOP}" mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package rm -rf ${NBTMPDIR} mkdir -p ${NBTMPDIR} # Copy files and create directories and links cd "${TOP}" makeDirectory "${NBTMPDIR}/exercici14modular/bin" copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755 # Generate tar file cd "${TOP}" rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/exercici14modular.tar cd ${NBTMPDIR} tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/exercici14modular.tar * checkReturnCode # Cleanup cd "${TOP}" rm -rf ${NBTMPDIR}
pdavila13/Programacion_C
Modular/Exercici14_Modular/nbproject/Package-Debug.bash
Shell
gpl-2.0
1,505
#!/bin/bash ############################################################################### # To all DEV around the world :) # # to build this kernel you need to be ROOT and to have bash as script loader # # do this: # # cd /bin # # rm -f sh # # ln -s bash sh # # # # Now you can build my kernel. # # using bash will make your life easy. so it's best that way. # # Have fun and update me if something nice can be added to my source. # # # # Original scripts by halaszk & various sources throughout gitHub # # modified by UpInTheAir for SkyHigh kernels # # very very slightly modified by The Sickness for his Twisted S6 kernel # # # ############################################################################### ############################################ SETUP ############################################ # Time of build startup res1=$(date +%s.%N) echo echo "${bldcya}***** Setting up Environment *****${txtrst}"; echo . ./env_setup.sh ${1} || exit 1; if [ ! -f $KERNELDIR/.config ]; then echo echo "${bldcya}***** Writing Config *****${txtrst}"; cp $KERNELDIR/arch/arm64/configs/$KERNEL_CONFIG .config; make ARCH=arm64 $KERNEL_CONFIG; fi; . $KERNELDIR/.config ########################################### CLEAN UP ########################################## echo echo "${bldcya}***** Clean up first *****${txtrst}" find . -type f -name "*~" -exec rm -f {} \; find . -type f -name "*orig" -exec rm -f {} \; find . -type f -name "*rej" -exec rm -f {} \; # cleanup previous Image files if [ -e $KERNELDIR/dt.img ]; then rm $KERNELDIR/dt.img; fi; if [ -e $KERNELDIR/arch/arm64/boot/Image ]; then rm $KERNELDIR/arch/arm64/boot/Image; fi; if [ -e $KERNELDIR/arch/arm64/boot/dt.img ]; then rm $KERNELDIR/arch/arm64/boot/dt.img; fi; # cleanup variant ramdisk files find . -type f -name "EMPTY_DIRECTORY" -exec rm -f {} \; if [ -e $BK/$TARGET/boot.img ]; then rm -rf $BK/$TARGET/boot.img fi; if [ -e $BK/$TARGET/Image ]; then rm -rf $BK/$TARGET/Image fi; if [ -e $BK/$TARGET/ramdisk.gz ]; then rm -rf $BK/$TARGET/ramdisk.gz fi; if [ -e $BK/$TARGET/ramdisk/lib/modules/ ]; then cd ${KERNELDIR}/$BK/$TARGET find . -type f -name "*.ko" -exec rm -f {} \; cd ${KERNELDIR} fi; if [ -e $BK/system/lib/modules/ ]; then cd ${KERNELDIR}/$BK/system find . -type f -name "*.ko" -exec rm -f {} \; fi; cd ${KERNELDIR} # cleanup old output files rm -rf ${KERNELDIR}/output/$TARGET/* # cleanup old dtb files rm -rf $KERNELDIR/arch/arm64/boot/dts/*.dtb; echo "Done" ####################################### COMPILE IMAGES ####################################### echo echo "${bldcya}***** Compiling kernel *****${txtrst}" if [ $USER != "root" ]; then make CONFIG_DEBUG_SECTION_MISMATCH=y -j10 Image ARCH=arm64 else make -j10 Image ARCH=arm64 fi; if [ -e $KERNELDIR/arch/arm64/boot/Image ]; then echo echo "${bldcya}***** Final Touch for Kernel *****${txtrst}" stat $KERNELDIR/arch/arm64/boot/Image || exit 1; mv ./arch/arm64/boot/Image ./$BK/$TARGET echo echo "--- Creating custom dt.img ---" ./utilities/dtbtool -o dt.img -s 2048 -p ./scripts/dtc/dtc ./arch/arm64/boot/dts/ else echo "${bldred}Kernel STUCK in BUILD!${txtrst}" exit 0; fi; echo echo "Done" ###################################### RAMDISK GENERATION ##################################### echo echo "${bldcya}***** Make ramdisk *****${txtrst}" # make modules make -j10 modules ARCH=arm64 || exit 1; # find modules for i in $(find "$KERNELDIR" -name '*.ko'); do cp -av "$i" ./$BK/system/lib/modules/; done; if [ -f "./$BK/system/lib/modules/*" ]; then chmod 0755 ./$BK/system/lib/modules/* ${CROSS_COMPILE}strip --strip-debug ./$BK/system/lib/modules/*.ko ${CROSS_COMPILE}strip --strip-unneeded ./$BK/system/lib/modules/* fi; # fix ramdisk permissions cd ${KERNELDIR}/$BK cp ./ramdisk_fix_permissions.sh ./$TARGET/ramdisk/ramdisk_fix_permissions.sh cd ${KERNELDIR}/$BK/$TARGET/ramdisk chmod 0777 ramdisk_fix_permissions.sh ./ramdisk_fix_permissions.sh 2>/dev/null rm -f ramdisk_fix_permissions.sh # make ramdisk cd ${KERNELDIR}/$BK ./mkbootfs ./$TARGET/ramdisk | gzip > ./$TARGET/ramdisk.gz echo echo "Done" ##################################### BOOT.IMG GENERATION ##################################### echo echo "${bldcya}***** Make boot.img *****${txtrst}" read -p "Do you want to use a stock (s) or custom generated (c) dt.img? (s/c) > " dt echo if [ "$dt" = "c" -o "$dt" = "C" ]; then ./mkbootimg --kernel ./$TARGET/Image --dt ${KERNELDIR}/dt.img --ramdisk ./$TARGET/ramdisk.gz --base 0x10000000 --kernel_offset 0x00008000 --ramdisk_offset 0x01000000 --tags_offset 0x00000100 --pagesize 2048 -o ./$TARGET/boot.img fi if [ "$dt" = "s" -o "$dt" = "S" ]; then ./mkbootimg --kernel ./$TARGET/Image --dt ./$TARGET/dt.img --ramdisk ./$TARGET/ramdisk.gz --base 0x10000000 --kernel_offset 0x00008000 --ramdisk_offset 0x01000000 --tags_offset 0x00000100 --pagesize 2048 -o ./$TARGET/boot.img fi echo -n "SEANDROIDENFORCE" >> ./$TARGET/boot.img echo "Done" ###################################### ARCHIVE GENERATION ##################################### echo echo "${bldcya}***** Make archives *****${txtrst}" cp -R ./maqical ${KERNELDIR}/output/$TARGET/ cp -R ./supersu ${KERNELDIR}/output/$TARGET/ cp -R ./busybox ${KERNELDIR}/output/$TARGET/ cp ./$TARGET/boot.img ${KERNELDIR}/output/$TARGET/maqical cp -R ./system ${KERNELDIR}/output/$TARGET/ cp -R ./META-INF ${KERNELDIR}/output/$TARGET/ cd ${KERNELDIR}/output/$TARGET GETVER=`grep 'S6_MM_*v' ${KERNELDIR}/.config | sed 's/.*".//g' | sed 's/-S.*//g'` # Without Clearwater audio mod AUDIO=`grep '# CONFIG_SND_SOC_ARIZONA_CONTROL*' ${KERNELDIR}/.config | sed 's/.*".//g' | sed 's/-S.*//g'` if [ "$AUDIO" == "# CONFIG_SND_SOC_ARIZONA_CONTROL is not set" ]; then AUDIO="no-audio" else AUDIO="" fi zip -r SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.zip . tar -H ustar -c ${KERNELDIR}/output/$TARGET/maqical/boot.img > SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.tar md5sum -t SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.tar >> SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.tar mv SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.tar SM-$TARGET-$AUDIO-kernel-${GETVER}-`date +[%d-%m-%y]`.tar.md5 echo echo "Done" #################################### OPTIONAL SOURCE CLEAN #################################### echo echo "${bldcya}***** Clean source *****${txtrst}" cd ${KERNELDIR} read -p "Do you want to Clean the source? (y/n) > " mc if [ "$mc" = "Y" -o "$mc" = "y" ]; then xterm -e make clean xterm -e make mrproper fi echo echo "Build completed" echo echo "${txtbld}***** Flashable zip found in output directory *****${txtrst}" echo # build script ends
Maqical/Firestorm
build_kernel.sh
Shell
gpl-2.0
7,274
#!/bin/bash set -e if [[ $EUID -ne 0 ]]; then echo "Permission denied, should be run as root" exit 1 fi cwd=$(pwd) # internal variables red=$(tput setaf 1) green=$(tput setaf 2) restcor=$(tput sgr0) # Update apt-get update && apt-get upgrade -y apt-get install -y jq rng-tools shadowsocks-libev # Clone repo cd /tmp git clone https://github.com/d0u9/scripts.git cd scripts export SCRIPTS_DIR=$(pwd) # Basic server setup source "$SCRIPTS_DIR/install_scripts/cloud_basic_setup.sh" # Install fail2ban bash "$SCRIPTS_DIR/install_scripts/fail2ban/install_fail2ban.sh" # Create config files echo -e "\nConfig files will be placed in /etc/trident" mkdir -p /etc/trident read -p "${green}Shadowsocks port:${restcor} " ss_port echo "${green}Shadowsocks password:${restcor}" read -s ss_pass read -p "${green}KcpTun port:${restcor} " kcp_port echo "${green}KcpTun password:${restcor}" read -s kcp_pass jq '.server_port='"$ss_port"' | .password="'"$ss_pass"'"' \ "$SCRIPTS_DIR/one_key_ss/ss_config_template.json" > /etc/trident/ss_config.json jq '.target="localhost:'"$ss_port"'" | .listen=":'"$kcp_port"'" | .key="'"$kcp_pass"'"' \ "$SCRIPTS_DIR/one_key_ss/kcp_config_template.json" > /etc/trident/kcp_config.json # Install kcptun cd /tmp kcptun_version="20171201" wget "https://github.com/xtaci/kcptun/releases/download/v20171113/kcptun-linux-amd64-$kcptun_version.tar.gz" -O kcptun.tar.gz tar -xf kcptun.tar.gz mv server_linux_amd64 /usr/bin/kcp-server mv client_linux_amd64 /usr/bin/kcp-client cp "$SCRIPTS_DIR/one_key_ss/kcp-server.service" /etc/systemd/system/ # Install SS cp "$SCRIPTS_DIR/one_key_ss/ss-server.service" /etc/systemd/system/ # Autostart systemctl daemon-reload systemctl enable ss-server.service systemctl enable kcp-server.service # Start systemctl start ss-server.service systemctl start kcp-server.service
d0u9/scripts
one_key_ss/setup.sh
Shell
gpl-2.0
1,853
#!/bin/bash #go through all the combination rm -rf out mkdir -p out #Create cell 4x2x1 supercell -s 4x2x1 -i PZT-PbZr05Ti05O3.cif -m -o out/PZT421 > /dev/null #Create table for i in {0..9} do echo $((i+1)) for y in 0.25000 0.75000 do for x in 0.12500 0.37500 0.62500 0.87500 do vl=`grep -E "(Zr1|Ti1).+$x\s+$y\s+0.56490" out/PZT421_i0${i}_w*.cif | sed -r 's/\s+([ZT])[ri]1.*/\1/g'` echo -n $vl done echo "" done echo "" done
orex/supercell
data/examples/PZT/df_cfg.bash
Shell
gpl-2.0
466
#!/bin/sh distdir="$1" top_srcdir="$2" if ( cd $top_srcdir; svn status ) 2>&1 | fgrep -v "$distdir" | grep .; then echo bad svn status >&2 exit 1 else : fi find $distdir \ -name autom4te.cache -prune -o \ -name .svn -prune -o \ -type f \ \! -name '*~' \ \! -name Makefile.in \ \! -name configure \ \! -name config.h.in \ \! -name aclocal.m4 \ \! -name INSTALL \ \! -name COPYING \ \! -name depcomp \ \! -name compile \ \! -name install-sh \ \! -name missing \ \! -name Makefile.in.in \ \! -name ja.po \ \! -name ja.gmo \ \! -name config.guess \ \! -name config.sub \ \! -name mkinstalldirs \ \! -name intltool-extract.in \ \! -name intltool-merge.in \ \! -name intltool-update.in \ \! -name ltmain.sh \ \! -name xmms2_applet.pot \ -print | while read path; do file=`echo $path | sed 's,.*/,,'` case "$file" in *.[ch]) if ! grep '^[^"]*Copyright (C) 2005-2006 Yuuki Harano[^"]*$' $path > /dev/null; then echo $path: No copyright. >&2 echo false fi ;; esac case "$path" in */common.h) if grep '#include <common.h>' $path > /dev/null; then echo $path: common.h included. >&2 echo false fi ;; *.[ch]) if ! grep '#include <common.h>' $path > /dev/null; then echo $path: common.h not included. >&2 echo false fi ;; esac case "$path" in */common.h) if ! grep '#include ["<]config\.h[>"]' $path > /dev/null; then echo $path: config.h not included. >&2 echo false fi ;; *.[ch]) if grep '#include ["<]config\.h[>"]' $path > /dev/null; then echo $path: config.h included. >&2 echo false fi ;; esac # case "$path" in # *.[ch]) # if ! grep '#include ["<]config\.h[>"]' $path > /dev/null; then # echo $path: config.h not included. >&2 # echo false # fi # ;; # esac done | grep false > /dev/null if [ $? -eq 0 ]; then exit 1 else exit 0 fi
masm11/xmms2_applet
check.sh
Shell
gpl-2.0
1,971
#!/bin/bash # This script updates the dolphin-emu.pot file to match the strings in # the source code. cd "$(dirname "$0")/.." # Scan the source code for strings and put them in dolphin-emu.pot SRCDIR=Source find $SRCDIR -name '*.cpp' -o -name '*.h' -o -name '*.c' | \ xgettext -s -p ./Languages/po -o dolphin-emu.pot --package-name="Dolphin Emulator" \ --keyword=_ --keyword=wxTRANSLATE --keyword=SuccessAlertT --keyword=PanicAlertT \ --keyword=PanicYesNoT --keyword=AskYesNoT --keyword=CriticalAlertT --keyword=GetStringT \ --keyword=_trans --keyword=tr:1,1t --keyword=tr:1,2c --keyword=QT_TR_NOOP --keyword=FmtFormatT \ --add-comments=i18n --from-code=utf-8 -f - # Copy strings from qt-strings.pot to dolphin-emu.pot xgettext -s -p ./Languages/po -o dolphin-emu.pot --package-name="Dolphin Emulator" \ -j ./Languages/po/qt-strings.pot sed -i "s/SOME DESCRIPTIVE TITLE\./Translation of dolphin-emu.pot to LANGUAGE/" Languages/po/dolphin-emu.pot sed -i "s/YEAR THE PACKAGE'S COPYRIGHT HOLDER/2003-2013/" Languages/po/dolphin-emu.pot sed -i "s/license as the PACKAGE package/license as the dolphin-emu package/" Languages/po/dolphin-emu.pot
BlueSplash/dolphin
Languages/update-source-strings.sh
Shell
gpl-2.0
1,152
#! /bin/sh #export PREFIX=$HOME/opt/nest export PREFIX=/opt/nest export PATH=$PATH:$PREFIX/bin export PYTHONPATH=$PREFIX/lib/python2.7/site-packages:$PYTHONPATH echo $PYTHONPATH echo $PREFIX
magnastrazh/NEUCOGAR
nest/dopamine/test_files/env_setup.sh
Shell
gpl-2.0
191
#!/bin/bash # Copyright (c) 2013 Lawrence Livermore National Laboratory # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation; # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Author: Peter D. Barnes, Jr. <[email protected]> # # Do a coverity build and submit report # me=`basename $0` # echo commands and output to a log file logf=coverity/coverity-build.log echo | tee $logf function say () { echo "$me:" $* | tee -a $logf } blank () { echo | tee -a $logf } function doo () { say "$ "$* $* 2>&1 | tee -a $logf } say $(date) blank doo ./ns3 clean blank doo ./ns3 configure $NS3CONFIG blank cov=coverity/cov-int doo cov-build --dir $cov ./ns3 build blank tarf=coverity/ns-3.tgz doo tar cvzf $tarf -C coverity cov-int blank useremail=$(hg showconfig ui.username | \ egrep -o "\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\.[a-zA-Z0-9.-]+\b") repoversion="$(basename $(dirname $PWD))@$(hg id -i)" # curl complains if this contains white space description="Coverity-mods" doo curl \ --form file=@$tarf \ --form project=ns-3 \ --form password=4jk2BVX9 \ --form email="$useremail" \ --form version="$repoversion" \ --form description="$description" \ http://scan5.coverity.com/cgi-bin/upload.py blank say $(date) blank
nsnam/ns-3-dev-git
utils/coverity-report.sh
Shell
gpl-2.0
1,818
#!/bin/sh ifconfig eth0 addr `./nfeth-config --get-atari-ip eth0` netmask `./nfeth-config --get-netmask eth0` up route add default eth0 gw `./nfeth-config --get-host-ip eth0`
aranym/aranym
atari/network/ethernet/nfeth-config/eth0-config.sh
Shell
gpl-2.0
175
#!/bin/bash # BK_EXAMINATION: it is a string that identifies your "examination" # BK_INPUT: it is a string that identifies your test (used to build the name of the directory where you execute) #export BK_EXAMINATION=ReachabilityDeadlock export PATH="$PATH:$HOME/BenchKit/bin/" debug () { echo "bkh:" "$*" >&2 } info () { echo "bkh:" "$*" >&2 } timeit () { /usr/bin/time -f '%e\n%M' -o bk.time "$@" EXITST=$? WALLTIME=$(head -n1 bk.time)s MAXRSS=$(($(tail -n1 bk.time) / 1024))MB } mycat () { #cat "$@" cat "$@" >&2 } do_verif () { #debug "Translating the formula ..." #timeit mcc2cunf < "$BK_EXAMINATION.xml" > "cunf.spec" #info "mcc2cunf: time $WALLTIME maxrss $MAXRSS" | tee -a ri.times #if [ "$EXITST" != 0 ]; then if [ "$?" != 0 ]; then info "Error: mcc2cunf returns error state, aborting" echo "DO_NOT_COMPETE" exit 0 fi #debug "Translating PNML into PEP (+ place-replication encoding)..." #timeit sh -c ' pnml2pep_mcc14.py < model.pnml 2> model.err | cont2pr.pl > model.ll_net #info "pnml2pep: time $WALLTIME maxrss $MAXRSS" | tee -a ri.times if [ "$(cat model.err)" ]; then info "Error: problems while translating PNML to PEP" info "Error: quite probably the net is not 1-safe" echo "DO_NOT_COMPETE" exit 0 fi #debug "Running Cunf ..." #timeit cunf model.ll_net cunf.spec > cunf.out 2> cunf.err #info "cunf : time $WALLTIME maxrss $MAXRSS" | tee -a ri.times #if [ "$EXITST" != 0 ]; then if [ "$?" != 0 ]; then if grep -q 'is not safe' cunf.err; then info "Error: cunf seems to detect the net is not safe, aborting" mycat cunf.out mycat cunf.err echo "DO_NOT_COMPETE" exit 0 fi info "Error: cunf returns an error state, aborting" mycat cunf.out mycat cunf.err echo "CANNOT_COMPUTE" exit 0 fi #debug "Mixing formula ids and verification results" grep '^#' cunf.spec > tmp.ids grep '^Result :' cunf.out > tmp.results #echo "Result : UNSAT" > tmp.results #> tmp.results let "n = $(wc -l < tmp.ids)" let "m = $(wc -l < tmp.results)" #debug "$n lines in tmp.ids; $m lines in tmp.results" if [ "$n" != "$m" ]; then info "WARNING: mismatch between # of formula ids and # of result lines" info "Specification file:" mycat cunf.spec info "Results file:" mycat cunf.out fi exec 3< tmp.ids exec 4< tmp.results for ((i = 1; i <= n; i++)) do read -u 3 lineid read -u 4 lineres negate=${lineid:2:1} id=${lineid:4} res=${lineres:11} #debug "negate '$negate' id '$id' result '$res'" if [ "$negate" == "Y" ]; then if [ "$res" == "SAT" ]; then res=UNSAT elif [ "$res" == "UNSAT" ]; then res=SAT fi fi if [ "$res" == "SAT" ]; then echo "FORMULA $id TRUE TECHNIQUES NET_UNFOLDING SAT_SMT" elif [ "$res" == "UNSAT" ]; then echo "FORMULA $id FALSE TECHNIQUES NET_UNFOLDING SAT_SMT" else echo "FORMULA $id CANNOT_COMPUTE" fi done #debug "cunf spec file:" #mycat cunf.spec >&2 #debug "cunf stdout:" #mycat cunf.out >&2 #debug "cunf stderr:" #mycat cunf.err >&2 } function main () { #debug "PWD '$PWD'" #debug "BK_EXAMINATION '$BK_EXAMINATION'" #debug "iscolored `cat iscolored`" if [ "$(cat iscolored)" == "TRUE" ]; then echo "DO_NOT_COMPETE" exit 0 fi case "$BK_EXAMINATION" in "ReachabilityDeadlock" | \ "ReachabilityFireability" | \ "ReachabilityFireabilitySimple" ) do_verif ;; *) info "cannot handle this examination" echo "DO_NOT_COMPETE" ;; esac exit 0 } main
cesaro/cunf-mcc2014
scripts/BenchKit_head.sh
Shell
gpl-2.0
3,464
############################################################################### # UNIT TEST A19b FOR kocos.pl ############################################################################### # Test A19b - Checks if the program finds correct 2nd order # co-occurrences from Hindi transliterated data # Input - test-A19.count # Output - test-A19b.reqd echo "UNIT Test A19b -"; echo " For kth order co-occurrence program kocos.pl"; echo "Input - Source file from test-A19.count"; echo "Output - Destination file from test-A19b.reqd"; echo "Test - Checks if the program finds correct 2nd order"; echo " co-occurrences from Hindi data containing puctuations"; #============================================================================= # INPUT #============================================================================= set TestInput="test-A19.count"; set Actual="test-A19b.reqd"; #============================================================================= # RUN THE PROGRAM #============================================================================= kocos.pl --literal karanA --order 2 $TestInput > test-A19b.output #============================================================================= # SORT THE RESULTS AND COMPARE #============================================================================= sort test-A19b.output > t1 sort $Actual > t2 diff -w t1 t2 > variance #============================================================================= # RESULTS OF TESTA19b #============================================================================= if(-z variance) then echo "STATUS : OK Test Results Match....."; else echo "STATUS : ERROR Test Results don't Match...."; echo " When Tested for --literal karanA- "; cat variance endif echo "" /bin/rm -f t1 t2 variance #############################################################################
BjoernKW/TextNSP
Testing/kocos/unit/testA19b.sh
Shell
gpl-2.0
1,940
#!/bin/sh # vim: set sw=4 et ts=4 sts=4 tw=80 : # Copyright 2010 Ali Polatel <[email protected]> # Distributed under the terms of the GNU General Public License v2 test_description='sandbox utimes(2)' . ./test-lib.sh test_done
alip/pandora
tests/t013-utimes.sh
Shell
gpl-2.0
228
#!/bin/bash # 1m is as 1l, but having the dropout end at 0.1 # see run_tdnn_lstm_1k.sh for results. # 1l is as 1k, but having the dropout end at the end of training. # 1k is as 1e, but introducing a dropout schedule. # 1e is as 1b, but reducing decay-time from 40 to 20. # 1d is as 1b, but adding decay-time=40 to the fast-lstmp-layers. note: it # uses egs from 1b, remember to remove that before I commit. # steps/info/chain_dir_info.pl exp/chain_cleaned/tdnn_lstm1a_sp_bi # exp/chain_cleaned/tdnn_lstm1a_sp_bi: num-iters=253 nj=2..12 num-params=9.5M dim=40+100->3607 combine=-0.07->-0.07 xent:train/valid[167,252,final]=(-0.960,-0.859,-0.852/-1.05,-0.999,-0.997) logprob:train/valid[167,252,final]=(-0.076,-0.064,-0.062/-0.099,-0.092,-0.091) # This is as run_lstm1e.sh except adding TDNN layers in between; also comparing below # with run_lstm1d.sh which had a larger non-recurrent-projection-dim and which had # better results. Note: these results are not with the updated LM (the LM data-prep # for this setup was changed in Nov 2016 but this was with an older directory). # # local/chain/compare_wer_general.sh exp/chain_cleaned/lstm1d_sp_bi exp/chain_cleaned/lstm1e_sp_bi exp/chain_cleaned/tdnn_lstm1a_sp_bi # System lstm1d_sp_bi lstm1e_sp_bi tdnn_lstm1a_sp_bi # WER on dev(orig) 10.3 10.7 9.7 # WER on dev(rescored) 9.8 10.1 9.3 # WER on test(orig) 9.7 9.8 9.1 # WER on test(rescored) 9.2 9.4 8.7 # Final train prob -0.0812 -0.0862 -0.0625 # Final valid prob -0.1049 -0.1047 -0.0910 # Final train prob (xent) -1.1334 -1.1763 -0.8518 # Final valid prob (xent) -1.2263 -1.2427 -0.9972 ## how you run this (note: this assumes that the run_tdnn_lstm.sh soft link points here; ## otherwise call it directly in its location). # by default, with cleanup: # local/chain/run_tdnn_lstm.sh # without cleanup: # local/chain/run_tdnn_lstm.sh --train-set train --gmm tri3 --nnet3-affix "" & # note, if you have already run one of the non-chain nnet3 systems # (e.g. local/nnet3/run_tdnn.sh), you may want to run with --stage 14. # run_tdnn_lstm_1a.sh was modified from run_lstm_1e.sh, which is a fairly # standard, LSTM, except that some TDNN layers were added in between the # LSTM layers. I was looking at egs/ami/s5b/local/chain/tuning/run_tdnn_lstm_1i.sh, but # this isn't exactly copied from there. set -e -o pipefail # First the options that are passed through to run_ivector_common.sh # (some of which are also used in this script directly). stage=0 nj=30 decode_nj=30 min_seg_len=1.55 label_delay=5 xent_regularize=0.1 train_set=train_cleaned gmm=tri3_cleaned # the gmm for the target data num_threads_ubm=32 nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned # training options chunk_left_context=40 chunk_right_context=0 chunk_left_context_initial=0 chunk_right_context_final=0 # decode options extra_left_context=50 extra_right_context=0 extra_left_context_initial=0 extra_right_context_final=0 frames_per_chunk=140,100,160 frames_per_chunk_primary=140 # The rest are configs specific to this script. Most of the parameters # are just hardcoded at this level, in the commands below. train_stage=-10 tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration. tdnn_lstm_affix=1m #affix for TDNN-LSTM directory, e.g. "a" or "b", in case we change the configuration. common_egs_dir=exp/chain_cleaned/tdnn_lstm1b_sp_bi/egs # you can set this to use previously dumped egs. # End configuration section. echo "$0 $@" # Print the command line for logging . cmd.sh . ./path.sh . ./utils/parse_options.sh if ! cuda-compiled; then cat <<EOF && exit 1 This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA If you want to use GPUs (and have them), go to src/, and configure and make on a machine where "nvcc" is installed. EOF fi local/nnet3/run_ivector_common.sh --stage $stage \ --nj $nj \ --min-seg-len $min_seg_len \ --train-set $train_set \ --gmm $gmm \ --num-threads-ubm $num_threads_ubm \ --nnet3-affix "$nnet3_affix" gmm_dir=exp/$gmm ali_dir=exp/${gmm}_ali_${train_set}_sp_comb tree_dir=exp/chain${nnet3_affix}/tree_bi${tree_affix} lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_comb_lats dir=exp/chain${nnet3_affix}/tdnn_lstm${tdnn_lstm_affix}_sp_bi train_data_dir=data/${train_set}_sp_hires_comb lores_train_data_dir=data/${train_set}_sp_comb train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires_comb for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \ $lores_train_data_dir/feats.scp $ali_dir/ali.1.gz $gmm_dir/final.mdl; do [ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1 done if [ $stage -le 14 ]; then echo "$0: creating lang directory with one state per phone." # Create a version of the lang/ directory that has one state per phone in the # topo file. [note, it really has two states.. the first one is only repeated # once, the second one has zero or more repeats.] if [ -d data/lang_chain ]; then if [ data/lang_chain/L.fst -nt data/lang/L.fst ]; then echo "$0: data/lang_chain already exists, not overwriting it; continuing" else echo "$0: data/lang_chain already exists and seems to be older than data/lang..." echo " ... not sure what to do. Exiting." exit 1; fi else cp -r data/lang data/lang_chain silphonelist=$(cat data/lang_chain/phones/silence.csl) || exit 1; nonsilphonelist=$(cat data/lang_chain/phones/nonsilence.csl) || exit 1; # Use our special topology... note that later on may have to tune this # topology. steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >data/lang_chain/topo fi fi if [ $stage -le 15 ]; then # Get the alignments as lattices (gives the chain training more freedom). # use the same num-jobs as the alignments steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \ data/lang $gmm_dir $lat_dir rm $lat_dir/fsts.*.gz # save space fi if [ $stage -le 16 ]; then # Build a tree using our new topology. We know we have alignments for the # speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use # those. if [ -f $tree_dir/final.mdl ]; then echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it." exit 1; fi steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \ --context-opts "--context-width=2 --central-position=1" \ --leftmost-questions-truncate -1 \ --cmd "$train_cmd" 4000 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir fi if [ $stage -le 17 ]; then mkdir -p $dir echo "$0: creating neural net configs using the xconfig parser"; num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}') learning_rate_factor=$(echo "print 0.5/$xent_regularize" | python) # note: the value of the dropout-proportion is not important, as it's # controlled by the dropout schedule; what's important is that we set it. lstmp_opts="decay-time=20 dropout-proportion=0.0 dropout-per-frame=true" mkdir -p $dir/configs cat <<EOF > $dir/configs/network.xconfig input dim=100 name=ivector input dim=40 name=input # please note that it is important to have input layer with the name=input # as the layer immediately preceding the fixed-affine-layer to enable # the use of short notation for the descriptor fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat # the first splicing is moved before the lda layer, so no splicing here relu-renorm-layer name=tdnn1 dim=512 relu-renorm-layer name=tdnn2 dim=512 input=Append(-1,0,1) fast-lstmp-layer name=lstm1 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstmp_opts relu-renorm-layer name=tdnn3 dim=512 input=Append(-3,0,3) relu-renorm-layer name=tdnn4 dim=512 input=Append(-3,0,3) fast-lstmp-layer name=lstm2 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstmp_opts relu-renorm-layer name=tdnn5 dim=512 input=Append(-3,0,3) relu-renorm-layer name=tdnn6 dim=512 input=Append(-3,0,3) fast-lstmp-layer name=lstm3 cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 $lstmp_opts ## adding the layers for chain branch output-layer name=output input=lstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5 # adding the layers for xent branch # This block prints the configs for a separate output that will be # trained with a cross-entropy objective in the 'chain' models... this # has the effect of regularizing the hidden parts of the model. we use # 0.5 / args.xent_regularize as the learning rate factor- the factor of # 0.5 / args.xent_regularize is suitable as it means the xent # final-layer learns at a rate independent of the regularization # constant; and the 0.5 was tuned so as to make the relative progress # similar in the xent and regular final layers. output-layer name=output-xent input=lstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5 EOF steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage fi steps/nnet3/chain/train.py --stage $train_stage \ --cmd "$decode_cmd" \ --feat.online-ivector-dir $train_ivector_dir \ --feat.cmvn-opts "--norm-means=false --norm-vars=false" \ --trainer.dropout-schedule='0,[email protected],[email protected],0.1' \ --trainer.optimization.combine-sum-to-one-penalty=0.001 \ --chain.xent-regularize 0.1 \ --chain.leaky-hmm-coefficient 0.1 \ --chain.l2-regularize 0.00005 \ --chain.apply-deriv-weights false \ --chain.lm-opts="--num-extra-lm-states=2000" \ --egs.dir "$common_egs_dir" \ --egs.opts "--frames-overlap-per-eg 0" \ --egs.chunk-width "$frames_per_chunk" \ --egs.chunk-left-context "$chunk_left_context" \ --egs.chunk-right-context "$chunk_right_context" \ --egs.chunk-left-context-initial "$chunk_left_context_initial" \ --egs.chunk-right-context-final "$chunk_right_context_final" \ --trainer.num-chunk-per-minibatch 128,64 \ --trainer.frames-per-iter 1500000 \ --trainer.max-param-change 2.0 \ --trainer.num-epochs 4 \ --trainer.deriv-truncate-margin 10 \ --trainer.optimization.shrink-value 0.99 \ --trainer.optimization.num-jobs-initial 2 \ --trainer.optimization.num-jobs-final 12 \ --trainer.optimization.initial-effective-lrate 0.001 \ --trainer.optimization.final-effective-lrate 0.0001 \ --trainer.optimization.momentum 0.0 \ --cleanup.remove-egs true \ --feat-dir $train_data_dir \ --tree-dir $tree_dir \ --lat-dir $lat_dir \ --dir $dir \ --cleanup=false # --cleanup=false is temporary while debugging. fi if [ $stage -le 19 ]; then # Note: it might appear that this data/lang_chain directory is mismatched, and it is as # far as the 'topo' is concerned, but this script doesn't read the 'topo' from # the lang directory. utils/mkgraph.sh --self-loop-scale 1.0 data/lang $dir $dir/graph fi if [ $stage -le 20 ]; then rm $dir/.error 2>/dev/null || true for dset in dev test; do ( steps/nnet3/decode.sh --num-threads 4 --nj $decode_nj --cmd "$decode_cmd" \ --acwt 1.0 --post-decode-acwt 10.0 \ --extra-left-context $extra_left_context \ --extra-right-context $extra_right_context \ --extra-left-context-initial $extra_left_context_initial \ --extra-right-context-final $extra_right_context_final \ --frames-per-chunk "$frames_per_chunk_primary" \ --online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${dset}_hires \ --scoring-opts "--min-lmwt 5 " \ $dir/graph data/${dset}_hires $dir/decode_${dset} || exit 1; steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" data/lang data/lang_rescore \ data/${dset}_hires ${dir}/decode_${dset} ${dir}/decode_${dset}_rescore || exit 1 ) || touch $dir/.error & done wait if [ -f $dir/.error ]; then echo "$0: something went wrong in decoding" exit 1 fi fi if [ $stage -le 21 ]; then # 'looped' decoding. we didn't write a -parallel version of this program yet, # so it will take a bit longer as the --num-threads option is not supported. # we just hardcode the --frames-per-chunk option as it doesn't have to # match any value used in training, and it won't affect the results (unlike # regular decoding). rm $dir/.error 2>/dev/null || true for dset in dev test; do ( steps/nnet3/decode_looped.sh --nj $decode_nj --cmd "$decode_cmd" \ --acwt 1.0 --post-decode-acwt 10.0 \ --extra-left-context-initial $extra_left_context_initial \ --frames-per-chunk 30 \ --online-ivector-dir exp/nnet3${nnet3_affix}/ivectors_${dset}_hires \ --scoring-opts "--min-lmwt 5 " \ $dir/graph data/${dset}_hires $dir/decode_looped_${dset} || exit 1; steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" data/lang data/lang_rescore \ data/${dset}_hires ${dir}/decode_looped_${dset} ${dir}/decode_looped_${dset}_rescore || exit 1 ) || touch $dir/.error & done wait if [ -f $dir/.error ]; then echo "$0: something went wrong in decoding" exit 1 fi fi exit 0
michellemorales/OpenMM
kaldi/egs/tedlium/s5_r2/local/chain/tuning/run_tdnn_lstm_1m.sh
Shell
gpl-2.0
14,028
#!/bin/bash set -ev pre-commit run --all-files || ( git status --short ; git diff ; echo "** The files above do not have the correct indentation. **" ; echo "To fix, see instructions at https://github.com/wannier-developers/wannier90/wiki/ContributorsGuide#automatic-pre-commits-spaces-indentation-" ; exit 1 )
paulatz/wannier90
test-suite/tools/run_precommit.sh
Shell
gpl-2.0
313
#!/bin/sh test_description='tests remote-svn' . ./test-lib.sh MARKSPATH=.git/info/fast-import/remote-svn if ! test_have_prereq PYTHON then skip_all='skipping remote-svn tests, python not available' test_done fi # Override svnrdump with our simulator PATH="$HOME:$PATH" export PATH PYTHON_PATH GIT_BUILD_DIR write_script "$HOME/svnrdump" <<\EOF exec "$PYTHON_PATH" "$GIT_BUILD_DIR/contrib/svn-fe/svnrdump_sim.py" "$@" EOF init_git () { rm -fr .git && git init && #git remote add svnsim testsvn::sim:///$TEST_DIRECTORY/t9020/example.svnrdump # let's reuse an existing dump file!? git remote add svnsim "testsvn::sim://$TEST_DIRECTORY/t9154/svn.dump" git remote add svnfile "testsvn::file://$TEST_DIRECTORY/t9154/svn.dump" } if test -e "$GIT_BUILD_DIR/git-remote-testsvn" then test_set_prereq REMOTE_SVN fi test_debug ' git --version type git type svnrdump ' test_expect_success REMOTE_SVN 'simple fetch' ' init_git && git fetch svnsim && test_cmp .git/refs/svn/svnsim/master .git/refs/remotes/svnsim/master && cp .git/refs/remotes/svnsim/master master.good ' test_debug ' git show-ref -s refs/svn/svnsim/master git show-ref -s refs/remotes/svnsim/master ' test_expect_success REMOTE_SVN 'repeated fetch, nothing shall change' ' git fetch svnsim && test_cmp master.good .git/refs/remotes/svnsim/master ' test_expect_success REMOTE_SVN 'fetch from a file:// url gives the same result' ' git fetch svnfile ' test_expect_failure REMOTE_SVN 'the sha1 differ because the git-svn-id line in the commit msg contains the url' ' test_cmp .git/refs/remotes/svnfile/master .git/refs/remotes/svnsim/master ' test_expect_success REMOTE_SVN 'mark-file regeneration' ' # filter out any other marks, that can not be regenerated. Only up to 3 digit revisions are allowed here grep ":[0-9]\{1,3\} " $MARKSPATH/svnsim.marks > $MARKSPATH/svnsim.marks.old && rm $MARKSPATH/svnsim.marks && git fetch svnsim && test_cmp $MARKSPATH/svnsim.marks.old $MARKSPATH/svnsim.marks ' test_expect_success REMOTE_SVN 'incremental imports must lead to the same head' ' SVNRMAX=3 && export SVNRMAX && init_git && git fetch svnsim && test_cmp .git/refs/svn/svnsim/master .git/refs/remotes/svnsim/master && unset SVNRMAX && git fetch svnsim && test_cmp master.good .git/refs/remotes/svnsim/master ' test_debug 'git branch -a' test_done
brunosantiagovazquez/git
t/t9020-remote-svn.sh
Shell
gpl-2.0
2,349
#!/bin/bash ################################################## # # # Script to test optirg and graphirg # # Generate 4 jpeg files : # # optimized vs non-optimized # # canvas vs tree-displayed # # # # Just call with ./test.sh # # # ################################################## # # # IRIT - TRACES # # # # R. Dubot - [email protected] # # Y. Ndongo - [email protected] # # # # July 2009 # # # ################################################## # Path to gliss2 : GL=../.. # Path to the NMP file to treat : PNMP=. # NMP file to treat : NMP=arm.nmp N=arm NOPT=$N"opt" # Call make in gliss2 folder (cd $GL ; make) echo Preprocessing $GL/gep/gliss-nmp2nml.pl $NMP > $N.nml echo Make the irg file $GL/irg/mkirg $N.nml $N.irg echo Optimize the IRG $GL/optirg/optirg $N.irg $NOPT.irg echo Generate graphviz file $GL/graphirg/graphirg $N.irg $N.dot $GL/graphirg/graphirg $NOPT.irg $NOPT.dot $GL/graphirg/graphirg -t $N.irg t$N.dot $GL/graphirg/graphirg -t $NOPT.irg t$NOPT.dot echo Generate JPEG files with dot. dot -Tjpg -o $N.jpg $N.dot dot -Tjpg -o $NOPT.jpg $NOPT.dot dot -Tjpg -o t$N.jpg t$N.dot dot -Tjpg -o t$NOPT.jpg t$NOPT.dot
Lycania/CacheSim
application/arm_sim/gliss2/test/optirg/test.sh
Shell
gpl-2.0
1,680
TOPDIR=.. NAME=codecgraph VERSION=`git log -1 --pretty=format:%ci|cut -f1 -d' '|sed 's!-!!g'` PKG=$NAME-$VERSION
cmatsuoka/codecgraph
page/build-common.sh
Shell
gpl-2.0
113
#!/bin/bash set -e EXACT=1 CPUS=`grep -c processor /proc/cpuinfo` # build the code CROSS_COMPILE=/home/ian/toolchain/android-toolchain-eabi/bin/arm-linux-androideabi- [ -d out ] || mkdir out make -j${CPUS} O=out ARCH=arm CROSS_COMPILE=$CROSS_COMPILE uImage modules mkdir out/modules_for_android make O=out ARCH=arm modules_install INSTALL_MOD_PATH=modules_for_android
fards/ainol_elfii_common
linaro_kernel_build_cmds.sh
Shell
gpl-2.0
374
#!/bin/sh # Copyright (C) 2015 Red Hat, Inc. All rights reserved. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions # of the GNU General Public License v.2. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # check if 'dmsetup --noflush' will work properly for mounted snapshot SKIP_WITH_CLVMD=1 SKIP_WITH_LVMETAD=1 SKIP_WITH_LVMPOLLD=1 . lib/inittest which mkfs.ext2 || skip aux prepare_vg 5 # Create stacked device lvcreate --type snapshot -s -L10 -n $lv1 $vg --virtualsize 100M aux extend_filter_LVMTEST vgcreate $vg1 "$DM_DEV_DIR"/$vg/$lv1 lvcreate -L20 -n $lv1 $vg1 lvcreate -L10 -n snap -s $vg1/$lv1 mkfs.ext2 "$DM_DEV_DIR/$vg1/snap" mkdir mnt mount -o errors=remount-ro "$DM_DEV_DIR/$vg1/snap" mnt sync # intetionally suspend layer bellow dmsetup suspend $vg-$lv1 # now this should pass without blocking #dmsetup suspend --noflush --nolockfs $vg1-snap & dmsetup suspend $vg1-snap & sleep .5 dmsetup info --noheadings -c -o suspended $vg1-snap | tee out should grep -i suspend out # unlock device below dmsetup resume $vg-$lv1 # so this will pass without blocking on udev # otherwise --noudevsync would be needed dmsetup resume $vg1-snap # Try how force removal works dmsetup suspend $vg-$lv1 # needs to fail as device is still open not dmsetup remove --force $vg1-snap & sleep .5 dmsetup table $vg1-snap | tee out should grep -i error out dmsetup resume $vg-$lv1 wait # check it really is now 'error' target dmsetup table $vg1-snap | tee out grep error out umount mnt || true lvremove -f $vg1 vgremove -ff $vg1 vgremove -ff $vg
shehbazj/DyRe
test/shell/snapshot-remove-dmsetup.sh
Shell
gpl-2.0
1,813
#!/bin/bash # this script is a modified version of run_tdnn_5g.sh. It uses # the new transition model and the python version of training scripts. set -e # configs for 'chain' stage=0 train_stage=-10 get_egs_stage=-10 dir=exp/chain/tdnn_5n # training options num_epochs=12 initial_effective_lrate=0.005 final_effective_lrate=0.0005 max_param_change=2.0 final_layer_normalize_target=0.5 num_jobs_initial=2 num_jobs_final=4 minibatch_size=128 frames_per_eg=150 remove_egs=false #common_egs_dir=exp/chain/tdnn_5g/egs/ common_egs_dir= # End configuration section. echo "$0 $@" # Print the command line for logging . cmd.sh . ./path.sh . ./utils/parse_options.sh if ! cuda-compiled; then cat <<EOF && exit 1 This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA If you want to use GPUs (and have them), go to src/, and configure and make on a machine where "nvcc" is installed. EOF fi # The iVector-extraction and feature-dumping parts are the same as the standard # nnet2 setup, and you can skip them by setting "--stage 4" if you have already # run those things. ali_dir=exp/tri3b_ali treedir=exp/chain/tri4_5n_tree lang=data/lang_chain_5n local/online/run_nnet2_common.sh --stage $stage || exit 1; if [ $stage -le 4 ]; then # Get the alignments as lattices (gives the chain training more freedom). # use the same num-jobs as the alignments nj=$(cat exp/tri3b_ali/num_jobs) || exit 1; steps/align_fmllr_lats.sh --nj $nj --cmd "$train_cmd" data/train \ data/lang exp/tri3b exp/tri3b_lats rm exp/tri3b_lats/fsts.*.gz # save space fi if [ $stage -le 5 ]; then # Create a version of the lang/ directory that has one state per phone in the # topo file. [note, it really has two states.. the first one is only repeated # once, the second one has zero or more repeats.] rm -rf $lang cp -r data/lang $lang silphonelist=$(cat $lang/phones/silence.csl) || exit 1; nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1; # Use our special topology... note that later on may have to tune this # topology. steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo fi if [ $stage -le 6 ]; then # Build a tree using our new topology. steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \ --cmd "$train_cmd" 1200 data/train $lang $ali_dir $treedir fi if [ $stage -le 7 ]; then mkdir -p $dir echo "$0: creating neural net configs"; steps/nnet3/tdnn/make_configs.py \ --self-repair-scale-nonlinearity 0.00001 \ --feat-dir data/train \ --ivector-dir exp/nnet2_online/ivectors \ --tree-dir $treedir \ --relu-dim 450 \ --splice-indexes "-1,0,1 -2,-1,0,1 -3,0,3 -6,-3,0 0" \ --use-presoftmax-prior-scale false \ --xent-regularize 0.1 \ --xent-separate-forward-affine true \ --include-log-softmax false \ --final-layer-normalize-target 1.0 \ $dir/configs || exit 1; fi if [ $stage -le 8 ]; then steps/nnet3/chain/train.py --stage $train_stage \ --cmd "$decode_cmd" \ --feat.online-ivector-dir exp/nnet2_online/ivectors \ --feat.cmvn-opts "--norm-means=false --norm-vars=false" \ --chain.xent-regularize 0.1 \ --chain.leaky-hmm-coefficient 0.1 \ --chain.l2-regularize 0.00005 \ --chain.apply-deriv-weights false \ --chain.lm-opts="--num-extra-lm-states=200" \ --egs.dir "$common_egs_dir" \ --egs.opts "--frames-overlap-per-eg 0" \ --egs.chunk-width $frames_per_eg \ --trainer.num-chunk-per-minibatch $minibatch_size \ --trainer.frames-per-iter 1000000 \ --trainer.num-epochs $num_epochs \ --trainer.optimization.num-jobs-initial $num_jobs_initial \ --trainer.optimization.num-jobs-final $num_jobs_final \ --trainer.optimization.initial-effective-lrate $initial_effective_lrate \ --trainer.optimization.final-effective-lrate $final_effective_lrate \ --trainer.max-param-change $max_param_change \ --cleanup.remove-egs $remove_egs \ --feat-dir data/train \ --tree-dir $treedir \ --lat-dir exp/tri3b_lats \ --dir $dir fi if [ $stage -le 9 ]; then steps/online/nnet2/extract_ivectors_online.sh --cmd "$train_cmd" --nj 4 \ data/test exp/nnet2_online/extractor exp/nnet2_online/ivectors_test || exit 1; fi if [ $stage -le 10 ]; then # Note: it might appear that this $lang directory is mismatched, and it is as # far as the 'topo' is concerned, but this script doesn't read the 'topo' from # the lang directory. utils/mkgraph.sh --self-loop-scale 1.0 data/lang $dir $dir/graph steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \ --scoring-opts "--min-lmwt 1" \ --nj 20 --cmd "$decode_cmd" \ --online-ivector-dir exp/nnet2_online/ivectors_test \ $dir/graph data/test $dir/decode || exit 1; fi if [ $stage -le 11 ]; then utils/mkgraph.sh --self-loop-scale 1.0 data/lang_ug $dir $dir/graph_ug steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \ --nj 20 --cmd "$decode_cmd" \ --online-ivector-dir exp/nnet2_online/ivectors_test \ $dir/graph_ug data/test $dir/decode_ug || exit 1; fi wait; exit 0;
michellemorales/OpenMM
kaldi/egs/rm/s5/local/chain/run_tdnn_5n.sh
Shell
gpl-2.0
5,100
# # Copyright (c) 2013 Qualcomm Atheros, Inc.. # # All Rights Reserved. # Qualcomm Atheros Confidential and Proprietary. # if [ "$ACTION" = "pressed" -a "$BUTTON" = "wps" ]; then for dir in /var/run/hostapd-*; do [ -d "$dir" ] || continue for vap_dir in $dir/ath*; do [ -r "$vap_dir" ] || continue hostapd_cli -i "${vap_dir#"$dir/"}" -p "$dir" wps_pbc done done fi
itgb/opCloudRouter
qca/feeds/wlan_10_2/qca-hostap/files/wps-hotplug.sh
Shell
gpl-2.0
378
#!/usr/bin/env bash build() { # this appears to be required to make sure we pull in correct cache yum clean --disablerepo=* --enablerepo=local all # build RPMs cd $BUILD_PATH fedpkg clone -a $1 cd $1 fedpkg sources fedpkg prep yum-builddep -y $1.spec fedpkg --dist=$DIST_TAG local if [ $? != 0 ]; then exit; fi mv $BUILD_PATH/$1/x86_64/* $BUILD_PATH/localrepo cd $BUILD_PATH/localrepo /usr/bin/createrepo . } # install development tools $PKG_APP groupinstall "Development Tools" -y # build speex build speex # build speexdsp build speexdsp # build dahdi build dahdi-tools # build libpri build libpri # build libresample build libresample #build libss7 build libss7 # build asterisk build asterisk # end the script exit 0
leifmadsen/asterisk-docker-builder
buildit.sh
Shell
gpl-2.0
801
#!/bin/bash tfdir="tf" halofitdir="halofit" frankendir="FrankenEmu" cambdir="camb" classdir="class" echo "Downloading of cosmological codes started" if [ ! -d "$tfdir" ]; then mkdir -p $tfdir echo "Downloading Eisenstein & Hu's transfer function code..." wget http://background.uchicago.edu/~whu/transfer/tf_fit.c -q -O $tfdir/tf_fit.c echo "Downloading Eisenstein & Hu's power spectra code..." wget http://background.uchicago.edu/~whu/transfer/power.c -q -O $tfdir/power.c else echo "Eisenstein & Hu's ($tfdir) folder already exists" fi if [ ! -d "$halofitdir" ]; then echo "Downloading Robert E. Smith's et al. halofit code..." mkdir -p $halofitdir wget http://www.roe.ac.uk/~jap/haloes/halofit+.tar -q -O - | tar x -C $halofitdir else echo "Robert E. Smith's et al. ($halofitdir) folder already exists" fi if [ ! -d "$frankendir" ]; then echo "Downloading FrankenEmulator..." mkdir -p $frankendir wget http://www.hep.anl.gov/cosmology/CosmicEmu/CosmicEmu_v2.tar.gz -q -O - | tar xz -C "$frankendir" --strip-components=1 else echo "FrankenEmulator ($frankendir) folder already exists." fi # wget http://www.lanl.gov/projects/cosmology/CosmicEmu/CosmicEmu_v1.1.tar.gz -q -O - | tar x -C CosmicEmulator read -p "Download and clone CAMB git repository (y/n)? " answer case ${answer:0:1} in y|Y ) echo "removing camb/ folder if existent" if [ ! -d "$cambdir" ]; then rm -rv $cambdir fi echo "Cloning CAMB git repo" git clone https://github.com/cmbant/CAMB.git sleep 2s echo "Renaming CAMB to $cambdir" mv CAMB/ "$cambdir/" ;; * ) echo No CAMB repo downloaded. You can add manually your own. Make sure the wrapper is still valid. ;; esac read -p "Download and clone CLASS git repository (y/n)? " answer case ${answer:0:1} in y|Y ) echo "removing class/ folder if existent" if [ ! -d "$classdir" ]; then rm -rv $classdir fi echo "Cloning CLASS git repo" git clone https://github.com/lesgourg/class_public.git sleep 2s echo "Renaming class_public to $classdir" mv class_public/ "$classdir/" ;; * ) echo No CLASS repo downloaded. You can add manually your own. Make sure the wrapper is still valid. ;; esac echo "Downloading of cosmological codes finished"
santiagocasas/cosmomathica
ext/download.sh
Shell
gpl-2.0
2,261
command="python /home/fernandosjp/Desktop/Fernando/GitHub/tesouro-investor/alert.py" job="*/2 * * * * $command" cat <(fgrep -i -v "$command" <(crontab -l)) <(echo "$job") | crontab -
fernandosjp/tesouro-investor
cron.sh
Shell
gpl-2.0
184
#! /bin/sh $EXTRACTRC *.ui *.rc >> rc.cpp || exit 11 $XGETTEXT *.cpp -o $podir/tablessolver.pot
wyuka/calligra
sheets/plugins/solver/Messages.sh
Shell
gpl-2.0
96
#! /bin/sh $EXTRACTRC --tag-group=koffice functions/*.xml > xml_doc.cpp $EXTRACTRC chart/*.ui dialogs/*.ui part/dialogs/*.ui *.kcfg *.rc >> rc.cpp $EXTRACTATTR --attr=optionWidget,name ui/CellToolOptionWidgets.xml --context="Option widget title" >> rc.cpp $XGETTEXT *.cpp chart/*.cpp commands/*.cpp database/*.cpp dialogs/*.cpp functions/*.cpp part/AboutData.h part/*.cpp part/commands/*.cpp part/dialogs/*.cpp ui/*.cpp -o $podir/sheets.pot rm xml_doc.cpp rc.cpp
wyuka/calligra
sheets/Messages.sh
Shell
gpl-2.0
464
LOG=wip-2712-fail rm -rf /tmp/$LOG PYTHONPATH=/home/owasserm/ceph-qa-suite \ ./virtualenv/bin/teuthology -v --archive /tmp/$LOG --owner [email protected] mine.yaml $LOG.yaml
oritwas/scripts
run_failed_teu.sh
Shell
gpl-2.0
190
convert images/OCS-153-A.png -crop 1509x537+0+0 +repage images/OCS-153-A-0.png convert -append images/OCS-152-B-11.png images/OCS-153-A-0.png images/OCS-152-B-11.png rm images/OCS-153-A-0.png convert images/OCS-153-A.png -crop 1509x397+0+544 +repage images/OCS-153-A-1.png convert images/OCS-153-A.png -crop 1509x465+0+950 +repage images/OCS-153-A-2.png convert images/OCS-153-A.png -crop 1509x387+0+1418 +repage images/OCS-153-A-3.png convert images/OCS-153-A.png -crop 1509x67+0+1822 +repage images/OCS-153-A-4.png convert images/OCS-153-A.png -crop 1509x232+0+1902 +repage images/OCS-153-A-5.png convert images/OCS-153-A.png -crop 1509x484+0+2125 +repage images/OCS-153-A-6.png convert images/OCS-153-A.png -crop 1509x783+0+2618 +repage images/OCS-153-A-7.png convert images/OCS-153-A.png -crop 1509x471+0+3410 +repage images/OCS-153-A-8.png convert images/OCS-153-A.png -crop 1509x309+0+3884 +repage images/OCS-153-A-9.png convert images/OCS-153-A.png -crop 1509x309+0+4208 +repage images/OCS-153-A-10.png # #/OCS-153.png convert images/OCS-153-B.png -crop 1569x149+0+0 +repage images/OCS-153-B-0.png convert -append images/OCS-153-A-10.png images/OCS-153-B-0.png images/OCS-153-A-10.png rm images/OCS-153-B-0.png convert images/OCS-153-B.png -crop 1569x311+0+156 +repage images/OCS-153-B-1.png convert images/OCS-153-B.png -crop 1569x387+0+478 +repage images/OCS-153-B-2.png convert images/OCS-153-B.png -crop 1569x135+0+876 +repage images/OCS-153-B-3.png convert images/OCS-153-B.png -crop 1569x605+0+1034 +repage images/OCS-153-B-4.png convert images/OCS-153-B.png -crop 1569x383+0+1664 +repage images/OCS-153-B-5.png convert images/OCS-153-B.png -crop 1569x135+0+2058 +repage images/OCS-153-B-6.png convert images/OCS-153-B.png -crop 1569x1507+0+2208 +repage images/OCS-153-B-7.png convert images/OCS-153-B.png -crop 1569x375+0+3724 +repage images/OCS-153-B-8.png convert images/OCS-153-B.png -crop 1569x397+0+4118 +repage images/OCS-153-B-9.png # #/OCS-153.png
jonnymwalker/Staroslavjanskij-Slovar
scripts/findindents.OCS-153.sh
Shell
gpl-2.0
1,970
#!/bin/sh # Copyright Roger Meier <[email protected]> # SPDX-License-Identifier: Apache-2.0 BSD-2-Clause GPL-2.0+ MIT WTFPL echo "Providing R libraries" sudo DEBIAN_FRONTEND=noninteractive apt-get -qqy install r-base r-base-dev \ r-cran-zoo r-cran-xts \ r-cran-xtable r-cran-reshape r-cran-stringr r-cran-scales \ r-cran-scales r-cran-rmysql r-cran-rcurl r-cran-mgcv \ r-cran-rjson r-cran-testthat libx11-dev libssl-dev libssh2-1-dev sudo Rscript packages.r
dauer-afk/codeface
integration-scripts/install_codeface_R.sh
Shell
gpl-2.0
471
export KERNEL_PATH=`pwd` export JET_PATH=`pwd`/../.. export MYDROID=${JET_PATH}/mydroid export PATH=$PATH:${MYDROID}/prebuilt/linux-x86/toolchain/arm-eabi-4.4.3/bin/ export CROSS_COMPILE=${MYDROID}/prebuilt/linux-x86/toolchain/arm-eabi-4.4.3/bin/arm-eabi- export PATH=${MYDROID}/../u-boot/tools:$PATH #make ARCH=arm distclean #make ARCH=arm jet_defconfig make -j$(egrep '^processor' /proc/cpuinfo | wc -l) ARCH=arm $kernel_config uImage 2>&1 | tee ${JET_PATH}/logs/kernel_make.out cd ${KERNEL_PATH}
ReconInstruments/jet_kernel
build_kernel.sh
Shell
gpl-2.0
501
#!/bin/sh if [ $# -eq 0 ]; then echo "Usage: $0 library [compiler]" exit 0 fi if [ $# -eq 1 ]; then CXX=g++ else CXX=$2 fi FILE=$1 echo "int main(){}" | $CXX -l$FILE -o /dev/null -x c++ - > /dev/null 2>&1 RES=$? exit $RES
Frontier789/Flib
src/scripts/testLib.sh
Shell
gpl-2.0
233