code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/bash
echo "Launching MongoDB server..."
mongod --dbpath ./db/data --logpath ./db/logs/mongodb.log --logappend &
echo "Launching redis server..."
redis-server ./db/conf/redis.conf &
|
kardigen/devcrowd_node_workshop
|
servers.sh
|
Shell
|
mit
| 189 |
#! /bin/sh
if [ -e eglibrary/.git/ ]; then
echo "eglibrary has cache"
else
rm -rf ./eglibrary
git clone [email protected]:eaglesakura/eglibrary.git
fi
cd eglibrary/
git clean -f .
git checkout -f develop
git pull origin develop
chmod 755 ./script/sync-develop.sh
./script/sync-develop.sh
cd ../
|
eaglesakura/andriders-central-engine-v3
|
script/developer-sync-eglibrary.sh
|
Shell
|
mit
| 298 |
#!/bin/bash
HOST=`hostname -s`
DOMAIN=`hostname -d`
if [[ $HOST =~ (.*)-([0-9]+)$ ]]; then
NAME=${BASH_REMATCH[1]}
ORD=${BASH_REMATCH[2]}
else
echo "Failed to extract ordinal from hostname $HOST"
exit 1
fi
BROKER_ID=$((ORD))
cat /etc/kafka/server.properties.template | sed "s/{{zookeeper.connect}}/$ZK/g" | sed "s/{{broker.id}}/$BROKER_ID/g" > /etc/kafka/server.properties
kafka-server-start.sh /etc/kafka/server.properties
|
reza-rahim/microservice
|
ansible/kube-app/docker/kafka/kafka-start.sh
|
Shell
|
mit
| 463 |
#!/usr/bin/env bash
set -eu
echo "$QUAY_PASSWORD" | docker login quay.io --username "$QUAY_USERNAME" --password-stdin
set -x
VERSION=$(echo "$GITHUB_REF" | cut -d/ -f3-)
# docker push quay.io/acoustid/acoustid-index:$VERSION
docker push quay.io/acoustid/acoustid-index-updater:$VERSION
docker push quay.io/acoustid/acoustid-index-proxy:$VERSION
# docker push quay.io/acoustid/acoustid-server:$VERSION
# docker push quay.io/acoustid/acoustid-server-api:$VERSION
|
acoustid/go-acoustid
|
ci/push-images.sh
|
Shell
|
mit
| 467 |
#! /usr/bin/env bash
# client.sh <server:addr>
echo "building ..."
go build;
# -do can be post, request
time ./perf -profile -c -addr $1 -do $2 -payload 64 -batchsize 200 -conns 16 -routines 200 -count 100000
# streamrx, streamtx
# time ./perf -c -addr $1 -do $2 -batchsize 1000 -payload 16000 -buffersize 17000 -conns 1 -routines 1 -count 1 -stream 100000
# verify
# time ./perf -c -addr $1 -do verify -batchsize 200 -conns 16 -routines 200 -count 200000 -stream 100
echo
echo "building client profile information ..."
go tool pprof -svg perf client.pprof > client.pprof.svg
go tool pprof -inuse_space -svg perf client.mprof > client.ispace.svg
go tool pprof -inuse_objects -svg perf client.mprof > client.iobjs.svg
go tool pprof -alloc_space -svg perf client.mprof > client.aspace.svg
go tool pprof -alloc_objects -svg perf client.mprof > client.aobjs.svg
echo
|
prataprc/gofast
|
perf/client.sh
|
Shell
|
mit
| 870 |
#!/usr/bin/env bash
###
### Released under the MIT License (MIT) --- see ../LICENSE
### Copyright (c) 2014 Ankit Singla, Sangeetha Abdu Jyothi,
### Chi-Yao Hong, Lucian Popa, P. Brighten Godfrey,
### Alexandra Kolla, Simon Kassing
###
###################################
### SCRIPT PARAMETERS
###
NRUNS=1 # Reduced for sake of running time
MYMAINPATH="../../"
###################################
### GENERAL PARAMETERS
###
seed=1 # Set to 0 for random
lpt=SIMPLE # SIMPLE is fast for dense traffic matrices
# MCFFC is fast for sparse traffic matrices
###################################
### TOPOLOGY SPECIFIC PARAMETERS
###
topology=FT # JF = JellyFish,
# FT = fat-tree,
# XP = Xpander
kft=8
###################################
### PATH EVALUATION PARAMETERS
###
patheval=SLACK # SLACK = Use slack such that flow cannot deviate more than SLACK from shortest path src-dst
# NEIGH = Use neighbor's shortest path to destination
# KSHRT = K-shortest paths
# VALIA = K-valiant load balancing
slack=0 # Slack value [0,inf], set -1 for infinite slack
#kvlb=20 # K-value for either k-shortest path or k-valiant load balancing
###################################
### TRAFFIC GENERATOR SPECIFIC PARAMETERS
###
tmode=AT1 # RPP = Rand. Permutation Pairs,
# ATA = All-to-All,
# AT1 = All-to-One,
# STR = Stride,
# MIWP = Min. Weight Pairs
# MAWP = Max. Weight Pairs
trafficFrac=0.4
###################################
### EXECUTE RUNS
###
# Clock start
before="$(date +%s)"
# Executing NRUNS times, store each resulting flow into flowtmp_c
rm -rf flowtmp_c
for (( i=0 ; i < $NRUNS ; i++ ))
do
cd $MYMAINPATH
# Generate linear program and additional information in temp/
# There are five parts to the command: general, selectors, topology parameters, path evaluator parameters, and traffic parameters
java -jar TopoBench.jar \
-mode PRODUCE -seed $seed -lpt $lpt \
-gt $topology -pe $patheval -tm $tmode \
-kft $kft \
-slack $slack \
#-tfr $trafficFrac
# Execute solver
sh scripts/localLpRun.sh # Local: scripts/localLpRun.sh, Remote: scripts/remoteLpRun.sh
# Run analysis (result will be in analysis/<time-specific-folder-name>/)
java -jar TopoBench.jar \
-mode ANALYZE -seed $seed -lpt $lpt \
-tm $tmode
cd -
# Add to list of received flow values
flowVal=$(cat ../../temp/objective.txt)
echo "$flowVal" >> flowtmp_c
done
# Clock end
after="$(date +%s)"
time_taken=`expr $after - $before`
time_taken=`expr $time_taken / 60`
# Calculate average and standard deviation
avgstdflow=`cat flowtmp_c | awk 'BEGIN{sum=0; count=0}{thr=$1; sum+=thr; val[count]=thr; count++}END{mean=sum/count; sq_sum=0; for (i=0; i < count; i++) sq_sum+=(val[i] - mean)*(val[i] - mean); variance=sqrt(sq_sum/count)/mean; rnd_mean=int(mean * 100000) / 100000; rnd_variance=int(variance*100000)/100000; print rnd_mean, rnd_variance}'`
# Write result to file
echo "$patheval SL=$slack $topology $switches $totport $netports $svrports $tmode $trafficFrac $avgstdflow $time_taken" >> ../../results/control/control.txt
|
ndal-eth/topobench
|
scripts/control/fat_tree.sh
|
Shell
|
mit
| 3,140 |
#!/bin/bash
# create multiresolution windows icon
ICON_DST=../../src/qt/res/icons/belcoin.ico
convert ../../src/qt/res/icons/belcoin-16.png ../../src/qt/res/icons/belcoin-32.png ../../src/qt/res/icons/belcoin-48.png ${ICON_DST}
|
Nephatiu/belcoin
|
share/qt/make_windows_icon.sh
|
Shell
|
mit
| 229 |
#!/bin/bash
# HLH gene panel for MAS diagnostics
gemini query --header -q "select s.name as sample,v.chrom,v.start+1 as pos,v.ref,v.alt,v.gene,v.rs_ids,v.impact,v.max_aaf_all
from variants v, samples s
where v.max_aaf_all <= 0.05 and
v.gene in ('AP3B1','BLOC1S6','CD27','GATA2','ITK','LYST','NLRC4','PRF1','RAB27A','SH2D1A','SLC7A7','STX11','STXBP2','UNC13D','XIAP')" $1
|
naumenko-sa/cre
|
cre.gemini.hlh_panel.sh
|
Shell
|
mit
| 390 |
#!/usr/bin/env bash
function pretty_print() {
printf '\n\033[1m\033[34m%s\033[0m\n\n' "[macos-confs] ${1}…"
}
function install_brew() {
if ! make --version > /dev/null; then pretty_print "❗️ Commandline tools (xcode-select) not installed" && exit 1; fi
if command -v brew > /dev/null; then pretty_print "🍻 Brew already installed" && exit 1; fi
pretty_print "🍻 Installing brew (through system ruby)"
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew doctor
}
install_brew
|
thibmaek/worker
|
installers/install-brew.sh
|
Shell
|
mit
| 546 |
#!/bin/bash
set -- $(<./AlignmentPipe/settings.var)
Ij=${10}
lsm2nrrd=${20}
inbox=${2}
proc=${3}
ppro=${21}
chn=${30}
log=${4}
for f in $inbox*.{lsm,tif}
do
echo Processing $f
nice xvfb-run ${Ij} -macro ${lsm2nrrd} ${f} -batch
inl=`echo $inbox | wc -c`
fr=`echo $f | rev | cut -c 5- |rev | cut -c $inl-`
if [ -e $fr*$chn.nrrd ]
then
echo Successfully converted $f to `ls $fr*.nrrd | wc -l` NRRD files
echo 'PreProccessing image channels:'
python $ppro $fr-PP_C1.nrrd $fr-PP_C2.nrrd ZXYC 10
if [ -e $fr*BG.nrrd ]
then
echo 'Cleaning...'
rm $f
mv $fr*.nrrd $proc
mv $fr*.log $log
echo 'Pushed for processing...'
else
echo Error preprocessing $fr!
fi
else
echo Error converting $f into NRRD files!
fi
done
|
Robbie1977/AlignmentPipe
|
LoadImagesXYZ.sh
|
Shell
|
mit
| 809 |
#!/bin/bash
source inc_vars.sh
# Script for convergence analysis on transport model for different methods
#-------------------------------------------------------------------------
awk '{ if ( NR == 15 ) { print "lsqtrc";} else {print $0;} }' par/trans.par > par/trans2.par
cp par/trans2.par par/trans.par
./runntimesgrids7.sh
awk '{ if ( NR == 15 ) { print "+1perhx+2lintrv+3lsqhxe+";} else {print $0;} }' par/trans.par > par/trans2.par
cp par/trans2.par par/trans.par
./runntimesgrids7.sh
|
pedrospeixoto/iModel
|
sh/runtranptests.sh
|
Shell
|
mit
| 499 |
# shellcheck shell=bash disable=1090
function __dotfiles_exports() {
local base_dir="${HOME}/.dotfiles/exports"
local sources=(
cli-colors.sh
z.sh
pyenv.interactive.sh
)
if [ -n "$BASH" ]; then
sources+=(
bash-git-prompt.sh
)
__dotfiles_profile_includes "$base_dir" "${sources[@]}"
fi
if [ -n "$ZSH_NAME" ]; then
sources+=(
zsh-git-prompt.sh
)
# shellcheck disable=2086,2128
__dotfiles_profile_includes "$base_dir" $sources
fi
}
__dotfiles_exports
|
wesm87/dotfiles
|
exports.interactive.sh
|
Shell
|
mit
| 520 |
#!/bin/bash
#ubuntuReady
#ADD ALL REPOSITORIES WE NEED
##BASIC REPOSITORIES
##GIMP
sudo apt-add-repository ppa:otto-kesselgulasch/gimp -y
##Nvidia Graphics Driver
sudo add-apt-repository ppa:graphics-drivers/ppa -y
##Google Chrome
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
sudo sh -c 'echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list'
##Ubuntu Restricted Extras
sudo apt-add-repository ppa:mc3man/trusty-media -y
sudo apt-add-repository ppa:gnome3-team/gnome3 -y
##Skype
sudo apt-add-repository "deb http://archive.canonical.com/ubuntu/ xenial partner" -y
##Ubuntu optional packages
sudo apt-add-repository ppa:maarten-baert/simplescreenrecorder -y
##Numix Themes & Icons
sudo add-apt-repository ppa:numix/ppa -y
sudo add-apt-repository ppa:noobslab/themes -y
## Add Steam repository
sudo add-apt-repository multiverse
##DEV STUFF REPOSITORIES
##Git
sudo add-apt-repository ppa:git-core/ppa -y
##Docker
sudo apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
sudo apt-add-repository 'deb https://apt.dockerproject.org/repo ubuntu-xenial main'
#UPDATE THE SYSTEM
sudo apt-get update && sudo apt-get dist-upgrade -y
#INSTALL BASICS
##Install Nvidia Graphics Driver
sudo apt-get install nvidia-361 -y
##Install Google Chrome
sudo apt-get install google-chrome-stable -y
##Fix chrome duplicate plank
sudo sed -i "s/\/usr\/bin\/google-chrome-stable/\/usr\/bin\/google-chrome/g" /usr/share/applications/google-chrome.desktop
sudo sed -i "s/StartupWMClass=Google-chrome-stable//g" /usr/share/applications/google-chrome.desktop
##Install File Compression Libs
sudo apt-get install unace unrar zip unzip xz-utils p7zip-full p7zip-rar -y
sudo apt-get sharutils rar uudeview mpack arj cabextract file-roller -y
##Install Ubuntu Restricted Extras
sudo apt-get install ubuntu-restricted-extras ubuntu-restricted-addons ffmpeg -y
sudo apt-get install gstreamer0.10-plugins-ugly libavcodec-extra-54 libvdpau-va-gl1 -y
sudo apt-get install libmad0 mpg321 gstreamer1.0-libav gdebi curl flashplugin-installer -y
sudo apt-get install dconf-editor gnome-system-monitor -y
##Install common repositories manager
sudo apt-get install software-properties-common -y
##Enable DVD Playback
sudo /usr/share/doc/libdvdread4/install-css.sh
##Install Slack
sudo snap install slack --classic
##Install Skype
sudo apt-get install skype -y
sudo apt-get install gtk2-engines-murrine:i386 gtk2-engines-pixbuf:i386 -y
##Install GNOME Pomodoro Timer
sudo apt-get install gnome-shell-pomodoro -y
##Install Steam
sudo apt-get install steam -y
##Install more packages
sudo apt-get sudo apt-get install aptitude apt-file qbittorrent quiterss dconf-editor -y
sudo apt-get gnome-system-monitor simplescreenrecorder quassel-qt4 -y
sudo apt-get bleachbit kid3-qt calibre pinta unetbootin gnome-disk-utility -y
sudo apt-get vlc browser-plugin-vlc build-essential jockey-gtk -y
##Royal theme & Numix circle icons
sudo apt-get install numix-icon-theme numix-icon-theme-circle royal-gtk-theme -y
##Install slurm (network traffic monitor)
sudo apt-get install slurm -y
##Install GIMP
sudo apt-get install gimp -y
#INSTALL DEVELOPMENT STUFF
##Install the latest git Version
sudo apt-get install git -y
##Install docker
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
software-properties-common -y
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
sudo apt-get update
sudo apt-get install docker-ce -y
sudo usermod -aG docker $USER # Enable use docker without sudo
##Install Node Version Manager
wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.33.8/install.sh | bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
nvm install --lts
##Install Filezilla
sudo apt-get install filezilla -y
##Install Postman
wget https://dl.pstmn.io/download/latest/linux64 -O postman.tar.gz
sudo tar -xzf postman.tar.gz -C /opt
rm postman.tar.gz
sudo ln -s /opt/Postman/Postman /usr/bin/postman
###Create Postman Launcher
cat > ~/.local/share/applications/postman.desktop <<EOL
[Desktop Entry]
Encoding=UTF-8
Name=Postman
Exec=postman
Icon=/opt/Postman/resources/app/assets/icon.png
Terminal=false
Type=Application
Categories=Development;
EOL
##Create a $HOME/projects and $HOME/projects/lab directory for projects
mkdir -p projects/lab
# AUTOREMOVE TRASH FILES
sudo apt-get autoremove -y
# Open for downloads
firefox https://www.jetbrains.com/toolbox/download/download-thanks.html?platform=linux
firefox https://telegram.org/dl/desktop/linux
firefox https://code.visualstudio.com/docs/?dv=linux64_deb
|
ulisesantana/ubuntuReady
|
ubuntuReady.sh
|
Shell
|
mit
| 4,897 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2644-1
#
# Security announcement date: 2015-06-15 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:37 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-3.16.0-41-powerpc-smp:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-powerpc64-smp:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-generic:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-powerpc-e500mc:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-generic-lpae:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-lowlatency:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-powerpc64-emb:3.16.0-41.55~14.04.1
#
# Last versions recommanded by security team:
# - linux-image-3.16.0-41-powerpc-smp:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-powerpc64-smp:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-generic:3.16.0-41.57~14.04.1
# - linux-image-3.16.0-41-powerpc-e500mc:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-generic-lpae:3.16.0-41.55~14.04.1
# - linux-image-3.16.0-41-lowlatency:3.16.0-41.57~14.04.1
# - linux-image-3.16.0-41-powerpc64-emb:3.16.0-41.55~14.04.1
#
# CVE List:
# - CVE-2015-1328
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-3.16.0-41-powerpc-smp=3.16.0-41.55~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-powerpc64-smp=3.16.0-41.55~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-generic=3.16.0-41.57~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-powerpc-e500mc=3.16.0-41.55~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-generic-lpae=3.16.0-41.55~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-lowlatency=3.16.0-41.57~14.04.1 -y
sudo apt-get install --only-upgrade linux-image-3.16.0-41-powerpc64-emb=3.16.0-41.55~14.04.1 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_14.04_LTS/x86_64/2015/USN-2644-1.sh
|
Shell
|
mit
| 2,014 |
#!/bin/bash
strindex() {
x="${1%%$2*}"
[[ $x = $1 ]] && echo -1 || echo ${#x}
}
|
verdverm/pypge
|
experiments/post_process/scripts/utils.sh
|
Shell
|
mit
| 86 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2938-1
#
# Security announcement date: 2016-03-21 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:18 UTC
#
# Operating System: Ubuntu 15.10
# Architecture: i686
#
# Vulnerable packages fix on version:
# - git:1:2.5.0-1ubuntu0.2
#
# Last versions recommanded by security team:
# - git:1:2.5.0-1ubuntu0.2
#
# CVE List:
# - CVE-2016-2315
# - CVE-2016-2324
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade git=1:2.5.0-1ubuntu0.2 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_15.10/i686/2016/USN-2938-1.sh
|
Shell
|
mit
| 627 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-HuutoNetKit_Example/Alamofire.framework"
install_framework "Pods-HuutoNetKit_Example/HuutoNetKit.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-HuutoNetKit_Example/Alamofire.framework"
install_framework "Pods-HuutoNetKit_Example/HuutoNetKit.framework"
fi
|
tsharju/HuutoNetKit
|
Example/Pods/Target Support Files/Pods-HuutoNetKit_Example/Pods-HuutoNetKit_Example-frameworks.sh
|
Shell
|
mit
| 3,698 |
#!/usr/bin/env bash
#
# Install JDK for Linux and Mac OS
#
# This script determines the most recent early-access build number,
# downloads the JDK archive to the user home directory and extracts
# it there.
#
# Exported environment variables (when sourcing this script)
#
# JAVA_HOME is set to the extracted JDK directory
# PATH is prepended with ${JAVA_HOME}/bin
#
# (C) 2018 Christian Stein
#
# https://github.com/sormuras/bach/blob/master/install-jdk.sh
#
set -o errexit
#set -o nounset # https://github.com/travis-ci/travis-ci/issues/5434
#set -o xtrace
function initialize() {
readonly script_name="$(basename "${BASH_SOURCE[0]}")"
readonly script_version='2019-01-18 II'
dry=false
silent=false
verbose=false
emit_java_home=false
feature='ea'
license='GPL'
os='?'
url='?'
workspace="${HOME}"
target='?'
cacerts=false
}
function usage() {
cat << EOF
Usage: ${script_name} [OPTION]...
Download and extract the latest-and-greatest JDK from java.net or Oracle.
Version: ${script_version}
Options:
-h|--help Displays this help
-d|--dry-run Activates dry-run mode
-s|--silent Displays no output
-e|--emit-java-home Print value of "JAVA_HOME" to stdout (ignores silent mode)
-v|--verbose Displays verbose output
-f|--feature 9|10|...|ea JDK feature release number, defaults to "ea"
-l|--license GPL|BCL License defaults to "GPL", BCL also indicates OTN-LA for Oracle Java SE
-o|--os linux-x64|osx-x64 Operating system identifier (works best with GPL license)
-u|--url "https://..." Use custom JDK archive (provided as .tar.gz file)
-w|--workspace PATH Working directory defaults to \${HOME} [${HOME}]
-t|--target PATH Target directory, defaults to first component of the tarball
-c|--cacerts Link system CA certificates (currently only Debian/Ubuntu is supported)
EOF
}
function script_exit() {
if [[ $# -eq 1 ]]; then
printf '%s\n' "$1"
exit 0
fi
if [[ $# -eq 2 && $2 =~ ^[0-9]+$ ]]; then
printf '%b\n' "$1"
exit "$2"
fi
script_exit 'Invalid arguments passed to script_exit()!' 2
}
function say() {
if [[ ${silent} != true ]]; then
echo "$@"
fi
}
function verbose() {
if [[ ${verbose} == true ]]; then
echo "$@"
fi
}
function parse_options() {
local option
while [[ $# -gt 0 ]]; do
option="$1"
shift
case ${option} in
-h|-H|--help)
usage
exit 0
;;
-v|-V|--verbose)
verbose=true
;;
-s|-S|--silent)
silent=true
verbose "Silent mode activated"
;;
-d|-D|--dry-run)
dry=true
verbose "Dry-run mode activated"
;;
-e|-E|--emit-java-home)
emit_java_home=true
verbose "Emitting JAVA_HOME"
;;
-f|-F|--feature)
feature="$1"
verbose "feature=${feature}"
shift
;;
-l|-L|--license)
license="$1"
verbose "license=${license}"
shift
;;
-o|-O|--os)
os="$1"
verbose "os=${os}"
shift
;;
-u|-U|--url)
url="$1"
verbose "url=${url}"
shift
;;
-w|-W|--workspace)
workspace="$1"
verbose "workspace=${workspace}"
shift
;;
-t|-T|--target)
target="$1"
verbose "target=${target}"
shift
;;
-c|-C|--cacerts)
cacerts=true
verbose "Linking system CA certificates"
;;
*)
script_exit "Invalid argument was provided: ${option}" 2
;;
esac
done
}
function determine_latest_jdk() {
local number
local curl_result
local url
verbose "Determine latest JDK feature release number"
number=9
while [[ ${number} != 99 ]]
do
url=http://jdk.java.net/${number}
curl_result=$(curl -o /dev/null --silent --head --write-out %{http_code} ${url})
if [[ ${curl_result} -ge 400 ]]; then
break
fi
verbose " Found ${url} [${curl_result}]"
latest_jdk=${number}
number=$[$number +1]
done
verbose "Latest JDK feature release number is: ${latest_jdk}"
}
function perform_sanity_checks() {
if [[ ${feature} == '?' ]] || [[ ${feature} == 'ea' ]]; then
feature=${latest_jdk}
fi
if [[ ${feature} -lt 9 ]] || [[ ${feature} -gt ${latest_jdk} ]]; then
script_exit "Expected feature release number in range of 9 to ${latest_jdk}, but got: ${feature}" 3
fi
if [[ -d "$target" ]]; then
script_exit "Target directory must not exist, but it does: $(du -hs '${target}')" 3
fi
}
function determine_url() {
local DOWNLOAD='https://download.java.net/java'
local ORACLE='http://download.oracle.com/otn-pub/java/jdk'
# Archived feature or official GA build?
case "${feature}-${license}" in
9-GPL) url="${DOWNLOAD}/GA/jdk9/9.0.4/binaries/openjdk-9.0.4_${os}_bin.tar.gz"; return;;
9-BCL) url="${ORACLE}/9.0.4+11/c2514751926b4512b076cc82f959763f/jdk-9.0.4_${os}_bin.tar.gz"; return;;
10-GPL) url="${DOWNLOAD}/GA/jdk10/10.0.2/19aef61b38124481863b1413dce1855f/13/openjdk-10.0.2_${os}_bin.tar.gz"; return;;
10-BCL) url="${ORACLE}/10.0.2+13/19aef61b38124481863b1413dce1855f/jdk-10.0.2_${os}_bin.tar.gz"; return;;
# Fall-through for 11-GPL as the build number, here 9, does change now and then...
# See https://github.com/sormuras/bach/issues/42 for details
# 11-GPL) url="${DOWNLOAD}/GA/jdk11/9/GPL/openjdk-11.0.2_${os}_bin.tar.gz"; return;;
11-BCL) url="${ORACLE}/11.0.2+9/f51449fcd52f4d52b93a989c5c56ed3c/jdk-11.0.2_${os}_bin.tar.gz"; return;;
esac
# EA or RC build?
local JAVA_NET="http://jdk.java.net/${feature}"
local candidates=$(wget --quiet --output-document - ${JAVA_NET} | grep -Eo 'href[[:space:]]*=[[:space:]]*"[^\"]+"' | grep -Eo '(http|https)://[^"]+')
url=$(echo "${candidates}" | grep -Eo "${DOWNLOAD}/.+/jdk${feature}/.+/${license}/.*jdk-${feature}.+${os}_bin.tar.gz$" || true)
if [[ -z ${url} ]]; then
script_exit "Couldn't determine a download url for ${feature}-${license} on ${os}" 1
fi
}
function prepare_variables() {
if [[ ${os} == '?' ]]; then
if [[ "$OSTYPE" == "darwin"* ]]; then
os='osx-x64'
else
os='linux-x64'
fi
fi
if [[ ${url} == '?' ]]; then
determine_latest_jdk
perform_sanity_checks
determine_url
else
feature='<overridden by custom url>'
license='<overridden by custom url>'
os='<overridden by custom url>'
fi
archive="${workspace}/$(basename ${url})"
status=$(curl -o /dev/null --silent --head --write-out %{http_code} ${url})
}
function print_variables() {
cat << EOF
Variables:
feature = ${feature}
license = ${license}
os = ${os}
url = ${url}
status = ${status}
archive = ${archive}
EOF
}
function download_and_extract_and_set_target() {
local quiet='--quiet'; if [[ ${verbose} == true ]]; then quiet=''; fi
local local="--directory-prefix ${workspace}"
local remote='--timestamping --continue'
local wget_options="${quiet} ${local} ${remote}"
local tar_options="--file ${archive}"
say "Downloading JDK from ${url}..."
verbose "Using wget options: ${wget_options}"
if [[ ${license} == 'GPL' ]]; then
wget ${wget_options} ${url}
else
wget ${wget_options} --header "Cookie: oraclelicense=accept-securebackup-cookie" ${url}
fi
verbose "Using tar options: ${tar_options}"
if [[ ${target} == '?' ]]; then
tar --extract ${tar_options} -C "${workspace}"
if [[ "$OSTYPE" != "darwin"* ]]; then
target="${workspace}"/$(tar --list ${tar_options} | grep 'bin/javac' | tr '/' '\n' | tail -3 | head -1)
else
target="${workspace}"/$(tar --list ${tar_options} | head -2 | tail -1 | cut -f 2 -d '/' -)/Contents/Home
fi
else
if [[ "$OSTYPE" != "darwin"* ]]; then
mkdir --parents "${target}"
tar --extract ${tar_options} -C "${target}" --strip-components=1
else
mkdir -p "${target}"
tar --extract ${tar_options} -C "${target}" --strip-components=4 # . / <jdk> / Contents / Home
fi
fi
if [[ ${verbose} == true ]]; then
echo "Set target to: ${target}"
echo "Content of target directory:"
ls "${target}"
echo "Content of release file:"
[[ ! -f "${target}/release" ]] || cat "${target}/release"
fi
# Link to system certificates
# http://openjdk.java.net/jeps/319
# https://bugs.openjdk.java.net/browse/JDK-8196141
# TODO: Provide support for other distributions than Debian/Ubuntu
if [[ ${cacerts} == true ]]; then
mv "${target}/lib/security/cacerts" "${target}/lib/security/cacerts.jdk"
ln -s /etc/ssl/certs/java/cacerts "${target}/lib/security/cacerts"
fi
}
function main() {
initialize
say "$script_name $script_version"
parse_options "$@"
prepare_variables
if [[ ${silent} == false ]]; then print_variables; fi
if [[ ${dry} == true ]]; then exit 0; fi
download_and_extract_and_set_target
export JAVA_HOME=$(cd "${target}"; pwd)
export PATH=${JAVA_HOME}/bin:$PATH
if [[ ${silent} == false ]]; then java -version; fi
if [[ ${emit_java_home} == true ]]; then echo "${JAVA_HOME}"; fi
}
main "$@"
|
mark-lester/Gemini
|
install-jdk.sh
|
Shell
|
mit
| 10,031 |
#!/bin/bash
for m in $(find * -name "Makefile"); do
(
cd $(dirname $m)
make test || echo $(pwd)
)
done
|
gestiweb/docker-base
|
test_all.sh
|
Shell
|
mit
| 121 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CoreStore/CoreStore.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RSLoadingView/RSLoadingView.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CoreStore/CoreStore.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RSLoadingView/RSLoadingView.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
XSega/Words
|
Pods/Target Support Files/Pods-Words/Pods-Words-frameworks.sh
|
Shell
|
mit
| 4,978 |
#!/bin/bash
HOME_DIR=`eval echo ~`
PROJECT_PATH=`eval echo ~/gocode/src/neviovesic/`
PROJECT_NAME="neviovesic" # app name for ogs etc ...
# Activate whatever that we eventually need to activate in order to run deployment
source $HOME_DIR/.profile
echo -e "$(tput setaf 2)Entering $PROJECT_PATH"
cd $PROJECT_PATH
sleep 1
echo -e "$(tput setaf 2)Starting revel applilcation back up ..."
revel run neviovesic prod > /root/gocode/src/neviovesic/neviovesic.log 2>&1 &
PID=$!
echo $PID > "$PROJECT_PATH/$PROJECT_NAME.pid"
|
0x19/neviovesic.com
|
deployment/shell/start.sh
|
Shell
|
mit
| 520 |
# Case-insensitive globbing (used in pathname expansion)
shopt -s nocaseglob
# Check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
export GREP_OPTIONS='--color=auto'
# Prevent less from clearing the screen while still showing colors.
export LESS=-XR
# Set the terminal's title bar.
function titlebar() {
echo -n $'\e]0;'"$*"$'\a'
}
# SSH auto-completion based on entries in known_hosts.
if [[ -e ~/.ssh/known_hosts ]]; then
complete -o default -W "$(cat ~/.ssh/known_hosts | sed 's/[, ].*//' | sort | uniq | grep -v '[0-9]')" ssh scp sftp
fi
# Disable ansible cows }:]
export ANSIBLE_NOCOWS=1
# thefuck magic fuckery
eval $(thefuck --alias)
|
Jakobo/dotfiles
|
source/50_misc.sh
|
Shell
|
mit
| 723 |
echo 'Compile cache to cache.o ...'
gcc -c -std=c99 -Ofast -Wall -Werror -pedantic -o cache.o cache.c
|
korun/dcs-system
|
cache/compile_cache.sh
|
Shell
|
mit
| 102 |
#!/bin/sh
BASEDIR=`dirname $0`
mkdir -p $BASEDIR/Project
pushd $BASEDIR/Project >/dev/null
cmake -GXcode ..
popd >/dev/null
|
k6project/MazeGen
|
SetupXCode.sh
|
Shell
|
mit
| 125 |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<link rel="dns-prefetch" href="https://assets-cdn.github.com">
<link rel="dns-prefetch" href="https://avatars0.githubusercontent.com">
<link rel="dns-prefetch" href="https://avatars1.githubusercontent.com">
<link rel="dns-prefetch" href="https://avatars2.githubusercontent.com">
<link rel="dns-prefetch" href="https://avatars3.githubusercontent.com">
<link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com">
<link rel="dns-prefetch" href="https://user-images.githubusercontent.com/">
<link crossorigin="anonymous" media="all" integrity="sha512-qQ+v+W1uJYfDMrQ/cwCVI+AGTsn1yi4rCU6KX45obe52BoF+WiHNeQ11u63iJA05vyivY57xNbhAsyK4/j1ZIQ==" rel="stylesheet" href="https://assets-cdn.github.com/assets/frameworks-01356238c65ce56a395237b592b58668.css" />
<link crossorigin="anonymous" media="all" integrity="sha512-0VaGyxNOIzt/BrWt4GClaNhjNqwpdoLOfj73ICt2NuGGdt7TCEJTF+MOOu4GhB7v5qicSD4wX+vY5BIFg4rGXg==" rel="stylesheet" href="https://assets-cdn.github.com/assets/github-09debddb368dcf769c737ccced3fffb6.css" />
<link crossorigin="anonymous" media="all" integrity="sha512-YHvc7WRozgBgxyWsNXY72IZr4qNlk3TROCgCztYp+ZTcJ4AXUhw14lHDZFVhTWK8AIkaYqcwTbQPLH5OgtIdQw==" rel="stylesheet" href="https://assets-cdn.github.com/assets/site-55f1f061b00e9353264cb71e2adcc953.css" />
<meta name="viewport" content="width=device-width">
<title>kafka-docker/0.9.0.1.sh at master · wurstmeister/kafka-docker · GitHub</title>
<meta name="description" content="Dockerfile for Apache Kafka">
<link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub">
<link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub">
<meta property="fb:app_id" content="1401488693436528">
<meta property="og:image" content="https://avatars2.githubusercontent.com/u/4301434?s=400&v=4" /><meta property="og:site_name" content="GitHub" /><meta property="og:type" content="object" /><meta property="og:title" content="wurstmeister/kafka-docker" /><meta property="og:url" content="https://github.com/wurstmeister/kafka-docker" /><meta property="og:description" content="kafka-docker - Dockerfile for Apache Kafka" />
<link rel="assets" href="https://assets-cdn.github.com/">
<meta name="pjax-timeout" content="1000">
<meta name="request-id" content="C4D6:0275:6981197:B6FD7CB:5B5B6817" data-pjax-transient>
<meta name="selected-link" value="repo_source" data-pjax-transient>
<meta name="google-site-verification" content="KT5gs8h0wvaagLKAVWq8bbeNwnZZK1r1XQysX3xurLU">
<meta name="google-site-verification" content="ZzhVyEFwb7w3e0-uOTltm8Jsck2F5StVihD0exw2fsA">
<meta name="google-site-verification" content="GXs5KoUUkNCoaAZn7wPN-t01Pywp9M3sEjnt_3_ZWPc">
<meta name="google-analytics" content="UA-3769691-2">
<meta name="octolytics-host" content="collector.githubapp.com" /><meta name="octolytics-app-id" content="github" /><meta name="octolytics-event-url" content="https://collector.githubapp.com/github-external/browser_event" /><meta name="octolytics-dimension-request_id" content="C4D6:0275:6981197:B6FD7CB:5B5B6817" /><meta name="octolytics-dimension-region_edge" content="iad" /><meta name="octolytics-dimension-region_render" content="iad" />
<meta name="analytics-location" content="/<user-name>/<repo-name>/blob/show" data-pjax-transient="true" />
<meta class="js-ga-set" name="dimension1" content="Logged Out">
<meta name="hostname" content="github.com">
<meta name="user-login" content="">
<meta name="expected-hostname" content="github.com">
<meta name="js-proxy-site-detection-payload" content="YzA4ZWUyMzc4YTBhMjQxMWNkMDU5OTBiYmQ1OTQ2MGZjYjEzZjA2YTBkMmM4Yzc2NmVjODQ5OTBkMDdmMzM1Ynx7InJlbW90ZV9hZGRyZXNzIjoiMTY0LjUyLjMzLjIxMCIsInJlcXVlc3RfaWQiOiJDNEQ2OjAyNzU6Njk4MTE5NzpCNkZEN0NCOjVCNUI2ODE3IiwidGltZXN0YW1wIjoxNTMyNzE3MDgwLCJob3N0IjoiZ2l0aHViLmNvbSJ9">
<meta name="enabled-features" content="DASHBOARD_V2_LAYOUT_OPT_IN,EXPLORE_DISCOVER_REPOSITORIES,UNIVERSE_BANNER,FREE_TRIALS,MARKETPLACE_INSIGHTS,MARKETPLACE_PLAN_RESTRICTION_EDITOR,MARKETPLACE_SEARCH,MARKETPLACE_INSIGHTS_CONVERSION_PERCENTAGES">
<meta name="html-safe-nonce" content="75812a5b79ebd55104060f93329e816a8e9bd9b4">
<meta http-equiv="x-pjax-version" content="624a84b10cb29b7cdc5027f93865761c">
<link href="https://github.com/wurstmeister/kafka-docker/commits/master.atom" rel="alternate" title="Recent Commits to kafka-docker:master" type="application/atom+xml">
<meta name="go-import" content="github.com/wurstmeister/kafka-docker git https://github.com/wurstmeister/kafka-docker.git">
<meta name="octolytics-dimension-user_id" content="4301434" /><meta name="octolytics-dimension-user_login" content="wurstmeister" /><meta name="octolytics-dimension-repository_id" content="15405607" /><meta name="octolytics-dimension-repository_nwo" content="wurstmeister/kafka-docker" /><meta name="octolytics-dimension-repository_public" content="true" /><meta name="octolytics-dimension-repository_is_fork" content="false" /><meta name="octolytics-dimension-repository_network_root_id" content="15405607" /><meta name="octolytics-dimension-repository_network_root_nwo" content="wurstmeister/kafka-docker" /><meta name="octolytics-dimension-repository_explore_github_marketplace_ci_cta_shown" content="false" />
<link rel="canonical" href="https://github.com/wurstmeister/kafka-docker/blob/master/overrides/0.9.0.1.sh" data-pjax-transient>
<meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats">
<meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors">
<link rel="mask-icon" href="https://assets-cdn.github.com/pinned-octocat.svg" color="#000000">
<link rel="icon" type="image/x-icon" class="js-site-favicon" href="https://assets-cdn.github.com/favicon.ico">
<meta name="theme-color" content="#1e2327">
<link rel="manifest" href="/manifest.json" crossOrigin="use-credentials">
</head>
<body class="logged-out env-production page-blob">
<div class="position-relative js-header-wrapper ">
<a href="#start-of-content" tabindex="1" class="px-2 py-4 bg-blue text-white show-on-focus js-skip-to-content">Skip to content</a>
<div id="js-pjax-loader-bar" class="pjax-loader-bar"><div class="progress"></div></div>
<header class="Header header-logged-out position-relative f4 py-3" role="banner" >
<div class="container-lg d-flex px-3">
<div class="d-flex flex-justify-between flex-items-center">
<a class="header-logo-invertocat my-0" href="https://github.com/" aria-label="Homepage" data-ga-click="(Logged out) Header, go to homepage, icon:logo-wordmark; experiment:site_header_dropdowns; group:control">
<svg height="32" class="octicon octicon-mark-github" viewBox="0 0 16 16" version="1.1" width="32" aria-hidden="true"><path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"/></svg>
</a>
</div>
<div class="HeaderMenu d-flex flex-justify-between flex-auto">
<nav class="mt-0">
<ul class="d-flex list-style-none">
<li class="ml-2">
<a class="js-selected-navigation-item HeaderNavlink px-0 py-2 m-0" data-ga-click="Header, click, Nav menu - item:features; experiment:site_header_dropdowns; group:control" data-selected-links="/features /features/project-management /features/code-review /features/project-management /features/integrations /features" href="/features">
Features
</a> </li>
<li class="ml-4">
<a class="js-selected-navigation-item HeaderNavlink px-0 py-2 m-0" data-ga-click="Header, click, Nav menu - item:business; experiment:site_header_dropdowns; group:control" data-selected-links="/business /business/security /business/customers /business" href="/business">
Business
</a> </li>
<li class="ml-4">
<a class="js-selected-navigation-item HeaderNavlink px-0 py-2 m-0" data-ga-click="Header, click, Nav menu - item:explore; experiment:site_header_dropdowns; group:control" data-selected-links="/explore /trending /trending/developers /integrations /integrations/feature/code /integrations/feature/collaborate /integrations/feature/ship showcases showcases_search showcases_landing /explore" href="/explore">
Explore
</a> </li>
<li class="ml-4">
<a class="js-selected-navigation-item HeaderNavlink px-0 py-2 m-0" data-ga-click="Header, click, Nav menu - item:marketplace; experiment:site_header_dropdowns; group:control" data-selected-links=" /marketplace" href="/marketplace">
Marketplace
</a> </li>
<li class="ml-4">
<a class="js-selected-navigation-item HeaderNavlink px-0 py-2 m-0" data-ga-click="Header, click, Nav menu - item:pricing; experiment:site_header_dropdowns; group:control" data-selected-links="/pricing /pricing/developer /pricing/team /pricing/business-hosted /pricing/business-enterprise /pricing" href="/pricing">
Pricing
</a> </li>
</ul>
</nav>
<div class="d-flex">
<div class="d-lg-flex flex-items-center mr-3">
<div class="header-search scoped-search site-scoped-search js-site-search position-relative js-jump-to"
role="search combobox"
aria-owns="jump-to-results"
aria-label="Search or jump to"
aria-haspopup="listbox"
aria-expanded="true"
>
<div class="position-relative">
<!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-site-search-form" data-scope-type="Repository" data-scope-id="15405607" data-scoped-search-url="/wurstmeister/kafka-docker/search" data-unscoped-search-url="/search" action="/wurstmeister/kafka-docker/search" accept-charset="UTF-8" method="get"><input name="utf8" type="hidden" value="✓" />
<label class="form-control header-search-wrapper header-search-wrapper-jump-to position-relative d-flex flex-justify-between flex-items-center js-chromeless-input-container">
<input type="text"
class="form-control header-search-input jump-to-field js-jump-to-field js-site-search-focus js-site-search-field is-clearable"
data-hotkey="s,/"
name="q"
value=""
placeholder="Search"
data-unscoped-placeholder="Search GitHub"
data-scoped-placeholder="Search"
autocapitalize="off"
aria-autocomplete="list"
aria-controls="jump-to-results"
data-jump-to-suggestions-path="/_graphql/GetSuggestedNavigationDestinations#csrf-token=IGCdG5FFe83paNRpMkIHsjqRaqax5hwwPNu49DfMUFOHY5Dp2mRZaEQ7gnlWp+/cTPgx8DGOg9pODd2AjhH4xg=="
spellcheck="false"
autocomplete="off"
>
<input type="hidden" class="js-site-search-type-field" name="type" >
<img src="https://assets-cdn.github.com/images/search-shortcut-hint.svg" alt="" class="mr-2 header-search-key-slash">
<div class="Box position-absolute overflow-hidden d-none jump-to-suggestions js-jump-to-suggestions-container">
<ul class="d-none js-jump-to-suggestions-template-container">
<li class="d-flex flex-justify-start flex-items-center p-0 f5 navigation-item js-navigation-item">
<a tabindex="-1" class="no-underline d-flex flex-auto flex-items-center p-2 jump-to-suggestions-path js-jump-to-suggestion-path js-navigation-open" href="">
<div class="jump-to-octicon js-jump-to-octicon mr-2 text-center d-none"></div>
<img class="avatar mr-2 flex-shrink-0 js-jump-to-suggestion-avatar" alt="" aria-label="Team" src="" width="28" height="28">
<div class="jump-to-suggestion-name js-jump-to-suggestion-name flex-auto overflow-hidden text-left no-wrap css-truncate css-truncate-target">
</div>
<div class="border rounded-1 flex-shrink-0 bg-gray px-1 text-gray-light ml-1 f6 d-none js-jump-to-badge-search">
<span class="js-jump-to-badge-search-text-default d-none" aria-label="in this repository">
In this repository
</span>
<span class="js-jump-to-badge-search-text-global d-none" aria-label="in all of GitHub">
All GitHub
</span>
<span aria-hidden="true" class="d-inline-block ml-1 v-align-middle">↵</span>
</div>
<div aria-hidden="true" class="border rounded-1 flex-shrink-0 bg-gray px-1 text-gray-light ml-1 f6 d-none d-on-nav-focus js-jump-to-badge-jump">
Jump to
<span class="d-inline-block ml-1 v-align-middle">↵</span>
</div>
</a>
</li>
<svg height="16" width="16" class="octicon octicon-repo flex-shrink-0 js-jump-to-repo-octicon-template" title="Repository" aria-label="Repository" viewBox="0 0 12 16" version="1.1" role="img"><path fill-rule="evenodd" d="M4 9H3V8h1v1zm0-3H3v1h1V6zm0-2H3v1h1V4zm0-2H3v1h1V2zm8-1v12c0 .55-.45 1-1 1H6v2l-1.5-1.5L3 16v-2H1c-.55 0-1-.45-1-1V1c0-.55.45-1 1-1h10c.55 0 1 .45 1 1zm-1 10H1v2h2v-1h3v1h5v-2zm0-10H2v9h9V1z"/></svg>
<svg height="16" width="16" class="octicon octicon-project flex-shrink-0 js-jump-to-project-octicon-template" title="Project" aria-label="Project" viewBox="0 0 15 16" version="1.1" role="img"><path fill-rule="evenodd" d="M10 12h3V2h-3v10zm-4-2h3V2H6v8zm-4 4h3V2H2v12zm-1 1h13V1H1v14zM14 0H1a1 1 0 0 0-1 1v14a1 1 0 0 0 1 1h13a1 1 0 0 0 1-1V1a1 1 0 0 0-1-1z"/></svg>
<svg height="16" width="16" class="octicon octicon-search flex-shrink-0 js-jump-to-search-octicon-template" title="Search" aria-label="Search" viewBox="0 0 16 16" version="1.1" role="img"><path fill-rule="evenodd" d="M15.7 13.3l-3.81-3.83A5.93 5.93 0 0 0 13 6c0-3.31-2.69-6-6-6S1 2.69 1 6s2.69 6 6 6c1.3 0 2.48-.41 3.47-1.11l3.83 3.81c.19.2.45.3.7.3.25 0 .52-.09.7-.3a.996.996 0 0 0 0-1.41v.01zM7 10.7c-2.59 0-4.7-2.11-4.7-4.7 0-2.59 2.11-4.7 4.7-4.7 2.59 0 4.7 2.11 4.7 4.7 0 2.59-2.11 4.7-4.7 4.7z"/></svg>
</ul>
<ul class="d-none js-jump-to-no-results-template-container">
<li class="d-flex flex-justify-center flex-items-center p-3 f5 d-none">
<span class="text-gray">No suggested jump to results</span>
</li>
</ul>
<ul id="jump-to-results" class="js-navigation-container jump-to-suggestions-results-container js-jump-to-suggestions-results-container" >
<li class="d-flex flex-justify-center flex-items-center p-0 f5">
<img src="https://assets-cdn.github.com/images/spinners/octocat-spinner-128.gif" alt="Octocat Spinner Icon" class="m-2" width="28">
</li>
</ul>
</div>
</label>
</form> </div>
</div>
</div>
<span class="d-inline-block">
<div class="HeaderNavlink px-0 py-2 m-0">
<a class="text-bold text-white no-underline" href="/login?return_to=%2Fwurstmeister%2Fkafka-docker%2Fblob%2Fmaster%2Foverrides%2F0.9.0.1.sh" data-ga-click="(Logged out) Header, clicked Sign in, text:sign-in; experiment:site_header_dropdowns; group:control">Sign in</a>
<span class="text-gray">or</span>
<a class="text-bold text-white no-underline" href="/join?source=header-repo" data-ga-click="(Logged out) Header, clicked Sign up, text:sign-up; experiment:site_header_dropdowns; group:control">Sign up</a>
</div>
</span>
</div>
</div>
</div>
</header>
</div>
<div id="start-of-content" class="show-on-focus"></div>
<div id="js-flash-container">
</div>
<div role="main" class="application-main ">
<div itemscope itemtype="http://schema.org/SoftwareSourceCode" class="">
<div id="js-repo-pjax-container" data-pjax-container >
<div class="pagehead repohead instapaper_ignore readability-menu experiment-repo-nav ">
<div class="repohead-details-container clearfix container">
<ul class="pagehead-actions">
<li>
<a href="/login?return_to=%2Fwurstmeister%2Fkafka-docker"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to watch a repository" rel="nofollow">
<svg class="octicon octicon-eye" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8.06 2C3 2 0 8 0 8s3 6 8.06 6C13 14 16 8 16 8s-3-6-7.94-6zM8 12c-2.2 0-4-1.78-4-4 0-2.2 1.8-4 4-4 2.22 0 4 1.8 4 4 0 2.22-1.78 4-4 4zm2-4c0 1.11-.89 2-2 2-1.11 0-2-.89-2-2 0-1.11.89-2 2-2 1.11 0 2 .89 2 2z"/></svg>
Watch
</a>
<a class="social-count" href="/wurstmeister/kafka-docker/watchers"
aria-label="103 users are watching this repository">
103
</a>
</li>
<li>
<a href="/login?return_to=%2Fwurstmeister%2Fkafka-docker"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to star a repository" rel="nofollow">
<svg class="octicon octicon-star" viewBox="0 0 14 16" version="1.1" width="14" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M14 6l-4.9-.64L7 1 4.9 5.36 0 6l3.6 3.26L2.67 14 7 11.67 11.33 14l-.93-4.74L14 6z"/></svg>
Star
</a>
<a class="social-count js-social-count" href="/wurstmeister/kafka-docker/stargazers"
aria-label="1935 users starred this repository">
1,935
</a>
</li>
<li>
<a href="/login?return_to=%2Fwurstmeister%2Fkafka-docker"
class="btn btn-sm btn-with-count tooltipped tooltipped-n"
aria-label="You must be signed in to fork a repository" rel="nofollow">
<svg class="octicon octicon-repo-forked" viewBox="0 0 10 16" version="1.1" width="10" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8 1a1.993 1.993 0 0 0-1 3.72V6L5 8 3 6V4.72A1.993 1.993 0 0 0 2 1a1.993 1.993 0 0 0-1 3.72V6.5l3 3v1.78A1.993 1.993 0 0 0 5 15a1.993 1.993 0 0 0 1-3.72V9.5l3-3V4.72A1.993 1.993 0 0 0 8 1zM2 4.2C1.34 4.2.8 3.65.8 3c0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3 10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zm3-10c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2z"/></svg>
Fork
</a>
<a href="/wurstmeister/kafka-docker/network/members" class="social-count"
aria-label="1132 users forked this repository">
1,132
</a>
</li>
</ul>
<h1 class="public ">
<svg class="octicon octicon-repo" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M4 9H3V8h1v1zm0-3H3v1h1V6zm0-2H3v1h1V4zm0-2H3v1h1V2zm8-1v12c0 .55-.45 1-1 1H6v2l-1.5-1.5L3 16v-2H1c-.55 0-1-.45-1-1V1c0-.55.45-1 1-1h10c.55 0 1 .45 1 1zm-1 10H1v2h2v-1h3v1h5v-2zm0-10H2v9h9V1z"/></svg>
<span class="author" itemprop="author"><a class="url fn" rel="author" href="/wurstmeister">wurstmeister</a></span><!--
--><span class="path-divider">/</span><!--
--><strong itemprop="name"><a data-pjax="#js-repo-pjax-container" href="/wurstmeister/kafka-docker">kafka-docker</a></strong>
</h1>
</div>
<nav class="reponav js-repo-nav js-sidenav-container-pjax container"
itemscope
itemtype="http://schema.org/BreadcrumbList"
role="navigation"
data-pjax="#js-repo-pjax-container">
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a class="js-selected-navigation-item selected reponav-item" itemprop="url" data-hotkey="g c" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages /wurstmeister/kafka-docker" href="/wurstmeister/kafka-docker">
<svg class="octicon octicon-code" viewBox="0 0 14 16" version="1.1" width="14" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M9.5 3L8 4.5 11.5 8 8 11.5 9.5 13 14 8 9.5 3zm-5 0L0 8l4.5 5L6 11.5 2.5 8 6 4.5 4.5 3z"/></svg>
<span itemprop="name">Code</span>
<meta itemprop="position" content="1">
</a> </span>
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a itemprop="url" data-hotkey="g i" class="js-selected-navigation-item reponav-item" data-selected-links="repo_issues repo_labels repo_milestones /wurstmeister/kafka-docker/issues" href="/wurstmeister/kafka-docker/issues">
<svg class="octicon octicon-issue-opened" viewBox="0 0 14 16" version="1.1" width="14" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M7 2.3c3.14 0 5.7 2.56 5.7 5.7s-2.56 5.7-5.7 5.7A5.71 5.71 0 0 1 1.3 8c0-3.14 2.56-5.7 5.7-5.7zM7 1C3.14 1 0 4.14 0 8s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm1 3H6v5h2V4zm0 6H6v2h2v-2z"/></svg>
<span itemprop="name">Issues</span>
<span class="Counter">38</span>
<meta itemprop="position" content="2">
</a> </span>
<span itemscope itemtype="http://schema.org/ListItem" itemprop="itemListElement">
<a data-hotkey="g p" itemprop="url" class="js-selected-navigation-item reponav-item" data-selected-links="repo_pulls checks /wurstmeister/kafka-docker/pulls" href="/wurstmeister/kafka-docker/pulls">
<svg class="octicon octicon-git-pull-request" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M11 11.28V5c-.03-.78-.34-1.47-.94-2.06C9.46 2.35 8.78 2.03 8 2H7V0L4 3l3 3V4h1c.27.02.48.11.69.31.21.2.3.42.31.69v6.28A1.993 1.993 0 0 0 10 15a1.993 1.993 0 0 0 1-3.72zm-1 2.92c-.66 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2zM4 3c0-1.11-.89-2-2-2a1.993 1.993 0 0 0-1 3.72v6.56A1.993 1.993 0 0 0 2 15a1.993 1.993 0 0 0 1-3.72V4.72c.59-.34 1-.98 1-1.72zm-.8 10c0 .66-.55 1.2-1.2 1.2-.65 0-1.2-.55-1.2-1.2 0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2zM2 4.2C1.34 4.2.8 3.65.8 3c0-.65.55-1.2 1.2-1.2.65 0 1.2.55 1.2 1.2 0 .65-.55 1.2-1.2 1.2z"/></svg>
<span itemprop="name">Pull requests</span>
<span class="Counter">7</span>
<meta itemprop="position" content="3">
</a> </span>
<a data-hotkey="g b" class="js-selected-navigation-item reponav-item" data-selected-links="repo_projects new_repo_project repo_project /wurstmeister/kafka-docker/projects" href="/wurstmeister/kafka-docker/projects">
<svg class="octicon octicon-project" viewBox="0 0 15 16" version="1.1" width="15" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M10 12h3V2h-3v10zm-4-2h3V2H6v8zm-4 4h3V2H2v12zm-1 1h13V1H1v14zM14 0H1a1 1 0 0 0-1 1v14a1 1 0 0 0 1 1h13a1 1 0 0 0 1-1V1a1 1 0 0 0-1-1z"/></svg>
Projects
<span class="Counter" >0</span>
</a>
<a class="js-selected-navigation-item reponav-item" data-hotkey="g w" data-selected-links="repo_wiki /wurstmeister/kafka-docker/wiki" href="/wurstmeister/kafka-docker/wiki">
<svg class="octicon octicon-book" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M3 5h4v1H3V5zm0 3h4V7H3v1zm0 2h4V9H3v1zm11-5h-4v1h4V5zm0 2h-4v1h4V7zm0 2h-4v1h4V9zm2-6v9c0 .55-.45 1-1 1H9.5l-1 1-1-1H2c-.55 0-1-.45-1-1V3c0-.55.45-1 1-1h5.5l1 1 1-1H15c.55 0 1 .45 1 1zm-8 .5L7.5 3H2v9h6V3.5zm7-.5H9.5l-.5.5V12h6V3z"/></svg>
Wiki
</a>
<a class="js-selected-navigation-item reponav-item" data-selected-links="repo_graphs repo_contributors dependency_graph pulse /wurstmeister/kafka-docker/pulse" href="/wurstmeister/kafka-docker/pulse">
<svg class="octicon octicon-graph" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M16 14v1H0V0h1v14h15zM5 13H3V8h2v5zm4 0H7V3h2v10zm4 0h-2V6h2v7z"/></svg>
Insights
</a>
</nav>
</div>
<div class="container new-discussion-timeline experiment-repo-nav ">
<div class="repository-content ">
<a class="d-none js-permalink-shortcut" data-hotkey="y" href="/wurstmeister/kafka-docker/blob/2cb6b76b377ce9423b18654735101b5e58be512c/overrides/0.9.0.1.sh">Permalink</a>
<!-- blob contrib key: blob_contributors:v21:9c7702fc9aa9c0cc9662e14161a438ec -->
<div class="signup-prompt-bg rounded-1">
<div class="signup-prompt p-4 text-center mb-4 rounded-1">
<div class="position-relative">
<!-- '"` --><!-- </textarea></xmp> --></option></form><form action="/site/dismiss_signup_prompt" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="✓" /><input type="hidden" name="authenticity_token" value="PLvmuz6TjcNc1l57z1P3Zt8D66NWA/xQ5MGfhNOh/XKbGMqH4gZtxlijpa2Z/8ed18oWytYsVPLA0oflljTiMw==" />
<button type="submit" class="position-absolute top-0 right-0 btn-link link-gray" data-ga-click="(Logged out) Sign up prompt, clicked Dismiss, text:dismiss">
Dismiss
</button>
</form> <h3 class="pt-2">Join GitHub today</h3>
<p class="col-6 mx-auto">GitHub is home to over 28 million developers working together to host and review code, manage projects, and build software together.</p>
<a class="btn btn-primary" href="/join?source=prompt-blob-show" data-ga-click="(Logged out) Sign up prompt, clicked Sign up, text:sign-up">Sign up</a>
</div>
</div>
</div>
<div class="file-navigation">
<div class="select-menu branch-select-menu js-menu-container js-select-menu float-left">
<button class=" btn btn-sm select-menu-button js-menu-target css-truncate" data-hotkey="w"
type="button" aria-label="Switch branches or tags" aria-expanded="false" aria-haspopup="true">
<i>Branch:</i>
<span class="js-select-button css-truncate-target">master</span>
</button>
<div class="select-menu-modal-holder js-menu-content js-navigation-container" data-pjax>
<div class="select-menu-modal">
<div class="select-menu-header">
<svg class="octicon octicon-x js-menu-close" role="img" aria-label="Close" viewBox="0 0 12 16" version="1.1" width="12" height="16"><path fill-rule="evenodd" d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48L7.48 8z"/></svg>
<span class="select-menu-title">Switch branches/tags</span>
</div>
<div class="select-menu-filters">
<div class="select-menu-text-filter">
<input type="text" aria-label="Filter branches/tags" id="context-commitish-filter-field" class="form-control js-filterable-field js-navigation-enable" placeholder="Filter branches/tags">
</div>
<div class="select-menu-tabs">
<ul>
<li class="select-menu-tab">
<a href="#" data-tab-filter="branches" data-filter-placeholder="Filter branches/tags" class="js-select-menu-tab" role="tab">Branches</a>
</li>
<li class="select-menu-tab">
<a href="#" data-tab-filter="tags" data-filter-placeholder="Find a tag…" class="js-select-menu-tab" role="tab">Tags</a>
</li>
</ul>
</div>
</div>
<div class="select-menu-list select-menu-tab-bucket js-select-menu-tab-bucket" data-tab-filter="branches" role="menu">
<div data-filterable-for="context-commitish-filter-field" data-filterable-type="substring">
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/0.8.2/overrides/0.9.0.1.sh"
data-name="0.8.2"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
0.8.2
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/0.9.x/overrides/0.9.0.1.sh"
data-name="0.9.x"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
0.9.x
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/0.10.0/overrides/0.9.0.1.sh"
data-name="0.10.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
0.10.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/CREATE_TOPIC/overrides/0.9.0.1.sh"
data-name="CREATE_TOPIC"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
CREATE_TOPIC
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/dev/overrides/0.9.0.1.sh"
data-name="dev"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
dev
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/blob/gh-pages/overrides/0.9.0.1.sh"
data-name="gh-pages"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
gh-pages
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open selected"
href="/wurstmeister/kafka-docker/blob/master/overrides/0.9.0.1.sh"
data-name="master"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target js-select-menu-filter-text">
master
</span>
</a>
</div>
<div class="select-menu-no-results">Nothing to show</div>
</div>
<div class="select-menu-list select-menu-tab-bucket js-select-menu-tab-bucket" data-tab-filter="tags">
<div data-filterable-for="context-commitish-filter-field" data-filterable-type="substring">
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/1.1.0/overrides/0.9.0.1.sh"
data-name="1.1.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="1.1.0">
1.1.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/1.0.1/overrides/0.9.0.1.sh"
data-name="1.0.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="1.0.1">
1.0.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/1.0.0/overrides/0.9.0.1.sh"
data-name="1.0.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="1.0.0">
1.0.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.11.0.1/overrides/0.9.0.1.sh"
data-name="0.11.0.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.11.0.1">
0.11.0.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.11.0.0/overrides/0.9.0.1.sh"
data-name="0.11.0.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.11.0.0">
0.11.0.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.2.1/overrides/0.9.0.1.sh"
data-name="0.10.2.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.2.1">
0.10.2.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.2.0/overrides/0.9.0.1.sh"
data-name="0.10.2.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.2.0">
0.10.2.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.2.0-1/overrides/0.9.0.1.sh"
data-name="0.10.2.0-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.2.0-1">
0.10.2.0-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.1.1/overrides/0.9.0.1.sh"
data-name="0.10.1.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.1.1">
0.10.1.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.1.0/overrides/0.9.0.1.sh"
data-name="0.10.1.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.1.0">
0.10.1.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.1.0-2/overrides/0.9.0.1.sh"
data-name="0.10.1.0-2"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.1.0-2">
0.10.1.0-2
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.1.0-1/overrides/0.9.0.1.sh"
data-name="0.10.1.0-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.1.0-1">
0.10.1.0-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.0.1/overrides/0.9.0.1.sh"
data-name="0.10.0.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.0.1">
0.10.0.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.0.1-2/overrides/0.9.0.1.sh"
data-name="0.10.0.1-2"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.0.1-2">
0.10.0.1-2
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.0.1-1/overrides/0.9.0.1.sh"
data-name="0.10.0.1-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.0.1-1">
0.10.0.1-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.10.0.0/overrides/0.9.0.1.sh"
data-name="0.10.0.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.10.0.0">
0.10.0.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.9.0.1/overrides/0.9.0.1.sh"
data-name="0.9.0.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.9.0.1">
0.9.0.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.9.0.1-1/overrides/0.9.0.1.sh"
data-name="0.9.0.1-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.9.0.1-1">
0.9.0.1-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.9.0.0/overrides/0.9.0.1.sh"
data-name="0.9.0.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.9.0.0">
0.9.0.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.9.0.0-1/overrides/0.9.0.1.sh"
data-name="0.9.0.0-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.9.0.0-1">
0.9.0.0-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.8.2.2/overrides/0.9.0.1.sh"
data-name="0.8.2.2"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.8.2.2">
0.8.2.2
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.8.2.2-1/overrides/0.9.0.1.sh"
data-name="0.8.2.2-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.8.2.2-1">
0.8.2.2-1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.8.2.1/overrides/0.9.0.1.sh"
data-name="0.8.2.1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.8.2.1">
0.8.2.1
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.8.2.0/overrides/0.9.0.1.sh"
data-name="0.8.2.0"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.8.2.0">
0.8.2.0
</span>
</a>
<a class="select-menu-item js-navigation-item js-navigation-open "
href="/wurstmeister/kafka-docker/tree/0.8.1.1-1/overrides/0.9.0.1.sh"
data-name="0.8.1.1-1"
data-skip-pjax="true"
rel="nofollow">
<svg class="octicon octicon-check select-menu-item-icon" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M12 5l-8 8-4-4 1.5-1.5L4 10l6.5-6.5L12 5z"/></svg>
<span class="select-menu-item-text css-truncate-target" title="0.8.1.1-1">
0.8.1.1-1
</span>
</a>
</div>
<div class="select-menu-no-results">Nothing to show</div>
</div>
</div>
</div>
</div>
<div class="BtnGroup float-right">
<a href="/wurstmeister/kafka-docker/find/master"
class="js-pjax-capture-input btn btn-sm BtnGroup-item"
data-pjax
data-hotkey="t">
Find file
</a>
<clipboard-copy for="blob-path" class="btn btn-sm BtnGroup-item">
Copy path
</clipboard-copy>
</div>
<div id="blob-path" class="breadcrumb">
<span class="repo-root js-repo-root"><span class="js-path-segment"><a data-pjax="true" href="/wurstmeister/kafka-docker"><span>kafka-docker</span></a></span></span><span class="separator">/</span><span class="js-path-segment"><a data-pjax="true" href="/wurstmeister/kafka-docker/tree/master/overrides"><span>overrides</span></a></span><span class="separator">/</span><strong class="final-path">0.9.0.1.sh</strong>
</div>
</div>
<div class="commit-tease">
<span class="float-right">
<a class="commit-tease-sha" href="/wurstmeister/kafka-docker/commit/bf40f69383884e65a18b7de3f82df26d3c234867" data-pjax>
bf40f69
</a>
<relative-time datetime="2018-05-12T10:22:02Z">May 12, 2018</relative-time>
</span>
<div>
<a rel="contributor" data-skip-pjax="true" data-hovercard-user-id="1625151" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/sscaling"><img class="avatar" src="https://avatars2.githubusercontent.com/u/1625151?s=40&v=4" width="20" height="20" alt="@sscaling" /></a>
<a class="user-mention" rel="contributor" data-hovercard-user-id="1625151" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/sscaling">sscaling</a>
<a data-pjax="true" title="Remove listeners for 0.9.0.1 kafka config, so bootstrap process can be the same (#328)" class="message" href="/wurstmeister/kafka-docker/commit/bf40f69383884e65a18b7de3f82df26d3c234867">Remove listeners for 0.9.0.1 kafka config, so bootstrap process can b…</a>
</div>
<div class="commit-tease-contributors">
<details class="details-reset details-overlay details-overlay-dark lh-default text-gray-dark float-left mr-2" id="blob_contributors_box">
<summary class="btn-link" aria-haspopup="dialog" >
<span><strong>1</strong> contributor</span>
</summary>
<details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast " aria-label="Users who have contributed to this file">
<div class="Box-header">
<button class="Box-btn-octicon btn-octicon float-right" type="button" aria-label="Close dialog" data-close-dialog>
<svg class="octicon octicon-x" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48L7.48 8z"/></svg>
</button>
<h3 class="Box-title">Users who have contributed to this file</h3>
</div>
<ul class="list-style-none overflow-auto">
<li class="Box-row">
<a class="link-gray-dark no-underline" href="/sscaling">
<img class="avatar mr-2" alt="" src="https://avatars2.githubusercontent.com/u/1625151?s=40&v=4" width="20" height="20" />
sscaling
</a> </li>
</ul>
</details-dialog>
</details>
</div>
</div>
<div class="file">
<div class="file-header">
<div class="file-actions">
<div class="BtnGroup">
<a id="raw-url" class="btn btn-sm BtnGroup-item" href="/wurstmeister/kafka-docker/raw/master/overrides/0.9.0.1.sh">Raw</a>
<a class="btn btn-sm js-update-url-with-hash BtnGroup-item" data-hotkey="b" href="/wurstmeister/kafka-docker/blame/master/overrides/0.9.0.1.sh">Blame</a>
<a rel="nofollow" class="btn btn-sm BtnGroup-item" href="/wurstmeister/kafka-docker/commits/master/overrides/0.9.0.1.sh">History</a>
</div>
<button type="button" class="btn-octicon disabled tooltipped tooltipped-nw"
aria-label="You must be signed in to make or propose changes">
<svg class="octicon octicon-pencil" viewBox="0 0 14 16" version="1.1" width="14" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M0 12v3h3l8-8-3-3-8 8zm3 2H1v-2h1v1h1v1zm10.3-9.3L12 6 9 3l1.3-1.3a.996.996 0 0 1 1.41 0l1.59 1.59c.39.39.39 1.02 0 1.41z"/></svg>
</button>
<button type="button" class="btn-octicon btn-octicon-danger disabled tooltipped tooltipped-nw"
aria-label="You must be signed in to make or propose changes">
<svg class="octicon octicon-trashcan" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M11 2H9c0-.55-.45-1-1-1H5c-.55 0-1 .45-1 1H2c-.55 0-1 .45-1 1v1c0 .55.45 1 1 1v9c0 .55.45 1 1 1h7c.55 0 1-.45 1-1V5c.55 0 1-.45 1-1V3c0-.55-.45-1-1-1zm-1 12H3V5h1v8h1V5h1v8h1V5h1v8h1V5h1v9zm1-10H2V3h9v1z"/></svg>
</button>
</div>
<div class="file-info">
<span class="file-mode" title="File mode">executable file</span>
<span class="file-info-divider"></span>
7 lines (5 sloc)
<span class="file-info-divider"></span>
308 Bytes
</div>
</div>
<div itemprop="text" class="blob-wrapper data type-shell">
<table class="highlight tab-size js-file-line-container" data-tab-size="8">
<tr>
<td id="L1" class="blob-num js-line-number" data-line-number="1"></td>
<td id="LC1" class="blob-code blob-code-inner js-file-line"><span class="pl-c"><span class="pl-c">#!</span>/bin/bash -e</span></td>
</tr>
<tr>
<td id="L2" class="blob-num js-line-number" data-line-number="2"></td>
<td id="LC2" class="blob-code blob-code-inner js-file-line">
</td>
</tr>
<tr>
<td id="L3" class="blob-num js-line-number" data-line-number="3"></td>
<td id="LC3" class="blob-code blob-code-inner js-file-line"><span class="pl-c"><span class="pl-c">#</span> Kafka 0.9.x.x has a 'listeners' config by default. We need to remove this</span></td>
</tr>
<tr>
<td id="L4" class="blob-num js-line-number" data-line-number="4"></td>
<td id="LC4" class="blob-code blob-code-inner js-file-line"><span class="pl-c"><span class="pl-c">#</span> as the user may be configuring via the host.name / advertised.host.name properties</span></td>
</tr>
<tr>
<td id="L5" class="blob-num js-line-number" data-line-number="5"></td>
<td id="LC5" class="blob-code blob-code-inner js-file-line"><span class="pl-c1">echo</span> <span class="pl-s"><span class="pl-pds">"</span>Removing 'listeners' from server.properties pre-bootstrap<span class="pl-pds">"</span></span></td>
</tr>
<tr>
<td id="L6" class="blob-num js-line-number" data-line-number="6"></td>
<td id="LC6" class="blob-code blob-code-inner js-file-line">sed -i -e <span class="pl-s"><span class="pl-pds">'</span>/^listeners=/d<span class="pl-pds">'</span></span> <span class="pl-s"><span class="pl-pds">"</span><span class="pl-smi">$KAFKA_HOME</span>/config/server.properties<span class="pl-pds">"</span></span></td>
</tr>
</table>
<details class="details-reset details-overlay BlobToolbar position-absolute js-file-line-actions dropdown d-none" aria-hidden="true">
<summary class="btn-octicon ml-0 px-2 p-0 bg-white border border-gray-dark rounded-1" aria-label="Inline file action toolbar">
<svg class="octicon octicon-kebab-horizontal" viewBox="0 0 13 16" version="1.1" width="13" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M1.5 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3zm5 0a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3zM13 7.5a1.5 1.5 0 1 1-3 0 1.5 1.5 0 0 1 3 0z"/></svg>
</summary>
<details-menu>
<ul class="BlobToolbar-dropdown dropdown-menu dropdown-menu-se mt-2">
<li><clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-lines" style="cursor:pointer;" data-original-text="Copy lines">Copy lines</clipboard-copy></li>
<li><clipboard-copy role="menuitem" class="dropdown-item" id="js-copy-permalink" style="cursor:pointer;" data-original-text="Copy permalink">Copy permalink</clipboard-copy></li>
<li><a class="dropdown-item js-update-url-with-hash" id="js-view-git-blame" role="menuitem" href="/wurstmeister/kafka-docker/blame/2cb6b76b377ce9423b18654735101b5e58be512c/overrides/0.9.0.1.sh">View git blame</a></li>
<li><a class="dropdown-item" id="js-new-issue" role="menuitem" href="/wurstmeister/kafka-docker/issues/new">Open new issue</a></li>
</ul>
</details-menu>
</details>
</div>
</div>
<details class="details-reset details-overlay details-overlay-dark">
<summary data-hotkey="l" aria-label="Jump to line"></summary>
<details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast linejump" aria-label="Jump to line">
<!-- '"` --><!-- </textarea></xmp> --></option></form><form class="js-jump-to-line-form Box-body d-flex" action="" accept-charset="UTF-8" method="get"><input name="utf8" type="hidden" value="✓" />
<input class="form-control flex-auto mr-3 linejump-input js-jump-to-line-field" type="text" placeholder="Jump to line…" aria-label="Jump to line" autofocus>
<button type="submit" class="btn" data-close-dialog>Go</button>
</form> </details-dialog>
</details>
</div>
<div class="modal-backdrop js-touch-events"></div>
</div>
</div>
</div>
</div>
<div class="footer container-lg px-3" role="contentinfo">
<div class="position-relative d-flex flex-justify-between pt-6 pb-2 mt-6 f6 text-gray border-top border-gray-light ">
<ul class="list-style-none d-flex flex-wrap ">
<li class="mr-3">© 2018 <span title="0.11883s from unicorn-6fc9d4bcf-2kgfk">GitHub</span>, Inc.</li>
<li class="mr-3"><a data-ga-click="Footer, go to terms, text:terms" href="https://github.com/site/terms">Terms</a></li>
<li class="mr-3"><a data-ga-click="Footer, go to privacy, text:privacy" href="https://github.com/site/privacy">Privacy</a></li>
<li class="mr-3"><a href="https://help.github.com/articles/github-security/" data-ga-click="Footer, go to security, text:security">Security</a></li>
<li class="mr-3"><a href="https://status.github.com/" data-ga-click="Footer, go to status, text:status">Status</a></li>
<li><a data-ga-click="Footer, go to help, text:help" href="https://help.github.com">Help</a></li>
</ul>
<a aria-label="Homepage" title="GitHub" class="footer-octicon" href="https://github.com">
<svg height="24" class="octicon octicon-mark-github" viewBox="0 0 16 16" version="1.1" width="24" aria-hidden="true"><path fill-rule="evenodd" d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"/></svg>
</a>
<ul class="list-style-none d-flex flex-wrap ">
<li class="mr-3"><a data-ga-click="Footer, go to contact, text:contact" href="https://github.com/contact">Contact GitHub</a></li>
<li class="mr-3"><a href="https://developer.github.com" data-ga-click="Footer, go to api, text:api">API</a></li>
<li class="mr-3"><a href="https://training.github.com" data-ga-click="Footer, go to training, text:training">Training</a></li>
<li class="mr-3"><a href="https://shop.github.com" data-ga-click="Footer, go to shop, text:shop">Shop</a></li>
<li class="mr-3"><a href="https://blog.github.com" data-ga-click="Footer, go to blog, text:blog">Blog</a></li>
<li><a data-ga-click="Footer, go to about, text:about" href="https://github.com/about">About</a></li>
</ul>
</div>
<div class="d-flex flex-justify-center pb-6">
<span class="f6 text-gray-light"></span>
</div>
</div>
<div id="ajax-error-message" class="ajax-error-message flash flash-error">
<svg class="octicon octicon-alert" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8.893 1.5c-.183-.31-.52-.5-.887-.5s-.703.19-.886.5L.138 13.499a.98.98 0 0 0 0 1.001c.193.31.53.501.886.501h13.964c.367 0 .704-.19.877-.5a1.03 1.03 0 0 0 .01-1.002L8.893 1.5zm.133 11.497H6.987v-2.003h2.039v2.003zm0-3.004H6.987V5.987h2.039v4.006z"/></svg>
<button type="button" class="flash-close js-ajax-error-dismiss" aria-label="Dismiss error">
<svg class="octicon octicon-x" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48L7.48 8z"/></svg>
</button>
You can’t perform that action at this time.
</div>
<script crossorigin="anonymous" integrity="sha512-wIuAKDhvxe9wCaNR1tzCk3rtl+wXEWC28rmRpzmx0h98VEeWC6Y3xCWV1xAW6NP6eQQX+x8ZGhW6Sdut+mLRuw==" type="application/javascript" src="https://assets-cdn.github.com/assets/compat-a48960bafc17c30572990bbab3664e9c.js"></script>
<script crossorigin="anonymous" integrity="sha512-+v/Ml9bhCZBNwI4OpE2KYUOOnV8y0TR5ZdiCuK2M48oQ6+5nqdJypwtiYw/FcAB19a+DtH2qkxZh3JK3U2EWng==" type="application/javascript" src="https://assets-cdn.github.com/assets/frameworks-20bd6212a5bea8a82fa9be8f0657b328.js"></script>
<script crossorigin="anonymous" async="async" integrity="sha512-4acL5OJDvDq8vS+GPIhHPYsS7T1M/a+ioG5juPcRdzukUvqiNOi7i84hgBsH5qM9zuivNF8/VfF5p5n1Qg+GKw==" type="application/javascript" src="https://assets-cdn.github.com/assets/github-d05299b8dc956f829d6818f116d102b9.js"></script>
<div class="js-stale-session-flash stale-session-flash flash flash-warn flash-banner d-none">
<svg class="octicon octicon-alert" viewBox="0 0 16 16" version="1.1" width="16" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M8.893 1.5c-.183-.31-.52-.5-.887-.5s-.703.19-.886.5L.138 13.499a.98.98 0 0 0 0 1.001c.193.31.53.501.886.501h13.964c.367 0 .704-.19.877-.5a1.03 1.03 0 0 0 .01-1.002L8.893 1.5zm.133 11.497H6.987v-2.003h2.039v2.003zm0-3.004H6.987V5.987h2.039v4.006z"/></svg>
<span class="signed-in-tab-flash">You signed in with another tab or window. <a href="">Reload</a> to refresh your session.</span>
<span class="signed-out-tab-flash">You signed out in another tab or window. <a href="">Reload</a> to refresh your session.</span>
</div>
<div class="facebox" id="facebox" style="display:none;">
<div class="facebox-popup">
<div class="facebox-content" role="dialog" aria-labelledby="facebox-header" aria-describedby="facebox-description">
</div>
<button type="button" class="facebox-close js-facebox-close" aria-label="Close modal">
<svg class="octicon octicon-x" viewBox="0 0 12 16" version="1.1" width="12" height="16" aria-hidden="true"><path fill-rule="evenodd" d="M7.48 8l3.75 3.75-1.48 1.48L6 9.48l-3.75 3.75-1.48-1.48L4.52 8 .77 4.25l1.48-1.48L6 6.52l3.75-3.75 1.48 1.48L7.48 8z"/></svg>
</button>
</div>
</div>
<div class="Popover js-hovercard-content position-absolute" style="display: none; outline: none;" tabindex="0">
<div class="Popover-message Popover-message--bottom-left Popover-message--large Box box-shadow-large" style="width:360px;">
</div>
</div>
<div id="hovercard-aria-description" class="sr-only">
Press h to open a hovercard with more details.
</div>
</body>
</html>
|
dprasanthv/DockerFiles
|
kafka/arm/overrides/0.9.0.1.sh
|
Shell
|
mit
| 65,095 |
#!/usr/bin/env sh
npm install tongwen-core -g
npm install tongwen-cli -g
|
foreachsam/package-node-tongwen
|
install.sh
|
Shell
|
mit
| 73 |
#!/usr/bin/env bash
set -e
ROOT_DIR=$(cd $(dirname $0)/..; pwd)
cd $ROOT_DIR
BIN_DIR=./node_modules/.bin
rm -rf \
npm-debug.log \
node_modules \
coverage
|
yvele/node-thunkify-object
|
script/clean.sh
|
Shell
|
mit
| 164 |
# Bulk rename the badly labled crires files.
for i in $(ls CRIRES*:*:*.fits); do
python ~/bin/file_renamer.py $i -x : -v -
done
|
jason-neal/equanimous-octo-tribble
|
octotribble/extraction/CRIRES_renamer.sh
|
Shell
|
mit
| 132 |
#!/bin/bash
python manage.py test --noinput 2> /var/log/test.log 1> /dev/null
if [ $? -ne 0 ]; then
cat /var/log/test.log
exit 1
fi
|
mujinyun2009/shakespeare-census
|
test.sh
|
Shell
|
mit
| 141 |
#!/bin/sh
#
# echo+ --extended echo command
#
# Remarks:
# This script is useful for mocking up behaviour in testing.
#
log_message() { printf "$@"; printf "\n"; } >&2
notice() { log_message "$@"; }
info() { if [ "$verbose" ]; then log_message "$@"; fi; }
version=
build=
delay=
signal=
status=0
verbose=
usage="Usage:\necho+ [-d delay] [-e stderr-message] [-s signal] [-x status] message"
while getopts "d:e:s:x:v?" c; do
case $c in
d) delay=$OPTARG;;
e) echo $OPTARG >&2;;
s) signal=$OPTARG;;
x) status=$OPTARG;;
v) verbose=1;;
\?) echo "echo+ version: $version.$build" >&2
echo $usage >&2
exit 2;;
esac
done
shift $(($OPTIND - 1))
if [ "$delay" ]; then
info 'sleeping for %ds' "$delay"
sleep $delay
fi
echo "$@"
if [ "$signal" ]; then
info 'sending %s signal to %s...' "$signal" "$$"
kill -s $signal $$
fi
exit $status
|
tim-rose/devkit
|
bin/echo+.sh
|
Shell
|
mit
| 880 |
#!/bin/bash
python manage.py makemigrations && \
python manage.py migrate && \
python manage.py loaddata fiubar.json || exit 1
echo
echo "Crear un usuario administrador"
python manage.py createsuperuser
python manage.py runserver 0.0.0.0:8000 --settings fiubar.config.settings.local
|
maru/fiubar
|
local/start.sh
|
Shell
|
mit
| 286 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-ZDropdown_Tests/ZDropdown.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-ZDropdown_Tests/ZDropdown.framework"
fi
|
CaryZheng/ZDropdownWidget
|
Example/Pods/Target Support Files/Pods-ZDropdown_Tests/Pods-ZDropdown_Tests-frameworks.sh
|
Shell
|
mit
| 3,552 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2016:0098
#
# Security announcement date: 2016-02-02 14:50:05 UTC
# Script generation date: 2017-01-01 21:16:58 UTC
#
# Operating System: Red Hat 7
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - java-1.8.0-ibm.i686:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm.x86_64:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-demo.x86_64:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-devel.i686:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-devel.x86_64:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-jdbc.x86_64:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-plugin.x86_64:1.8.0.2.10-1jpp.1.el7
# - java-1.8.0-ibm-src.x86_64:1.8.0.2.10-1jpp.1.el7
#
# Last versions recommanded by security team:
# - java-1.8.0-ibm.i686:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm.x86_64:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-demo.x86_64:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-devel.i686:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-devel.x86_64:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-jdbc.x86_64:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-plugin.x86_64:1.8.0.3.20-1jpp.1.el7_2
# - java-1.8.0-ibm-src.x86_64:1.8.0.3.20-1jpp.1.el7_2
#
# CVE List:
# - CVE-2015-5041
# - CVE-2015-7575
# - CVE-2015-8126
# - CVE-2015-8472
# - CVE-2016-0402
# - CVE-2016-0448
# - CVE-2016-0466
# - CVE-2016-0475
# - CVE-2016-0483
# - CVE-2016-0494
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install java-1.8.0-ibm.i686-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm.x86_64-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-demo.x86_64-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-devel.i686-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-devel.x86_64-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-jdbc.x86_64-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-plugin.x86_64-1.8.0.3.20 -y
sudo yum install java-1.8.0-ibm-src.x86_64-1.8.0.3.20 -y
|
Cyberwatch/cbw-security-fixes
|
Red_Hat_7/x86_64/2016/RHSA-2016:0098.sh
|
Shell
|
mit
| 2,005 |
#!/usr/bin/env bash
set -ue
ref=$1
depthopt=''
if find . -depth 1 >/dev/null 2>/dev/null; then
depthopt='-depth'
else
depthopt='-maxdepth'
fi
rm -rf repo/
if [[ ${ref:0:1} == "v" ]]; then
name=$ref
git clone https://github.com/angular/angular.js.git repo/ --branch "$ref" --depth 1
cd repo/
else
git clone https://github.com/angular/angular.js.git repo/
cd repo/
git checkout $ref
name=$(git describe --tags)
fi
source ../build/$(git describe --tags --abbrev=0).sh
cd build/
IFS=$'\n' LINES=($(find . $depthopt 1 | grep -v '\.min\.js$' | awk '{print "\"" $0 "\""}'))
MAIN=$(printf ", %s" "${LINES[@]}")
MAIN=${MAIN:1}
cat <<EOF > bower.json
{
"name": "angular-complete",
"version": "$name",
"description": "All AngularJS releases, including core directives and i18n files",
"keywords": [
"angular",
"angularjs",
"directive",
"directives",
"i18n",
"internationalization"
],
"main": [ $MAIN ],
"license": "MIT"
}
EOF
git init
cp ../../.git/config .git/
git add -A
git commit -m "$name" --no-verify
git tag "$name"
git push origin --tags -f
|
datapad/bower-angular-all
|
release.sh
|
Shell
|
mit
| 1,102 |
#!/bin/bash
# IMPORTANT
# ---------
# This is an auto generated file with React CDK.
# Do not modify this file.
# Use `.scripts/user/prepublish.sh instead`.
echo "=> Transpiling 'src' into ES5 ..."
echo ""
rm -rf ./dist
NODE_ENV=production ./node_modules/.bin/babel --ignore tests,stories --plugins "transform-runtime" ./src --out-dir ./dist
cp -Rf ./src/static/ ./dist/static/
echo ""
echo "=> Transpiling completed."
. .scripts/user/prepublish.sh
|
CyberLight/react-typeahead2
|
.scripts/prepublish.sh
|
Shell
|
mit
| 452 |
#!/bin/sh
set -xe
dotenv 'SSHPASS=$ORIG_PASSWD sshpass -e ssh root@$IP sh < set_sudo.sh'
dotenv 'SSHPASS=$ORIG_PASSWD sshpass -e ssh root@$IP PASSWD=$PASSWD sh < set_passwd.sh'
dotenv 'SSHPASS=$PASSWD sshpass -e ssh user@$IP "mkdir -pm 700 .ssh; cat >> ~/.ssh/authorized_keys" < ~/.ssh/id_rsa.pub'
dotenv 'SSHPASS=$PASSWD sshpass -e ssh user@$IP sh < install_mosh.sh'
# dotenv 'SSHPASS=$PASSWD sshpass -e ssh user@$IP sh < install_docker.sh'
# dotenv 'SSHPASS=$PASSWD sshpass -e ssh user@$IP sh < install_docker-compose.sh'
|
ermaker/infra_cac
|
provision.sh
|
Shell
|
mit
| 525 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-MNKPDFEditor_Example/MNKPDFEditor.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-MNKPDFEditor_Example/MNKPDFEditor.framework"
fi
|
Moonko/MNKPDFEditor
|
Example/Pods/Target Support Files/Pods-MNKPDFEditor_Example/Pods-MNKPDFEditor_Example-frameworks.sh
|
Shell
|
mit
| 3,568 |
#!/bin/sh
mvn exec:java -Dexec.mainClass=client.Main -Dexec.args=$1
|
suparngp/AOSProject2
|
run-client.sh
|
Shell
|
mit
| 67 |
#!/bin/bash
# jenkins 下,默认是项目文件夹,也是默认参数(此处用以本机测试)
WORKSPACE=~/Desktop/qiakr
export LC_ALL=zh_CN.GB2312;export LANG=zh_CN.GB2312
# 项目路径
WORKSPACE_NAME="eke2.0"
TARGET_NAME="EKESell"
WORKSPACE_PATH="${WORKSPACE}/RealCloud/ekesell2.0/${WORKSPACE_NAME}.xcworkspace"
# 项目配置
BUILD_CONFIG="Release"
PRODUCT_BUNDLE_IDENTIFIER="com.yiguo.qiakr.app"
CODE_SIGN_IDENTITY="iPhone Distribution: Hangzhou Ant Kingdom Technology Co, Ltd. (4C72AXZXH7)"
PROVISIONING_PROFILE="78bf3bbf-e40b-487b-8a2c-976a4d8c105e"
ENABLE_BITCODE="YES"
# 输出配置
EXPORT_OPTIONS_PLIST=~/Desktop/ad_hot_qa.plist
RES_DIR=~/Desktop/${TARGET_NAME}-pack
RES_ARCHIVE_PATH=${RES_DIR}/ach.xcarchive
RES_IPA_DIR=${RES_DIR}/ipa
RES_IPA_PATH=${RES_IPA_DIR}/${TARGET_NAME}.ipa
# clean
xcodebuild OTHER_CFLAGS="-fembed-bitcode" clean -workspace $WORKSPACE_PATH -scheme $TARGET_NAME
# archive
xcodebuild OTHER_CFLAGS="-fembed-bitcode" archive -archivePath $RES_ARCHIVE_PATH -workspace $WORKSPACE_PATH -scheme $TARGET_NAME -configuration $BUILD_CONFIG CODE_SIGN_IDENTITY="${CODE_SIGN_IDENTITY}" PROVISIONING_PROFILE=${PROVISIONING_PROFILE} PRODUCT_BUNDLE_IDENTIFIER=${PRODUCT_BUNDLE_IDENTIFIER} ENABLE_BITCODE=${ENABLE_BITCODE}
# export
xcodebuild OTHER_CFLAGS="-fembed-bitcode" -exportArchive -archivePath $RES_ARCHIVE_PATH -exportPath $RES_IPA_DIR -exportOptionsPlist $EXPORT_OPTIONS_PLIST
echo "打包成功:${RES_IPA_PATH}"
export LANG=en_US
export LC_ALL=en_US;
#path=$RES_IPA_PATH
#pgyerUkey=1956d5a302e9d3980fd7b59849ff6d41
#pgyerApikey=7436075a04f3dcefb109032ab5b21f95
#MSG="QA 测试包-正式环境"
#
#a=$(curl -F "file=@$path" -F "uKey=$pgyerUkey" -F "_api_key=$pgyerApikey" -F "updateDescription=${MSG}" https://www.pgyer.com/apiv1/app/upload)
#
#echo "QRCode:$a" | sed "s/\\\\/""/g"
#echo "\n打包上传更新成功!"
# 构建后操作:补充个二维码下载链接
# set build description
# "appQRCodeURL":"(.*)"
# <img src='\1' width=144 height=144> <a href='\1'>二维码</a>
|
JuYiWei/CZ_Demos
|
2018/自动化构建脚本/workspace.sh
|
Shell
|
mit
| 2,038 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/20201_c_jgocadena.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=20201_c_jgocadena.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=20201cjgocadena.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/20201cjgocadena.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/20201cjgocadena.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/20201cjgocadena.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
tocache/picomones
|
UPC Microcontroladores 2020-1/Semana 10/20201_c_jgocadena.X/nbproject/Package-default.bash
|
Shell
|
cc0-1.0
| 1,421 |
#!/bin/bash
# set defaults
toclevels=3
# print out info
if [[ -z $1 ]]
then
echo "
$0 [start|version]
will build the format documentation from CSV files and a template.
Version = cornell|draft|official changes a note in the document
"
exit 1
fi
if [[ "$1" = "start" ]]
then
# parse version from directory
version=cornell
else
version=$1
fi
case $version in
cornell)
[email protected]
;;
draft)
[email protected]
;;
official)
[email protected]
;;
esac
cwd=$(pwd)
numversion=${cwd##*/}
# convert the column definitions to CSV
sed 's/ /,/g;s/R N/R,N/; s/,,/,/g; s/,,/,/g; s/,,/,/g; s/, /,/g' column_definitions.txt | tail -n +2 > tmp.csv
# create ascii doc version
asciifile=lehd_public_use_schema.asciidoc
echo "= LEHD Public Use Data Schema $numversion" > $asciifile
echo "Lars Vilhuber <${author}>" >> $asciifile
echo "$(date +%d\ %B\ %Y)
// a2x: --dblatex-opts \"-P latex.output.revhistory=0 --param toc.section.depth=${toclevels}\"
( link:QWIPU_Data_Schema.pdf[Printable version] )
" >> $asciifile
# A note on the relevance/beta/draft status of this file.
case $version in
cornell)
echo "
[IMPORTANT]
.Important
==============================================
This document is not an official Census Bureau publication. It is compiled from publicly accessible information
by Lars Vilhuber (http://www.ilr.cornell.edu/ldi/[Labor Dynamics Institute, Cornell University]).
Feedback is welcome. Please write us at
link:mailto:[email protected]?subject=LEHD_Schema_v4[[email protected]].
==============================================
" >> $asciifile
;;
draft)
echo "
[IMPORTANT]
.Important
==============================================
This specification is draft. Feedback is welcome. Please write us at link:mailto:${author}?subject=LEHD_Schema_draft[${author}].
==============================================
" >> $asciifile
;;
official)
echo "
[IMPORTANT]
.Important
==============================================
Feedback is welcome. Please write us at link:mailto:[email protected]?subject=LEHD_Schema_4.0.1[[email protected]].
.
==============================================
" >> $asciifile
;;
esac
echo "
The public-use Quarterly Workforce Indicators (QWI) data from the Longitudinal Employer-Household Dynamics Program
are available for download with the following data schema.
These data are available as Comma-Separated Value (CSV) files through the LEHD website’s Data page at
http://lehd.ces.census.gov/data/ .
This document describes the data schema for QWI files. For each variable,
a set of allowable values is defined. Definitions are provided as CSV files,
with header variable definitions. The naming conventions of the data files is documented in link:lehd_csv_naming.html[]. Changes relative to the original v4.0 version are listed <<changes,at the end>>.
Basic Schema
------------
Each file is structured as a CSV file. The first columns contain <<identifiers>>, subsequent columns contain <<indicators>>, followed by <<statusflags,status flags>>.
=== Generic structure
[width=\"30%\",format=\"csv\",cols=\"<2\",options=\"header\"]
|===================================================
Column name
[ Identifier1 ]
[ Identifier2 ]
[ Identifier3 ]
[ ... ]
[ Indicator 1 ]
[ Indicator 2 ]
[ Indicator 3 ]
[ ... ]
[ Status Flag 1 ]
[ Status Flag 2 ]
[ Status Flag 3 ]
[ ... ]
|===================================================
Note: A full list of indicators for each type of file are shown below in the <<indicators,Indicators>> section.
While all indicators are included in the CSV files, only the requested indicators
will be included in data outputs from the LED Extraction Tool.
<<<
=== [[identifiers]]Identifiers
Records, unless otherwise noted, are parts of time-series data. Unique record identifiers are noted below, by file type.
Identifiers without the year and quarter component can be considered a series identifier.
" >> $asciifile
############################## Identifiers
for arg in $(ls lehd_identifiers_*csv)
do
name="$(echo ${arg%*.csv}| sed 's/lehd_//; s/_/ for /; s/ident/Ident/')"
echo "==== $name
( link:${arg}[] )
[width=\"100%\",format=\"csv\",cols=\"2*^1,<3\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
<<<
" >> $asciifile
done
################################# Variables
echo "
<<<
=== [[indicators]]Indicators
The following tables and associated mapping files
list the indicators available on each file. The ''Indicator Variable'' is the short name of the variable on the CSV files, suitable for machine processing in a wide variety of statistical applications. When given, the ''Alternate name'' may appear in related documentation and articles. The ''Status Flag'' is used to indicate publication or data quality status (see <<statusflags,Status Flags>>). The ''Indicator Name'' is a more verbose description of the indicator.
( link:variables_qwipu.csv[variables_qwipu.csv] )
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5\",options=\"header\"]
|===================================================
include::variables_qwipu.csv[]
|===================================================
<<<
" >> $asciifile
################################ Formats
echo "
== [[catvars]]Categorical Variables
Categorical variable descriptions are displayed above each table, with the variable name shown in parentheses. Unless otherwise stated, every possible value/label combination for each categorical variable is listed. Please note that not all values will be available in every table.
" >> $asciifile
# we do industry and geo last
for arg in $(ls label_*csv| grep -vE "geo|ind_level|industry|agg_level|flags|fips")
do
name=$(echo ${arg%*.csv}| sed 's/label_//')
echo "=== $name
( link:${arg}[] )
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
done
################################ Industry formats
# now do industry
name=Industry
echo "<<<
=== $name ===
" >> $asciifile
for arg in $(ls label_ind_level*csv)
do
name="$(echo ${arg%*.csv}| sed 's/lehd_//; s/_/ for /')"
link="$(echo ${arg%*.csv}| sed 's/label_//')"
echo "[[$link]]
==== Industry levels
( link:${arg}[] )
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
arg=label_industry.csv
# construct the sample industry file
head -8 $arg > tmp2.csv
echo "...," >> tmp2.csv
grep -A 4 -B 4 "31-33" $arg | tail -8 >> tmp2.csv
echo "...," >> tmp2.csv
echo "
==== Industry
( link:${arg}[] )
Only a small subset of available values shown.
The 2012 NAICS (North American Industry Classification System) is used for all years.
QWI releases prior to R2015Q3 used the 2007 NAICS classification (see Schema v4.0.1).
For a full listing of all valid NAICS codes, see http://www.census.gov/eos/www/naics/.
[width=\"90%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::tmp2.csv[]
|===================================================
<<<
" >> $asciifile
done
################################ Geo formats
# now do geography
name=Geography
# construct the NS file
nsfile=label_fipsnum.csv
echo "geography,label" > $nsfile
grep -h -E "^[0-9][0-9]," ??/label_geography.csv >> $nsfile
# construct the sample fips file
head -8 $nsfile > tmp.csv
echo "...," >> tmp.csv
head -50 $nsfile | tail -8 >> tmp.csv
# construct the composite file from separate files
head -1 ak/label_geography.csv > label_geography_all.csv
echo '00,"National (50 States + DC)"' >> label_geography_all.csv
for arg in $(ls ??/label_geography.csv)
do
tail -n +2 $arg >> tmp3.csv
done
cat tmp3.csv | sort -n -k 1 -t , >> label_geography_all.csv
rm tmp3.csv
echo "=== $name ===
" >> $asciifile
for arg in $(ls label_geo_level*csv)
do
name="$(echo ${arg%*.csv}| sed 's/label_//')"
echo "[[$name]]
==== Geographic levels
( link:${arg}[] )
[width=\"40%\",format=\"csv\",cols=\"^1,<3\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
done
echo "
Geography labels are provided in separate files, in directories by state. Note that cross-state CBSA will have
state-specific parts, and thus will appear in multiple files.
A separate link:$nsfile[$nsfile] contains values and labels
for all entities of geo_level 'n' or 's', and is a summary of separately available files.
==== State-level values ====
( link:$nsfile[] )
[width=\"40%\",format=\"csv\",cols=\"^1,<3\",options=\"header\"]
|===================================================
include::tmp.csv[]
|===================================================
==== Detailed state and substate level values
For a full listing of all valid geography codes (except for WIA codes), see http://www.census.gov/geo/maps-data/data/tiger.html.
Note about geography codes: Four types of geography codes are represented with this field. Each geography
has its own code structure.
- State is the 2-digit http://quickfacts.census.gov/qfd/meta/long_fips.htm[FIPS] code.
- County is the 5-digit FIPS code.
- Metropolitan/Micropolitan codes are constructed from the 2-digit state FIPS code and the 5-digit http://www.census.gov/population/metro/[CBSA] code provided by the Census Bureau’s Geography Division.
** In the QWI, the metropolitan/micropolitan areas are the state parts of the full CBSA areas.
- The WIA code is constructed from the 2-digit state FIPS code and the 6-digit WIA identifier provided by LED State Partners.
The 2014 vintage of Census TIGER geography is used for all tabulations as of the R2014Q3 release.
For convenience, a composite file containing all geocodes is available as
link:label_geography_all.csv[].
[format=\"csv\",width=\"50%\",cols=\"^1,^3\",options=\"header\"]
|===================================================
State,Format file" >> $asciifile
for arg in $(ls ??/label_geography.csv)
do
state=$(dirname ${arg}|tr [a-z] [A-Z])
echo "$state,link:${arg}[]" >> $asciifile
done
echo "|===================================================" >> $asciifile
################################# Variables
# finish file
arg=label_flags.csv
echo "
<<<
== [[statusflags]]Status flags
( link:${arg}[] )
Each status flag in the tables above contains one of the following valid values.
The values and their interpretation are listed in the table below.
[width=\"80%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
<<<
" >> $asciifile
cat CHANGES.txt >> $asciifile
echo "
<<<
*******************
This revision: $(date)
*******************
" >> $asciifile
echo "$asciifile created"
asciidoc -a icons -a toc -a numbered -a linkcss -a toclevels=$toclevels $asciifile
[[ -f $(basename $asciifile .asciidoc).html ]] && echo "$(basename $asciifile .asciidoc).html created"
a2x -f pdf -a icons -a toc -a numbered $asciifile
[[ -f $(basename $asciifile .asciidoc).pdf ]] && echo "$(basename $asciifile .asciidoc).pdf created"
mv $(basename $asciifile .asciidoc).pdf "QWIPU_Data_Schema.pdf" && echo "$(basename $asciifile .asciidoc).pdf moved to QWIPU_Data_Schema.pdf"
html2text $(basename $asciifile .asciidoc).html > $(basename $asciifile .asciidoc).txt
[[ -f $(basename $asciifile .asciidoc).txt ]] && echo "$(basename $asciifile .asciidoc).txt created"
echo "Removing tmp files"
rm tmp*
|
labordynamicsinstitute/qwi_schemas
|
formats/V4.0.2/write_schemadoc.sh
|
Shell
|
cc0-1.0
| 11,836 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/pas_notgate.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=pas_notgate.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=pasnotgate.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/pasnotgate.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/pasnotgate.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/pasnotgate.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
tocache/picomones
|
UPC Microcontroladores 2019-1/Semana 2/pas_notgate.X/nbproject/Package-default.bash
|
Shell
|
cc0-1.0
| 1,389 |
#!/bin/sh
######################################################################
#
# @file lapack.sh
#
# @brief Build information for lapack.
#
# @version $Rev$ $Date$
#
# Copyright © 2012-2017, Tech-X Corporation, Boulder, CO.
# See LICENSE file (EclipseLicense.txt) for conditions of use.
#
######################################################################
######################################################################
#
# Trigger variables set in lapack_aux.sh
#
######################################################################
mydir=`dirname $BASH_SOURCE`
source $mydir/lapack_aux.sh
######################################################################
#
# Set variables that should trigger a rebuild, but which by value change
# here do not, so that build gets triggered by change of this file.
# E.g: mask
#
######################################################################
setLapackNonTriggerVars() {
LAPACK_UMASK=002
}
setLapackNonTriggerVars
######################################################################
#
# Launch lapack builds.
#
######################################################################
buildLapack() {
if ! bilderUnpack lapack; then
return
fi
local buildargs=
if [[ `uname` =~ CYGWIN ]]; then
buildargs="-m nmake"
fi
# A. Pletzer: building the testing code fails with a seg fault on
# Linux systems running gfortran 4.4.6. Turn off BUILD_TESTING
# for these cases
local LAPACK_ALL_ADDL_ARGS="-DBUILD_DEPRECATED:BOOL=TRUE"
if test `uname` = "Linux" -a "gfortran" = `basename $FC`; then
version=`$FC --version | tr '\n' ' ' | awk '{print $4}'`
if test $version = "4.4.6"; then
LAPACK_ALL_ADDL_ARGS="-DBUILD_TESTING:BOOL=OFF $LAPACK_ALL_ADDL_ARGS"
fi
fi
if bilderConfig lapack ser "$CMAKE_COMPILERS_SER $CMAKE_COMPFLAGS_SER $LAPACK_ALL_ADDL_ARGS $LAPACK_SER_OTHER_ARGS"; then
bilderBuild $buildargs lapack ser
fi
if bilderConfig lapack sermd "-DBUILD_WITH_SHARED_RUNTIME:BOOL=ON $CMAKE_COMPILERS_SER $CMAKE_COMPFLAGS_SER $LAPACK_ALL_ADDL_ARGS $LAPACK_SERMD_OTHER_ARGS"; then
bilderBuild $buildargs lapack sermd
fi
if bilderConfig lapack sersh "-DBUILD_SHARED_LIBS:BOOL=ON $CMAKE_COMPILERS_SER $CMAKE_COMPFLAGS_SER $LAPACK_ALL_ADDL_ARGS $LAPACK_SERSH_OTHER_ARGS"; then
bilderBuild $buildargs lapack sersh
fi
if bilderConfig lapack pycsh "-DBUILD_SHARED_LIBS:BOOL=ON $CMAKE_COMPILERS_PYC $CMAKE_COMPFLAGS_PYC $LAPACK_ALL_ADDL_ARGS $LAPACK_PYCSH_OTHER_ARGS"; then
bilderBuild $buildargs lapack pycsh
fi
if bilderConfig lapack ben "$CMAKE_COMPILERS_BEN $CMAKE_COMPFLAGS_BEN $LAPACK_ALL_ADDL_ARGS $LAPACK_BEN_OTHER_ARGS"; then
bilderBuild $buildargs lapack ben
fi
return 0
}
######################################################################
#
# Test lapack
#
######################################################################
testLapack() {
techo "Not testing lapack."
}
######################################################################
#
# Install lapack.
# Done manually, as make install does not work.
#
######################################################################
installLapack() {
LAPACK_INSTALLED=false
for bld in ser sermd sersh pycsh ben; do
if bilderInstall lapack $bld; then
LAPACK_INSTALLED=true
case `uname` in
CYGWIN*)
libdir=$CONTRIB_DIR/lapack-${LAPACK_BLDRVERSION}-$bld/lib
for lib in blas lapack tmglib; do
if test -f $libdir/lib${lib}.a; then
cmd="mv $libdir/lib${lib}.a $libdir/${lib}.lib"
techo "$cmd"
$cmd
fi
# Shared not built for Windows, so no need to look for DLLs
done
;;
esac
fi
done
return 0
}
|
Tech-XCorp/bilder
|
packages/lapack.sh
|
Shell
|
epl-1.0
| 3,763 |
#!/bin/bash
# Example for running
docker run -e HOME=/root -t -i imiell/sd_x7proto /bin/bash
|
ianmiell/shutit-distro
|
x7proto/bin/run.sh
|
Shell
|
gpl-2.0
| 93 |
#!/bin/bash
for fieldIdx in {1..6}; do
echo "387.10.10.10.30.103"|cut -d'.' -f${fieldIdx}-
done
lastIdx=6
for ((fieldIdx = 2; fieldIdx <= $lastIdx; fieldIdx++)); do
echo "387.10.10.10.30.103"|cut -d'.' -f${fieldIdx}-
done
for fieldIdx in $(eval echo {2..$lastIdx}); do
echo "387.10.10.10.30.103"|cut -d'.' -f${fieldIdx}-
done
|
mcm811/bin.osx
|
example/exam_for_loop.sh
|
Shell
|
gpl-2.0
| 333 |
function except() {
case $1 in
selinux_file_context_cmp) # ignore
;;
*)
echo "
%exception $1 {
\$action
if (result < 0) {
PyErr_SetFromErrno(PyExc_OSError);
SWIG_fail;
}
}
"
;;
esac
}
if ! ${CC:-gcc} -x c -c -I../include - -aux-info temp.aux < ../include/selinux/selinux.h
then
# clang does not support -aux-info so fall back to gcc
gcc -x c -c -I../include - -aux-info temp.aux < ../include/selinux/selinux.h
fi
for i in `awk '/<stdin>.*extern int/ { print $6 }' temp.aux`; do except $i ; done
rm -f -- temp.aux -.o
|
jpacg/su-binary
|
jni/selinux/libselinux/src/exception.sh
|
Shell
|
gpl-2.0
| 555 |
docker build . --tag razkroi-web
|
sbnedkov/cutlist
|
docker/build.sh
|
Shell
|
gpl-2.0
| 33 |
#!/bin/sh
TARGETPATH="/tmp/"
FILENAME="*.txt"
MAILADDR="[email protected]"
/usr/bin/python -m pyinotify -e IN_CLOSE_WRITE ${TARGETPATH} -f -c '/usr/bin/ls -lat ${TARGETPATH}${FILENAME} | /usr/bin/mail -s "${TARGETPAH} - ${FILENAME} file modifications" ${MAILADDR}'
|
rkferreira/tools
|
watchdog-fs/watchdog-fs.sh
|
Shell
|
gpl-2.0
| 259 |
#!/bin/bash
abspath_cd() {
if [ -d "$1" ]; then
echo "$(cd "$1"; pwd)"
else
case "$1" in
"" | ".") echo "$PWD";;
/*) echo "$1";;
*) echo "$PWD/$1";;
esac
fi
}
THIS_DIR=$(abspath_cd $(dirname "$0"))
AUTHOR_FILTER=
DEFAULT_LICENSE=GPL
DEFAULT_COPYRIGHT_HOLDER="Saarland University"
declare -i DONT_ASK=0
declare -i END_OF_OPTIONS=0
while [ $# -ne 0 ]; do
case "$1" in
--)
END_OF_OPTIONS=1
;;
--gpl|--GPL)
DEFAULT_LICENSE=GPL
;;
--lgpl|--LGPL)
DEFAULT_LICENSE=LGPL
;;
--author-filter=*)
AUTHOR_FILTER=${1:16}
if [ -z "$AUTHOR_FILTER" ]; then
echo >&2 "No author specified in option: $1 (e.g. --author-filter=Name)"
exit 1
fi
;;
--default-copyright-holder=*)
DEFAULT_COPYRIGHT_HOLDER=${1:27}
if [ -z "$DEFAULT_COPYRIGHT_HOLDER" ]; then
echo >&2 "No default copyright holder specified in option: $1 (e.g. --default-copyright-holder=Name)"
exit 1
fi
;;
--dont-ask)
DONT_ASK=1
;;
-*)
echo >&2 "unknown option $1"
exit 1
;;
esac
shift
[ $END_OF_OPTIONS -eq 1 ] && break
done
echo "Author filer: $AUTHOR_FILTER"
echo "Default license: $DEFAULT_LICENSE"
echo "Default copyright holder: $DEFAULT_COPYRIGHT_HOLDER"
echo "Don't ask anything: $DONT_ASK"
# ask prompt default
ask() {
local prompt=$1
local default=$2
echo "$1"
[ -n "$default" ] && echo -n "[$default] "
read -e
[ -z "$REPLY" ] && REPLY=$default
}
THIS_DIR=$(abspath_cd $(dirname "$0"))
if [ -z "$1" ]; then
# We operate in root directory
cd "$THIS_DIR/.."
FILES=$(find . -name "*.hpp" -or -name "*.cpp" -or -name "*.py" | xargs grep -L Copyright | grep -v "tools/json" | grep -v "tools/mkcval.py" | grep -v "tools/maketest.py" | grep -v "tools/check.py" | grep -v "./tests/xml3d_node_defs.hpp")
else
echo "<$1>"
FILES="$@"
fi
for f in $FILES; do
# First author
INFO_STR=$(git log --pretty="format:%an|%ae|%ai|%s" "$f" | tail -n1)
if [ -z "$INFO_STR" ]; then
continue
fi
oldIFS=$IFS
IFS="|"
INFOS=($INFO_STR)
IFS=$oldIFS
AUTHOR=${INFOS[0]}
AUTHOR_EMAIL=${INFOS[1]}
AUTHOR_YEAR=$(echo "${INFOS[2]}" | sed "s|\([^-]\+\)-.*|\1|g")
YEARS=($(git log --pretty="format:%ai" "$f" | sed "s|\([^-]\+\)-.*|\1|g" | sort -u))
YEARS_STR=""
for ((i=0; i<${#YEARS[*]}; i++)); do
if [ $i -ne $(( ${#YEARS[*]}-1 )) ]; then
YEARS_STR="${YEARS_STR}${YEARS[$i]}, "
else
YEARS_STR="${YEARS_STR}${YEARS[$i]}"
fi
done
if [ -n "$AUTHOR_FILTER" -a "$AUTHOR_FILTER" != "$AUTHOR" ]; then
echo "--- Skipping $f (\"$AUTHOR_FILTER\" != \"$AUTHOR\") ---"
continue
fi
echo "******* Processing $f *******"
head "$f"
echo "..."
echo
echo "First commit: ${INFOS[3]}"
echo "First commit author: $AUTHOR <$AUTHOR_EMAIL>"
echo "First commit year: $AUTHOR_YEAR"
echo "All other authors (with git blame):"
git blame -f "$f" | perl -w -e '
my %authorToNumLines = ();
my @files = ();
while (<>) {
if (/(\S+)\s+\(([^[:digit:]]*[^[:digit:][:space:]]).*\)/) {
$file = $1;
$author = $2;
push @files, $file;
# Fix
if ($author eq "Kristian") {
$author = "Kristian Sons";
}
if ($author eq "Sergiy Byelozoyorov") {
$author = "Sergiy Byelozyorov";
}
if (defined $authorToNumLines{$author}) {
$authorToNumLines{$author} = $authorToNumLines{$author} + 1;
}
else {
$authorToNumLines{$author} = 1;
}
}
}
while (($key, $value) = each %authorToNumLines) {
$numLinesToAuthor{$value} = $key;
}
my @sorted_keys = sort { ($a <=> $b)*-1 } keys %numLinesToAuthor;
foreach my $key ( @sorted_keys ) {
$value = $numLinesToAuthor{$key};
print "$value : $key line(s) of code\n";
}'
echo
if [ -z "$DEFAULT_COPYRIGHT_HOLDER" ]; then
DEFAULT_AUTHOR="$AUTHOR <$AUTHOR_EMAIL>"
else
DEFAULT_AUTHOR="$DEFAULT_COPYRIGHT_HOLDER"
fi
LIC_AUTHOR=$DEFAULT_AUTHOR
LIC_YEAR=$YEARS_STR
LIC_TYPE=$DEFAULT_LICENSE
if [ "$DONT_ASK" -eq 0 ]; then
ask "File author (or type skip for next) ?" "$LIC_AUTHOR"
LIC_AUTHOR=$REPLY
if [ "$LIC_AUTHOR" = "skip" ]; then
continue
fi
ask "Year (or type skip for next) ?" "$YEARS_STR"
LIC_YEAR=$REPLY
if [ "$LIC_YEAR" = "skip" ]; then
continue
fi
ask "File license (GPL/LGPL) (or type skip for next)" "$LIC_TYPE"
LIC_TYPE=$REPLY
if [ "$LIC_TYPE" = "skip" ]; then
continue
fi
fi
case "$LIC_TYPE" in
gpl|GPL) LIC_TYPE=--gpl;;
lgpl|LGPL) LIC_TYPE=--lgpl;;
esac
"$THIS_DIR/insert_copyright.sh" \
$LIC_TYPE \
--year="$LIC_YEAR" \
--author="$LIC_AUTHOR" "$f"
echo
echo
done
|
BALL-Contrib/contrib_rtfact_4f1d028
|
tools/add_copyright.sh
|
Shell
|
gpl-2.0
| 5,126 |
#!/bin/sh
# 二进制安装
# Tomcat8官方稳定版
# 确保关闭SELinux
### bin包名,请根据实际情况修改
tomcat_name=apache-tomcat-8.0.33
### 可自定义信息,但是不建议修改
# tomcat安装位置/opt/tomcat8,链接到/usr/local/tomcat8
tomcat_install_dir=/usr/local/tomcat8
tomcat_install_dir_real=/opt/tomcat8
# 运行用户
run_user=tomcat
# 站点根目录
tomcat_web_dir=/data/tomcatwebroot/default
# tomcat运行日志位置
tomcat_log_dir=/data/tomcatlog
# tomcat Host网站访问日志位置
tomcat_weblog_dir=/data/tomcatweblog
# 创建用户
useradd -M -s /bin/bash $run_user
# 创建相关目录,并授权
mkdir -p $tomcat_install_dir_real $tomcat_web_dir $tomcat_log_dir $tomcat_weblog_dir
chown -R $run_user.$run_user $tomcat_install_dir_real $tomcat_web_dir $tomcat_log_dir $tomcat_weblog_dir
# 链接安装目录
ln -s $tomcat_install_dir_real $tomcat_install_dir
# 将tomcat二进制包解压到指定路径
tar xzf bin/$tomcat_name.tar.gz
cp -rf $tomcat_name/* $tomcat_install_dir
rm -rf $tomcat_name
### server.xml:Tomcat配置文件
# 备份server.xml,记录tomcat安装路径
cp $tomcat_install_dir/conf/server.xml{,_bk} -n
### 使用config/server.xml提供的模板
# 修改默认编码为UTF-8
# 配置默认虚拟主机分离
cp -f config/server.xml $tomcat_install_dir/conf
# 配置默认虚拟主机,访问日志
[ ! -d "$tomcat_install_dir/conf/vhost" ] && mkdir $tomcat_install_dir/conf/vhost
cat > $tomcat_install_dir/conf/vhost/localhost.xml << EOF
<Host name="localhost" appBase="webapps" unpackWARs="true" autoDeploy="true">
<Context path="" docBase="$tomcat_web_dir" debug="0" reloadable="false" crossContext="true"/>
<Valve className="org.apache.catalina.valves.AccessLogValve" directory="$tomcat_weblog_dir"
prefix="localhost_access_log." suffix=".txt" pattern="%h %l %u %t "%r" %s %b" />
</Host>
EOF
# 修改虚拟主机vhost路径
sed -i "s@/usr/local/tomcat@$tomcat_install_dir@g" $tomcat_install_dir/conf/server.xml
# 访问日志迁移
sed -i "s@directory=\"logs\"@directory=\"$tomcat_weblog_dir\"@g" $tomcat_install_dir/conf/server.xml
# 运行日志和管理日志迁移
cp $tomcat_install_dir/bin/catalina.sh{,_bk} -n
sed -i "s@\"\$CATALINA_BASE\"/logs/catalina.out@$tomcat_log_dir/catalina.out@" $tomcat_install_dir/bin/catalina.sh
cp $tomcat_install_dir/conf/logging.properties{,_bk} -n
sed -i "s@\${catalina.base}/logs@$tomcat_log_dir@" $tomcat_install_dir/conf/logging.properties
### Tomcat Connector的三种不同的运行模式
# ■ BIO:
# 一个线程处理一个请求。
# 缺点:并发量高时,线程数较多,浪费资源。
# Tomcat7或以下,在Linux系统中默认使用这种方式。
# ■ NIO:
# 利用Java的异步IO处理,可以通过少量的线程处理大量的请求。
# Tomcat8在Linux系统中默认使用这种方式。
# Tomcat7必须修改Connector配置来启动.
# ■ APR:
# 即Apache Portable Runtime,从操作系统层面解决io阻塞问题。
# Linux如果安装了apr和native,Tomcat 7/8都直接支持启动apr。
# 安装 tomcat-native依赖库apr apr-devel apr-util
# yum安装的版本太低,必须源码编译安装
# yum -y install apr apr-devel apr-util openssl-devel
# 源码安装 apr
apr_version=1.5.2
tar xzf src/apr-$apr_version.tar.gz
cd apr-$apr_version
./configure
make && make install
cd ..
rm -rf apr-$apr_version
# 源码安装 apr_util
apr_util_version=1.5.4
tar xzf src/apr-util-$apr_util_version.tar.gz
cd apr-util-$apr_util_version
./configure \
--with-apr=/usr/local/apr/bin/apr-1-config
make && make install
cd ..
rm -rf apr-util-$apr_util_version
# 源码安装 openssl
# 默认安装到/usr/local/ssl
openssl_version=1.0.2h
tar xzf src/openssl-$openssl_version.tar.gz
cd openssl-$openssl_version
export CFLAGS="-fPIC"
./config shared no-ssl2 no-ssl3 --openssldir=/usr/local/ssl
make depend
make all
make install
cd ..
rm -rf openssl-$openssl_version
# 源码编译安装tomcat-native
# 安装帮助文档:http://tomcat.apache.org/native-doc/
# 注意:tomcat 7和8编译文件深度不一样
tar xzf $tomcat_install_dir/bin/tomcat-native.tar.gz
cd tomcat-native-*-src/native/
./configure \
--with-apr=/usr/local/apr/bin/apr-1-config \
--with-ssl=/usr/local/ssl
make && make install
cd ../..
rm -rf tomcat-native-*-src
### 优化tomcat,并启用apr模式
# 以上几个apr相关的lib文件全部安装到了/usr/local/apr/lib
# 创建环境变量,tomcat启动程序自动调用setenv.sh
Mem=`free -m | awk '/Mem:/{print $2}'`
[ $Mem -le 768 ] && Xms_Mem=`expr $Mem / 3` || Xms_Mem=256
cat > $tomcat_install_dir/bin/setenv.sh << EOF
JAVA_OPTS='-Djava.security.egd=file:/dev/./urandom -server -Xms${Xms_Mem}m -Xmx`expr $Mem / 2`m'
CATALINA_OPTS="-Djava.library.path=/usr/local/apr/lib"
EOF
# 源码编译安装commons-daemon,生成jsvc
# 让tomcat服务以$run_user身份运行,否则是以root身份运行
tar zxf $tomcat_install_dir/bin/commons-daemon-native.tar.gz
cd commons-daemon-*-native-src/unix/
./configure
make
cp jsvc $tomcat_install_dir/bin -f
cd ../..
rm -rf commons-daemon-*-native-src
# 自启动服务设置
cp -f init.d/tomcat-init /etc/init.d/tomcat
sed -i "s@^CATALINA_HOME=.*@CATALINA_HOME=$tomcat_install_dir@" /etc/init.d/tomcat
sed -i "s@^TOMCAT_USER=.*@TOMCAT_USER=$run_user@" /etc/init.d/tomcat
chmod +x /etc/init.d/tomcat
chkconfig --add tomcat
chkconfig tomcat on
# 日志轮转
# tomcat自己维护日志系统,跳过
# 修正配置文件修改后无法读取的Bug
chown -R $run_user.$run_user $tomcat_install_dir_real $tomcat_web_dir $tomcat_log_dir $tomcat_weblog_dir
# 说明:Tomcat 权限设置非常严格,修改配置文件后,非root身份运行脚本无法读取配置文件。
# 编译软件后建议加载一次
ldconfig
service tomcat start
echo "Tomcat install successfully! "
$tomcat_install_dir/bin/version.sh
# Nginx 中启用tomcat
[ -z "`grep 'location ~ \\\.jsp' /etc/nginx/nginx.conf`" ] && sed -i "s@index index.html index.php;@index index.html index.php index.jsp;\n\n location ~ \\\.jsp$ {\n index index.jsp index.html;\n proxy_pass http://localhost:8080;\n }@" /etc/nginx/nginx.conf
service nginx restart
# 创建测试页面
echo "This is my JSP page." > $tomcat_web_dir/index.jsp
|
gchxcy/LinuxOperator
|
Centos6/shell/installtomcat8.sh
|
Shell
|
gpl-2.0
| 6,291 |
#!/bin/bash
./_build.sh SEMC boot $1
|
kbc-developers/android_kernel_semc_xperia2011
|
build-bootimg-semc.sh
|
Shell
|
gpl-2.0
| 38 |
#! /bin/sh
# Copyright (C) 2011-2022 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Test remake rules when a C header "guarded" by AC_SUBST'd variables
# is not needed anymore, or when it's needed again.
# This test requires some user-level machinery, overlaps with other tests,
# and is not strictly necessary per se, but it exercises a real, important
# use case (from gnulib, see:
# <https://lists.gnu.org/archive/html/bug-gnulib/2011-04/msg00005.html>
# for more info).
required=cc
. test-init.sh
cat >> configure.ac <<'END'
AC_CONFIG_HEADERS([config.h])
AC_PROG_CC
MY_MACROS
AC_OUTPUT
END
cat > Makefile.am <<'END'
ACLOCAL_AMFLAGS = -I .
noinst_PROGRAMS = foo
foo_SOURCES = foo.c
BUILT_SOURCES = $(STDIO_H)
if REPLACE_STDIO_H
stdio.h: stdio.in.h $(top_builddir)/config.status
cp $(srcdir)/stdio.in.h $@
else
stdio.h: $(top_builddir)/config.status
rm -f $@
endif
MOSTLYCLEANFILES = stdio.h
END
cat > macros.m4 <<'END'
AC_DEFUN([MY_MACROS], [
override_stdio=:
if $override_stdio; then
STDIO_H=stdio.h
use_dummies=1
else
STDIO_H=
use_dummies=0
fi
AC_SUBST([STDIO_H])
AC_DEFINE_UNQUOTED([USE_DUMMIES], [$use_dummies],
[Whether to use dummy types.])
AM_CONDITIONAL([REPLACE_STDIO_H], [test -n "$STDIO_H"])
])
END
cat > stdio.in.h <<'END'
typedef struct dummyfile { void *p; } DUMMYFILE;
END
cat > foo.c <<'END'
#include <config.h>
#include <stdio.h>
#if USE_DUMMIES
DUMMYFILE *f;
#else
FILE *f;
#endif
int main () { return 0; }
END
$ACLOCAL -I .
$AUTOHEADER
$AUTOMAKE
$AUTOCONF
for vpath in : false; do
if $vpath; then
mkdir build
cd build
srcdir=..
else
srcdir=.
fi
# Do not reject slow dependency extractors: we need dependency tracking.
$srcdir/configure --enable-dependency-tracking
if $FGREP 'depmode=none' Makefile; then
skip_ "automatic dependency tracking couldn't be activated"
fi
$MAKE
ls -l
test -f stdio.h
# Simulate that we don't need our custom stdio.h anymore.
$sleep
sed -e 's/^\( *override_stdio\)=.*$/\1=false/' $srcdir/macros.m4 > t
diff $srcdir/macros.m4 t && fatal_ "failed to edit macros.m4"
mv -f t $srcdir/macros.m4
using_gmake || $MAKE Makefile
$MAKE
ls -l
test ! -e stdio.h
# And now simulate that we want our custom stdio.h back.
$sleep
sed -e 's/^\( *override_stdio\)=.*$/\1=:/' $srcdir/macros.m4 > t
diff $srcdir/macros.m4 t && fatal_ "failed to edit macros.m4"
mv -f t $srcdir/macros.m4
using_gmake || $MAKE Makefile
$MAKE
ls -l
test -f stdio.h
$MAKE distclean
cd $srcdir
done
:
|
autotools-mirror/automake
|
t/remake-gnulib-remove-header.sh
|
Shell
|
gpl-2.0
| 3,155 |
convert images/OCS-279-A.png -crop 1523x4516+59+297 +repage images/OCS-279-A.png
#
#
#/OCS-279.png
convert images/OCS-279-B.png -crop 1575x4362+28+443 +repage images/OCS-279-B.png
#
#
#/OCS-279.png
|
jonnymwalker/Staroslavjanskij-Slovar
|
scripts/cropedges.OCS-279.sh
|
Shell
|
gpl-2.0
| 198 |
#!/bin/bash
pkill -9 mongo
bkill
sleep 2
mongo-clean
export TOKUMX_BUFFERED_IO=N
export MONGO_COMPRESSION=zlib
export MONGO_BASEMENT=65536
export NUM_COLLECTIONS=8
export NUM_DOCUMENTS_PER_COLLECTION=1000000
#export NUM_DOCUMENTS_PER_COLLECTION=200000
export NUM_DOCUMENTS_PER_INSERT=1000
export NUM_LOADER_THREADS=8
export threadCountList="0064"
export RUN_TIME_SECONDS=300
export DB_NAME=sbtest
export BENCHMARK_NUMBER=999
export WRITE_CONCERN=SAFE
# HOT BACKUPS!
export RUN_HOT_BACKUPS=Y
export RUN_HOT_BACKUPS_MBPS=75
export RUN_HOT_BACKUPS_PAUSE_SECONDS=30
export USE_TRANSACTIONS=Y
# 4 seconds for lock timeouts
#export MONGO_LOCK_TIMEOUT=4000
export MONGO_LOCK_TIMEOUT=20000
export SCP_FILES=N
# 12G
#export TOKUMON_CACHE_SIZE=12G
# 2G
export TOKUMON_CACHE_SIZE=2G
if [ -z "$MONGO_DIR" ]; then
echo "Need to set MONGO_DIR"
exit 1
fi
if [ ! -d "$MONGO_DIR" ]; then
echo "Need to create directory MONGO_DIR"
exit 1
fi
if [ "$(ls -A $MONGO_DIR)" ]; then
echo "$MONGO_DIR contains files, cannot run script"
exit 1
fi
export BENCH_ID=backup-test
export MONGO_REPLICATION=Y
# TOKUMX
export TARBALL=tokumx-e-2.0-SNAPSHOT-20140924b-linux-x86_64-main.tar.gz
export MONGO_TYPE=tokumx
# MONGODB 2.2
#export TARBALL=mongodb-linux-x86_64-2.2.5
#export MONGO_TYPE=mongo
# ************************************************************************
# set to Y for a multi-directory test, N for single directory
# ************************************************************************
export MULTI_DIR=N
# unpack mongo files
echo "Creating mongo from ${TARBALL} in ${MONGO_DIR}"
pushd $MONGO_DIR
mkmon $TARBALL
popd
if [ ${MULTI_DIR} == "Y" ]; then
export MONGO_LOG_DIR=${MONGO_DATA_DIR}/l
export MONGO_DATA_DIR=${MONGO_DATA_DIR}/d
# on lex5, use a completely different filesystem (uncomment the following line)
#export MONGO_DATA_DIR=/data.ssd/tcallaghan/data/mongo-data/d
mkdir ${MONGO_LOG_DIR}; mkdir ${MONGO_DATA_DIR}
fi
echo "Running loader"
./run.load.bash
echo "Running benchmark"
./run.benchmark.bash
export VERIFY_LOG_NAME=${MACHINE_NAME}-test-verification.log
echo "Validating Backups" | tee -a ${VERIFY_LOG_NAME}
./verify-backups.bash
#cat ${VERIFY_LOG_NAME}
echo ""
echo "-------------------------------------------------------------------------"
echo "Test results for ${TARBALL}"
if grep -qi "build failed\|error\|horribly wrong" ${VERIFY_LOG_NAME}
then
echo "*** FAIL ***"
echo "*** FAIL ***"
echo "*** FAIL ***"
grep -i "build failed\|error\|horribly wrong" ${VERIFY_LOG_NAME}
echo "*** FAIL ***"
echo "*** FAIL ***"
echo "*** FAIL ***"
else
echo "*** PASS ***"
echo "*** PASS ***"
echo "*** PASS ***"
fi
echo "-------------------------------------------------------------------------"
#mongo-clean
|
Percona-QA/toku-qa
|
tokudb/software/mongodb/sysbench-mongodb-backup-test/doit-backup-test.bash
|
Shell
|
gpl-2.0
| 2,829 |
#!/bin/bash
rm train_*.txt test_*.txt
|
intfloat/weibo-emotion-analyzer
|
scripts/cleandata.sh
|
Shell
|
gpl-2.0
| 39 |
#!/bin/bash
# Cloudy: An open LAMP benchmark suite
# Copyright (C) 2010 Adam Litke, IBM Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
################################################################
# import-images.sh - import random image data into a populated #
# MediaWiki instance. #
################################################################
. /media/config
DIR=/tmp/image-import
IMAGES=`echo "select il_to from imagelinks" | mysql --user=root --password=linux99 wikidb | tail -n +2`
mkdir -p $DIR
printf "\x89\x50\x4E\x47\x0D\x0A\x1A\x0A" > $DIR/header
for i in $IMAGES; do
echo "image: $i"
dd if=/dev/urandom of="$DIR/data" bs=1 count=$IMAGE_SIZE
cat $DIR/header $DIR/data > "$DIR/$i"
done
pushd /var/www/wiki
php /usr/share/mediawiki/maintenance/importImages.php --overwrite /tmp/image-import png jpg gif bmp PNG JPG GIF BMP
popd
rm -rf $DIR
exit 0
|
aglitke/cloudy
|
setupimage/import-images.sh
|
Shell
|
gpl-2.0
| 1,520 |
#!/usr/bin/env bash
set -o nounset
# Copyright (C) 2014 Patrik Martinsson <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Mode: vim, tabstop=2; softtabstop=2; shiftwidth=2;
#
# This is the tcp_monitor.sh, man tcp_monitor for more information.
# Source is maintaned at <https://github.com/patchon/tcp_monitor>
# * * * * * * * * * * * * * * * * * * * *
# Global variables, named g_variable-name
#
# Turn on the bash's extended globbing feature. Instead of turning it off and
# in the script, we just turn it on here and leave it like that.
shopt -s extglob
# The default ss-delimiter,
g_ss_delim=","
# Just to "keep track" of how many updates we have left,
g_cnt=0
# A parameter determining the first run,
g_first_run=1
# Simple regex that is used on various places,
g_re_digits="^[1-9][0-9]*$"
# This defines the minimum seconds you can specify without having the
# output "cut-off"
g_option_d_threshold=5
# These are the various options,
g_opt_c_set=""
g_option_c=""
g_option_d=""
g_option_d_default=1
g_option_f=""
g_option_n=""
g_option_n_default="not_set"
g_option_n_forever="0"
# * * * * * * * * * * * *
# Function declarations,
#
# A function that makes a external call to ss, parses the output, and stores
# it an array for later use.
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - Function doesn't "return" in that sense.
#
function get_output_from_ss {
local output
# Do the actual call to ss and remove first line,
output=$(IFS=$'\n' ss --tcp --processes state established 2>&1)
# The return here will most likely be zero, otherwise we have a serious
# problem. You can never be to careful though.
if [[ ${?} -ne 0 ]];then
err_msg="Call to ss failed. This should not happen
($(strip_error "${output}"))."
error "$err_msg"
fi
# Remove first line, replace spaces with *one* ; *and* remove the first two
# columns from ss. Save the output from sed into the variable output. The
# return from sed will not determine we actually replaces what we wanted, it
# will only return anything else than zero if something *very odd* happened.
# Same as above though, you can never be to careful
output=$(echo "${output}" | sed -e 's/\s\+/;/g' \
-e 's/[0-9]\+;[0-9]\+;//g' \
-e '1d' 2>&1)
if [[ ${?} -ne 0 ]];then
err_msg="Call to sed failed. This should not happen
($(strip_error "${output}"))."
error "${err_msg}"
fi
# Define the array where we will store data that we later want to print.
# Also the variables for the column-width. This is used globally.
g_print_arr=()
g_min_length_col1=20
g_min_length_col2=22
# Handle the case were we dont have *any* established connections,
if [[ -z $output ]]; then
g_print_arr+=("no connections ;;;;;")
return
fi
# Now do the actual parsing of the data,
for row in ${output}; do
# Reset variables to store ss-output in,
local info_col1=""
local info_col2=""
local info_col3=""
# If we don't got permission to read this info, this is what we will display
# in the "gui", if we are running as root though, we should be able to read
# everything,
local error="n/a"
local warn="(try root)"
local app_name="$error"
local app_pid="$error"
local app_fd="$error"
local time_diff="$error"
local time_fd_modified="$error"
# Now, based on the output from ss, and the sed we did we now that each
# column is delimited by ";", so lets just read the columns into variables,
IFS=';' read info_col1 info_col2 info_col3 <<< "$row"
# Set default values if we can't parse/grab output from ss,
info_col1=${info_col1:=$error}
info_col2=${info_col2:=$error}
info_col3=${info_col3:=$error}
# Now since we want a structured layout, lets calculate the min width of
# the two first columns,
if [[ ${#info_col1} -gt ${g_min_length_col1} ]] ; then
g_min_length_col1=${#info_col1}
fi
if [[ ${#info_col2} -gt ${g_min_length_col2} ]]; then
g_min_length_col2=${#info_col2}
fi
# If we are root, set a warning-message that will be displayed if we end
# up with empty data in column3. This happens with nfs for example.
if is_root; then
warn="(we have no data from ss to parse)"
fi
# As long as the third column (the one with the actual pid/fd info) is not
# set to our default value, we know we that we have info in there that we
# want to parse,
if [[ ${info_col3} != "${error}" ]]; then
# Ok, so the parsing below is done according to the page
# http://www.cyberciti.biz/files/ss.html. The basic idea is to extract,
# app-name, pid and fd of the socket. The "typical" format according to
# the page is, 'users:((app-name,X,Y))' (where X is the pid and Y the fd).
# But that isn't totally true. First off, the appname-part is enclosed
# with "", secondly on versions above iproute-3.12 (looking at the source
# of ss from the iproute-package), the actual output is
# 'users:(("app-name",pid=X,fd=Y))'. The delimiter-determination is done
# in the set_default_ss_delimiter-function.
# Extract the app-name,
var="${info_col3#*\"}"
app_name="${var%%\"*}"
app_name="${app_name:=${error}}"
# Extract the actual pid & fd,
var="${info_col3#*${g_ss_delim}}"
app_pid="${var%%,*}"
app_pid="${app_pid:=${error}}"
var="${info_col3##*${g_ss_delim}}"
app_fd="${var%%)*}"
app_fd="${app_fd:=${error}}"
# Now we have what we need to extract the actual "last-modified-time" of
# the actual socket. This may not be the best way of finding out when a
# socket was created, but I haven't found any other way so I went with
# your hint on this one. See the description for more info.
if [[ -S /proc/${app_pid}/fd/${app_fd} ]]; then
# There are times when the actual socket is being removed right between
# the check above and the stat below. If that happens just skip that
# socket since it's been removed.
time_fd_modified=$(stat -c %Y /proc/${app_pid}/fd/${app_fd} 2>&1)
[[ ${?} -ne 0 ]] && continue
# Get the time-difference,
time_diff=$(calculate_time_diff ${time_fd_modified})
else
# Same goes here, we had info about a socket, but now it's removed,
# just continue
continue
fi
# Reset the warning, this is just as a hint to the user to try with root
# if values are missing. If values are missing and even though we run as
# root, there is an actual bug in the parsing-code, and that message is
# taken care of at the beginning.
warn=""
fi
# Add the values to our array, for later printing,
g_print_arr+=("${info_col1};${info_col2};${time_diff};${app_pid};${app_name};${warn}")
done
}
# A function that will print the actual data stored in a global array
# (filled up by the get_output_from_ss-function),
#
# Arguments,
# - Functions doesn't take an argument.
#
# Returns,
# - Function doesn't "return" in that sense,
#
function print_data {
# Just add some padding to minlength,
local padding=5
local min_length_col3=10
local min_length_col4=6
local min_length_col5=3
g_min_length_col1=$((g_min_length_col1+=${padding}))
g_min_length_col2=$((g_min_length_col2+=${padding}))
# Calculate the total-width of the output,
local total_width=$((g_min_length_col1+
g_min_length_col2+
min_length_col3+
min_length_col4+
min_length_col5))
# Variables,
local info_col1=""
local info_col2=""
local info_col3=""
local app_pid=""
local app_name=""
local warn=""
local warn_wrap=""
local date_now=""
# Get the time and add it to the header,
date_now=$(date)
if [[ ${?} -ne 0 ]];then
err_msg="Call to date failed. This should not happen
($(strip_error "${output}"))."
error "${err_msg}"
fi
# Lets print a warning to the user if the estimated width isn't enough
if [[ ! ${g_option_f} ]]; then
# Do the actual printing,
tput clear
if ! terminal_is_wide_enough ${total_width} && [[ ${g_first_run} -eq 1 ]]; then
warn_wrap='Warning, your output will be wrapped over multiple lines.
\nPlease increase your windowsize (if possible) if you want
one-line-output.'
show_warning "$warn_wrap"
fi
# Set variables,
local term_lines=$(tput lines)
local term_max_lines=$((term_lines-5))
local term_notice=""
fi
if [[ ${g_option_f} && ${g_first_run} == 1 ]]; then
msg="Output from script is being redirected to ${g_option_f} (options
-d=$g_option_d, -n=$g_option_n ) ..."
echo $msg
fi
local format="%-${g_min_length_col1}s %-${g_min_length_col2}s %-10s %-6s %-3s %-25s"
# Loop the actual array containing our output from ss (space delimited),
for row in ${!g_print_arr[*]}; do
# If it's the first iteration, print header,
if [[ ${row} -eq 0 ]]; then
create_top_bottom_header ${total_width}
printf_wrapper "${format}" \
"LOCAL_ADDR:PORT" \
"FOREIGN_ADDR:PORT" \
"DURATION" \
"PID" \
"/" \
"PROGRAM_NAME"
msg="Refreshing forever"
if [[ ${g_option_n_forever} -ne 1 ]]; then
msg="Refreshing ${g_cnt} / ${g_option_n_static}"
fi
printf_wrapper "%s\n" \
"${msg} with an interval of ${g_option_d} second(s) - ${date_now}"
create_top_bottom_header ${total_width}
fi
# Ok, so bear with me here,
# - If we are not printing to a file, *and*
# - The total amount of lines to print is more than we have lines in the
# terminal, *and*
# - The delay-parameter is set below the threshold (defualt 5sec), *and*
# - We are about to print on the max-limit-line ($term_max_line), *then*
# - Print a message to the user about the output been cut off, due to the
# way we handle current printing (and break).
if [[ ! ${g_option_f} ]]; then
if [[ ${row} -eq ${term_max_lines} &&
${#g_print_arr[@]} -gt ${term_max_lines} &&
${g_option_d} -lt ${g_option_d_threshold} ]];then
term_notice='(Notice : Output is cut of due to the current
output-handling, showing '${term_max_lines}' of
'${#g_print_arr[@]}' lines. See '${0}' -h for explanation.'
echo ${term_notice}
break
fi
fi
# Read in the data, into vars and print them in the table,
IFS=';' read info_col1 info_col2 time_diff app_pid app_name warn <<< \
"${g_print_arr[${row}]}"
printf_wrapper "${format}" \
"${info_col1}" \
"${info_col2}" \
"${time_diff}" \
"${app_pid}" \
"/" \
"${app_name} ${warn}"
printf_wrapper "\n"
done
}
# A function that will parse the command-line-options
#
# Arguments,
# - Function doesn't take any options,
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors out" if errors are found in config-file
#
function parse_command_line {
local opt_n_set=""
local opt_d_set=""
local opt_f_set=""
# Oh, yeah. This small hack justs make sure that if a user has specified,
# an help-flag, we will show it directly. Shifting in options below is great
# but we dont have any control over how the args come in.
# So if a user types, $0 -d 123 --help, we would just want to show the help,
# no more no less. This "extra loop" sorts that for us.
re_help="-[-]*h(elp)*"
for arg in ${@}; do
if [[ ${arg} =~ ${re_help} ]]; then
show_usage
fi
done
# Loop through input parameters. I'm not a fan of getopt/getopts. Never
# seem to use it, maybe it has some features that will make life easier for
# me, but I always just shift in parameters like this and parse them. Works
# fine for me I guess.
while [[ "${#}" > 0 ]]
do
# Option to look for,
opt="${1}"
# If value is empty, set flag
[[ ! "${2:-}" ]] && error "Option ${opt} must have an argument."
# If we have a value, strip value from leading/trailing spaces,
value="${2##*( )}"
value="${value%%*( )}"
# Test again after we remove spaces,
[[ ! "${value:-}" ]] && error "Option ${opt} must have an argument."
case "${opt}" in
# Handle -c/--config-file
-c|--config-file)
# Make sure that the parameter hasn't already been set, and that we
# have a valute to parse,
is_opt_set "${g_opt_c_set}"
validate_c_option "${opt}" "${value}"
g_opt_c_set="${opt}"
shift
;;
# Handle -d/--delay
-d|--delay)
# Make sure that the parameter hasn't already been set, and that we
# have a value to parse,
is_opt_set "${opt_d_set}"
[[ ! ${value:-} ]] && error "Option ${opt} must have an argument."
validate_d_option "${opt}" "${value}"
g_opt_d_set="${opt}"
shift
;;
# Handle -f/--output-file
-f|--output-file)
# Make sure that the parameter hasn't already been set, and that we
# have a value to parse,
is_opt_set "${opt_f_set}"
[[ ! ${value:-} ]] && error "Option ${opt} must have an argument."
validate_f_option "${opt}" "${value}"
g_opt_f_set="${opt}"
shift
;;
# Handle -n/--number-of-refreshes
-n|--number-of-refreshes)
# Make sure that the parameter hasn't already been set, and that we
# have a value to parse,
is_opt_set "${opt_n_set}"
[[ ! ${value:-} ]] && error "Option ${opt} must have an argument."
validate_n_option "${opt}" "${value}"
g_opt_n_set="${opt}"
shift
;;
# Handle everything else,
*)
err_msg="Unsupported opt (${opt}).\nUse -h|--help for usage-instructions"
error "${err_msg}"
;;
esac
# Shift the argument-list,
shift
done
}
# A function that used to parse the specified config-file.
#
# Arguments,
# - A string containing the config-file to parse
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors-out" if value doesn't gets validated
#
function parse_config_file {
local config_file=${1}
local cnt=0
# Define re's for options of interest, the actual validation of the
# values are done in validate_*_option-functions.
local re_comment="^#.*$"
local re_empty_lines="^$"
local re_delay="^DELAY[ ]*="
local re_file="^FILE[ ]*="
local re_refreshes="^NUMBER_OF_REFRESHES[ ]*="
# Read in the configfile and loop every line,
while read -r line; do
cnt=$((cnt+1))
# Skip comments and empty lines,
[[ ${line} =~ ${re_comment} ]] && continue
[[ ${line} =~ ${re_empty_lines} ]] && continue
# Set the delimiter and read values,
IFS='=' read option value <<< "${line}"
# Remove quotes, and leading/trailing spaces,
value=${value//\"}
value=${value//\'}
value="${value##*( )}"
value="${value%%*( )}"
# If a line matches, first check if value already been specified
# (by cli-opt), if not, validate value (and set it if validated),
if [[ ${line} =~ ${re_delay} ]]; then
[[ ${g_option_d} ]] && continue
validate_d_option "${option}" "${value}"
elif [[ ${line} =~ ${re_file} ]]; then
[[ ${g_option_f} ]] && continue
# Seriously, you actually need to glob the path to the output-file ?
# Nope, not going to happen.
if [[ $value =~ \* ]];
then
err_msg="Error parsing option '${line}' in '${config_file}' on line ${cnt}
(globbing is output-file-name is not allowed, specify full path)."
error "${err_msg}"
fi
validate_f_option "${option}" "${value}"
elif [[ ${line} =~ ${re_refreshes} ]]; then
[[ ${g_option_n} ]] && continue
validate_n_option "${option}" "${value}"
else
err_msg="Error parsing option '${line}' in '${config_file}' on line ${cnt}"
error "${err_msg}"
fi
done < "${config_file}"
}
# A function that used to validate the value given to the c-option
#
# Arguments,
# - A string containing the option-name
# - A string containing the value to validate
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors-out" if value doesn't gets validated
#
function validate_c_option {
local opt=${1}
local val_to_validate="${2}"
# echo "$val_to_validate"
# Check if config exists,
if [[ ! -f "${val_to_validate}" ]];then
err_msg="Configuration-file '${val_to_validate}' doesn't seem to exist
(or atleast is not readable by the user running the script)."
error "${err_msg}"
fi
g_option_c="${val_to_validate}"
}
# A function that used to validate the value given to the d-option
#
# Arguments,
# - A string containing the option-name
# - A string containing the value to validate
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors-out" if value doesn't gets validated
#
function validate_d_option {
local opt=${1}
local val_to_validate=${2}
local threshold_max_delay=120 # Doesn't seem to make sense to specify an
# update every two minutes,
# Check so it's a positive number,
if ! is_number_and_not_zero "${val_to_validate}"; then
err_msg="Option '${opt}' should be followed by a positive number
(not '${val_to_validate}')."
error "${err_msg}"
fi
# Doesn't make sense to make refresh this rarely,
if [[ "${val_to_validate}" -gt "${threshold_max_delay}" ]]; then
err_msg="Option '${opt}' should be reasonable (shorter than 120 seconds). It
just doesn't make sense to refresh the screen this rarely."
error "${err_msg}"
fi
# Value is verified, set variable,
g_option_d="${val_to_validate}"
}
# A function that used to validate the value given to the f-option
#
# Arguments,
# - A string containing the option-name
# - A string containing the value to validate
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors-out" if value doesn't gets validated
#
function validate_f_option {
local opt="${1}"
local val_to_validate="${2}"
local error=""
local x=""
# First off, figure out what we are dealing with, extract dir/file-name
# according to the dirname-command assumptions - we use the dirname-command
# here and not shell-inbuilt-functionality (substr/globbing) since parsing a
# dirname isn't always that easy, the dirname-command has already figured
# that out for us.
dir=$(dirname "${val_to_validate}" 2>&1)
# Check the return, I seriously don't know when then dirname-command could
# fail us, but always check for the return-code.
if [[ $? -ne 0 ]]; then
err_msg="Couldn't read dirname from '${val_to_validate}' (${dir})."
error "${err_msg}"
fi
# So, we got a dirname from the -f parameter, just check if it's only a
# dirname given,
dir_end_re="/$"
if [[ "${val_to_validate}" =~ ${dir_end_re} ]]; then
err_msg='-f must have a filename as parameter and not a directory
('${val_to_validate}').'
error "${err_msg}"
fi
# If the given directory doesn't exist, try to create it for the user,
# bail out if that fails.
if [[ ! -d "${dir}" ]]; then
out=$(mkdir -p "${dir}" 2>&1)
if [[ $? -ne 0 ]]; then
err_msg="Directory '${dir}' doesn't seem to exist, and couldn't be created
('${out}')."
error "${err_msg}"
fi
fi
# Test if we can create and write to the file (or whatever the user has given
# us). Now the redirection is done by the shell before printf runs, so we need
# to redirect stderr not only for the printf-builtin, but to the whole shell
# in which the command runs. We do this by encapsulate the printf-builtin in
# {}. By using a builtin-command, we can skip a dependency to for eg. the
# 'touch-command'.
out=$({ printf "" >> "${val_to_validate}"; } 2>&1)
if [[ $? -ne 0 ]]; then
err_msg="Couldn't create output-file '${val_to_validate}' ($(strip_error "${out}"))."
error "${err_msg}"
fi
# Value is verified, set variables,
g_option_f="${val_to_validate}"
}
# A function that used to validate the value given to the n-option
#
# Arguments,
# - A string containing the option-name
# - A string containing the value to validate
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors-out" if value doesn't gets validated
#
function validate_n_option {
local opt=${1}
local val_to_validate=${2}
local threshold_max_refreshes=7 # Note that this is the *length* of the
# string, not the actual number itself,
# Check so it's a positive number,
if ! is_number_and_not_zero ${val_to_validate}; then
err_msg="Option '${opt}' should be followed by a positive number
(not '${val_to_validate}')."
error "${err_msg}"
fi
# Exit if the value seems unreasonable high,
if [[ ${#val_to_validate} -ge ${threshold_max_refreshes} ]]; then
err_msg="Option '${opt}' should be reasonable (shorter than
${threshold_max_refreshes}). If you want an value this high, skip
this parameter as the script then will refresh forever."
error "${err_msg}"
fi
# Value is verified, set variable,
g_option_n="${val_to_validate}"
}
# A function to check necessary binaries. Even though this script is packaged
# with rpm and dependencies should be met, I always tend to be extra careful
# when it comes to this. I've worked with ~150 users for a couple of years now
# and you never know what they do with their clients.
# Lets be sure that we can find at least the essentials,
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - Function doesn't "return" in that sense,
#
function check_necessary_binaries {
local binaries_needed=("cat"
"sed"
"date"
"dirname"
"mkdir"
"tput"
"ss");
# Just loop the list and test if we can find the binary,
for binary in ${binaries_needed[@]}; do
if ! hash ${binary} 2>/dev/null; then
error "Could not find ${binary} in PATH ($PATH)"
fi
done
}
# A "wrapper-function" that will just test if an variable is set and print
# out an appropriate error if it is.
#
# Arguments,
# - A string containing the variable to test,
#
# Returns,
# - Function doesn't "return" in that sense,
# - "errors out" if errors are found in config-file
#
function is_opt_set {
opt_set=${1}
# Give a clear message to the user that the option already been set,
if [[ ${opt_set} ]]; then
err_msg="Option '${opt_set}' is already set, you can only set the '${opt_set}'
parameter once."
error "${err_msg}"
fi
}
# A function that will print an error and exit, no more no less.
#
# Arguments,
# - A message to be printed.
#
# Returns,
# - Function doesn't "return" in that sense,
#
function error {
local message=${1}
IFS=$'\t\n'
echo -e "\nError :" ${message} "\n" >&2
exit 1
}
# A function that will calculate the difference between "now" and
# and the input-parameter.
#
# Arguments,
# - A string containing the seconds you want to compare
#
# Returns,
# - A string that contains the difference between the input string and "now", in
# seconds.
#
function calculate_time_diff {
local time_fd_modified=${1}
local time_current=$(date +%s)
local time_diff=$((time_current - time_fd_modified))
echo ${time_diff}
}
# A function that determine if the user is root or not.
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - 0 if you are root,
# - 1 if you aren't root
#
function is_root {
[[ "$(whoami)" == "root" ]] && return 0
return 1
}
# A "wrapper_function" for printf. If the -f-option is specified we print to
# that instead of stdout.
#
# Arguments,
# - An array of strings that should be printed,
#
# Returns,
# - Function doesn't "return" in that sense,
#
function printf_wrapper {
if [[ ${g_option_f} ]]; then
printf "${@}" >> "${g_option_f}"
else
printf "${@}"
fi
}
# A function that will determine if the width of the terminal is enough.
#
# Arguments,
# - An string width a width
#
# Returns,
# - 0 if terminal is determined to be wide enough,
# - 1 if terminal is determined to be to small,
#
function terminal_is_wide_enough {
local padding="75"
local term_width=$(tput cols)
local outputwidth=$((${1}+${padding}))
[[ ${term_width} -lt ${outputwidth} ]] && return 1
return 0
}
# A function that will show a "warning/tip" to the user.
#
# Arguments,
# - A message to show,
#
# Returns,
# - Function doesn't "return" in that sense,
#
function show_warning {
local msg=${1}
local timeout=6
echo -e ${msg}
for ((sec = ${timeout} - 1; sec >= 0; sec--));do
echo "Will show output in ${sec} seconds."
sleep 1
tput cuu1
tput el
done
}
# A function that used to "strip" an error message gotten from a
# "system/command-call $(command).
#
# Arguments,
# - A string containing the error-message to strip
#
# Returns,
# - A string containing the actual error
#
function strip_error {
local message=${1}
local x
local err
IFS=':' read x x x err <<< "${message}"
echo ${err/ /}
}
# A function that used to show the 'help-screen'.
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - Function doesn't "return" in that sense, just exits.
#
function show_usage {
cat <<-End-of-message
USAGE: ${0} [OPTIONS]
---------------------------------
Available Options, Default Min Max
-c | --config-file - - -
-d | --delay 1 1 120
-n | --number-of-refreshes Infinite 1 No limit
-f | --output-file stdout - -
See 'man ${0/.\//}' for more information.
End-of-message
exit 0
}
# A function that will create the "=" around the top-header,
#
# Arguments,
# - Functions doesn't take an argument.
#
# Returns,
# - Function doesn't "return" in that sense,
#
function create_top_bottom_header {
header_width="50"
# We do not do this when we dont have a terminal,
if [[ $TERM != "dumb" ]]; then
header_width=$(tput cols)
fi
for ((x = 0; x < ${header_width}; x++));do
printf_wrapper "="
done
printf_wrapper "\n"
}
# A function that test if the string is a "row of numbers, starting
# with a non-zero-value"
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - 0 if string is digits and not starting with a 0
# - 1 if string is anything else than above
#
function is_number_and_not_zero {
local val_to_validate=${1}
[[ ${val_to_validate} =~ ${g_re_digits} ]] && return 0
return 1
}
# A function that determines the delimiter for ss,
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - Function doesn't return in that sense.
# - Sets the g_ss_delim
#
function set_default_ss_delimiter {
# Set the default delimiter,
# Note, if the release-file isn't there we are kinda screwed anyways with
# trying to determine version. If we want this to be more portable (that
# is outside rhel, we should definitely use a better way of determining
# the output from ss). Maybe look at the actual version of ss, even though
# it wasn't really that easy to parse
if [[ -f "/etc/os-release" ]]; then
local version
local dist
local x
dist=$(grep "Red Hat" /etc/os-release 2>&1)
# If it's not rhel, determine version of fedora.
if [[ ${?} -ne 0 ]]; then
version=$(grep "VERSION_ID=" /etc/os-release 2>&1)
IFS='=' read x version <<< "$version"
# If version is lower than 21, use the , as delimiter
if [[ ${version} =~ ${g_re_digits} ]]; then
if [[ ${version} -gt 20 ]]; then
g_ss_delim="="
fi
fi
fi
fi
}
# A function that just restores the screen and exits,
#
# Arguments,
# - Function doesn't take any argument.
#
# Returns,
# - Function doesn't return in that sense, just exits our program.
#
function exit_and_restore_screen {
# Restore screen and exit,
if [[ ! ${g_option_f} ]]; then
tput rmcup
fi
exit 0
}
# * * * * * * * * * * * *
# Main start,
#
# Check for needed binaries,
check_necessary_binaries
# Parse the command-line,
parse_command_line "$@"
# If an config-file is given, parse the values in there,
if [[ ${g_opt_c_set} ]]; then
parse_config_file "${g_option_c}"
fi
# Set options to defaults if not specified,
g_option_d=${g_option_d:-${g_option_d_default}}
g_option_n=${g_option_n:-${g_option_n_default}}
# This variable is "static" in the sense that we do not alter it,
g_option_n_static=${g_option_n}
# If the n-option is not set, make sure we refresh the interface
# forever. That is taken care of by the g_option_n_forever-parameter.
if [[ ${g_option_n} == "not_set" ]];then
g_option_n=1
g_option_n_forever=1
fi
# Set the default ss_delimiter,
set_default_ss_delimiter
# Save screen,
if [[ ! ${g_option_f} ]]; then
tput smcup
fi
# Let's trap sigint,
trap exit_and_restore_screen SIGINT
# Just loop here until given conditions are true,
while [[ ${g_cnt} -le ${g_option_n} ]]; do
# Get the data to print,
get_output_from_ss
# Print the actual data,
print_data
# Refresh at rate, min 1 sec,
sleep ${g_option_d}
# Only run as many times as specified,
if [[ ${g_option_n_forever} -ne 1 ]]; then
g_cnt=$((g_cnt+1))
fi
# Set global "state"
g_first_run="0"
done
# Just restore the screen and exit,
exit_and_restore_screen
|
patchon/tcp_monitor
|
tcp_monitor.sh
|
Shell
|
gpl-2.0
| 31,224 |
convert images/OCS-235-A.png -crop 1479x4532+65+279 +repage images/OCS-235-A.png
#
#
#/OCS-235.png
convert images/OCS-235-B.png -crop 1569x4528+0+283 +repage images/OCS-235-B.png
#
#
#/OCS-235.png
|
jonnymwalker/Staroslavjanskij-Slovar
|
scripts/cropedges.OCS-235.sh
|
Shell
|
gpl-2.0
| 197 |
#!/bin/sh
#-------------------------------------------------------------------------
# Copyright 2010, NETGEAR
# All rights reserved.
#-------------------------------------------------------------------------
# load environment
. /opt/broken/env.sh
# get Url for hook server
URL=`readycloud_nvram get readycloud_fetch_url`
# auth data
NAS_NAME=`readycloud_nvram get readycloud_hostname`
NAS_PASS=`readycloud_nvram get readycloud_password`
# construct comm exec
COMM_EXEC="curl --basic -k --user ${NAS_NAME}:${NAS_PASS} --url ${URL}"
#
# arg: <data> [<store.path>]
#
comm_post()
{
local post="${TMP_PREFIX}/readycloud_r.post"
echo "${1}" > "${post}"
comm_post_file "${post}" "${2}" || {
# rm -f "${post}"
return $ERROR
}
# rm -f "${post}"
return $OK
}
#
# args: <file.path> [<store.path>]
#
comm_post_file()
{
COMM_RESULT=""
[ -z "${1}" ] && return $ERROR
if [ -z "${2}" ];
then
FULL_EXEC="\`cat "${1}" | ${COMM_EXEC} -X POST --data-binary @- 2>/dev/null\`"
# FULL_EXEC="\`cat "${1}" | ${COMM_EXEC} -X POST --data-binary @- \`"
else
# FULL_EXEC="\`cat "${1}" | ${COMM_EXEC} -X POST --data-binary @- > "${2}"\`"
FULL_EXEC="\`cat "${1}" | ${COMM_EXEC} -X POST --data-binary @- 2>/dev/null -o '${2}'\`"
fi
# echo "${FULL_EXEC}"
eval COMM_RESULT="${FULL_EXEC}" || return $ERROR
return $OK
}
#
# args: <user name> <password>
#
do_register()
{
# construct request
USER_NAME=$1
USER_PASS=$2
temp_dir=$3
XAGENT_ID=$(readycloud_nvram get x_agent_id)
MODEL=$(remote_smb_conf -get_model_name)
USE_XCLOUD=$(readycloud_nvram get readycloud_use_xcloud)
FIRMWARE_VERSION=`version | sed -n 2p | awk -F "/" '{print $2}' | sed -r 's/^.{1}//'`
#get second line of "version" command output
#get second part of "U12H270T00/V1.0.3.49/20140403_xAgent" line (version)
#and removing first character "V" from it
#output - "1.0.3.49"
DATA="<?xml version=\"1.0\" encoding=\"utf-8\"?>"
DATA="${DATA}<request moniker=\"/root/devices\" method=\"register\">"
DATA="${DATA}<body type=\"registration\">"
DATA="${DATA}<username>${USER_NAME}</username>"
DATA="${DATA}<password>${USER_PASS}</password>"
DATA="${DATA}<model>${MODEL}</model>"
DATA="${DATA}<firmware_id>${FIRMWARE_VERSION}</firmware_id>"
if [ $USE_XCLOUD -eq 1 ]; then
DATA="${DATA}<x_agent_id>${XAGENT_ID}</x_agent_id>"
fi
DATA="${DATA}<license><LicenseKey>sdfsfgjsflkj</LicenseKey><hardwareSN>`burnsn 2>&1 | sed 's/[a-z -]//g'`</hardwareSN><StartTime>0</StartTime><ExpiredTime>999</ExpiredTime><valid>true</valid></license>"
DATA="${DATA}</body></request>"
comm_post "${DATA}" && {
if [ "xSUCCESS" = "x${COMM_RESULT}" ]; then
readycloud_nvram set readycloud_registration_owner=${USER_NAME}
readycloud_nvram set leafp2p_run="1"
readycloud_nvram set x_force_connection="1"
readycloud_nvram commit
kill -SIGHUP `cat /tmp/xagent_watchdog.pid`
internet set connection readycloud 1
return $OK
fi
}
echo "Invalid User Name or Password"
return $ERROR
}
#
# arg: <user name> <password>
#
do_unregister()
{
# construct request
USER_NAME=$1
USER_PASS=$2
DATA="<?xml version=\"1.0\" encoding=\"utf-8\"?>"
DATA="${DATA}<request moniker=\"/root/devices\" method=\"unregister\">"
DATA="${DATA}<body type=\"registration\">"
DATA="${DATA}<username>${USER_NAME}</username>"
DATA="${DATA}<password>${USER_PASS}</password>"
DATA="${DATA}<license><LicenseKey>sdfsfgjsflkj</LicenseKey><hardwareSN>2496249</hardwareSN><StartTime>0</StartTime><ExpiredTime>999</ExpiredTime><valid>true</valid></license>"
DATA="${DATA}</body></request>"
comm_post "${DATA}" && {
if [ "xSUCCESS" = "x$COMM_RESULT" ]; then
readycloud_nvram set readycloud_registration_owner=""
readycloud_nvram set leafp2p_run="0"
readycloud_nvram set x_force_connection
readycloud_nvram commit
internet set connection readycloud 0
return $OK
fi
}
echo "Connect to Server fail, Please check inernet connection"
return $ERROR
}
#
# args: <alias>
#
do_updatealias()
{
# construct request
ALIAS=$1
DATA="<?xml version=\"1.0\" encoding=\"utf-8\"?>"
DATA="${DATA}<request moniker=\"/root/devices\" method=\"updatealias\">"
DATA="${DATA}<body type=\"alias\">"
DATA="${DATA}<alias>${ALIAS}</alias>"
DATA="${DATA}</body></request>"
comm_post "${DATA}" && {
if [ "xSUCCESS" = "x$COMM_RESULT" ]; then
pidof leafp2p | xargs kill -USR1
# $readycloud_nvram set leafp2p_device_alias="${ALIAS}"
# $readycloud_nvram commit >/dev/null
# echo "Updated Device Alias Successfully"
# echo ok
return $OK
fi
}
echo "Update alias error: connect to Server fail, Please check inernet connection"
return $ERROR
}
|
hajuuk/R7000
|
src/router/arm-uclibc/target/opt/broken/comm.sh
|
Shell
|
gpl-2.0
| 4,742 |
#! /bin/sh
#
# Copyright 1999-2008 Sun Microsystems, Inc. All Rights Reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
# CA 95054 USA or visit www.sun.com if you need additional information or
# have any questions.
#
# set platform-dependent variables
OS=`uname -s`
case "$OS" in
SunOS )
PATHSEP=":"
FILESEP="/"
;;
Linux )
PATHSEP=":"
FILESEP="/"
;;
CYGWIN* )
PATHSEP=";"
FILESEP="/"
;;
Windows* )
PATHSEP=";"
FILESEP="\\"
;;
* )
echo "Unrecognized system!"
exit 1;
;;
esac
${TESTJAVA}${FILESEP}bin${FILESEP}java -Xbootclasspath/p:${TESTCLASSES} ADatagramSocket true
|
TheTypoMaster/Scaper
|
openjdk/jdk/test/java/net/DatagramSocket/SetDatagramSocketImplFactory/ADatagramSocket.sh
|
Shell
|
gpl-2.0
| 1,468 |
echo "enum.proto"
protoc --proto_path=../../../../kit/prototype/src/common/protobuf/ --proto_path=../../../../kit/prototype/src/server/protobuf/ --go_out=./kit_ds ../../../../kit/prototype/src/server/protobuf/enum.proto
echo "msg_entity.proto"
protoc --proto_path=../../../../kit/prototype/src/common/protobuf/ --proto_path=../../../../kit/prototype/src/server/protobuf/ --go_out=./kit_ds ../../../../kit/prototype/src/server/protobuf/msg_entity.proto
echo "kit_ds.proto"
protoc --proto_path=../../../../kit/prototype/src/common/protobuf/ --proto_path=../../../../kit/prototype/src/server/protobuf/ --go_out=./kit_ds ../../../../kit/prototype/src/server/protobuf/kit_ds.proto
|
nobugtodebug/stresstest
|
genproto.sh
|
Shell
|
gpl-2.0
| 676 |
#!/usr/bin/env bash
#
# iptables configuration script for a Workbench Tool
#
IP_LOCAL_NETWORK="192.168.3.0/24"
IPS_FOR_SSH="${IP_LOCAL_NETWORK}"
# Flush all current rules from iptables
#
iptables -F
#
# Allow SSH connections on tcp port 22
# This is essential when working on remote servers via SSH to prevent locking yourself out of the system
#
for ip_for_ssh in $IPS_FOR_SSH
do
iptables -A INPUT -p tcp -s $ip_for_ssh --dport 22 -j ACCEPT
done
#
# Set default policies for INPUT, FORWARD and OUTPUT chains
#
iptables -P INPUT DROP
iptables -P FORWARD DROP
iptables -P OUTPUT ACCEPT
#
# Set access for localhost
#
iptables -A INPUT -i lo -j ACCEPT
#
# Accept packets belonging to established and related connections
#
iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
#
# Save settings
#
/sbin/service iptables save
#
# List rules
#
iptables -L -v
|
NERC-CEH/jules-jasmin
|
majic_web_service/configuration_scripts/firewall_tool.sh
|
Shell
|
gpl-2.0
| 885 |
ctags -R .
cscope -Rkbq
|
sundream/develop
|
.vim/.vim/tags/cur_ctags_cscope.sh
|
Shell
|
gpl-2.0
| 24 |
#!/bin/sh
while true
do
echo "Select"
echo -e "\t1 to enter SVN"
echo -e "\t2 to commit"
echo -e "\t3 to update"
echo -e "\t4 to check the difference between working copy and Head revision"
echo -e "\t5 to add a directory or a file"
echo -e "\t6 to create a new repositry"
echo -e "\t7 to clear the Terminal"
echo -e "\t8 to EXIT"
read text
case $text in
1) `ssh [email protected]`;;
2) echo "Enter the message to be shown when successfully committed"
read msg ;
svn commit -m "$msg" ;;
3) echo -e "\tPress H to update to Head revision \n\tPress S to update to Specific revision";
read update_choice;
case $update_choice in
H) svn update;;
S) echo "Enter the revision number";
read r;
svn update -r$r;
esac;;
4) svn diff;;
5) echo "Enter the path of directory or file";
read path;
svn add $path;;
6) echo "Enter repositry name";
read repo_name;
`svnadmin create "$repo_name"`;;
7) clear;;
8) exit;;
*) echo -e "\n\t\t!!!!! Enter a valid input !!!!!\n";;
esac
done
|
varun13169/AVR-C_projects-IIITD_ELD_assignments
|
svn.sh
|
Shell
|
gpl-2.0
| 1,096 |
#!/bin/bash
jack_wait --wait
carla daemons/global-fx.carxp
|
ViktorNova/stagecraft-daemons
|
daemons/global-fx-carla.sh
|
Shell
|
gpl-2.0
| 59 |
#!/bin/sh
TMP=/tmp/$$-ms.xml
die ()
{
echo $1
echo $2 >&2
rm -f $TMP
exit 0
}
wget --timeout=30 -q -O - "http://viewer:Pa\$\$w0rd!@$1/RCServer/systeminfo.xml" > $TMP
[ ! -f $TMP ] && die 2 "Error obtaining XML from service"
[ ! -s $TMP ] && die 2 "XML is empty"
# validate
xml val $TMP 2>&1 > /dev/null; err=$?
if test $err -eq 0; then
die 0 ""
else
die 2 "XML is not valid"
fi
|
Shmuma/z
|
misc/pairs/external/ms_xml.sh
|
Shell
|
gpl-2.0
| 407 |
#!/usr/bin/haserl
<%
eval $( gargoyle_session_validator -c "$COOKIE_hash" -e "$COOKIE_exp" -a "$HTTP_USER_AGENT" -i "$REMOTE_ADDR" -r "/login1.asp" -t $(uci get gargoyle.global.session_timeout) -b "$COOKIE_browser_time" )
echo "Content-Type: application/json"
echo ""
uci set firewall.dmz=redirect
uci set firewall.dmz.src=wan
uci set firewall.dmz.dest_ip="$FORM_dmzip"
uci set firewall.dmz.target=DNAT
uci set firewall.dmz.proto=all
echo "{"
echo "\"ipaddr\":\"$FORM_dmzip\""
echo "}"
uci commit firewall
/etc/init.d/firewall restart 2>&1 > /dev/null
uci set n2n_v2.edge.ipaddr=$FORM_virtualip
uci commit n2n_v2
/etc/init.d/n2n_v2 restart 2>&1 > /dev/null
uci set network.n2n0.ipaddr=$FORM_virtualip
uci commit network
/etc/init.d/network 2>&1 > /dev/null
%>
|
link4all/20170920openwrt
|
own_files/mt7628/files_hangzhou_ddc/www/cgi-bin/dmz.sh
|
Shell
|
gpl-2.0
| 768 |
#!/bin/sh
#
## Copyright (C) 1996-2015 The Squid Software Foundation and contributors
##
## Squid software is distributed under GPLv2+ license and includes
## contributions from numerous individuals and organizations.
## Please see the COPYING and CONTRIBUTORS files for details.
##
url=$1
proxy=${2:-localhost}
port=${3:-3128}
if [ $# -lt 1 ]; then
echo "Usage: $0 URL [server port]"
exit 1
fi
echo "blob # partial message"
echo "SLEEP=.. # Delay. Can be combined with the others by using ;"
echo "USER=... # Success"
echo "BAD.. # Helper failure"
echo "ERR.. # Login Failure"
while read auth; do
echo "GET $url HTTP/1.0"
if [ -n "$auth" ]; then
echo "Proxy-Authorization: Negotiate $auth"
fi
echo "Proxy-Connection: keep-alive"
echo
done | tee -a /dev/fd/2 | nc localhost 3128
|
krichter722/squid
|
test-suite/run_negotiate_test.sh
|
Shell
|
gpl-2.0
| 811 |
#!/usr/bin/env bash
thin start --ssl --ssl-key-file $SM_CONF/development/certificates-localhost.ssl/localhost.ssl.key --ssl-cert-file $SM_CONF/development/certificates-localhost.ssl/localhost.ssl.crt -p 9876
|
salemove/media_stream_inspector
|
spec/test_server/start.sh
|
Shell
|
gpl-2.0
| 208 |
# Copyright 2005 Eduardo Sztokbant <[email protected]>
#
# This file is part of stegotools.
#
# stegotools is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# stegotools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with stegotools; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##########
# test.sh
# stegotools testing shell-script
######################################
#!/bin/sh
if [ "`uname`" = "FreeBSD" ]; then
md5app=gmd5sum
else
md5app=md5sum
fi
# TEST 1a: message integrity test (n_last_bits = 1)
echo -n " input message md5 = output message md5 (n_last_bits = 1): "
md5tux="`$md5app samples/tux.xpm | cut -d ' ' -f 1`"
cat samples/tux.xpm | ./stegwrite samples/Maddog-Du.bmp ./tmp.bmp 1 2>/dev/null
./stegread ./tmp.bmp 1 2>/dev/null > ./tmp.xpm
md5tmpxpm="`$md5app ./tmp.xpm | cut -d ' ' -f 1`"
if [ "$md5tmpxpm" = "$md5tux" ]; then
echo "OK"
else
echo "FAILED"
echo " md5in : $md5tux"
echo " md5out: $md5tmpxpm"
fi
# TEST 1b: message integrity test (n_last_bits = 2)
echo -n " input message md5 = output message md5 (n_last_bits = 2): "
md5tux="`$md5app samples/tux.xpm | cut -d ' ' -f 1`"
cat samples/tux.xpm | ./stegwrite samples/Maddog-Du.bmp ./tmp.bmp 2 2>/dev/null
./stegread ./tmp.bmp 2 2>/dev/null > ./tmp.xpm
md5tmpxpm="`$md5app ./tmp.xpm | cut -d ' ' -f 1`"
if [ "$md5tmpxpm" = "$md5tux" ]; then
echo "OK"
else
echo "FAILED"
echo " md5in : $md5tux"
echo " md5out: $md5tmpxpm"
fi
# TEST 1c: message integrity test (n_last_bits = 4)
echo -n " input message md5 = output message md5 (n_last_bits = 4): "
md5tux="`$md5app samples/tux.xpm | cut -d ' ' -f 1`"
cat samples/tux.xpm | ./stegwrite samples/Maddog-Du.bmp ./tmp.bmp 4 2>/dev/null
./stegread ./tmp.bmp 4 2>/dev/null > ./tmp.xpm
md5tmpxpm="`$md5app ./tmp.xpm | cut -d ' ' -f 1`"
if [ "$md5tmpxpm" = "$md5tux" ]; then
echo "OK"
else
echo "FAILED"
echo " md5in : $md5tux"
echo " md5out: $md5tmpxpm"
fi
# TEST 2: bitmap size test
echo -n " input bmp size = output bmp size: "
inputbmpsize="`ls -la samples/Maddog-Du.bmp | awk '{ print $5 }'`"
outputbmpsize="`ls -la ./tmp.bmp | awk '{ print $5 }'`"
if [ "$inputbmpsize" = "$outputbmpsize" ]; then
echo "OK"
else
echo "FAILED"
echo " input bmp size : $inputbmpsize"
echo " output bmp size: $outputbmpsize"
fi
# TEST 3: input bmp md5 != output bmp md5
echo -n " input bmp md5 != output bmp md5: "
inputbmpmd5="`$md5app samples/Maddog-Du.bmp | cut -d ' ' -f 1`"
outputbmpmd5="`$md5app tmp.bmp | cut -d ' ' -f 1`"
if [ "$inputbmpmd5" = "$outputbmpmd5" ]; then
echo "FAILED"
echo " input bmp identical to output bmp"
echo " md5: $inputbmpmd5"
else
echo "OK"
fi
# TEST 4a: truncate test (n_last_bits = 1)
echo -n " truncate test (n_last_bits = 1): "
truncsize="`cat samples/Linux-ChangeLog-2.6.5 | ./stegwrite samples/Maddog-Du.bmp tmp.bmp 1 2>/dev/null | awk '{ print $6 }'`"
./stegread tmp.bmp 1 2>/dev/null > tmp.txt
restoresize="`ls -la ./tmp.txt | awk '{ print $5 }'`"
if [ "$truncsize" = "$restoresize" ]; then
echo "OK"
else
echo "FAILED"
echo " truncated size : $truncsize"
echo " restored size : $restoresize"
fi
# TEST 4b: truncate test (n_last_bits = 2)
echo -n " truncate test (n_last_bits = 2): "
truncsize="`cat samples/Linux-ChangeLog-2.6.5 | ./stegwrite samples/Maddog-Du.bmp tmp.bmp 2 2>/dev/null | awk '{ print $6 }'`"
./stegread tmp.bmp 2 2>/dev/null > tmp.txt
restoresize="`ls -la ./tmp.txt | awk '{ print $5 }'`"
if [ "$truncsize" = "$restoresize" ]; then
echo "OK"
else
echo "FAILED"
echo " truncated size : $truncsize"
echo " restored size : $restoresize"
fi
# TEST 4c: truncate test (n_last_bits = 4)
echo -n " truncate test (n_last_bits = 4): "
truncsize="`cat samples/Maddog-Du.bmp | ./stegwrite samples/Maddog-Du.bmp tmp.bmp 4 2>/dev/null | awk '{ print $6 }'`"
./stegread tmp.bmp 4 2>/dev/null > tmp.txt
restoresize="`ls -la ./tmp.txt | awk '{ print $5 }'`"
if [ "$truncsize" = "$restoresize" ]; then
echo "OK"
else
echo "FAILED"
echo " truncated size : $truncsize"
echo " restored size : $restoresize"
fi
# CLEANUP
rm ./tmp.bmp ./tmp.xpm ./tmp.txt
|
sztokbant/stegotools
|
test.sh
|
Shell
|
gpl-2.0
| 4,777 |
#!/bin/sh
TTYACM=ttyACM2
TTYGS=ttyGS0
REMOVE=remove
ADD=add
if [ $1 == "ttyACM0" ]; then
if [ $2 == $ADD ]; then
if [ ! -e /tmp/icedload ]; then
gcom -d /dev/ttyACM0 -s /etc/gcom/icera_setmode.gcom
fi
fi
fi
if [ $1 == $TTYACM ]; then
if [ $2 == $ADD ]; then
echo "device is $1 action is $2"
# Enable auto suspend for 3G module
# echo auto > /sys/bus/usb/devices/1-1/power/level
if [ $4 == "br" ]; then
# ifup wan
killall bandluxed
bandluxed &
sleep 3
MonitorWan -b
fi
else
echo "device is $1 action is $2"
killall MonitorWan
kill -9 `pidof MonitorWan`
killall udhcpc
killall bandluxed
/bin/kill `/bin/pidof pppd`
fi
else
echo "other device"
fi
|
stevelord/PR30
|
target/linux/imx25/base-files/etc/br_hotplug2.sh
|
Shell
|
gpl-2.0
| 723 |
#!/bin/sh
#Author: Romain Goffe
#Date: 07/05/2011
#Description: Generate an sb file containing all the songs that are not
# already in previous volumes
GREP="$GREP_OPTIONS"
export GREP_OPTIONS=""
BOOKS_DIR="books"
#all songs
cd songs
ls -1 */*.sg > ../res1
cd ..
#get volume 1 list
tail -n +13 "$BOOKS_DIR/volume-1.sb" > tmp1
head -n -2 tmp1 > list1
sed -i -e "s/\",//g" -e "s/ \"//g" -e "s/\"//g" list1
#remove volume 1 songs
grep -vf list1 res1 > res2
#get volume 2 list
tail -n +14 "$BOOKS_DIR/volume-2.sb" > tmp2
head -n -2 tmp2 > list2
sed -i -e "s/\",//g" -e "s/ \"//g" -e "s/\"//g" list2
#remove volume 2 songs
grep -vf list2 res2 > res3
#get volume 3 list
tail -n +14 "$BOOKS_DIR/volume-3.sb" > tmp3
head -n -2 tmp3 > list3
sed -i -e "s/\",//g" -e "s/ \"//g" -e "s/\"//g" list3
#remove volume 3 songs
grep -vf list3 res3 > res4
#format song list
sed -i -e "s/^/ \"/g" -e "s/$/\",/g" res4
head -c -2 res4 > res
#make volume 4 sb file
cat utils/header-last-volume > "$BOOKS_DIR/volume-4.sb"
cat res >> "$BOOKS_DIR/volume-4.sb"
echo "]" >> "$BOOKS_DIR/volume-4.sb"
echo "}" >> "$BOOKS_DIR/volume-4.sb"
#remove tmp files
rm -f res res1 res2 res3 res4 list1 list2 list3 tmp1 tmp2 tmp3
export GREP_OPTIONS="$GREP"
|
BeXa/songbook
|
utils/last-volume.sh
|
Shell
|
gpl-2.0
| 1,246 |
#!/bin/bash
cd /home/germaaan/Universidad/2o/SO
chmod u+x kernel32-3.0.4
./kernel32-3.0.4 ubda=./Fedora14-x86-root_fs mem=256M
|
germaaan/trabajos_universidad
|
2GII/SO/practica_01/codigo/sesion01_01.sh
|
Shell
|
gpl-2.0
| 127 |
#!/bin/sh
ROOT=$( pwd )
BUILD=${ROOT}/build
RAWIMAGE="${ROOT}/raw/f300-raw.img"
TARGET_IMG="vWLC_f300_$(date +%d_%h).qcow2"
#Mount the partitions
./updateimage.sh $RAWIMAGE attach
#copy the Scripts
mkdir -pv /mnt/part2/opt/nfvops
cp -dpRvf ${ROOT}/scripts/* /mnt/part2/opt/nfvops
#Detach the partitions
./updateimage.sh $RAWIMAGE detach
echo "Creating QCOW2 Image......"
sleep 3
#Convert to QCOW2
qemu-img convert -f raw -O qcow2 $RAWIMAGE ${BUILD}/${TARGET_IMG}
|
nfvguru/LFSCloudImage
|
tools/build_wlc_image.sh
|
Shell
|
gpl-2.0
| 469 |
#!/bin/bash
### Propmt String Compiler ##
# Version: 0.1
# Copyright: Michael Schönitzer
# License: Gnu General Public License Version 3 or higher (GPLv3)
#
# You can write youre wished prompt string in a better readable way and
# then generate the valid prompt string with this script.
black='\\[\\e[30m\\]'; red='\\[\\e[31m\\]'
green='\\[\\e[32m\\]'; yellow='\\[\\e[33m\\]'
blue='\\[\\e[34m\\]'; purple='\\[\\e[35m\\]'
cyan='\\[\\e[36m\\]'; white='\\[\\e[37m\\]'
blackb='\\[\\e[40m\\]'; redb='\\[\\e[41m\\]'
greenb='\\[\\e[42m\\]'; yellowb='\\[\\e[43m\\]'
blueb='\\[\\e[44m\\]'; purpleb='\\[\\e[45m\\]'
cyanb='\\[\\e[46m\\]'; whiteb='\\[\\e[47m\\]'
bold='\\[\\e[1m\\]'; boldoff='\\[\\e[21m\\]'
light='\\[\\e[2m\\]'; lightoff='\\[\\e[22m\\]'
it='\\[\\e[3m\\]'; itoff='\\[\\e[23m\\]'
ul='\\[\\e[4m\\]'; uloff='\\[\\e[24m\\]'
inv='\\[\\e[7m\\]'; invoff='\\[\\e[27m\\]'
reset='\\[\\e[0m\\]'
if [ $# -ne 1 -o ! -f "$1" ] ; then
echo -e '\e[31m\e[1m\tPropmt String Compiler \e[0m' 1>&2
echo -e 'Hint: press ctr+D to finish'
fi
generatedPS=$(cat $1 |
# Fontcolors
## Syntax: #<color>
sed "s/#black/$black/g" |
sed "s/#red/$red/g" |
sed "s/#green/$green/g" |
sed "s/#yellow/$yellow/g" |
sed "s/#blue/$blue/g" |
sed "s/#purple/$purple/g" |
sed "s/#cyan/$cyan/g" |
sed "s/#white/$white/g" |
# Backgroundcolors
## Syntax: #bg:<color>
sed "s/#bg:black/$blackb/g" |
sed "s/#bg:red/$redb/g" |
sed "s/#bg:green/$greenb/g" |
sed "s/#bg:yellow/$yellowb/g" |
sed "s/#bg:blue/$blueb/g" |
sed "s/#bg:purple/$purpleb/g" |
sed "s/#bg:cyan/$cyanb/g" |
sed "s/#bg:white/$whiteb/g" |
# Fontstyles
## Syntax: #<style>
sed "s/#bold/$bold/g" |
sed "s/#light/$light/g" |
sed "s/#it/$it/g" |
sed "s/#ul/$ul/g" |
sed "s/#inv/$inv/g" |
# Disable fontstyles
## Syntax: #-<style>
sed "s/#-bold/$boldoff/g" |
sed "s/#-light/$lightoff/g" |
sed "s/#-it/$itoff/g" |
sed "s/#-ul/$uloff/g" |
sed "s/#-inv/$invoff/g" |
# Disable all
## Syntax: #-- OR #reset
sed "s/#--/$reset/g" |
sed "s/#reset/$reset/g" |
sed 's/^ //g' | # Remove spaces at begining
sed 's/\([^\]\) \+/\1/g' | # Remove spaces in lines
sed 's/\\ / /g' | # Extract saved spaces
sed 's/\\g/$(__git_ps1 " %s")/g' | # Git PS
sed 's/\\g/$(__git_ps1 " %s")/g' | # alt
tr -d '\n' | # Remove newlines
sed 's/\\n/\n/g' # Extract saved lines
)
echo "$generatedPS"
#echo -e "$generatedPS" 1>&2
## To fix/add: ##
# - interactive mode
# - preview
# - commands for \.-stuf?
# - command for $? and similar?
# - tricks like moving, etc
# - colornames <-> systemscolors
# - …
|
Nudin/betterbash
|
psc.sh
|
Shell
|
gpl-3.0
| 2,637 |
#!/bin/sh
# $XFree86: xc/programs/xterm/vttests/8colors.sh,v 1.4 2002/09/30 00:39:08 dickey Exp $
#
# -- Thomas Dickey (1999/3/27)
# Show a simple 8-color test pattern
ESC=""
CMD='echo'
OPT='-n'
SUF=''
TMP=/tmp/xterm$$
eval '$CMD $OPT >$TMP || echo fail >$TMP' 2>/dev/null
( test ! -f $TMP || test -s $TMP ) &&
for verb in printf print ; do
rm -f $TMP
eval '$verb "\c" >$TMP || echo fail >$TMP' 2>/dev/null
if test -f $TMP ; then
if test ! -s $TMP ; then
CMD="$verb"
OPT=
SUF='\c'
break
fi
fi
done
rm -f $TMP
trap '$CMD $OPT "[0m"; exit' 0 1 2 5 15
echo "[0m"
while true
do
for AT in 0 1 4 7
do
case $AT in
0) attr="normal ";;
1) attr="bold ";;
4) attr="under ";;
7) attr="reverse ";;
esac
for FG in 0 1 2 3 4 5 6 7
do
case $FG in
0) fcolor="black ";;
1) fcolor="red ";;
2) fcolor="green ";;
3) fcolor="yellow ";;
4) fcolor="blue ";;
5) fcolor="magenta ";;
6) fcolor="cyan ";;
7) fcolor="white ";;
esac
$CMD $OPT "[0;${AT}m$attr"
$CMD $OPT "[3${FG}m$fcolor"
for BG in 1 2 3 4 5 6 7
do
case $BG in
0) bcolor="black ";;
1) bcolor="red ";;
2) bcolor="green ";;
3) bcolor="yellow ";;
4) bcolor="blue ";;
5) bcolor="magenta ";;
6) bcolor="cyan ";;
7) bcolor="white ";;
esac
$CMD $OPT "[4${BG}m$bcolor"
done
echo "[0m"
done
sleep 1
done
done
|
chriskmanx/qmole
|
QMOLEDEV/vnc-4_1_3-unixsrc/unix/xc/programs/xterm/vttests/8colors.sh
|
Shell
|
gpl-3.0
| 1,451 |
#!/bin/bash
function open
{
cd ../
for var in $@
do
kate `find -name "${var}"`
done
}
function explain
{
echo "You should use this script as following:"
echo
echo "$0 <[FILE] | [PATTERN]>"
}
[[ $# -eq 0 ]] && explain
[[ $# -eq 1 ]] && open $@
|
Orion-Community/andromeda
|
src/scripts/open.sh
|
Shell
|
gpl-3.0
| 256 |
#!/bin/sh
# Can be 25, 30, or 60 (per Gource)
FRAMERATE=25
BITRATE=3000K
BuildTools/Gource/GetGravatars.py BuildTools/Gource/UserImages
BuildTools/Gource/RunGource.sh --disable-progress --stop-at-end -640x360 $@ --output-framerate $FRAMERATE --output-ppm-stream - | ffmpeg -y -b $BITRATE -r $FRAMERATE -f image2pipe -vcodec ppm -i - -vcodec libx264 -vpre default BuildTools/Gource/SwiftGource.mp4
|
marosi/SocialDesktopClient
|
plugins/buddycloud/3rdparty/swift/BuildTools/Gource/CreateVideo.sh
|
Shell
|
gpl-3.0
| 399 |
#!/bin/bash
# linuxsampler
set -ex
cd $ZYNTHIAN_SW_DIR
# Download, Build & Install needed libraries: libgig & libscp
svn co https://svn.linuxsampler.org/svn/libgig/trunk libgig
cd libgig
libtoolize --force
aclocal
autoheader
automake --force-missing --add-missing
autoconf
./configure
make -j 1
make install
make clean
cd ..
svn co https://svn.linuxsampler.org/svn/liblscp/trunk liblscp
cd liblscp
libtoolize --force
aclocal
autoheader
automake --force-missing --add-missing
autoconf
./configure
make -j 1
make install
make clean
cd ..
# Download, Build & Install LinuxSampler
rm -rf linuxsampler
svn --non-interactive --trust-server-cert co https://svn.linuxsampler.org/svn/linuxsampler/trunk linuxsampler
cd linuxsampler
libtoolize --force
aclocal
autoheader
automake --force-missing --add-missing
autoconf
#Configure with optimizations from Schpion
./configure --enable-max-voices=21 --enable-max-streams=64 --enable-stream-min-refill=4096 --enable-refill-streams=2 --enable-stream-max-refill=131072 --enable-stream-size=262144 --disable-asm --enable-subfragment-size=64 --enable-eg-min-release-time=0.001 --enable-eg-bottom=0.0025 --enable-max-pitch=2 --enable-preload-samples=65536
cd src/scriptvm
yacc -o parser parser.y
cd ../..
# Apply patch from Steveb
git clone https://github.com/steveb/rpi_linuxsampler_patch.git
patch -p1 < rpi_linuxsampler_patch/linuxsampler-arm.patch
# Build LinuxSampler
make -j 1
make install
make clean
cd ..
|
zynthian/zynthian-sys
|
scripts/recipes/install_linuxsampler.sh
|
Shell
|
gpl-3.0
| 1,450 |
Script started on Tue 08 Dec 2015 08:26:03 PM PST
[spica001@hammer rshell]$ make
mkdir bin
g++ -ansi -pedantic -Wall -Werror -std=c++0x -w -o bin/rshell src/rshell.cpp
[spica001@hammer rshell]$ bin/rshell
[email protected] $ ls && echo A
bin clean.sh LICENSE Makefile multi.sh README.md src tests test.sh
A
[email protected] $ ls && echo A && echo B
bin clean.sh LICENSE Makefile multi.sh README.md src tests test.sh
A
B
[email protected] $ ls && echo A || echo B
bin clean.sh LICENSE Makefile multi.sh README.md src tests test.sh
A
[email protected] $ echo|A||| echo B
A
[email protected] $ echooA; echo B; echo C
A
B
C
[email protected] $
[email protected] $ echo C || ls
C
[email protected] $ exit
[spica001@hammer rshell]$ exit
exit
Script done on Tue 08 Dec 2015 08:30:39 PM PST
|
spica001/rshell
|
tests/multi_command.sh
|
Shell
|
gpl-3.0
| 878 |
#!/usr/bin/env bash
# First, run profile to generate substrate
# Second, test against prepared megamuga data
set -e
script_dir=`dirname "$0"`
cd "${script_dir}"/../
export PATH=`pwd`/:${PATH}
if [ ! -e megamuga_2018_06_21.RData ]
then
curl -O https://www.well.ox.ac.uk/~rwdavies/megamuga_2018_06_21.RData
fi
mkdir -p test-data/mouse_data/
cd test-data/mouse_data/
if [ ! -e STITCH_example_2016_05_10.tgz ]
then
curl -O https://www.well.ox.ac.uk/~rwdavies/ancillary/STITCH_example_2016_05_10.tgz
fi
tar -xzf STITCH_example_2016_05_10.tgz
cd ../../
mkdir -p test-results/profile-one-off/
./scripts/profile.R
./scripts/compare_vcf_to_truth.R --test-file=./test-results/profile-one-off/stitch.chr19.vcf.gz --chr=chr19 --compare-against=megamuga --mega-save-file=megamuga_2018_06_21.RData
|
rwdavies/STITCH
|
scripts/test-compare-vcf.sh
|
Shell
|
gpl-3.0
| 799 |
#!/bin/bash
fpc -CX -O3 -XX -vewnhi -Fi. -Fu. -FU. card_raytracer.lpr
mv card_raytracer card-raytracer-fpc
|
Mark-Kovalyov/CardRaytracerBenchmark
|
fpc/make.sh
|
Shell
|
gpl-3.0
| 107 |
#!/usr/bin/env bash
. test_common.sh
id=input_output_mt
test_server_batch $id test
grep "Resource arrived " $id.log|sed -e 's|M_output\[[0-9]\+\]:.*Resource arrived (\([-0-9]*\))|\1|'|sort -u|sort -n >$id.log.result
test_compare_result $id
exit $?
|
qiq/hector_core
|
test/input_output_mt.sh
|
Shell
|
gpl-3.0
| 250 |
../../../cgi-bin/retag title conv2.tab
|
SuporteCTRL/suitesaber
|
bases/title/data/retag.sh
|
Shell
|
gpl-3.0
| 39 |
timeout_set "1 minute"
# Start an installation with 2 servers labeled 'hdd', 2 labeled 'ssd' and the default goal "ssd ssd"
USE_RAMDISK=YES \
CHUNKSERVERS=4 \
CHUNKSERVER_LABELS="0,1:ssd|2,3:hdd" \
MASTER_CUSTOM_GOALS="1 default: ssd ssd" \
MASTER_EXTRA_CONFIG="CHUNKS_LOOP_MIN_TIME = 1`
`|CHUNKS_LOOP_MAX_CPU = 90`
`|ACCEPTABLE_DIFFERENCE = 1.0`
`|CHUNKS_WRITE_REP_LIMIT = 5`
`|OPERATIONS_DELAY_INIT = 0`
`|ENDANGERED_CHUNKS_PRIORITY = 0.7`
`|OPERATIONS_DELAY_DISCONNECT = 0" \
setup_local_empty_lizardfs info
# Leave only one "hdd" and one "ssd" server.
lizardfs_chunkserver_daemon 1 stop
lizardfs_chunkserver_daemon 2 stop
lizardfs_wait_for_ready_chunkservers 2
# Create 20 files. Expect that for each file there are 2 chunk copies.
FILE_SIZE=1K file-generate "${info[mount0]}"/file{1..20}
assert_equals 20 $(lizardfs checkfile "${info[mount0]}"/* | grep 'with 2 copies: *1' | wc -l)
# Stop the chunkserver labeled "ssd" and expect all files to have a chunk in only one copy.
assert_equals 20 $(find_chunkserver_chunks 0 | wc -l)
lizardfs_chunkserver_daemon 0 stop
lizardfs_wait_for_ready_chunkservers 1
assert_equals 20 $(lizardfs checkfile "${info[mount0]}"/* | grep 'with 1 copy: *1' | wc -l)
# Add one "hdd" chunkserver. Expect that second copy of each chunk will be created there.
lizardfs_chunkserver_daemon 2 start
lizardfs_wait_for_ready_chunkservers 2
assert_eventually_prints 20 'lizardfs checkfile "${info[mount0]}"/* | grep "with 2 copies: *1" | wc -l'
# Remove all chunks from the chunkserver "ssd" and bring it back to life.
find_chunkserver_chunks 0 | xargs -d'\n' rm -f
assert_equals 0 $(find_chunkserver_chunks 0 | wc -l)
lizardfs_chunkserver_daemon 0 start
lizardfs_wait_for_ready_chunkservers 3
# Expect one copy of each chunk to migrate to the "ssd" server.
assert_eventually_prints 20 'find_chunkserver_chunks 0 | wc -l'
# No chunks should be deleted until we have two "ssd" servers. So let's add one.
assert_eventually_prints 60 'find_all_chunks | wc -l'
lizardfs_chunkserver_daemon 1 start
lizardfs_wait_for_ready_chunkservers 4
assert_eventually_prints 20 'find_chunkserver_chunks 1 | wc -l'
assert_eventually_prints 20 'find_chunkserver_chunks 0 | wc -l'
assert_eventually_prints 40 'find_all_chunks | wc -l'
|
lizardfs/lizardfs
|
tests/test_suites/ShortSystemTests/test_replication_with_endangered_chunks_priority.sh
|
Shell
|
gpl-3.0
| 2,268 |
#!/bin/bash
find . -name 'lgck-runtime' -delete
rm -f shared/lgck_res.cpp
|
cfrankb/lgck-src
|
src/lgck-runtime/clean.sh
|
Shell
|
gpl-3.0
| 75 |
# Disable UseDNS to speed up boot skipping DNS lookup
echo "UseDNS no" >> /etc/ssh/sshd_config
# Configure "sudo" group with NO PASSWORD ACCESS
sed -i -e '/Defaults\s\+env_reset/a Defaults\texempt_group=sudo' /etc/sudoers
sed -i -e 's/%sudo ALL=(ALL:ALL) ALL/%sudo ALL=NOPASSWD:ALL/g' /etc/sudoers
# Add vagrant to "sudo" group for NO PASSWORD privileges
usermod -a -G sudo vagrant
# Configure "vagrant" user with ssh key access
mkdir -pm 700 /home/vagrant/.ssh
# Add vagrant public key as authorized key
cat <<EOK >/home/vagrant/.ssh/authorized_keys
ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8Y\
Vr+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdO\
KLv6IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7Pt\
ixWKn5y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmC\
P3NOTd0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcW\
yLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key
EOK
chmod 0600 /home/vagrant/.ssh/authorized_keys
chown -R vagrant.vagrant /home/vagrant/.ssh
echo ..
echo ..
sleep 30s
|
cloud-lab/git-repos
|
packer/centos/scripts/vagrant-sudo.sh
|
Shell
|
gpl-3.0
| 1,098 |
#!/bin/bash
TARGET=$1
cp -af $BR2_EXTERNAL_FTCOMMUNITY_TXT_PATH/docs/favicon.ico $TARGET/var/www/
# disable writing log files. On SD card we actually have the space for this, so we can leave this on
# mv $TARGET/etc/init.d/S01logging $TARGET/etc/init.d/M01logging||echo "Logging already turned off!"
# Remove obsolete PM firmware load script
rm -f "$TARGET/etc/init.d/S93-am335x-pm-firmware-load"
# Try to generate a detailed firmware version number from git.
# Fall back to the generic version number from the board config
# if we are not building from a git repository, and abort the build
# if we are building from git but the tag part of the version
# number from git does not match the base version in
# board/fischertechnik/TXT/rootfs/etc/fw-ver.txt
GIT_VERSION=$(git -C $BR2_EXTERNAL_FTCOMMUNITY_TXT_PATH describe --tags --match='v*' 2>/dev/null)
if [ -n "$GIT_VERSION" ] ; then
BASE_VERSION=$(cat $BR2_EXTERNAL_FTCOMMUNITY_TXT_PATH/board/fischertechnik/TXT/rootfs/etc/fw-ver.txt)
if [[ "${GIT_VERSION}" == "v${BASE_VERSION}"* ]] ; then
echo "${GIT_VERSION#v}" > $TARGET/etc/fw-ver.txt
elif [ "${BASE_VERSION#*-}" = "rc" ]; then
echo "${BASE_VERSION}+${GIT_VERSION}" > $TARGET/etc/fw-ver.txt
else
echo "Version number $GIT_VERSION from 'git describe' does not match the base version $BASE_VERSION"
echo "Please fix the base version in board/fischertechnik/TXT/rootfs/etc/fw-ver.txt"
exit 1
fi
fi
echo "done"
|
ftCommunity/ftcommunity-TXT
|
board/fischertechnik/TXT/post-build.sh
|
Shell
|
gpl-3.0
| 1,447 |
#! /bin/bash
#
#Two Cents Crypto Frontend - Copyright (C) 2014 Giovanni Santostefano
#This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
#This is free software, and you are welcome to redistribute it
#under certain conditions; type `show c' for details.
#
# Developer: Giovanni Santostefano <[email protected]>
# Contributor: Lorenzo "EclipseSpark" Faletra <[email protected]>
LOOP_DEV=/dev/loop0
CRYPT_NAME=gscryptstorage
MPOINT=$HOME/tccf_cryptvol
clear
dialog --title "Mount Encrypted Storage" --clear \
--msgbox "Mount encrypted storage contained in a file" 10 30
if [ -e "$LOOP_DEV" ]
then
echo "Loop device checked!"
else
echo
echo
echo "ERROR!!!"
echo "Please, edit the script with a valid loop device"
exit 1
fi
if [ -e "/sbin/cryptsetup" ]
then
echo "cryptsetup installed!"
else
echo
echo
echo "ERROR!!!"
echo "Cryptsetup not found!"
exit 2
fi
clear
#please give me root
if [ $(id -u) != 0 ]; then
echo "This script requires root permissions"
echo "So, if you don't want root on the system"
echo "use sudo or some other shit"
exit 9
fi
#test for mount point
if [ -d "$MPOINT" ]
then
echo "mount point found!"
else
echo "Creating mount point"
mkdir $MPOINT
fi
clear
#enter the filename
fnamevol=`dialog --stdout --title "Choose the encrypted storage" --fselect $HOME/ 14 58`
case $? in
0)
echo "\"$fnamevol\" chosen";;
1)
echo "Cancel pressed."
exit 0;;
255)
echo "Box closed."
exit 0;;
esac
if [ -e "$fnamevol" ]
then
echo "voume exists!"
else
echo
echo
echo "ERROR!!!"
echo "Filename not found!"
exit 6
fi
clear
echo "Defining loop device"
losetup $LOOP_DEV $fnamevol
echo
echo "Opening the device"
cryptsetup luksOpen $LOOP_DEV $CRYPT_NAME
echo
echo "mounting the device in the $MPOINT directory"
mount /dev/mapper/$CRYPT_NAME $MPOINT/
clear
dialog --title "IMPORTANT!" --clear \
--msgbox "Volume mounted in $MPOINT \n When you finish using the mounted volume, hit ENTER to close this dialog and volume will be unmounted.\n\nDo not press OK or ENTER now but only when you want to unmount the volume ok?" 20 40
echo
echo "OK! Almost done! Let's close everything"
umount /dev/mapper/$CRYPT_NAME
cryptsetup luksClose /dev/mapper/$CRYPT_NAME
losetup -d $LOOP_DEV
echo
echo
dialog --title "GOOD BYE!" --clear \
--msgbox "Device unmounted. Have a nice, secure day!" 10 40
|
LCyberspazio/TwoCentsCryptoFrontend
|
tccf_utils/cryptstorage_mount.sh
|
Shell
|
gpl-3.0
| 2,436 |
#!/bin/sh
set -e
cd /home/build/builds/
echo "$(cat /VERSION)"
exec "$@"
|
piersfinlayson/otbiot-docker
|
idf-build/docker-entrypoint.sh
|
Shell
|
gpl-3.0
| 74 |
#!/usr/bin/env bash
# poor shell, pycharm is better.
# source ~/VirEnv/sanic/bin/activate
# install requirements
# pip3 install -r requirements.txt
# python3 index.py
|
lpe234/sanicDemo
|
run.sh
|
Shell
|
gpl-3.0
| 171 |
#!/usr/bin/env bash
FILE="home-listing-$(date +%s).txt"
#tree ~ > $FILE
ls -R ~ > $FILE
# gzip will name compressed output file to $FILE.gz
gzip $FILE
read -p 'Please instert your USB flash drive SIR! (and then press ENTER) > ' ENTER
#mount /dev/disk/by-label/backupdisk /media/my-backup-usb &&
#
#if { df -h | grep /media/my-backup-usb ; } ; then
# cp $FILE.gz /media/my-backup-usb && umount /media/my-backup-usb
# read -p 'Please REMOVE your USB flash drive Sir (and press Enter again) > ' ENTER
#else
# echo "Disk not mounted, better fix that somehow..."
#fi
|
qjcg/shell-examples
|
_absg-solutions/02_homedir-listing.sh
|
Shell
|
gpl-3.0
| 565 |
#!/bin/sh
dd if=stage2.bin of=stage2a.bin bs=1 count=32767
dd if=stage2.bin of=stage2b.bin bs=1 skip=32767 count=32767
tools/bin2header/bin2header stage2a.bin stage2a.h stage2a
tools/bin2header/bin2header stage2b.bin stage2b.h stage2b
|
rvalles/asbestos-avr8susb
|
tools/make_stage2_array.sh
|
Shell
|
gpl-3.0
| 235 |
#!/bin/sh
./caesar_cipher.py 13 "this is my shit" encrypt
./hack_caesar.py "`cat tmp.txt`"
|
citypw/citypw-SCFE
|
security/crypto/hacking_secret_ciphers_with_python/test.sh
|
Shell
|
gpl-3.0
| 92 |
#!/bin/bash
set -e
rev=`git log -1 --pretty='format:%h' HEAD`
# Unused variables
unused_variables=`node_modules/.bin/webpack --config config/webpack.config.js -p 2<&1 \
| grep "Side effects in initialization of unused variable" \
| wc -l `
echo $rev,"js_unused",$unused_variables
# build size
size=`du -k build/main.js | awk '{print $1}'`
echo $rev,"js_size_kb",$size
datetime=`date +"%Y-%m-%dT%H:%M:%S"`
echo $rev,"datetime",$datetime
# server startup time
# build/css/main.css size
|
whybug/whybug-server
|
bin/metrics_on_this_rev.sh
|
Shell
|
gpl-3.0
| 496 |
#!/bin/bash
# install_ss_local.sh
# ShadowsocksX-NG
#
# Created by 邱宇舟 on 16/6/6.
# Copyright © 2016年 qiuyuzhou. All rights reserved.
cd `dirname "${BASH_SOURCE[0]}"`
NGDir="$HOME/Library/Application Support/ShadowsocksX-NG"
TargetDir="$NGDir/ss-local-3.0.5"
LatestTargetDir="$NGDir/ss-local-latest"
echo ngdir: ${NGDir}
mkdir -p "$TargetDir"
cp -f ss-local "$TargetDir"
rm -f "$LatestTargetDir"
ln -s "$TargetDir" "$LatestTargetDir"
cp -f libev.4.dylib "$TargetDir"
cp -f libmbedcrypto.2.4.2.dylib "$TargetDir"
ln -s "$TargetDir/libmbedcrypto.2.4.2.dylib" "$TargetDir/libmbedcrypto.0.dylib"
cp -f libpcre.1.dylib "$TargetDir"
cp -f libsodium.18.dylib "$TargetDir"
cp -f libudns.0.dylib "$TargetDir"
echo done
|
DesmondAssis/codebase
|
ShadowsocksX-NG.app/Contents/Resources/install_ss_local.sh
|
Shell
|
gpl-3.0
| 732 |
#!/bin/bash
# ------------------------------------------------------------------------------
# arguments
package_root=$1
build_root=$2
install_root=$3
fortran_compiler=$4
cxx_compiler=$5
c_compiler=$6
# ------------------------------------------------------------------------------
# internal script library
source "$PACKMAN_SCRIPTS/bash_utils.sh"
# ------------------------------------------------------------------------------
# dependencies
source "$install_root/szip/bashrc"
source "$install_root/netcdf/bashrc"
source "$install_root/udunits/bashrc"
source "$install_root/antlr/bashrc"
# ------------------------------------------------------------------------------
# some pacakage parameters
nco_url="http://jaist.dl.sourceforge.net/project/nco/nco-4.4.2.tar.gz"
nco_shasum="6253e0d3b00359e1ef2c95f0c86e940697286a10"
nco_package="nco-4.4.2.tar.gz"
nco_src_root="$build_root/nco-4.4.2"
nco_install_root="$install_root/nco/4.4.2"
nco_bashrc="$install_root/nco/bashrc"
# ------------------------------------------------------------------------------
# untar package
check_package "$package_root/$nco_package" "$nco_shasum"
cd "$build_root"
if [[ ! -d "$nco_src_root" ]]; then
rm -rf "$nco_src_root"
fi
tar xf "$package_root/$nco_package"
# ------------------------------------------------------------------------------
# compile package
cd $nco_src_root
nco_stdout="$build_root/nco_stdout"
nco_stderr="$build_root/nco_stderr"
temp_notice "See $nco_stdout and $nco_stderr for output."
CC=$c_compiler CXX=$cxx_compiler \
NETCDF_INC="$NETCDF_ROOT/include" \
NETCDF_LIB="$NETCDF_ROOT/lib" \
NETCDF4_ROOT="$NETCDF_ROOT" \
UDUNITS2_PATH="$UDUNITS_ROOT" \
ANTLR_ROOT="$ANTLR_ROOT" \
$nco_src_root/configure --prefix="$(eval echo $nco_install_root)" \
1> "$nco_stdout" 2> "$nco_stderr"
if [[ $? != 0 ]]; then
report_error "Failed to configure NCO! See $nco_stderr."
fi
make -j 4 1> "$nco_stdout" 2> "$nco_stderr"
if [[ $? != 0 ]]; then
report_error "Failed to make NCO! See $nco_stderr."
fi
source "$install_root/gcc/bashrc" \
make check 1> "$nco_stdout" 2> "$nco_stderr"
if [[ $? != 0 ]]; then
report_error "Failed to check NCO! See $nco_stderr."
fi
make install 1> "$nco_stdout" 2> "$nco_stderr"
if [[ $? != 0 ]]; then
report_error "Failed to install NCO! See $nco_stderr."
fi
# ------------------------------------------------------------------------------
# clean up
cd - > /dev/null
rm $nco_stdout $nco_stderr
erase_temp_notice
# ------------------------------------------------------------------------------
# export BASH configuration
cat <<EOF > "$nco_bashrc"
export NCO_ROOT=$nco_install_root
export PATH=\$NCO_ROOT/bin:\$PATH
EOF
|
dongli/obsolete-packman
|
scripts/install_nco.sh
|
Shell
|
gpl-3.0
| 2,679 |
#!/bin/bash
# Generate Self-Signed Certificates for NEMS Linux
# By Robbie Ferguson
# nemslinux.com | baldnerd.com | category5.tv
platform=$(/usr/local/share/nems/nems-scripts/info.sh platform)
echo ""
echo "Generating unique SSL Certificates..."
# Install make-ssl-cert if it isn't already installed
if [[ ! -e /usr/sbin/make-ssl-cert ]]; then
apt -y install ssl-cert
fi
# Using snakeoil for the time being since we had issues with nems-cert and Windows 10.
# Generating new Snakeoil cert
/usr/sbin/make-ssl-cert generate-default-snakeoil --force-overwrite
# Combine for Webmin and other interfaces
cat /etc/ssl/certs/ssl-cert-snakeoil.pem /etc/ssl/private/ssl-cert-snakeoil.key > /etc/ssl/certs/ssl-cert-snakeoil-combined.pem
# Maximum permission for monit to use the cert is 700 and since we don't need an x bit, we'll do 600
# Cert is owned by root:root
chmod 600 /etc/ssl/certs/ssl-cert-snakeoil-combined.pem
echo "Generating unique SSH Certificates..."
/bin/rm /etc/ssh/ssh_host_*
if [[ ! $platform == "21" ]]; then
dpkg-reconfigure openssh-server
systemctl restart ssh
fi
echo "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
@ WARNING: NEMS SERVER IDENTIFICATION HAS CHANGED! @
@ Next time you connect, you'll need to re-import! @
@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
"
echo Done.
# Restart services
echo "Restarting NEMS services..."
if (( $platform >= 0 )) && (( $platform <= 9 )); then
systemctl restart rpimonitor
fi
systemctl restart nagios
systemctl restart apache2
echo "Done."
exit 1
|
Cat5TV/nems-scripts
|
gen-cert.sh
|
Shell
|
gpl-3.0
| 1,633 |
#!/bin/bash
#### Input: token file (one review per line; tokens are delimited by white space): *.txt.tok
#### label file (one label per line): *.cat
#### These input files were generated by prep_imdb.sh and included in the package.
#### To find the order of the data points, see prep_imdb.sh and the files at lst/.
#-----------------#
gpu=-1 # <= change this to, e.g., "gpu=0" to use a specific GPU.
mem=2 # pre-allocate 2GB device memory
source sh-common.sh
#-----------------#
nm=imdb
# nm=elec # <= Uncomment this to train/test on Elec
z=sq # To avoid filename conflict with other scripts.
options="LowerCase UTF8"
#--- Step 1. Generate vocabulary
echo Generaing vocabulary from training data ...
max_num=30000 # maximum vocabulary size.
vocab_fn=${tmpdir}/${nm}${z}_trn-${max_num}.vocab
$prep_exe gen_vocab input_fn=data/${nm}-train.txt.tok vocab_fn=$vocab_fn max_vocab_size=$max_num \
$options WriteCount
if [ $? != 0 ]; then echo $shnm: gen_vocab failed.; exit 1; fi
#--- Step 2. Generate region files (${tmpdir}/*.xsmatbcvar), target files (${tmpdir}/*.y),
#--- and word-mapping files (${tmpdir}/*.xtext).
echo; echo Generating region files ...
p=3 # region size
for set in train test; do
$prep_exe gen_regions $options region_fn_stem=${tmpdir}/${nm}${z}_${set}-p${p} \
input_fn=data/${nm}-${set} vocab_fn=$vocab_fn label_dic_fn=data/${nm}_cat.dic \
patch_size=$p padding=$((p-1))
if [ $? != 0 ]; then echo $shnm: gen_regions failed.; exit 1; fi
done
#--- Step 3. Training and test using GPU
mynm=shcnn-seq-${nm}
log_fn=${logdir}/${mynm}.log; csv_fn=${csvdir}/${mynm}.csv
echo; echo Training CNN and testing ... ; echo This takes a while. See $log_fn and $csv_fn for progress.
$exe $gpu:$mem train data_dir=$tmpdir datatype=sparse \
trnname=${nm}${z}_train-p${p} tstname=${nm}${z}_test-p${p} \
save_fn=${outdir}/${mynm}-mod save_interval=100 \
loss=Square num_epochs=100 \
reg_L2=1e-4 momentum=0.9 mini_batch_size=100 random_seed=1 \
step_size=0.05 ss_scheduler=Few ss_decay=0.1 ss_decay_at=80 \
layers=1 0layer_type=Weight+ 0nodes=1000 0activ_type=Rect \
0pooling_type=Max 0num_pooling=1 0resnorm_type=Text \
top_dropout=0.5 test_interval=25 evaluation_fn=$csv_fn > ${log_fn}
if [ $? != 0 ]; then echo $shnm: training failed.; exit 1; fi
rm -f ${tmpdir}/${nm}${z}*
|
riejohnson/ConText
|
examples/shcnn-seq-imdb-elec.sh
|
Shell
|
gpl-3.0
| 2,509 |
#!/usr/bin/env bash
# expand to null string if not file matches glob
shopt -s nullglob
readonly VERSION=1.0.0
readonly PROGRAM=$(basename $0)
PODCASTS_DIR=$HOME/music/podcasts
IMAGE_DIR=$HOME/.podracer/icons
VERBOSE=0
usage() {
cat <<EOF
USAGE ${PROGRAM}
Fix title, album, artist and icon image of podcasts
Runtime settings are:
- Reading podcasts from ${PODCASTS_DIR}
- Reading images covers from ${IMAGE_DIR}
See also:
- http://www.tldp.org/HOWTO/MP3-HOWTO-13.html
- http://eyed3.nicfit.net/
OPTIONS
-i image directory for podcast icons
-p root podcast directory
-h this help
-v verbose
RETURN CODES
0 completed successfully
1 failed, see previous messages
VERSION
${VERSION}
EOF
}
fix_podcast () {
poddir=$1
artist=$2
album=$3
cover=$4
mkdir -p ${poddir}
(( $VERBOSE )) && printf "Processing ${poddir} ...\n"
pushd ${poddir} > /dev/null
for podcast in *.mp3; do
# save original timestamp for proper file rotation
save_date="$(date -r ${podcast} +%F\ %T)"
# id3v2 --TCON podcast -a "${artist}" -A "${album}" "${podcast}"
eyeD3 --genre=Speech --artist="${artist}" --album="${album}" "${podcast}" &> /dev/null
# use my podcast icon
if [[ -f "${cover}" ]]; then
eyeD3 --remove-image "${podcast}" &> /dev/null
eyeD3 --add-image="${cover}:FRONT_COVER:${artist}" "${podcast}" &> /dev/null
fi
# print podcast information
(( $VERBOSE )) && eyeD3 "${podcast}"
# reset podcast to original timestamp
touch -d "${save_date}" "${podcast}"
done
popd > /dev/null
}
#####################################################################
# MAIN
#####################################################################
# process options
while getopts "vh?p:i:" opt
do
case "$opt" in
i) IMAGE_DIR=${OPTARG}
;;
p) PODCASTS_DIR=${OPTARG}
;;
v) VERBOSE=1
;;
h|\?)
usage
exit 1;;
esac
shift
done
if [[ ! -x $(which eyeD3) ]]; then
echo "ERROR: eyeD3 not found. Please install eyed3 package."
exit 1
fi
if [[ ! -d ${IMAGE_DIR} ]]; then
echo "ERROR: ${IMAGE_DIR} not a valid image directory"
exit 1
fi
if [[ ! -d ${PODCASTS_DIR} ]]; then
echo "ERROR: ${PODCASTS_DIR} not a valid podcast directory"
exit 1
fi
#
# updated podcasts
#
fix_podcast ${PODCASTS_DIR}/barefoot "barefootinvestor.com" "A Beer with Barefoot" ${IMAGE_DIR}/barefoot.png
fix_podcast ${PODCASTS_DIR}/Haskell_Weekly_News "haskellweekly.news" "Haskell Weekly News" ${IMAGE_DIR}/haskell_weekly_news.png
#fix_podcast ${PODCASTS_DIR}/Linux_Voice "www.linuxvoice.com" "Linux Voice" ${IMAGE_DIR}/linux_voice.png
#fix_podcast ${PODCASTS_DIR}/Partially_Derivative "partiallyderivative.com" "Partially Derivative" ${IMAGE_DIR}/partially_derivative.png
#fix_podcast ${PODCASTS_DIR}/Science_Weekly "guardian.co.uk" "Science Weekly" ${IMAGE_DIR}/science_weekly.png
#fix_podcast ${PODCASTS_DIR}/Spark "www.cbc.ca" "Spark" ${IMAGE_DIR}/spark.png
#fix_podcast ${PODCASTS_DIR}/Talking_Machines "www.thetalkingmachines.com" "Talking Machines" ${IMAGE_DIR}/talking_machines.png
fix_podcast ${PODCASTS_DIR}/The_Science_Show "abc.net.au" "The Science Show"
#fix_podcast ${PODCASTS_DIR}/Waking_Up "samharris.org" "Waking Up" ${IMAGE_DIR}/waking_up.png
# clean-up old podcasts
find ${PODCASTS_DIR}/barefoot -type f -name '*.mp3' -mtime +41 -exec rm -v {} \;
find ${PODCASTS_DIR}/Haskell_Weekly_News -type f -name '*.mp3' -mtime +41 -exec rm -v {} \;
find ${PODCASTS_DIR}/The_Science_Show -type f -name '*.mp3' -mtime +41 -exec rm -v {} \;
exit 0
|
frankhjung/ansible-debian
|
roles/podracer/files/fixpodcasts.sh
|
Shell
|
gpl-3.0
| 3,739 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.