code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env bash
/vagrant/provisions/shell/base.sh
/vagrant/provisions/shell/nodejs.sh
|
JuanjoFR/realtime-web-client
|
provisions/shell/bootstrap.sh
|
Shell
|
mit
| 91 |
#!/bin/bash
set -xe
is_defined() {
if [ -z "${!1}" ]; then
echo "ERROR: $1 not defined"
exit 1
fi
}
# Add a bunch of environment variable assertions here. Using this function,
# the start script will fail if the environment variables you expected are
# not defined. For example:
#
# is_defined my_setting
/usr/local/bin/confd -onetime -backend=env
exec /opt/app/app "$@"
|
adamschmidt/golang-boilerplate
|
container/files/opt/app/start.sh
|
Shell
|
mit
| 387 |
z_path=~/.zsh-plugins/z.sh
if [[ $(command -v brew) ]]; then
z_path=`brew --prefix`/etc/profile.d/z.sh
fi
if [[ -f $z_path ]]; then
. $z_path
fi
if [[ $(command -v thefuck) ]]; then
eval "$(thefuck --alias)"
fi
|
morinap/dotfiles
|
zsh/plugins.zsh
|
Shell
|
mit
| 219 |
#!/bin/bash
conda create --name mne python=2.7 ipython notebook
source activate mne
conda install mne pysurfer
|
pchrapka/brain-modelling
|
experiments/exp34-mne-python/create-env.sh
|
Shell
|
mit
| 112 |
#!/bin/bash
mkdir -p ../frogs
vipsthumbnail -c -o ../frogs/%s-128x128.jpg[Q=30] *.jpg
vipsthumbnail --size=180x90 -c -o ../frogs/%s-180x90.jpg[Q=30] *.jpg
|
cilogi/cilogi-guides
|
offline/botanics/media/images/mkthumbs.bash
|
Shell
|
mit
| 197 |
#cloud-config
mounts:
- [ /dev/xvdb, /media/ephemeral0, auto, "defaults,nobootwait", "0", "2" ]
- [ swap, null ]
apt_update: true
apt_sources:
- source: "deb http://apt.puppetlabs.com trusty main"
keyid: 4BD6EC30
filename: puppetlabs.list
- source: "deb http://apt.puppetlabs.com trusty dependencies"
keyid: 4BD6EC30
filename: puppetlabs.list
packages:
- puppet
- bundler
- git
- python-pip
- augeas-tools
- tree
- ccze
write_files:
- path: /root/.ssh/config
permissions: '0600'
content: |
StrictHostKeyChecking no
owner: root:root
################################################
# #
# This will setup a private key in the root #
# user's home folder. If it's needed, #
# uncomment the lines below and then follow #
# the steps below. #
# #
# 1) Create a key if you don't have one #
# already. #
# 2) cat id_rsa | base64 -w0 #
# 3) copy the output of that command after #
# the "content: " in the next block. #
# #
################################################
# - path: /root/.ssh/id_rsa
# permissions: '0600'
# encoding: b64
# content:
# owner: root:root
- path: /etc/puppet/Gemfile
content: |
source 'https://rubygems.org'
gem 'librarian-puppet'
gem 'aws-sdk', '>=2.0.6.pre'
- path: /etc/puppet/Puppetfile
content: |
forge 'https://forgeapi.puppetlabs.com'
mod 'bootstrap-aws',
:git => 'ssh://[email protected]:44322/common-libs/puppet-aws.git'
- path: /etc/puppet/hiera.yaml
content: |
---
:backends: yaml
:yaml:
:datadir: /etc/puppet/hiera.d
:hierarchy: bootstrap
:logger: puppet
- path: /etc/puppet/hiera.d/bootstrap.yaml
content: |
################################################
# #
# Be sure to set the instance name and FQDN #
# in the puppet variables below. #
# #
################################################
aws::bootstrap::instance_name: "generic"
aws::bootstrap::instance_fqdn: "generic.demo.local"
aws::bootstrap::eip_allocation_id: nil
aws::bootstrap::static_volume_encryption: false
aws::bootstrap::static_volume_size: 0
aws::bootstrap::static_volume_tag: static-volume
aws::bootstrap::is_nat: false
aws::bootstrap::eni_interface: nil
aws::bootstrap::nat_cidr_range: nil
aws::bootstrap::eni_id: nil
ssh_authorized_keys:
- ssh-dss AAAAB3NzaC1kc3MAAACBAKrOf+aK/mZRe/TWaovHZvl3JtH1gC7DZn2O7aSjNqIviAdreZuzIDqq+mKOuJck+/zllx8eeNu5UlesR3IFezNRd9RkQXcSQe5K5nRvO++mCexPzjJrlWmFGc23NcmenchWVNg1cDHxQIBrsE7dcRdfXa8hW6THV0IeYAwTkAtLAAAAFQC7ThNKzD0o4vlvArO2JI8dS2wMvwAAAIAeamD3flIglbZeYVre1BPD9GyW7oUJr7yR7Gt0OtFe6aPK5DNzdt1fxlq1Q7zNjb71vb09cQxqrTeSfzp0UacXGMwLx5Oi34nGiOL6zaNZf8gMU2HqqeCJL1OISZ0B8BV4YSNjHPw4pZgJ8kkmOfJNNQDjsnVvZU1cQr9Xv1pqmwAAAIBnOj6bRvPuWkDcTYTqXmrCUzpx3U3lpFS65Ui/VdozlsRyuTuwjpdWDi2Tw3MjK0tDbGBhDc5z98/bKHJijaiczVx1lj2nsz7eAXZhfl3VARGOS3h0ty3pshLPGNED3LptzBlDXRtL1YqCrX2V+BM1Gy8kWD6TN07ypW0MJ/wKCg== root@dosas
runcmd:
- cd /etc/puppet
- bundle install
- export USER=root
- export HOME=/root/
- chmod 600 /root/.ssh
- git config --global http.sslverify false
- echo n | librarian-puppet init
- librarian-puppet install --path=/etc/puppet/bootstrap-modules
- puppet apply --color=false --modulepath=/etc/puppet/bootstrap-modules --execute 'include aws::bootstrap'
|
bubblehouse/puppet-aws
|
example/generic.sh
|
Shell
|
mit
| 3,752 |
./audiofile1 -a ~/Music/vtest
if [ ! -f ~/.audiofile/lib.db ]; then
echo "Database not created."
exit 1
fi
./audiofile1 -r '%a/%b/%d.%n-%t-%b-%a.mp3'
if [ ! -f ~/Music/vtest/Scorpions/Blackout/1.7-Arizona-Blackout-Scorpions.mp3 ]; then
echo "Renamed files not created."
exit 2
fi
|
mattvryan/audiofile
|
testandverify_1.sh
|
Shell
|
mit
| 287 |
C_NAMESERVER="10.0.103.1"
C_DOMAIN="uatv.me"
C_GATEWAY="10.0.103.1"
# C_NAMESERVER="10.0.7.1"
# C_DOMAIN="uawifi.net.ua"
# C_GATEWAY="10.0.7.1"
NTP_SERVER1="0.ua.pool.ntp.org"
NTP_SERVER2="1.ua.pool.ntp.org"
NTP_SERVER3="2.ua.pool.ntp.org"
|
grengojbo/vmware-coreos
|
cluster01.sh
|
Shell
|
mit
| 242 |
#!/bin/bash
mkdir .local/easybuild/modules/all/cufflinks
mkdir -p .local/easybuild/software/cufflinks/2.2.1
wget -c -P .local/easybuild/software/cufflinks/2.2.1 http://cufflinks.cbcb.umd.edu/downloads/cufflinks-2.2.1.Linux_x86_64.tar.gz
tar xvzf .local/easybuild/software/cufflinks/2.2.1/cufflinks-2.2.1.Linux_x86_64.tar.gz -C .local/easybuild/software/cufflinks/2.2.1/
cp /omics_pipe/dist/modulefiles/cufflinks .local/easybuild/modules/all/cufflinks/2.2.1
|
adammaikai/OmicsPipe2.0
|
dist/AWS_customBuild/cufflinks_install.sh
|
Shell
|
mit
| 457 |
#!/bin/bash
PHP=`which php`
COMPOSER=`which composer`
NPM=`which npm`
PHPUNIT=`which phpunit`
PWD=`pwd`
export SYMFONY_ENV=prod
ERROR=`tput setab 1` # background red
GREEN=`tput setab 2` # background green
BACKGROUND=`tput setab 4` # background blue
INFO=`tput setaf 3` # yellow text
BLACKTEXT=`tput setaf 0`
COLOR=`tput setaf 7` # text white
NC=`tput sgr0` # reset
if [[ `echo "$@" | grep '\-\-reset'` ]] || [[ `echo "$@" | grep '\-r'` ]]; then
RESET=1
else
RESET=0
fi
function labelText {
echo -e "\n${BACKGROUND}${COLOR}-> ${1} ${NC}\n"
}
function errorText {
echo -e "\n${ERROR}${COLOR}=> ${1} <=${NC}\n"
}
function infoText {
echo -e "\n${INFO}=> ${1} <=${NC}\n"
}
function successText {
echo -e "\n${GREEN}${BLACKTEXT}=> ${1} <=${NC}\n"
}
function writeErrorMessage {
if [[ $? != 0 ]]; then
errorText "${1}"
fi
}
if [[ "dev" == "$1" ]]; then
labelText "Development run"
$COMPOSER install
$PHP $PWD/app/console cache:clear -e=dev
$PHP $PWD/app/console cache:clear -e=test
else
labelText "PRODUCTION optimize autoloader"
$COMPOSER install --no-dev -o -a
fi
labelText "Run setup:install"
$PHP $PWD/bin/console setup:install
labelText "Run npm install"
$NPM install
if [[ "dev" == "$1" ]]; then
$PHPUNIT
else
infoText "Not development, no tests run"
fi
successText "Setup finished"
exit 0
|
kisphp/newsletter
|
build.sh
|
Shell
|
mit
| 1,378 |
#!/bin/bash
(x=`tput op` y=`printf %80s`;for i in {0..256};do o=00$i;echo -e ${o:${#o}-3:3} `tput setaf $i;tput setab $i`${y// /=}$x;done)
|
midwire/bash.env
|
bin/colortest.sh
|
Shell
|
mit
| 140 |
#! /bin/sh
VER_MAJOR=1
VER_MINOR=7
UUID1=84efce45-6968-4945-92f5-864af6aee9e2
UUID2=32b5da4e-b95b-450c-a287-83f8f2db7930
RESOURCE_PACK_NAME=TransparentGlass
RESOURCE_PACK_FILE_JAVA=${RESOURCE_PACK_NAME}.zip
RESOURCE_PACK_FILE_BEDROCK_WIN10=${RESOURCE_PACK_NAME}BE.zip
RESOURCE_PACK_FILE_BEDROCK_PORTABLE=${RESOURCE_PACK_NAME}.mcpack
TMPDIR=./tmp
#
# for Java Edition (1.13 or above)
#
TEXTURES_SRCDIR=${RESOURCE_PACK_NAME}/assets/minecraft/textures/blocks
TEXTURES_DSTDIR=${TMPDIR}/assets/minecraft/textures/block
PACK_FORMAT=7
rm -f ${RESOURCE_PACK_FILE_JAVA}
rm -f -r ${TMPDIR}
mkdir -p ${TMPDIR} ${TMPDIR}/assets
mkdir -p ${TMPDIR}/assets/minecraft ${TMPDIR}/assets/minecraft/textures ${TMPDIR}/assets/minecraft/textures/block
cp ${TEXTURES_SRCDIR}/glass.png ${TEXTURES_DSTDIR}/glass.png
for _color in black blue brown cyan gray green light_blue lime magenta orange pink purple red white yellow
do
cp ${TEXTURES_SRCDIR}/glass_${_color}.png ${TEXTURES_DSTDIR}/${_color}_stained_glass.png
done
# silver -> light_gray
cp ${TEXTURES_SRCDIR}/glass_silver.png ${TEXTURES_DSTDIR}/light_gray_stained_glass.png
cp -R ${RESOURCE_PACK_NAME}/pack.png ${TMPDIR}/
cat ${RESOURCE_PACK_NAME}/pack.mcmeta | \
sed "s/XXXPACKFORMATXXX/${PACK_FORMAT}/g" | \
sed "s/XXXMAJORXXX/${VER_MAJOR}/g" | \
sed "s/XXXMINORXXX/${VER_MINOR}/g" | \
sed "s/XXXUUID1XXX/${UUID1}/g" | \
sed "s/XXXUUID2XXX/${UUID2}/g" | \
cat > ${TMPDIR}/pack.mcmeta
(cd ${TMPDIR}/ && zip -r ../${RESOURCE_PACK_FILE_JAVA} *)
rm -f -r ${TMPDIR}
#
# for Bedrock Edition (Windows10)
#
rm -f ${RESOURCE_PACK_FILE_BEDROCK_WIN10}
rm -f -r ${TMPDIR}
mkdir -p ${TMPDIR}
cp -R ${RESOURCE_PACK_NAME}/assets/minecraft/textures ${TMPDIR}/
cp -R ${RESOURCE_PACK_NAME}/pack.png ${TMPDIR}/pack_icon.png
cat ${RESOURCE_PACK_NAME}/pack_manifest.json | \
sed "s/XXXMAJORXXX/${VER_MAJOR}/g" | \
sed "s/XXXMINORXXX/${VER_MINOR}/g" | \
sed "s/XXXUUID1XXX/${UUID1}/g" | \
sed "s/XXXUUID2XXX/${UUID2}/g" | \
cat > ${TMPDIR}/pack_manifest.json
(cd ${TMPDIR}/ && zip -r ../${RESOURCE_PACK_FILE_BEDROCK_WIN10} *)
rm -f -r ${TMPDIR}
#
# for Bedrock Edition (iOS)
#
rm -f ${RESOURCE_PACK_FILE_BEDROCK_PORTABLE}
rm -f -r ${TMPDIR}
mkdir -p ${TMPDIR}
cp -R ${RESOURCE_PACK_NAME}/assets/minecraft/textures ${TMPDIR}/
cp -R ${RESOURCE_PACK_NAME}/manifest.json ${TMPDIR}/
cp -R ${RESOURCE_PACK_NAME}/pack.png ${TMPDIR}/pack_icon.png
cat ${RESOURCE_PACK_NAME}/manifest.json | \
sed "s/XXXMAJORXXX/${VER_MAJOR}/g" | \
sed "s/XXXMINORXXX/${VER_MINOR}/g" | \
sed "s/XXXUUID1XXX/${UUID1}/g" | \
sed "s/XXXUUID2XXX/${UUID2}/g" | \
cat > ${TMPDIR}/manifest.json
(cd ${TMPDIR}/ && zip -r ../${RESOURCE_PACK_FILE_BEDROCK_PORTABLE} *)
rm -f -r ${TMPDIR}
|
cvsync/TransparentGlass
|
build.sh
|
Shell
|
mit
| 2,715 |
#!/bin/bash
if [ $# -ne 1 ]; then
echo "Provide version"
exit 42
fi
version=":$1"
docker push seges/tomcat$version
|
seges/docker-tomcat
|
docker-push.sh
|
Shell
|
mit
| 122 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-SocketIOChatClient_Tests/Cartography.framework"
install_framework "Pods-SocketIOChatClient_Tests/SocketIOClientSwift.framework"
install_framework "Pods-SocketIOChatClient_Tests/SocketIOChatClient.framework"
install_framework "Pods-SocketIOChatClient_Tests/FBSnapshotTestCase.framework"
install_framework "Pods-SocketIOChatClient_Tests/Nimble.framework"
install_framework "Pods-SocketIOChatClient_Tests/Nimble_Snapshots.framework"
install_framework "Pods-SocketIOChatClient_Tests/Quick.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-SocketIOChatClient_Tests/Cartography.framework"
install_framework "Pods-SocketIOChatClient_Tests/SocketIOClientSwift.framework"
install_framework "Pods-SocketIOChatClient_Tests/SocketIOChatClient.framework"
install_framework "Pods-SocketIOChatClient_Tests/FBSnapshotTestCase.framework"
install_framework "Pods-SocketIOChatClient_Tests/Nimble.framework"
install_framework "Pods-SocketIOChatClient_Tests/Nimble_Snapshots.framework"
install_framework "Pods-SocketIOChatClient_Tests/Quick.framework"
fi
|
Chaatz/SocketIOChatClient
|
Example/Pods/Target Support Files/Pods-SocketIOChatClient_Tests/Pods-SocketIOChatClient_Tests-frameworks.sh
|
Shell
|
mit
| 4,494 |
#!/bin/bash
export GIT_URL=https://raw.githubusercontent.com/eschweit-at-tibco/bw-devops/master
yum -y install unzip > /tmp/yum-unzip.log 2>&1
# install maven
cd /tmp
wget http://www.eu.apache.org/dist/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz
tar xzf apache-maven-3.3.9-bin.tar.gz
mkdir /usr/local/maven
mv apache-maven-3.3.9 /usr/local/maven/
alternatives --install /usr/bin/mvn mvn /usr/local/maven/apache-maven-3.3.9/bin/mvn 1
export M3_HOME=/usr/local/maven/apache-maven-3.3.9
echo "export M3_HOME=/usr/local/maven/apache-maven-3.3.9" >> /etc/profile.d/maven.sh
sed "s:<localRepository>.*:--><localRepository>/opt/tibco/maven</localRepository><\!--:" ${M3_HOME}/conf/settings.xml > settings.xml
mv -f settings.xml ${M3_HOME}/conf/settings.xml
# download and install the jenkins package
wget -O /etc/yum.repos.d/jenkins.repo http://pkg.jenkins-ci.org/redhat/jenkins.repo
rpm --import http://pkg.jenkins-ci.org/redhat/jenkins-ci.org.key
yum -y install jenkins > /tmp/yum-jenkins.log 2>&1
# add centos to the jenkins group
usermod -a -G jenkins centos
# install jenkins at /jenkins and disable the setup wizard
sed 's/JENKINS_HOME=.*$/JENKINS_HOME=\"\/jenkins\"/;s/JENKINS_JAVA_OPTIONS=\"/&-Djenkins.install.runSetupWizard=false /' /etc/sysconfig/jenkins > /etc/sysconfig/jenkins.new
mv -f /etc/sysconfig/jenkins.new /etc/sysconfig/jenkins
# create jenkins dir
mkdir /jenkins
# create SSH key
ssh-keygen -t rsa -N "" -f key.pem
export SSH_KEY=$(cat key.pem.pub)
# download the groovy initialisation script for jenkins (setting admin)
wget --no-check-certificate --content-disposition -P /tmp ${GIT_URL}/ec2-scripts/init.groovy
sed "s:##PWD##:${1}:;s:##SSHKEY##:${SSH_KEY}:" /tmp/init.groovy > /jenkins/init.groovy
# download the groovy config script for jenkins (installing plugins)
wget --no-check-certificate --content-disposition -P /tmp ${GIT_URL}/ec2-scripts/configure.groovy
sed "s:##GHTOKEN##:${2}:" /tmp/configure.groovy > /jenkins/configure.groovy
# download the groovy pipeline setup script for jenkins (installing plugins)
wget --no-check-certificate --content-disposition -P /tmp ${GIT_URL}/ec2-scripts/pipeline.xml
wget --no-check-certificate --content-disposition -P /jenkins ${GIT_URL}/ec2-scripts/setup-pipeline.groovy
# download the groovy disable cli script for jenkins (installing plugins)
wget --no-check-certificate --content-disposition -P /jenkins ${GIT_URL}/ec2-scripts/disable-cli.groovy
chown -R jenkins:jenkins /jenkins > /tmp/chown1.log 2>&1
chown -R jenkins:jenkins /opt/tibco
# add jenkins user to sudoers and disable tty
echo "jenkins ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
echo "Defaults:%jenkins !requiretty" >> /etc/sudoers
echo "Defaults:jenkins !requiretty" >> /etc/sudoers
# start Jenkins
service jenkins start
export JENKINS_URL=http://localhost:8080
# wait for Jenkins Web Server to be up
while [[ "$(curl -s -o /dev/null -m 5 -w ''%{http_code}'' ${JENKINS_URL})" != "200" ]]; do sleep 5; done
wget ${JENKINS_URL}/jnlpJars/jenkins-cli.jar
for value in "github build-pipeline-plugin dashboard-view workflow-aggregator plain-credentials"
do
java -jar jenkins-cli.jar -remoting -s ${JENKINS_URL} -i key.pem install-plugin $value
done
java -jar jenkins-cli.jar -remoting -s ${JENKINS_URL} -i key.pem restart
# wait for Jenkins Web Server to be up
while [[ "$(curl -s -o /dev/null -m 5 -w ''%{http_code}'' ${JENKINS_URL})" != "200" ]]; do sleep 5; done
java -jar jenkins-cli.jar -remoting -s ${JENKINS_URL} -i key.pem groovy /jenkins/configure.groovy
java -jar jenkins-cli.jar -remoting -s ${JENKINS_URL} -i key.pem groovy /jenkins/setup-pipeline.groovy
java -jar jenkins-cli.jar -remoting -s ${JENKINS_URL} -i key.pem groovy /jenkins/disable-cli.groovy
|
eschweit-at-tibco/bw-devops
|
ec2-scripts/jenkins.sh
|
Shell
|
mit
| 3,732 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/HISwiftExtensions/HISwiftExtensions.framework"
install_framework "$BUILT_PRODUCTS_DIR/Nimble/Nimble.framework"
install_framework "$BUILT_PRODUCTS_DIR/Quick/Quick.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/HISwiftExtensions/HISwiftExtensions.framework"
install_framework "$BUILT_PRODUCTS_DIR/Nimble/Nimble.framework"
install_framework "$BUILT_PRODUCTS_DIR/Quick/Quick.framework"
fi
|
hilenium/HISwiftExtensions
|
Example/Pods/Target Support Files/Pods-HISwiftExtensions_Tests/Pods-HISwiftExtensions_Tests-frameworks.sh
|
Shell
|
mit
| 3,905 |
rake db:drop db:create db:migrate
|
kkestell/travelogue-server
|
resetdb.sh
|
Shell
|
mit
| 34 |
#!/bin/bash
## Moves apache conf from repo location to sites-available
#
# 000-encoded-default.conf
# 111-indexer-primary.conf
# 222-indexer-vis.conf
# 666-encoded-app.conf
#
##
src_dir='/home/ubuntu/encoded/cloud-config/configs/apache'
dest_dir='/etc/apache2/sites-available'
site_arr=('000-encoded-default.conf' '111-indexer-primary.conf' '222-indexer-vis.conf' )
for filename in ${site_arr[@]}; do
if [ -f "$dest_dir/$filename" ]; then
rm "$dest_dir/$filename"
fi
cp "$src_dir/$filename" "$dest_dir/$filename"
done
filename='666-encoded-app.conf'
if [ -f "$dest_dir/$filename" ]; then
rm "$dest_dir/$filename"
fi
sed "s/APP_WORKERS/$ENCD_APP_WORKERS/" < "$src_dir/$filename" >> "$dest_dir/$filename"
|
ENCODE-DCC/encoded
|
cloud-config/configs/apache/build-conf.sh
|
Shell
|
mit
| 732 |
#!/usr/bin/env bash
echo "Install LM server..."
go install github.com/viktor-br/links-manager-server/app/links-manager-server
cd $GOPATH
RUNNING=$(docker inspect -f {{.State.Running}} lm-test-server 2> /dev/null)
if [ "$RUNNING" = "" ]; then
echo "Container doesn't exist. Run..."
docker run -it -d --name lm-test-server \
--link lm-test-main-storage \
-e LMS_MAIN_STORAGE_CONNECTION=postgres://postgres@lm-test-main-storage:5432/test?sslmode=disable \
-e LMS_MAIN_STORAGE_TYPE=postgres \
-e LMS_SECRET=123 \
-p 8080:8080 \
-v $(pwd)/bin:/go/bin golang /go/bin/links-manager-server
fi
if [ "$RUNNING" = "false" ]; then
echo "Container was stopped. Start..."
docker start lm-test-server
fi
|
viktor-br/links-manager-server
|
scripts/run-test-server.sh
|
Shell
|
mit
| 733 |
#!/bin/sh
i=10
while [[ $i -gt 5 ]]; do
echo $i
((i--))
done
i=10
while [ $i -gt 5 ]; do
echo $i
: $((i-=1))
done
while read line;do
echo $line
done < /etc/hosts
|
yuweijun/learning-programming
|
bash/test_while.sh
|
Shell
|
mit
| 184 |
readonly ref_endpoint="${GITHUB_API_URL:-https://api.github.com}/repos/%s/git/refs/tags/%s"
readonly release_endpoint="${GITHUB_API_URL:-https://api.github.com}/repos/%s/releases"
readonly release_json='{"tag_name": "v%s", "name": "%s", "target_commitish": "%s"}'
release-create() {
declare reponame="$1" version="${2#v}" branch="${3:-master}" name="$4"
local release="$(printf "$release_json" "$version" "$name" "$branch")"
local release_url="$(printf "$release_endpoint" "$reponame")"
echo "Creating release v$version from branch $branch ..."
upload_url="$(curl -s -H "Authorization: token $GITHUB_ACCESS_TOKEN" -d "$release" "$release_url" | upload-url)"
for asset in $(ls -A release); do
local name="$(basename $asset)"
echo "Uploading $name ..."
curl -X POST -H "Content-Type: $(mimetype $name)" -H "Authorization: token $GITHUB_ACCESS_TOKEN" --data-binary "@release/$asset" \
"$upload_url=$name" > /dev/null
done
}
release-destroy() {
declare reponame="$1" version="$2"
local release_url="$(printf "$release_endpoint" "$reponame")"
[[ "$version" == [0-9]* ]] && version="v$version"
release_id="$(curl -s -H "Authorization: token $GITHUB_ACCESS_TOKEN" "$release_url" | release-id-from-tagname "$version")"
echo "Deleting release..."
curl -s -H "Authorization: token $GITHUB_ACCESS_TOKEN" -X DELETE "$release_url/$release_id"
echo "Deleting tag..."
tag_url="$(printf "$ref_endpoint" "$reponame" "$version")"
curl -s -H "Authorization: token $GITHUB_ACCESS_TOKEN" -X DELETE "$tag_url"
}
usage() {
echo "Usage: gh-release [-v] subcommand"
echo
echo "Subcommands:"
echo " create <reponame> <version> [branch] [name]"
echo " destroy <reponame> <version>"
echo " checksums <algorithm>"
echo
}
release-checksums() {
declare alg="$1"
echo "Writing $alg checksum files..."
for asset in $(ls -A release); do
cat "release/$asset" | checksum "$alg" > "release/${asset}.$alg"
done
}
main() {
set -eo pipefail; [[ "$TRACE" ]] && set -x
case "$1" in
create) shift; release-create "$@";;
destroy) shift; release-destroy "$@";;
checksums) shift; release-checksums "$@";;
-v) echo "$VERSION";;
*) usage;;
esac
}
|
progrium/gh-release
|
bash/gh-release.bash
|
Shell
|
mit
| 2,169 |
#!/bin/bash
# This is for log purposes
echo "[$(date)] Renew SSL cert script starting"
echo "Current path $PWD"
/home/thor/certbot/certbot-auto renew --no-self-upgrade --standalone --preferred-challenges http-01 --post-hook "hassctl restart" #--pre-hook "hassctl stop"
|
Genestealer/Home-Assistant-Configuration
|
includes/shell_scripts/update_ssl.sh
|
Shell
|
mit
| 269 |
#!/bin/sh
export DISPLAY=:0.0
rygel &
transmission-gtk &
cd alarm
nohup python alarm.py &
cd ..
cd www/home_server/
nohup ruby app.rb &
cd ../../
#xrandr --output LVDS1 --off
#cat
#xrandr --output LVDS1 --on
|
tobykurien/HomeServer
|
scripts/start.sh
|
Shell
|
mit
| 212 |
#!/bin/sh
./pod/dlpod.py http://radiofrance-podcast.net/podcast09/rss_16173.xml
|
jeremiedecock/podcast-manager
|
dl_grand_bien_vous_fasse.sh
|
Shell
|
mit
| 81 |
# now we have a tiff but the coordinate system is messed up. Let's warp it to WGS_1984 (epsg:4326)
# castle_forest is in UTM North U33 (epsg:32633) we get this from the documentation, not the data.
# using liblas' lasinfo tool, you can see that the coordinate system is messed up.
# see more info on the zone here: http://spatialreference.org/ref/epsg/wgs-84-utm-zone-33n/
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 castle_forest_1.grid.std.tif castle_forest_1_4326.tif
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 castle_forest_2.grid.std.tif castle_forest_2_4326.tif
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 castle_forest_3.grid.std.tif castle_forest_3_4326.tif
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 powerline_1.grid.std.tif powerline_1_4326.tif
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 valley_1.grid.std.tif valley_1_4326.tif
gdalwarp -s_srs epsg:32633 -t_srs epsg:4326 valley_2.grid.std.tif valley_2_4326.tif
|
DanielCaldwell/ConvertingLidarToGeoTiff
|
scripts/03_georeference_tif.sh
|
Shell
|
mit
| 929 |
set -euo pipefail
# Unpack the bootstrap tools tarball.
echo Unpacking the bootstrap tools...
$mkdir $out
$bzip2 -d < $tarball | (cd $out && $cpio -i)
export PATH=$out/bin
# Fix codesign wrapper paths
sed -i \
-e "1c\
#!$out/bin/bash" \
-e "s|[^( ]*\bsigtool\b|$out/bin/sigtool|g" \
$out/bin/codesign
updateInstallName() {
local path="$1"
cp "$path" "$path.new"
install_name_tool -id "$path" "$path.new"
codesign -f -i "$(basename "$path")" -s - "$path.new"
mv -f "$path.new" "$path"
}
find $out
ln -s bash $out/bin/sh
ln -s bzip2 $out/bin/bunzip2
find $out/lib -type f -name '*.dylib' -print0 | while IFS= read -r -d $'\0' lib; do
updateInstallName "$lib"
done
# Provide a gunzip script.
cat > $out/bin/gunzip <<EOF
#!$out/bin/sh
exec $out/bin/gzip -d "\$@"
EOF
chmod +x $out/bin/gunzip
# Provide fgrep/egrep.
echo "#! $out/bin/sh" > $out/bin/egrep
echo "exec $out/bin/grep -E \"\$@\"" >> $out/bin/egrep
echo "#! $out/bin/sh" > $out/bin/fgrep
echo "exec $out/bin/grep -F \"\$@\"" >> $out/bin/fgrep
cat >$out/bin/dsymutil << EOF
#!$out/bin/sh
EOF
chmod +x $out/bin/egrep $out/bin/fgrep $out/bin/dsymutil
|
NixOS/nixpkgs
|
pkgs/stdenv/darwin/unpack-bootstrap-tools-aarch64.sh
|
Shell
|
mit
| 1,134 |
#!/bin/sh
# Build Google Test framework
echo "# Building Google Test framework"
cd trd/gtest/
cmake -H. -Bbuild
cmake --build build -- -j4
cd ../../
# Build Pool Controller Application
echo "# Building Pool Controller"
cmake -H. -Bbuild
cmake --build build -- -j4
|
andlgr/pool-controller
|
build.sh
|
Shell
|
mit
| 267 |
#!/usr/bin/env bash
koopa::fedora_dnf() { # {{{1
# """
# Use either 'dnf' or 'yum' to manage packages.
# @note Updated 2021-06-15.
# """
local app
if koopa::is_installed 'dnf'
then
app='dnf'
elif koopa::is_installed 'yum'
then
app='yum'
else
koopa::stop "Failed to locate package manager (e.g. 'dnf' or 'yum')."
fi
sudo "$app" -y "$@"
return 0
}
koopa::fedora_dnf_delete_repo() { # {{{1
# """
# Delete an enabled dnf repo.
# @note Updated 2021-06-16.
# """
local file name
koopa::assert_has_args "$#"
for name in "$@"
do
file="/etc/yum.repos.d/${name}.repo"
koopa::assert_is_file "$file"
koopa::rm -S "$file"
done
return 0
}
koopa::fedora_dnf_install() { # {{{1
koopa::fedora_dnf install "$@"
}
koopa::fedora_dnf_remove() { # {{{1
koopa::fedora_dnf remove "$@"
}
koopa::fedora_install_from_rpm() { # {{{1
# """
# Install directly from RPM file.
# @note Updated 2021-06-17.
# Allowing passthrough of '--prefix' here.
# """
koopa::assert_has_args "$#"
koopa::assert_is_installed 'rpm'
sudo rpm -v \
--force \
--install \
--nogpgcheck \
"$@"
return 0
}
|
steinbaugh/seqcloud
|
lang/shell/bash/functions/os/linux/distro/fedora/dnf.sh
|
Shell
|
mit
| 1,271 |
if [ -f "$ASDF_DATA_DIR/plugins/java/set-java-home.zsh" ]; then
. "$ASDF_DATA_DIR/plugins/java/set-java-home.zsh"
fi
|
ahaasler/dotfiles
|
java/asdf.zsh
|
Shell
|
mit
| 118 |
#!/bin/bash
#SBATCH --partition=mono
#SBATCH --ntasks=1
#SBATCH --time=4-0:00
#SBATCH --mem-per-cpu=8000
#SBATCH -J Deep-DAE_SDAE_5_dec_real_RICA_sig
#SBATCH -e Deep-DAE_SDAE_5_dec_real_RICA_sig.err.txt
#SBATCH -o Deep-DAE_SDAE_5_dec_real_RICA_sig.out.txt
source /etc/profile.modules
module load gcc
module load matlab
cd ~/deepLearn && srun ./deepFunction 5 'DAE' 'SDAE' '128 1500 1000 500 10' '0 0 0 0 0' '5_dec_real' 'RICA_sig' "'iteration.n_epochs', 'learning.lrate', 'use_tanh', 'noise.drop', 'noise.level', 'rica.cost', 'cae.cost'" '200 1e-3 0 0 0 0.1 0' "'iteration.n_epochs', 'use_tanh'" '200 0'
|
aciditeam/matlab-ts
|
jobs/deepJobs_DAE_SDAE_5_dec_real_RICA_sig.sh
|
Shell
|
mit
| 617 |
#!/bin/bash
#
alspro doctools.pro -g file_doc -p
|
AppliedLogicSystems/ALSProlog
|
docs/src_help_md/lib_blt_xamps/1-x_libblt.sh
|
Shell
|
mit
| 51 |
#!/bin/bash
# Bash script created by Mattia Campagnano on Mon Aug 14 13:05:21 EDT 2017
# Returns the number of networks and hosts obtainable from a given IP address.
echo -n "Enter IP address: "
read ip
octet1=$(echo $ip | cut -d "." -f1)
#echo "${octet1}"
if [ $octet1 -gt 0 ] && [ $octet1 -le 126 ]
then
def_mask=8 #Class A default subnet mask length is 8
#def_mask is a variable storing the default classful subnet mask length for the specific IP class
#echo "${def_mask}" # Optional echo statement for debugging purposes
elif [ $octet1 -ge 128 ] && [ $octet1 -le 191 ]
then
def_mask=16 #Class B default subnet mask length is 16
elif [ $octet1 -ge 192 ] && [ $octet1 -le 223 ]
then
def_mask=24 #Class C default subnet mask length is 24
else
echo "Reserved IP address" #We don't use classes D and E, so we break here.
exit 1;
fi
echo -n "Enter CIDR: " #Enter classless subnet mask length (e.g.: 28)
read cidr
len=32 # An IPv4 address is 32-bit long
borrowed_bits=$((cidr-def_mask))
#The difference between the classless subnet mask length and the default classful subnet
#mask length (i.e. 8 for class A, 16 for class B, 24 for class C) returns the number of
#bits borrowed from the host portion
echo "The CIDR entered borrows ${borrowed_bits} bits"
hostexp=$((len-cidr)) #Exponent (indicated as y) needed to calculate the number of hosts
# The formula for calculating the number of hosts is (2^y)-2, where
# y is the number of zeros in the subnet mask (=32-CIDR, where CIDR indicates the number of ones).
correction=2
#The formula for calculating the hosts number subtracts 2 because the network ID and the broadcast are to
#be excluded, in that they can't be assigned to hosts (they're not routable).
# echo "The host exponent is ${hostexp}" # Optional echo statement for debugging purposes.
networks=$((2**borrowed_bits))
# The formula for calculating the number of neteorks is (2^x)-2, where
# "x" is the number of borrowed bits, calculated as shown above.
temphosts=$((2**hostexp))
hosts=$((temphosts-correction))
echo "This subnet mask allows to create ${networks} networks and ${hosts} hosts"
|
infosecstark/subnet_calculator
|
subnet_calculator.bash
|
Shell
|
mit
| 2,187 |
#!/bin/bash
# certbot certonly --rsa-key-size 4096 --webroot -w /var/vadweb -d vadweb.us -d www.vadweb.us
certbot renew --rsa-key-size 4096 --pre-hook "service nginx stop" --post-hook "service nginx start; /home/vadim/docker_registry/renew.sh"
|
Vadman97/Vadweb
|
gen_ssl.bash
|
Shell
|
mit
| 245 |
#!/usr/bin/env bash
time (
for c in $(ls -1a *.*.cmd); do
echo ----------------------------------------------------------
echo $c:
echo ----------------------------------------------------------
cat $c
echo ----------------------------------------------------------
. ./$c
done
)
|
erikwilson/wiki-hop
|
import/runall.sh
|
Shell
|
mit
| 295 |
#!/bin/bash
# setup-server.sh
# Setup a server install of Argumenta.
# Exit if any command exits with non-zero status.
set -e
# This script's real path.
SCRIPT_FILE=$(readlink -f "$0")
# This script's source directory.
SOURCE_DIR=$(readlink -f `dirname "$SCRIPT_FILE"`/..)
# The target install directory.
INSTALL_DIR='/usr/local/argumenta'
# The deployment config directory.
CONFIG_DIR='/etc/argumenta'
# The upstart config file.
UPSTART_CONFIG_FILE='/etc/init/argumenta.conf'
# The Nginx upstart file.
NGINX_UPSTART_FILE='/etc/init/argumenta-nginx.conf'
# The daily backup cron.d file.
BACKUP_CRON_FILE='/etc/cron.d/argumenta-backup'
#
# Upstart config for the `argumenta` service.
#
UPSTART_CONFIG=$(cat <<-"END"
description "Argumenta (Node.js)"
author "Argumenta.io"
start on (local-filesystems and net-device-up IFACE!=lo)
stop on runlevel [016]
respawn
respawn limit 10 5 # Default respawns per second.
script
if [ -f /usr/bin/node ]; then
NODE=/usr/bin/node
else
NODE=`which node`
fi
export HOME="/home/argumenta"
echo $$ > /var/run/argumenta.pid
exec sudo -u argumenta \
NODE_ENV='production' CONFIG_DIR='/etc/argumenta' \
"$NODE" /usr/local/argumenta/app \
1>> /var/log/argumenta.log \
2>> /var/log/argumenta.err
end script
END
)
#
# Upstart config for `argumenta-nginx` reverse proxy.
#
NGINX_UPSTART=$(cat <<-"END"
description "Nginx (Argumenta)"
author "Argumenta.io"
start on starting argumenta
stop on stopping argumenta
respawn
respawn limit 10 5 # Default respawns per second.
script
NGINX="/usr/sbin/nginx"
CONFIG="/etc/argumenta/nginx.conf"
echo $$ > /var/run/argumenta-nginx.pid
exec "$NGINX" -c "$CONFIG" -g "daemon off;" \
1>> /var/log/argumenta-nginx.log \
2>> /var/log/argumenta-nginx.err
end script
END
)
#
# Cron.d file for daily backups.
#
BACKUP_CRON=$(cat <<-"END"
# /etc/cron.d/argumenta-backup
# Cron.d format:
# m h d M D user command
SHELL=/bin/bash
# Creates rotated database backups.
ARGUMENTA_BACKUP=/usr/bin/argumenta-backup
# Log file.
BACKUP_LOG=/var/log/argumenta-backup.log
# Daily at 12:01 am, backup the production database.
01 00 * * * root $ARGUMENTA_BACKUP >> $BACKUP_LOG 2>&1
END
)
#
# Adds an `argumenta` user account.
#
addUser() {
echo "Adding 'argumenta' user account."
adduser \
--quiet \
--system \
--shell /bin/bash \
--gecos 'Argumenta web app' \
--group \
--disabled-password \
--home /home/argumenta \
argumenta
}
#
# Adds an `argumenta-backup` user for database backups.
#
addBackupUser() {
echo "Adding 'argumenta-backup' user account."
adduser \
--quiet \
--system \
--shell /bin/bash \
--gecos 'Argumenta backups' \
--group \
--disabled-password \
--home /home/argumenta-backup \
argumenta-backup
}
#
# Installs the Argumenta app.
#
installApp() {
echo "Installing app to '$INSTALL_DIR'."
cp -a -L -T "$SOURCE_DIR" "$INSTALL_DIR"
chown -R root:argumenta "$INSTALL_DIR"
chmod -R 0640 "$INSTALL_DIR"
chmod -R 0750 "${INSTALL_DIR}/bin"
find "$INSTALL_DIR" -type d -print0 | xargs -0 chmod 0750
# Allow Nginx to serve static files.
chmod 0755 "$INSTALL_DIR"
find "$INSTALL_DIR"/public -type f -print0 | xargs -0 chmod 0644
find "$INSTALL_DIR"/public -type d -print0 | xargs -0 chmod 0755
}
#
# Generates app config files.
#
genAppConfig() {
echo "Generating app config."
"$INSTALL_DIR"/bin/setup-config.sh > /dev/null
}
#
# Creates deployment config files.
#
createDeployConfig() {
echo "Creating deploy config in '$CONFIG_DIR'."
cp -a --no-clobber -T "${INSTALL_DIR}/config/deploy" "$CONFIG_DIR"
chown -R root:argumenta "$CONFIG_DIR"
chmod -R 0740 "$CONFIG_DIR"
find "$CONFIG_DIR" -type d -print0 | xargs -0 chmod -R 0750
}
#
# Creates an Upstart config file.
#
createUpstartConfig() {
echo "Creating Upstart config '$UPSTART_CONFIG_FILE'."
echo "$UPSTART_CONFIG" > $UPSTART_CONFIG_FILE
chmod 0644 $UPSTART_CONFIG_FILE
}
#
# Creates SSL config directory.
#
createSSLConfig() {
if [ ! -d "$CONFIG_DIR"/ssl ]; then
echo "Creating SSL config directory '$CONFIG_DIR/ssl'."
mkdir -p "$CONFIG_DIR"/ssl
ln -s '/etc/ssl/certs/argumenta.crt' "$CONFIG_DIR"/ssl/argumenta.crt
ln -s '/etc/ssl/private/argumenta.key' "$CONFIG_DIR"/ssl/argumenta.key
fi
}
#
# Creates Nginx configuration files.
#
createNginxConfig() {
echo "Creating Nginx config '/etc/argumenta/nginx.conf'."
"$INSTALL_DIR"/bin/setup-nginx.sh > /dev/null
}
#
# Creates an Nginx Upstart service.
#
createNginxUpstart() {
echo "Creating Nginx Upstart '$NGINX_UPSTART_FILE'"
echo "$NGINX_UPSTART" > "$NGINX_UPSTART_FILE"
sudo chmod 0644 "$NGINX_UPSTART_FILE"
}
#
# Creates a daily backup cron.d file.
#
createBackupCron() {
echo "Creating Backup cron.d file: '$BACKUP_CRON_FILE'."
echo "$BACKUP_CRON" > "$BACKUP_CRON_FILE"
sudo chmod 0644 "$BACKUP_CRON_FILE"
}
#
# Prints usage information.
#
usage() {
cat <<-End
Usage: $0 [options]
Options:
-d, --debug : Show additional debug info.
-h, --help : Show this usage info.
End
}
#
# Gets command line options.
#
getOpts() {
while [[ "$1" == -* ]]; do
case "$1" in
-d | --debug ) DEBUG=1; shift ;;
-h | --help ) usage; exit 0 ;;
* ) shift ;;
esac
done
if [[ $DEBUG -eq 1 ]]; then
echo "Debug mode enabled."
set -x
fi
}
#
# Main script.
#
main() {
getOpts "$@"
addUser
addBackupUser
installApp
genAppConfig
createDeployConfig
createUpstartConfig
createSSLConfig
createNginxConfig
createNginxUpstart
createBackupCron
echo "Done!"
}
# Let's do this!
main "$@"
|
argumenta/argumenta
|
bin/setup-server.sh
|
Shell
|
mit
| 5,709 |
#
# When run, this creates a file called $PWD/wrt-buildroot-manager.source that you can source from bashrc
#
DIR=`dirname "$0"`
DIR=`cd "$DIR"; pwd`
cat > wrt-buildroot-manager.source <<EOF
export WRT_BUILDROOT_DIR="$DIR"
export WRT_BUILDROOT_TEMPLATE="$DIR/template"
export PATH="$DIR:\$PATH"
EOF
|
pastcompute/wrt-buildroot-manager
|
configure.sh
|
Shell
|
gpl-2.0
| 300 |
# restore files with TSM
# have to do this for each filespace
#-----<--------->-------
export starposition=1
star ()
{
set -- '/' '-' '\' '|';
test $starposition -gt 4 -o $starposition -lt 1 && starposition=1;
echo -n "${!starposition}";
echo -en "\r";
let starposition++
#sleep 0.1
}
#-----<--------->-------
for num in $TSM_RESTORE_FILESPACE_NUMS ; do
filespace="${TSM_FILESPACES[$num]}"
# make sure FileSpace has a trailing / (for dsmc)
test "${filespace:0-1}" == "/" || filespace="$filespace/"
LogPrint "Restoring ${filespace}"
TsmProcessed=""
Log "Running 'dsmc restore ${filespace}* /mnt/local/$filespace -verbose -subdir=yes -replace=all -tapeprompt=no ${TSM_DSMC_RESTORE_OPTIONS[@]}'"
dsmc restore \""${filespace}"\" \""/mnt/local/${filespace}/"\" \
-verbose -subdir=yes -replace=all \
-tapeprompt=no "${TSM_DSMC_RESTORE_OPTIONS[@]}" | \
while read Line ; do
if test "${Line:0:8}" == "ANS1898I" ; then
TsmProcessed="$(echo "${Line:9}" | tr -s '*') "
Line="Restoring" # trigger star
fi
if test "${Line:0:9}" == "Restoring" ; then
echo -n "$TsmProcessed"
star
else
echo "$Line"
fi
done
done
|
thumm/rear_deb
|
usr/share/rear/restore/TSM/default/40_restore_with_tsm.sh
|
Shell
|
gpl-2.0
| 1,276 |
#!/bin/bash -ue
#-*-sh-*-
#
# $Id: run_fake.sh $
#
# Author: Markus Stenberg <[email protected]>
#
# Copyright (c) 2013 cisco Systems, Inc.
#
# Created: Thu Feb 7 13:57:59 2013 mstenber
# Last modified: Thu Aug 1 11:31:56 2013 mstenber
# Edit time: 14 min
#
. /usr/bin/luaenv.sh
mkdir -p $LOGDIR
ENABLE_MST_DEBUG=1 lua $CORE/fakedhcpv6d.lua \
--join=eth0 \
--dns=2000::3 \
--search=v6.lab.example.com \
--pref=3000 \
--valid=4000 \
2000:cafe:bee0::/56 \
2>&1 > $LOGDIR/fake.log &
# Implicitly provide for routes also
# (fakedhcpv6d does this automatically, hooray)
#ip -6 route add 2000:dead:bee0::/55 dev eth0 via fe80::ec6e:75ff:fe23:4e28
|
fingon/hnet-ttin
|
topology/home10-3isp/isp2/usr/bin/run_fake.sh
|
Shell
|
gpl-2.0
| 672 |
#!/bin/bash
#Icinga Plugin Script to display the connection status of the registered services to broker
#Broker IP & Port
ip="localhost"
port="39001"
# Broker server status messages
aliveMsg="Server is alive"
deadMsg="Server is not alive"
if [[ "`uname -m`" == "arm"* ]]; then
export ODEDIR=/home/nextra/build/Nextra/src/../install/linux/tcp
export HOME=/tmp
fi
isBrokerAlive=$($ODEDIR/bin/broklist -ping $ip $port)
icingaData=""
# Commented the below code due to a known issue while invoking broker within the script
#if [ "$isBrokerAlive" == "$deadMsg" ];
#then
# broker="broker -e /tmp/tmp_broker.env -bg"
# #broker="broker -bg"
# $broker
# #/bin/bash /usr/lib/nagios/plugins/brokerStart.sh
#fi
if [ -f /tmp/_out ];
then
rm /tmp/_out
fi
isBrokerAlive=$($ODEDIR/bin/broklist -ping $ip $port)
if [ "$isBrokerAlive" == "$aliveMsg" ];
then
#Script to register ORCA and MPAP services
#source /usr/lib/nagios/plugins/register_services.sh
serviceMsg=$($ODEDIR/bin/broklist $ip $port)
icingaData="Broker running at $ip $port\nRegistered Services\n"
for index in ${!serviceMsg[@]};
do
if [ $(($index % 5)) -eq 0 ];
then
icingaData="$icingaData ${serviceMsg[$index]} ${serviceMsg[$index+1]} ${serviceMsg[$index+2]} ${serviceMsg[$index+3]} ${serviceMessage[$index+4]} ${serviceMsg[$index+5]}\n"
fi
done
icingaData="$icingaData |"
broklistServices=($($ODEDIR/bin/broklist $ip $port | awk '{$4=$5=""; print $0}'))
if [[ ${!broklistServices[@]} == "" ]];
then
icingaData="$icingaData \n No services registered to broker"
echo -e " $icingaData"
else
for index in ${!broklistServices[@]};
do
ctr=$index
if [ $(($index % 3)) -eq 0 ];
then
serviceName="${broklistServices[$index]}"
serviceIp="${broklistServices[$index+1]}"
servicePort="${broklistServices[$index+2]}"
commands[$ctr]="/usr/lib/nagios/plugins/icingaBroklistPing.sh $serviceName $serviceIp $servicePort"
fi
done
op=$(SHELL=/bin/bash parallel --gnu -j 20 ::: "${commands[@]}")
icingaData="$icingaData $op"
echo -e $icingaData
fi
else
icingaData="Broker not running"
echo -e $icingaData
exit 2
fi
exit 0
|
inspire-international/namingServer-plugin_icinga
|
plugins/query_broker.sh
|
Shell
|
gpl-2.0
| 2,355 |
#! /bin/sh
#BEGIN DEPEND------------------------------------------------------------------
INPUT_MODULES='src/lib/perl5/COD/CIF/Data/CODNumbers.pm'
#END DEPEND--------------------------------------------------------------------
perl <<'END_SCRIPT'
#------------------------------------------------------------------------------
#$Author$
#$Date$
#$Revision$
#$URL$
#------------------------------------------------------------------------------
#*
#* Unit test for the COD::CIF::Data::CODNumbers::timestamps_are_the_same()
#* subroutine. Tests the way the subroutine handles situations when both of
#* the values are datetime timestamps and point to different moments in time.
#**
use strict;
use warnings;
use COD::CIF::Data::CODNumbers;
my $value_1 = '2017-01-01T23:00:00.00-03:00';
my $value_2 = '2017-01-01T23:00:00.00-02:00';
my $data_name = '_raman_measurement.datetime_initiated';
my $entry_1 = {
'timestamp' => {
$data_name => $value_1,
}
};
my $entry_2 = {
'timestamp' => {
$data_name => $value_2,
}
};
if ( COD::CIF::Data::CODNumbers::have_equiv_timestamps($entry_1, $entry_2, $data_name) ) {
print "Values are treated as being the same.\n";
} else {
print "Values are treated as being different.\n";
}
if ( COD::CIF::Data::CODNumbers::have_equiv_timestamps($entry_2, $entry_1, $data_name) ) {
print "Values are treated as being the same.\n";
} else {
print "Values are treated as being different.\n";
}
END_SCRIPT
|
sauliusg/cod-tools
|
tests/shtests/have_equiv_timestamps_009.sh
|
Shell
|
gpl-2.0
| 1,484 |
./virtualenv/bin/teuthology-suite --suite upgrade:hammer --suite-branch wip-rgw-new-multisite --email [email protected] --ceph wip-rgw-new-multisite --machine-type vps --distro ubuntu
#--dry-run
|
oritwas/scripts
|
run_upgrade_suite.sh
|
Shell
|
gpl-2.0
| 198 |
#!/bin/sh
. $IPKG_INSTROOT/etc/moca.conf
CONFIG="/bin/config"
IFCONFIG="/sbin/ifconfig"
vlan_tag_mark=$($CONFIG get vlan_tag_enable)
vlan_tag_id_1=$($CONFIG get vlan_tag_id_1)
vlan_tag_id_2=$($CONFIG get vlan_tag_id_2)
stop()
{
killall clinkd
killall kclinkd
sleep 1
killall -9 clinkd
killall -9 kclinkd
rmmod CandDdvr
}
start()
{
#load Driver
insmod /etc/moca/CandDdvr.ko
mknod /dev/jaws31 c 254 31
#Down/Up Interface to force driver to re-init ethernet MAC
ifconfig eth0 down
ifconfig eth0 up
#mm 0xb8050028 0xae000000
#start deamon service
eth0_mac=$(ifconfig eth0 | grep eth0 | sed 's/.*HWaddr//')
/etc/moca/clinkd -Dvtf/tmp/moca/examples --mac-addr $eth0_mac -i /dev/jaws31 &
#/etc/moca/clinkd -Dvtf/etc/moca/examples --mac-addr 00:03:7F:11:24:D6 -i /dev/jaws31 &
}
restart()
{
stop
start
}
show_netinfo()
{
echo "MoCA monitoring the status of the connected MoCA modes:"
/etc/moca/clnkstat -n
}
show_conf()
{
echo "MoCA SoC Current Configuration/Info:"
#read configurations
/etc/moca/clnkcfg --lof
/etc/moca/clnkcfg --networksearch
/etc/moca/clnkcfg --channelmask
/etc/moca/clnkcfg --securitymode
/etc/moca/clnkcfg --mocapassword
/etc/moca/clnkstat -d
}
change_password()
{
/etc/moca/clnkcfg -s --securitymode=$($CONFIG get coax_encry_abled)
/etc/moca/clnkcfg -s --mocapassword=$($CONFIG get coax_encry_key)
}
change_channel_by_id()
{
moca_chan=$($CONFIG get coax_channel)
case $moca_chan in
15)
moca_chan_freq=1150
moca_chan_mask=0x80000000
;;
17)
moca_chan_freq=1200
moca_chan_mask=0x40000000
;;
19)
moca_chan_freq=1250
moca_chan_mask=0x20000000
;;
21)
moca_chan_freq=1300
moca_chan_mask=0x10000000
;;
23)
moca_chan_freq=1350
moca_chan_mask=0x8000000
;;
25)
moca_chan_freq=1400
moca_chan_mask=0x4000000
;;
27)
moca_chan_freq=1450
moca_chan_mask=0x2000000
;;
29)
moca_chan_freq=1500
moca_chan_mask=0x1000000
;;
31)
moca_chan_freq=1550
moca_chan_mask=0x800000
;;
33)
moca_chan_freq=1600
moca_chan_mask=0x400000
;;
0|*) #SCAN
moca_chan_freq=0
moca_chan_mask=0xffc00000
;;
esac
$CONFIG set coax_channel_mask=$moca_chan_mask
$CONFIG commit
if [ "$moca_chan_mask" = "0xffc00000" ]; then
/etc/moca/clnkcfg -s --networksearch=1 --channelmask=$moca_chan_mask
else
/etc/moca/clnkcfg -s --lof=$moca_chan_freq --networksearch=0 --channelmask=$moca_chan_mask
fi
}
change_channel_by_mask()
{
moca_chanmask=$($CONFIG get coax_channel_mask)
case $moca_chanmask in
0x80000000) #1150
$CONFIG set coax_channel=15
;;
0x40000000) #1200
$CONFIG set coax_channel=17
;;
0x20000000) #1250
$CONFIG set coax_channel=19
;;
0x10000000) #1300
$CONFIG set coax_channel=21
;;
0x8000000) #1350
$CONFIG set coax_channel=23
;;
0x4000000) #1400
$CONFIG set coax_channel=25
;;
0x2000000) #1450
$CONFIG set coax_channel=27
;;
0x1000000) #1500
$CONFIG set coax_channel=29
;;
0x800000) #1550
$CONFIG set coax_channel=31
;;
0x400000) #1600
$CONFIG set coax_channel=33
;;
0xffc00000|*) #SCAN
$CONFIG set coax_channel=0
;;
esac
#set frequency and commit new config value
change_channel_by_id
#reset SoC
/etc/moca/clnkrst -i jaws31 &
}
#coax_encry_abled - 0:Enable, 1:Disable
#coax_encry_key - default: 99999999988888888
apply_change()
{
#set frequency
change_channel_by_id
#set security
change_password
#save configuration files after all change is applied.
#This function is now called by MoCA LSDK
#save_config_file
#reset SoC
/etc/moca/clnkrst -i jaws31 &
}
#/tmp/moca/examples/clink.conf
update_conf_file()
{
#Disable this function since it cannot work well for new SDK V1.01.
#This function can work for old SDK V1.00.
return
echo -n > /tmp/moca/examples/clink.conf
while read LINE
do
find_ret1=`echo $LINE | grep -c "mocapassword"`
find_ret2=`echo $LINE | grep -c "securitymode"`
find_ret3=`echo $LINE | grep -c "channelmask"`
if [ "x$find_ret1" == "x1" ]; then
pwd_len=$(/bin/echo $($CONFIG get coax_encry_key) | wc -L)
echo "mocapassword $pwd_len $($CONFIG get coax_encry_key)" >> /tmp/moca/examples/clink.conf
elif [ "x$find_ret2" == "x1" ]; then
echo "securitymode 1 $($CONFIG get coax_encry_abled)" >> /tmp/moca/examples/clink.conf
elif [ "x$find_ret3" == "x1" ]; then
echo "channelmask 10 $($CONFIG get coax_channel_mask)" >> /tmp/moca/examples/clink.conf
else
echo $LINE >> /tmp/moca/examples/clink.conf
fi
done < /etc/moca/examples/clink.conf
#below lines will pend "^@" at end of line
#sed -i "/mocapassword/c mocapassword $pwd_len $($CONFIG get coax_encry_key)" /tmp/moca/examples/clink.conf
#sed -i "/securitymode/c securitymode 1 $($CONFIG get coax_encry_abled)" /tmp/moca/examples/clink.conf
#sed -i "/channelmask/c channelmask 10 $($CONFIG get coax_channel_mask)" /tmp/moca/examples/clink.conf
}
lan_restart() {
ifconfig $LAN_IF down
echo -n 5 > /proc/switch_phy
sleep 8
ifconfig $LAN_IF up
}
#Prepare MoCA VLAN interface
moca_vlan_if_up() {
#Prepare for WAN_IF: eth0
#remove eth0 from br0.
brctl delif $BR_IF $WAN_IF
$IFCONFIG $WAN_IF up
#Attach eth0.wireless (vlan_tag_id_1) to BR_IF: "br0"
WAN_VLAN1_EXIST=`$IFCONFIG $WAN_IF.$vlan_tag_id_1 2>/dev/zero | grep -c "eth"`
if [ "x$WAN_VLAN1_EXIST" == "x0" ]; then
vconfig add $WAN_IF $vlan_tag_id_1
fi
brctl addif $BR_IF $WAN_IF.$vlan_tag_id_1
$IFCONFIG $WAN_IF.$vlan_tag_id_1 up
#Attach eth0.ethernet (vlan_tag_id_2) to BR_VLAN: br2
if [ "x$vlan_tag_id_1" != "x$vlan_tag_id_2" ]; then
WAN_VLAN2_EXIST=`$IFCONFIG $WAN_IF.$vlan_tag_id_2 2>/dev/zero | grep -c "eth"`
if [ "x$WAN_VLAN2_EXIST" == "x0" ]; then
vconfig add $WAN_IF $vlan_tag_id_2
fi
brctl addif $BR_VLAN $WAN_IF.$vlan_tag_id_2
$IFCONFIG $WAN_IF.$vlan_tag_id_2 up
fi
}
moca_vlan_if_down() {
$IFCONFIG $WAN_IF down
#Restore BR_IF: br0
$IFCONFIG $WAN_IF.$vlan_tag_id_1 down
brctl delif $BR_IF $WAN_IF.$vlan_tag_id_1
#Restore BR_VLAN: br2
if [ "x$vlan_tag_id_1" != "x$vlan_tag_id_2" ]; then
$IFCONFIG $WAN_IF.$vlan_tag_id_2 down
brctl delif $BR_VLAN $WAN_IF.$vlan_tag_id_2
fi
#Restore WAN_IF: eth0
brctl addif $BR_IF $WAN_IF
$IFCONFIG $WAN_IF up
}
vlan_tag_disable_without_lan_restart() {
#Move LAN interface from "br2" back to "br0"
if [ "x$vlan_tag_id_1" != "x$vlan_tag_id_2" ]; then
brctl delif $BR_VLAN $LAN_IF
brctl addif $BR_IF $LAN_IF
fi
#Disable MoCA VLAN interface, then enable MoCA interface
moca_vlan_if_down
#Disable BR_VLAN (br2) interface
ifconfig $BR_VLAN down
}
vlan_tag_disable() {
vlan_tag_disable_without_lan_restart
#restart LAN interface
lan_restart
}
#Before running this function, all interfaces should be attached to "br0".
#After this function, "eth1" and "eth0.4094" attaches to "br2".
vlan_tag_enable() {
#Prepare br2 (BR_VLAN) interface
#Sometimes, br2 is remvoed by wireles driver, So create it again if necessary.
BR_VLAN_EXIST=`$IFCONFIG $BR_VLAN 2>/dev/zero | grep -c "br"`
if [ "x$BR_VLAN_EXIST" == "x0" ]; then
/etc/rc.d/S16bridge-vlan start
br_default_mac=`$IFCONFIG $BR_IF | grep "HWaddr" | ask '{print $5}'`
$IFCONFIG $BR_VLAN hw ether $br_default_mac
fi
$IFCONFIG $BR_VLAN up
#Disable MoCA interface, then enable MoCA VLAN interface
moca_vlan_if_up
#Prepare WLAN interfaces: ath0~ath15
#Nothing to do because all WLAN interfaces are already attached to "br0".
#Prepare LAN interface: eth1
if [ "x$vlan_tag_id_1" != "x$vlan_tag_id_2" ]; then
brctl delif $BR_IF $LAN_IF
brctl addif $BR_VLAN $LAN_IF
fi
#restart LAN interface
lan_restart
}
test1() {
killall -9 ntpclient;killall -9 miniupnpd;killall -9 snmpd;killall -9 net-scan;
killall -9 udhcpd;killall -9 udhcpc;killall -9 dniautoip;
killall -9 telnetenable;killall -9 utelnetd;
killall -9 acld;killall -9 crond;killall -9 boxlogin;
killall -9 syslogd;killall -9 klogd;killall -9 hotplug2;
#hostapd will auto shut down by 'wlan down'
/sbin/wlan down;
killall -9 inetd;killall -9 uhttpd
#killall -9 button_detect;killall -9 datalib;killall -9 potval;
#killall -9 detcable;
}
test2() {
echo 4 > /proc/sys/kernel/printk
}
test3() {
#restore device from debug mode into normal mode
$CONFIG set eth_background_control=0
$CONFIG commit
}
save_config_file() {
rm -r /tmp/moca/config-to-save.tar.gz config-to-save.tar /tmp/moca/config-to-save/ 1>/dev/mull 2>/dev/null
[ ! -d /tmp/moca/config-to-save ] && mkdir -p /tmp/moca/config-to-save
cp /tmp/moca/examples/*.conf /tmp/moca/config-to-save/
cd /tmp/moca/
tar cvf config-to-save.tar config-to-save 1>/dev/mull 2>/dev/null
gzip config-to-save.tar 1>/dev/mull 2>/dev/null #config-to-save.tar.gz
#get_and_save len into /dev/mtd-moca-config
#dd if=/tmp/moca/config-to-save.tar.gz of=/dev/mtd-moca-config bs=1 seek=16 count=$(len)
[ -f config-to-save.tar.gz ] && /etc/moca/dni-mtd save_config_file 1>/dev/mull 2>/dev/null
}
output_config_file() {
rm -r /tmp/moca/config-to-save.tar.gz config-to-save.tar /tmp/moca/config-to-save/ 1>/dev/mull 2>/dev/null
#get len from /dev/mtd-moca-config
#dd if=/dev/mtd-moca-config of=/tmp/moca/config-to-save.tar.gz bs=1 skip=16 count=$(len)
[ -d /tmp/moca/ ] && /etc/moca/dni-mtd output_config_file 1>/dev/mull 2>/dev/null
cd /tmp/moca/
gzip -d config-to-save.tar.gz 1>/dev/mull 2>/dev/null
tar xvf config-to-save.tar 1>/dev/mull 2>/dev/null
cp /etc/moca/examples/* /tmp/moca/examples/
cp /tmp/moca/config-to-save/* /tmp/moca/examples/
}
#restore clink.conf to clink.backup
restore()
{
#erase moca-config partition
mtd erase /dev/mtd-moca-config
#reset SoC
/etc/moca/clnkcfg --restore
sleep 5
/etc/moca/clnkrst -i jaws31 &
}
#echo "----- $0 $1 -----"
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
show_netinfo)
show_netinfo
;;
show_conf)
show_conf
;;
update_conf_file)
update_conf_file
;;
change_channel_by_mask)
change_channel_by_mask
;;
apply_change)
apply_change
;;
vlan_tag_disable_without_lan_restart)
vlan_tag_disable_without_lan_restart
;;
vlan_tag_disable)
vlan_tag_disable
;;
vlan_tag_enable)
vlan_tag_enable
;;
save_config_file)
save_config_file
;;
output_config_file)
output_config_file
;;
test1)
test1
;;
test2)
test2
;;
test3)
test3
;;
restore)
restore
;;
esac
|
jameshilliard/WM2500RP-V1.0.0.34_gpl_src
|
package/moca-util/src/dni-scripts/moca-control.sh
|
Shell
|
gpl-2.0
| 10,263 |
#!/bin/sh
#
# TinyGL-Demos Auto-Building Script
# Created by Keripo
# For Project ZeroSlackr
# Last updated: Aug 22, 2008
#
echo ""
echo "==========================================="
echo ""
echo "TinyGL-Demos Auto-Building Script"
echo ""
# Cleanup
if [ -d build ]; then
echo "> Removing old build directory..."
rm -rf build
fi
# Make new compiling directory
echo "> Setting up build directory..."
mkdir build
cd build
BUILDDIR=$(pwd)
# Symlink the libraries
echo "> Symlinking libraries..."
DIR=$(pwd)
LIBSDIR=../../../../libs
LIBS="hotdog TinyGL ttk launch"
for lib in $LIBS
do
if [ ! -d $LIBSDIR/$lib ]; then
cd $LIBSDIR
echo " - Building "$lib"..."
./src/$lib.sh
echo ""
cd $DIR
fi
ln -s $LIBSDIR/$lib ./
done
cd $DIR
# Copy over compiled file
echo "> Copying over compiled files..."
mkdir compiled
cp -rf $LIBSDIR/TinyGL/examples/gears compiled/Gears
cp -rf $LIBSDIR/TinyGL/examples/iv compiled/ImageViewer
cp -rf $LIBSDIR/TinyGL/examples/mech compiled/Mech
cp -rf $LIBSDIR/TinyGL/examples/spin compiled/Spin
cp -rf $LIBSDIR/TinyGL/examples/texobj compiled/Texobj
# Water demo doesn't seem to work ; /
#cp -rf $LIBSDIR/TinyGL/examples/water compiled/Water
# Launch module
echo "> Building ZeroLauncher launch module..."
cp -rf ../src/launcher ./
cd launcher
export PATH=/usr/local/arm-uclinux-tools2/bin:/usr/local/arm-uclinux-elf-tools/bin:/usr/local/arm-uclinux-tools/bin:$PATH
make -f ../launch/launch.mk
cd ..
# Creating release
echo "> Creating 'release' folder..."
cp -rf ../src/release ./
cd release
# Files
PACK=ZeroSlackr/opt/Media/TinyGL-Demos
cp -rf ../compiled/* $PACK/
cp -rf ../launcher/* $PACK/Launch/
# Documents
cp -rf "../../ReadMe from Keripo.txt" $PACK/
cp -rf ../../License.txt $PACK/
# Delete .svn folders - directory change done in case of previous failure
cd $BUILDDIR
cd release
sh -c "find -name '.svn' -exec rm -rf {} \;" >> /dev/null 2>&1
# Done
echo ""
echo "Fin!"
echo ""
echo "Auto-Building script by Keripo"
echo ""
echo "==========================================="
|
ProjectZeroSlackr/ProjectZeroSlackr-SVN
|
packs/Media/TinyGL-Demos/build.sh
|
Shell
|
gpl-2.0
| 2,020 |
#! /bin/bash -
# ============================================================================
# TEST-SEREN.SH
# D. A. Hubber - 10/2/2009
#
# Automated script to run a complete suite of tests (or just individual tests)
# of the Seren SPH code.
#
# Usage:
# test-seren.sh [-comp] : Fortran compiler
# [-dp] : double precision
# [-openmp] : parallelise with OpenMP
# [-mpi] : parallelise with MPI
# [-n NPROC] : Set no. of MPI processes
# [-fast] : fast compiler flags
# [-debugX] : set DEBUG mode to X
# [-clean] : Clean-up files once sim. has ended
# [-all] : perform all tests
# [-listX] : perform test suite X
# [-test test1 test2 ..] : perform specified tests 1, 2 ..
#
# Perform all tests in suite : test-seren.sh -all
# Perform pre-selected list of tests, X : test-seren.sh -listX
# Perform tests 'test1', 'test2' etc.. : test-seren.sh -test test1 test2 ..
#
# List of current tests
# ADSOD-3D-AB : Adiabatic Sod test with standard AV.
# ADSOD-3D-AB-COND : As ADSOD-3D-AB but with art. conductivity.
# ADSOD-3D-GRADH-AB-COND : As ADSOD-3D-AB-COND but with 'grad-h' SPH.
# ADSOD-3D-GRADH-MON-COND : As ADSOD-3D-GRADH-AB-COND, but with MON97 AV.
# BURRAU1 : Burrau 3-body test
# COL-3D-AB : Colliding flows in 3D with AB art. visc.
# COL-3D-AB-TD : As COL-3D-AB but with time-dependent visc.
# COL-3D-AB-BAL : As COL-3D-AB but with the Balsara switch.
# COL-3D-MON : Colliding flows in 3D with AB art. visc.
# EIGEN1-MONO-KSGRAV : BH Eigen MAC test with KS gravity (4000).
# EIGEN1-QUAD-KSGRAV : As above, but with quadrupole moment terms.
# EIGEN1-OCT-KSGRAV : As above, but with octupole moment terms.
# EIGEN1-MONO-NBODY : BH Eigen MAC test with N-body gravity.
# EIGEN1-QUAD-NBODY : As above, but with quadrupole moment terms.
# EIGEN1-OCT-NBODY : As above, but with octupole moment terms.
# FIGURE8 : Figure-8 3-body test
# FREEFALL1-GRADH-BH : Freefall collapse test with grad-h gravity
# FREEFALL1-NBODY-BH : As above, but with no kernel-softening.
# GEO1-MONO-KSGRAV : BH gemoetric MAC test with KS gravity (4000).
# GEO1-QUAD-KSGRAV : As above, but with quadrupole moment terms.
# GEO1-OCT-KSGRAV : As above, but with octupole moment terms.
# GEO1-MONO-NBODY : BH geometric MAC test with N-body gravity.
# GEO1-QUAD-NBODY : As above, but with quadrupole moment terms.
# GEO1-OCT-NBODY : As above, but with octupole moment terms.
# ISOFREEFALL1-GRADH-BH : As FREEFALL1-GRADH-BH, but with isothermal gas
# NTSI1-2D-MON-COND-CONSTH : Non-linear thin shell instability
# POLYRAD1-AB : Polytropic-cooling collapse test with AB AV.
# POLYRAD1-AB-FLD : As above, but with flux-limited diffusion
# SEDOV1-3D-GRADH : Sedov blast wave test with global timesteps.
# SEDOV2-3D-GRADH : As above, but with individual timesteps.
# SEDOV3-3D-GRADH : As above, but with neighbour-checking.
# SHEAR-2D-GRADH-AB : 2D shear-flow
# SIT1-AB-BH-SINK : Boss-Bodenheimer test (standard AV, sinks).
# SIT1-GRADH-AB-BH-SINK : As above, but with 'grad-h' SPH.
# SIT1-AB-BH-SMOOTH_SINK : ..
# STATPOLY1-AB-CONSTH : Relax polytrope to hydrostatic balance.
# STATPOLY1-AB-GRADH : As above but with grad-h SPH (1000 particles)
# STATPOLY2-AB-GRADH : As above but with 100,000 particles
# STATPOLY3-AB-GRADH : As above but with 114 particles
# ============================================================================
# Set-up lists of tests
LISTADSOD="ADSOD-3D-AB ADSOD-3D-AB-COND ADSOD-3D-GRADH-AB-COND ADSOD-3D-GRADH-MON-COND"
LISTCOL="COL-3D-AB COL-3D-AB-BAL COL-3D-AB-TD COL-3D-MON"
LISTEIGEN1SPH="EIGEN1-QUAD-KSGRAV EIGEN1-OCT-KSGRAV"
LISTEIGEN1NBODY="EIGEN1-QUAD-NBODY EIGEN1-OCT-NBODY"
LISTFREEFALL="FREEFALL1-GRADH-BH FREEFALL-NBODY-BH"
LISTGEO1SPH="GEO1-MONO-KSGRAV GEO1-QUAD-KSGRAV GEO1-OCT-KSGRAV"
LISTGEO1NBODY="GEO1-MONO-NBODY GEO1-QUAD-NBODY GEO1-OCT-NBODY"
LISTGRAV="$LISTEIGEN1SPH $LISTEIGEN1NBODY $LISTGEO1SPH $LISTGEO1NBODY"
LISTKH="KH-2D-GRADH-COND"
LISTNTSI="NTSI1-2D-MON-COND-CONSTH"
LISTPOLYRAD1="POLYRAD1-AB"
LISTSEDOV="SEDOV1-3D-GRADH SEDOV2-3D-GRADH SEDOV3-3D-GRADH"
LISTSHEAR2D="SHEAR-2D-GRADH-AB SHEAR-2D-GRADH-AB-TD"
LISTSTATPOLY="STATPOLY1-AB-CONSTH STATPOLY1-AB-GRADH STATPOLY2-AB-GRADH STATPOLY3-AB-GRADH"
ALL_TESTS="$LISTADSOD $LISTCOL $LISTGRAV $LISTPOLYRAD1 $LISTSEODV $LISTSHEAR2D $LISTSTATPOLY"
LIST1="STATPOLY1-AB-GRADH ADSOD-3D-GRADH-MON-COND COL-3D-AB-TD SEDOV3-3D-GRADH NTSI1-2D-MON-COND-CONSTH KH-2D-GRADH-COND BURRAU1 FIGURE8 SIT1-GRADH-AB-BH-SINK POLYRAD1-AB"
LISTPAPER="ADSOD-3D-GRADH-MON-COND COL-3D-AB SEDOV1-3D-GRADH SEDOV2-3D-GRADH SEDOV3-3D-GRADH FREEFALL1-GRADH-BH STATPOLY1-AB-GRADH STATPOLY2-AB-GRADH STATPOLY3-AB-GRADH FIGURE8 BURRAU""LISTGEO1SPH"
#ALL_TESTS="COL-3D-AB COL-3D-AB-BAL COL-3D-AB-TD COL-3D-MON97 ADSOD-3D-AB ADSOD-3D-AB-COND ADSOD-3D-GRADH-AB-COND SIT1-AB-SINK POLYRAD1-AB GEO1-MONO-KSGRAV GEO1-QUAD-KSGRAV GEO1-OCT-KSGRAV GEO1-MONO-NBODY GEO1-QUAD-NBODY GEO1-OCT-NBODY NTSI1-2D-MON-COND-CONSTH STATPOLY1-AB-CONST"
echo '-----------------'
echo 'Seren test script'
echo '-----------------'
echo 'No. of arguments : '$#
# Unset key variables for safety
unset F90 PRECISION OPENMP DEBUG TESTLIST CLEAN
# Process arguments for script
# ----------------------------------------------------------------------------
if test $# -gt 0
then
while [ $# -gt 0 ]
do
case $1 in
-gfortran)
F90=gfortran
;;
-f90)
F90=f90
;;
-ifort)
F90=ifort
;;
-g95)
F90=g95
;;
-f95)
F90=f95
;;
-dp)
PRECISION=DOUBLE
;;
-openmp)
OPENMP=1
;;
-mpi)
MPI=1
MPIF90=mpif90
GHOST_PARTICLES=1
;;
-n1)
NPROC=1
;;
-n2)
NPROC=2
;;
-n4)
NPROC=4
;;
-n8)
NPROC=8
;;
-fast)
COMPILER_MODE=FAST
;;
-debug)
COMPILER_MODE=DEBUG
;;
-debug0)
DEBUG=0
;;
-debug1)
DEBUG=1
;;
-debug2)
DEBUG=2
;;
-debug3)
DEBUG=3
;;
-clean)
CLEAN=1
;;
-all)
TESTLIST=$ALL_TESTS
echo "Running all known tests: "$TESTLIST
break;
;;
-list1)
TESTLIST=$LIST1
echo "Running test list 1: "$TESTLIST
break;
;;
-listPAPER)
TESTLIST=$LISTPAPER
echo "Running test list 1: "$TESTLIST
break;
;;
-listADSOD)
TESTLIST=$LISTADSOD
echo "Running adiabatic Sod test list: "$TESTLIST
break;
;;
-listCOL)
TESTLIST=$LISTCOL
echo "Running colliding flows test list: "$TESTLIST
break;
;;
-listRAD)
TESTLIST=$LISTRAD
echo "Running Polytropic cooling test list: "$TESTLIST
break;
;;
-listEIGEN1NBODY)
TESTLIST=$LISTEIGEN1NBODY
echo "Running tree grav Eigen MAC N-body test list: "$TESTLIST
break;
;;
-listEIGEN1SPH)
TESTLIST=$LISTEIGEN1SPH
echo "Running tree grav Eigen MAC KS test list: "$TESTLIST
break;
;;
-listFREEFALL)
TESTLIST=$LISTFREEFALL
echo "Running Freefall collapse test list: "$TESTLIST
break;
;;
-listGEO1SPH)
TESTLIST=$LISTGEO1SPH
echo "Running tree grav geometric MAC KS test list: "$TESTLIST
break;
;;
-listGEO1NBODY)
TESTLIST=$LISTGEO1NBODY
echo "Running tree grav geometric MAC N-body test list: "$TESTLIST
break;
;;
-listGRAV)
TESTLIST=$LISTGRAV
echo "Running tree gravity test list: "$TESTLIST
break;
;;
-listSEDOV)
TESTLIST=$LISTSEDOV
echo "Running Sedov blast wave test list: "$TESTLIST
break;
;;
-listSHEAR2D)
TESTLIST=$LISTSHEAR2D
echo "Running Shear-flow test list: "$TESTLIST
break;
;;
-test)
shift
if test $# -gt 0
then
TESTLIST="$@"
echo "Running selected test via argument list: "$TESTLIST
else
exit 1
fi
break
;;
-*)
error "Unrecognised option: $1"
exit 0
;;
*)
exit 1
;;
esac
shift
done
else
echo 'No arguments - Exiting script'
exit 1
fi
# Initialise variables for test.
# ----------------------------------------------------------------------------
cd ..
SEREN_DIR=$(pwd)
SCRIPT_DIR=$SEREN_DIR/scripts
#IC_DIR=$SCRIPT_DIR/IC
IC_DIR=./icfiles
TESTFILE_DIR=$SCRIPT_DIR/test-files
PLOTFILE_DIR=$SCRIPT_DIR/plot-files
cd $SCRIPT_DIR
if [ ! -e results ]
then
mkdir results
fi
echo 'SCRIPT_DIR : '$SCRIPT_DIR
echo 'SEREN_DIR : '$SEREN_DIR
# Now loop over all tests
# ============================================================================
for i in $TESTLIST
do
# Read in Makefile and parameter options for current test.
# --------------------------------------------------------------------------
cd $SCRIPT_DIR
testname=$i
TEST_DIR=$SCRIPT_DIR/$testname
echo 'Searching for '$testname
if test -e $TESTFILE_DIR/$testname.test
then
rm -rf $TEST_DIR
mkdir $TEST_DIR
echo 'Reading '$testname.test
source $TESTFILE_DIR/$testname.test
# Construct Makefile for current test. Set default values for variables
# that don't yet exist (i.e. for options not required by current test).
# Finally, add 'tail' containing the rest of the Makefile.
cd $SCRIPT_DIR
rm -f Makefile
source ./create_makefile_header.sh
echo '' >> Makefile
echo 'DFLAGS += '${DFLAGS:="-DDEBUG_DIAGNOSTICS -DDEBUG_PLOT_DATA -DDEBUG_TRACK_ENERGY"} >> Makefile
# echo 'DFLAGS += '${DFLAGS:="-DDEBUG_DIAGNOSTICS -DDEBUG_PLOT_DATA -DTRACK_ENERGY -Wall -Wconversion -ffpe-trap=invalid,zero,overflow,underflow,denormal"} >> Makefile
echo 'CFLAGS += $(DFLAGS)' >> Makefile
echo '' >> Makefile
cat $SEREN_DIR/'makefiletail.mk' >> Makefile
# Next, construct 'params.dat' file for current test.
cd $SCRIPT_DIR
rm -f params.dat
source ./create_parameters_file.sh
# Now place the Makefile, parameters file, initial conditions file and
# any other necessary files inside the test directory.
mv Makefile $TEST_DIR/.
mv params.dat $TEST_DIR/.
cp $IC_DIR/$in_file.gz $TEST_DIR/.
gunzip $TEST_DIR/$in_file.gz
# Now make seren (or other program) in test directory and clean up any
# '.o' and '.mod' files.
echo 'Compiling SEREN'
cd $TEST_DIR
make -s -j 12 $PROG_NAME
rm -f *.o *.mod
# Run seren in test directory.
#if "$MPI" -e "1"
# then
#mpirun -n $NPROC ./seren-mpi
#else
time ./$PROG_EX
#fi
# Prepare figures for current test.
# --------------------------------------------------------------------------
cd $PLOTFILE_DIR
if test -e "$PLOT_FILE"
then
cp $PLOT_FILE $TEST_DIR/.
if test -e "$ANALYTIC"
then
cp $PLOTFILE_DIR/$ANALYTIC $TEST_DIR/.
fi
cd $TEST_DIR
if test "$PLOT_PROG" == "gnuplot"
then
gnuplot < $PLOT_FILE
fi
mv $testname.ps $SCRIPT_DIR/results/.
fi
cd $TEST_DIR
# Clean-up files if required
if [ "$CLEAN" == "1" ]
then
rm -rf $TEST_DIR
fi
else
echo $testname 'does not exist'
fi
# Now unset (almost) all variables in preparation for next test.
# --------------------------------------------------------------------------
echo 'Unsetting all bash variables for next test'
unset -v SRCDIR EXEDIR OPTIMISE COMPILER_MODE OUTPUT_LEVEL NDIM
unset -v INFILE_FORMAT OUTFILE_FORMAT PERIODIC X_BOUNDARY Y_BOUNDARY
unset -v Z_BOUNDARY SPHERICAL_WALL CYLINDRICAL_WALL
unset -v SPH_SIMULATION NBODY_SPH_SIMULATION NBODY_SIMULATION SPH
unset -v SPH_INTEGRATION KERNEL HFIND MINIMUM_H HYDRO ENERGY_EQN ENTROPY_EQN
unset -v ARTIFICIAL_VISCOSITY BALSARA VISC_TD PATTERN_REC
unset -v ARTIFICIAL_CONDUCTIVITY EXTERNAL_PRESSURE
unset -v RAD_WS SINK_POTENTIAL_WS AMBIENT_HEATING_WS SINK_HEATING_WS
unset -v FLUX_LIMITED_DIFFUSION COOLING_HEATING IONIZING_RADIATION
unset -v STELLAR_WIND PARTICLE_INJECTION_WINDS STELLAR_LUMINOSITY
unset -v EXTERNAL_FORCE SELF_GRAVITY MEAN_H_GRAVITY EWALD
unset -v SINKS SINK_RADIUS SINK_REMOVE_ANGMOM SINK_GRAVITY_ONLY
unset -v NBODY_INTEGRATION BINARY_STATS BINARY_COM_MOTION FORCE_SPLITTING
unset -v TREE MULTIPOLE MAC REORDER CELL_WALK SORT TIMESTEP
unset -v CHECK_NEIB_TIMESTEP SIGNAL_VELOCITY_DT NEIGHBOURLISTS
unset -v KERNEL_TABLES REMOVE_OUTLIERS TIMING_CODE DIMENSIONLESS TEST
unset -v run_id run_dir in_file_form out_file_form
unset -v restart com_frame rseed ptrack
unset -v sph_endtime nbody_sph_endtime nbody_endtime firstsnap snaptime
unset -v noutputstep ntempstep ndiagstep nsinkstep nsnapstep
unset -v courant_mult accel_mult sink_mult nbody_timemult nlevels dt_fixed
unset -v runit munit tunit vunit aunit rhounit sigmaunit Punit funit Eunit
unset -v momunit angmomunit angvelunit dmdtunit Lunit kappaunit Bunit Qunit
unset -v Junit uunit tempunit dudtunit rscale mscale
unset -v periodic_min_x periodic_max_x periodic_min_y periodic_max_y
unset -v periodic_min_z periodic_max_z rspheremax psphere
unset -v pp_gather hmin h_fac boundaryeos icmeos gaseos
unset -v isotemp rhobary gamma mu_bar Kpoly Pext cooling_law
unset -v alpha beta alpha_min
unset -v abserror thetamaxsqd nbuildstep
unset -v rhosink sinkrad nsearchstep rho_search potmin_search
unset -v hill_sphere_search energy_search div_v_search div_a_search
unset -v timescale_search energy_accrete alpha_ss smooth_accrete_frac
unset -v smooth_accrete_dt f_accretion feedback_tdelay feedback_minmass
unset -v star_radius alpha_EA dmdt_regular z_factor
unset -v rho_remove energy_remove rad_remove rholost rad_lost
unset -v npec nbody_frac gammapertmax
unset -v eos_opa_file ptemp0 temp_inf ptemp_r0 ptemp_q fcolumn
unset -v nionallstep f1 f2 f3 f4 Tneut Tion Xfrac a_star N_LyC
unset -v rstatic1 rstatic2 rstatic3 lmax_hp M_loss v_wind
unset -v ANALYTIC PLOT_PROG PLOT_FILE PROG_EX
done
# ============================================================================
echo 'Finished all tests'
exit 1
|
dhubber/seren
|
scripts/test-seren.sh
|
Shell
|
gpl-2.0
| 15,630 |
#!/bin/bash
# Video editors
# created by Thomas Neuhold
#
# handbrake (video converter dvd -> mp4
# shotcut (video editor alternative to openshot)
export DEBIAN_FRONTEND=noninteractive
## Video-Konverter
apt-get -y install handbrake shotcut
## Shotcut
# Codecs für Shotcut
# apt-get -y install libsdl2-dev
# add-apt-repository -y ppa:haraldhv/shotcut
# apt-get -y update
# apt-get -y install shotcut
|
edvapp/autoinstall
|
laus/scriptsForClasses/AGI/325-installVideoEditors.sh
|
Shell
|
gpl-2.0
| 406 |
#!/bin/bash
DIRNAME=`dirname $0`
export SCRIPTDIR=`cd "$DIRNAME" && pwd`
ARCH=`uname -m`
. $SCRIPTDIR/shellpacks/common.sh
. $SCRIPTDIR/shellpacks/common-config.sh
cd $SHELLPACK_TOPLEVEL
for DIRNAME in $SHELLPACK_SOURCES $SHELLPACK_LOG $SHELLPACK_TEMP; do
if [ ! -e "$DIRNAME" ]; then
mkdir -p "$DIRNAME"
fi
done
mkdir -p $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/
for PACKAGE in ffsb fsmark hackbench lmbench memcached memcachetest netperf pft pipetest postgresbuild postmark starve sysbench; do
# Check if we already built it
if [ -e $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/$PACKAGE.tar.gz ]; then
echo Already built $PACKAGE for arch $ARCH
continue
fi
if [ "$PACKAGE" = "postgresbuild" -o "$PACKAGE" = "sysbench" ]; then
if [ "`whoami`" != "root" ]; then
echo root required to build package $PACKAGE
continue
fi
fi
# Clean out the sources directory and build this package
rm -rf $SHELLPACK_SOURCES/*
echo Prebuilding $PACKAGE for arch $ARCH
$SHELLPACK_TOPLEVEL/prebuild-mmtest.sh $PACKAGE > $SHELLPACK_TEMP/build.log 2>&1
if [ $? -ne 0 ]; then
cat $SHELLPACK_TEMP/build.log
echo BUILD FAILED FOR PACKAGE $PACKAGE ARCH $ARCH
echo Build log: $SHELLPACK_TEMP/build.log
exit $SHELLPACK_ERROR
fi
echo Creating $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/$PACKAGE.tar.gz
tar -czf $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/$PACKAGE.tar.gz work/testdisk/sources/*-installed 2> /dev/null
done
# Build different versions of dbench
for VERSION in 3.04 4.0; do
# Check if we already built it
if [ -e $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/dbench$VERSION.tar.gz ]; then
echo Already built dbench$VERSION for arch $ARCH
continue
fi
# Clean out the sources directory and build this package
rm -rf $SHELLPACK_SOURCES/*
echo Prebuilding dbench$VERSION for arch $ARCH
$SHELLPACK_TOPLEVEL/prebuild-mmtest.sh dbench -v $VERSION > $SHELLPACK_TEMP/build.log 2>&1
if [ $? -ne 0 ]; then
cat $SHELLPACK_TEMP/build.log
echo BUILD FAILED FOR PACKAGE dbench$VERSION ARCH $ARCH
echo Build log: $SHELLPACK_TEMP/build.log
exit $SHELLPACK_ERROR
fi
echo Creating $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/dbench$VERSION.tar.gz
tar -czf $SHELLPACK_TOPLEVEL/prebuilds/$ARCH/dbench$VERSION.tar.gz work/testdisk/sources/*-installed 2> /dev/null
done
rm -rf $SHELLPACK_TEMP
exit $SHELLPACK_SUCCESS
|
wjn740/mmtests
|
prebuild-all.sh
|
Shell
|
gpl-2.0
| 2,302 |
#!/bin/sh
JAVA_HOME=/user/vbasile/home/.local/lib/jdk1.7.0_79
# get the code
cd ext/
git clone https://github.com/pippokill/lesk-wsd-dsm.git
cd lesk-wsd-dsm
# compile
ant jar -Dplatforms.JDK_1.7.home=$JAVA_HOME
# Download babelNet 2.5.1 indexes and API
wget http://babelnet.org/data/2.5/babelnet-2.5-index-bundle.tar.bz2
tar -xjvf babelnet-2.5-index-bundle.tar.bz2
wget http://babelnet.org/data/2.5/BabelNet-API-2.5.tar.bz2
tar -xjvf BabelNet-API-2.5.tar.bz2
cp -r BabelNet-API-2.5/resources/* resources/
cp BabelNet-API-2.5/babelnet-api-2.5.jar lib/
mv lib/babelnet-api-1.1.1.jar lib/babelnet-api-1.1.1.jar.bak
# fix BabelNet configuration files
cat config/babelnet.var.properties | head -n -1 > config/babelnet.var.properties.new
echo "babelnet.dir="`pwd`/BabelNet-2.5 >> config/babelnet.var.properties.new
mv config/babelnet.var.properties.new config/babelnet.var.properties
cp BabelNet-API-2.5/config/babelnet.properties config/
# download the Word Space Model
wget https://dl.dropboxusercontent.com/u/66551436/termvectors_en.bin
mkdir -p resources/dsm
mv termvectors_en.bin resources/dsm/
# download WordNet 3.1
mkdir WordNet3.1
cd WordNet3.1
wget http://wordnetcode.princeton.edu/wn3.1.dict.tar.gz
tar -xzvf wn3.1.dict.tar.gz
cd ..
# fix Wordnet configuration files
wndir=`pwd | sed 's/\//\\\\\//g'`
cat config/jlt.var.properties | sed -e "s/^jlt.wordnetPrefix.*/jlt.wordnetPrefix=$wndir\/WordNet3.1/" > config/jlt.var.properties.new
mv config/jlt.var.properties.new config/jlt.var.properties
# test
head -n 8 text/multilingual-all-words.en.plain > test.plain
./run.sh -i test.plain \
-o out.plain \
-cm doc \
-f plain \
-dsm ./resources/dsm/termvectors_en.bin \
-lang en \
-sc ./resources/sense/sense.freq \
-sf bn \
-c max \
-of plain \
-depth 1
./run.sh -i test.plain -o out.plain -cm doc -f plain -dsm ./resources/dsm/termvectors_en.bin -lang en -sc ./resources/sense/sense.freq -sf bn -c max -of plain -depth 1
|
Remper/learningbyreading
|
ext/install_lesk-wsd-dsm.sh
|
Shell
|
gpl-2.0
| 1,938 |
#!/bin/sh
echo "testing with $(python2 -V 2>&1)"
python2 $(which nosetests) --rednose -v --with-coverage --cover-erase tests/
if [ $? == 0 ]; then
echo
echo "testing with $(python3 -V 2>&1)"
python3 $(which nosetests-3) --rednose -v --with-coverage --cover-erase tests/
fi
|
TechRunner2/i3-gaps-rice
|
.config/i3/bar/runtests.sh
|
Shell
|
gpl-2.0
| 279 |
#!/bin/bash
FAD_ONLY=${FAD_ONLY-true}
# by default, only generate suggestions from words that had src=fad in nobsme-dicts
# if env has false, generate suggestions from all words
if [[ -n $LOOKUP && $LOOKUP != "lookup -q -flags mbTT" ]]; then
echo "Warning: overriding strange value of LOOKUP: $LOOKUP"
fi
export LOOKUP="lookup -q -flags mbTT"
psed () {
perl -CSAD -wnpe "$@"
}
tabalign () {
column -ts$'\t'
}
rev () {
# /usr/bin/rev on OS X doesn't handle unicode. Just wonderful.
perl -CSAD -wlnpe '$_=reverse($_)'
}
suffix_chargrams () {
# hitparade of most popular suffixes up to fourgrams:
psed 's/.*(.(.(.(.))))$/$1\n$2\n$3\n$4/' | sort | uniq -c | sort -n
}
lookup_good () {
fst=$1
$LOOKUP "${fst}" | grep -v '+?$' |grep .
}
preproc () {
lang=$1
shift
$GTHOME/gt/script/preprocess --abbr=$GTHOME/langs/${lang}/tools/preprocess/abbr.txt "$@"
}
ana_no_prep () {
lang=$1
$LOOKUP $GTHOME/langs/${lang}/src/analyser-gt-desc.xfst
}
ana () {
lang=$1
shift
preproc ${lang} "$@" | ana_no_prep ${lang}
}
lemma_per_line () {
# Used by make-freq.sh
# Note: if a form is ambiguous, it gets an unnaturally high lemma
# "corpus count", but there's not really any simple way around
# this, and we're not really after absolute corpus counts either
# (this is more for comparing progress and whether something
# exists at all)
awk -F'\t' '/^$/{for(i in a)if(i)print i; for(i in a)delete a[i]} {sub(/\+.*/,"",$2);a[$2]++}'
}
to_freqlist () {
sort|uniq -c|sort -nr|sed $'s/^ *//;s/ /\t/'
}
clean_punct () {
sed "s/[…““’]/'/g" | tr $' ;/\\0123456789{}[]«»"_.?:-:,)(””!¶\t'"'" ' '
}
join_freq () {
# Join two tsv lists by their second column
freq1=$1
freq2=$2
(
LC_ALL=C
join -t$'\t' -j2 \
<(sort -k2,2 -t$'\t' "${freq1}" ) \
<(sort -k2,2 -t$'\t' "${freq2}")
)
}
freq_annotate () {
# Look up a column ($1) of candidates from stdin in freqfile ($2)
# and append the freq to each line of the candidates.
# sum is the sum of freqs, norm is the sum of a freqfile.
awk -v column="$1" -v freqs="$2" -v sum="$3" -v norm="$4" '
BEGIN{
OFS=FS="\t"
while(getline<freqs)freq[$2]=$1
}
function ceil(xs) {
x = sprintf("%d", xs)
return (xs == int(x)) ? x : int(x)+1
}
{
f=0
n=split($column, words, "/")
for(i=1;i<=n;i++) {
f += freq[words[i]]
}
fn=f/(n+0.00000001)
print $0,ceil(fn*norm/sum)
}'
}
clean_cmp_ana () {
# Grep for compound analyses of a certain part of speech, and turn
# output of lookup into tab-separated form followed by compound
# lemmas
lang=$1
pos=$2
if [[ ${lang} = nob ]]; then
# nob for some reason has completely different analysis format :(
grep "#+Cmp.*+${pos}[^#]*$" \
| sed 's/+X+N/+N/g;s/+Nynorsk+N/+N/g' \
| sed 's/+[^#+]*#+CmpS*+/ /g;
s/+[^#+]*#+CmpS*-/- /g;
s/+[^#]*$//' \
| sed 's/ */ /g'
# samarbeidsspørsmål samarbeid +N#+CmpS+spørs+X+N#+Cmp+mål+N+Neu+Pl+Indef
# forsøksråd for +N#+Cmp+søk+N#+CmpS+råd+N+Neu+Sg+Indef
# primærprodukt primær+A#+Cmp+produkt+N+Neu+Sg+Indef
# kjerneområde kjerne +N#+Cmp+område+N+Neu+Sg+Indef
# kystfiskerlag kystfisker +N#+Cmp+lag+N+Neu+Pl+Indef
else
sed 's/\([^ +]*\)\([^# ]*\)+Cmp-/\1-\2+Cmp/g' \
| grep "+Cmp-*#.*+${pos}[^#]*$" \
| sed 's/+[^#]*#*/ /g' \
| sed 's/ */ /g' \
| sed 's/ $//'
fi
}
ana_to_lemmas () {
psed '# nob analyser:
s/\+(X|Nynorsk)\+N/+N/g;
s/\+[^#+\n]*#\+CmpS*\+/\t/g;
s/\+[^#+\n]*#\+CmpS*-/-\t/g;
s/\+[^#\n]*$//;
# all other analysers:
s/\+[^#\n]*#*/\t/g;
s/\t+/\t/g;
s/\t$//'
}
convert_all () {
lang=$1
if [[ -n $GTBOUND ]]; then
convert2xml $GTBOUND/orig/$lang & P1=$!
else
echo "GTBOUND not set, only converting GTFREE"
fi
convert2xml $GTFREE/orig/$lang & P2=$!
wait $P1 $P2
}
ccat_all () {
lang=$1
if [[ -n $GTBOUND ]]; then
cat <(ccat -a -l $lang $GTBOUND/converted/$lang) \
<(ccat -a -l $lang $GTFREE/converted/$lang)
else
echo "GTBOUND not set, only ccat-ing GTFREE"
ccat -a -l $lang $GTFREE/converted/$lang
fi
# Might as well include the few example sentences we have:
if [[ $lang = sma ]]; then
sort -u <(xmlstarlet sel -t -m '//x' -c 'text()' -n $GTHOME/words/dicts/smanob/src/*.xml) \
<(xmlstarlet sel -t -m '//xt' -c 'text()' -n $GTHOME/words/dicts/nobsma/src/*.xml)
fi
}
dict_xml2tsv () {
dir=$1
lang1=${dir%???}
lang2=${dir#???}
restriction=$2
shift
shift
# forall e where $restriction
# print lg/l/text
# forall mg//tg/t:
# print text()
# print "\n"
xmlstarlet sel -t \
-m "//e${restriction}" -c './lg/l/text()' \
-m "./mg//tg[@xml:lang='${lang2}' or not(@xml:lang)]/t" -o $'\t' -c './text()' \
-b -n \
"$@" | gawk -F'\t' '$1 && $2'
}
normalisePoS () {
psed 's/(\w+)_/\u\L$1_/;
s/Cc_/CC_/; s/Cs_/CS_/; s/(Pp|Prep)_/Pr_/; s/P_/Po_/; s/I_/Ij_/;
s/_[a-z]{6,6}[.]/./;'
}
apertiumpos () {
case "$1" in
V) echo vblex;;
N) echo n;;
A) echo a;; # might become adj later!
Adv) echo adv;;
Pron) echo prn;;
Po) echo po;;
Pr) echo pr;;
*) echo '[^<]*';;
esac
}
dir2tsv () {
# Will output into "$dir" under cwd
restriction=$1
dir=$2
test -d ${dir} || mkdir ${dir}
rm -f ${dir}/[VNA].tsv # The main files we want
# Some source directories are missing a lot of files, ensure at
# least these exists:
touch "${dir}"/{V,N,A,nonVNA}.tsv
if [[ ${dir} = smesmj ]]; then
for csv in $GTHOME/words/dicts/${dir}/src/*.csv; do
tsv=${dir}/$(basename "$csv")
tsv=${tsv%%.csv}.tsv
tsv=$(echo "$tsv" | normalisePoS)
cut -f1-2 <"${csv}" > "${tsv}"
done
elif [[ ${dir} = smesma ]]; then
# Do not try with sme-nob, takes an hour
adir=sme-sma
lt-expand ../apertium-${adir}.${adir}.dix > apertium-${adir}.${adir}.exp
for pos in V N A Adv Pron Po Pr; do
apos=$(apertiumpos ${pos})
grep -i "<${apos}>.*:.*<${apos}>" apertium-${adir}.${adir}.exp \
| grep -v '<prop>' | sed 's/<[^>]*>//g' \
| sed 's/:[<>]:/:/' \
| tr ':' '\t' >"${dir}/${pos}_apertium.tsv"
done
elif [[ ${dir} != smjnob ]]; then
for xml in $GTHOME/words/dicts/${dir}/src/*_${dir}.xml; do
tsv=${dir}/$(basename "${xml}")
tsv=${tsv%%.xml}.tsv
tsv=$(echo "$tsv" | normalisePoS)
# Why does this sometimes return non-zero even though good output?
dict_xml2tsv ${dir} "${restriction}" "${xml}" > "${tsv}" || true
done
fi
if [[ ${dir} = nobsmj || ${dir} = smjnob ]]; then
kintel2tsv ${dir}
# Kintel files will contain _kintel in the name, so we can
# separate them out in canonicalise.sh; but we only look at
# plain N.tsv etc. when generating candidates; so N_kintel.tsv
# is appended to plain N.tsv below.
fi
if [[ ${dir} = smanob ]]; then
for xml in $GTHOME/words/dicts/smaswe/src/*_smaswe.xml; do
tsv=${dir}/$(basename "${xml}")
tsv=${tsv%%.xml}.tsv
tsv=$(echo "$tsv" | normalisePoS)
# Why does this sometimes return non-zero even though good output?
dict_xml2tsv ${dir} "${restriction}" "${xml}" >> "${tsv}" || true
done
fi
if [[ ${dir} = nob* ]]; then
for pos in V N A; do
<"$GTHOME/words/dicts/${dir}/src/${pos}_${dir}.xml" \
awk -F' ::: ' '$2{print $1"\t"$2}' >"${dir}/bad_${pos}.tsv"
done
touch "${dir}/bad_nonVNA.tsv" # TODO: some dirs don't even have nonVNA xml's
elif [[ ${dir} = sme* ]]; then
lang1=${dir%???}
lang2=${dir#???}
for pos in V N A; do
apos=$(apertiumpos "${pos}")
<"../apertium-${lang1}-${lang2}.${lang1}-${lang2}.dix" \
gawk -v pos="${apos}" -F' ::: ' '$2 && $3==pos{print $1"\t"$2}' \
>"${dir}/bad_${pos}.tsv"
done
touch "${dir}/bad_nonVNA.tsv" # TODO: some dirs don't even have nonVNA xml's
fi
# We only use files named $dir/$pos_$dir.tsv, e.g.
# smenob/V_smenob.tsv; append some entries from the more funnily
# named files to the ordinary-named files.
for f in ${dir}/[VNA]_{Pl,G3,mwe,NomAg,kintel}*.tsv ; do
[[ -f $f ]] || continue
b=$(basename "$f")
pos=${b%%_*}
dir=$(dirname "$f")
cat "$f" >> "${dir}/${pos}.tsv"
done
}
dir2tsv_fad () {
dir2tsv '[contains(@src,"fad") or .//*[contains(@src,"fad")]]' "$@"
}
mono_from_bi () {
lang=$1
pos=$2
if [[ ${pos} = nonVNA ]]; then
pos=[^VNA]
fi
cat <(cut -f1 ${lang}???/${pos}*.tsv) \
<(cut -f2- ???${lang}/${pos}*.tsv | tr '\t' '\n') \
| sort -u
}
kintel2tsv () {
dir=$1
lang1=${dir%???}
lang2=${dir#???}
dir2=${lang1}2${lang2}
test -d ${dir} || mkdir ${dir}
for pos in V N A nonVNA; do
if [[ $pos = nonVNA ]]; then
restriction="[.//l[not(@pos='V' or @pos='N' or @pos='A' or @obt='V' or @obt='N' or @obt='A')]]"
else
restriction="[.//l[(@pos='${pos}' or @obt='${pos}')]]"
fi
xml=$GTHOME/words/dicts/smjnob-kintel/src/${dir2}/*.xml
tsv=${dir}/${pos}_kintel.tsv
# Extract the finished translations:
dict_xml2tsv ${dir} "${restriction}" ${xml} > "${tsv}" || true
# but also include the unfinished ones (no .//t):
xmlstarlet sel -t \
-m "//e${restriction}" -c './lg/l/text()' \
-m './mg[count(.//t)=0]/trans_in' -o $'\t' -c './/span[not(contains(@STYLE,"font-style:italic"))]/text()' \
-b -n \
${xml} \
| psed 's/ el[.] /\t/g' \
| psed 's/\([^)]*\)/\t/g' \
| psed "s/( [bDdfGgjlmnŋprsVvbd][bDdfGgjlmnŋprsVvbdthkRVSJN']*| -\p{L}+-)*(\$|[ ,.;])/\t/g" \
| psed 's/\t[0-9]+/\t/g' \
| psed 's/\t\t/\t/g;s/^\t//' > "${tsv}".unchecked
done
}
lexc2lms () {
# ugly hack to grep some lemmas out of lexc's
sed 's/!.*//' \
| grep -v '^[; ]*[@+-:<]' \
| grep ':.* .*;' \
| sed 's/[:+].*//' \
| tr -d '#%' \
| sed 's/^ *//'
}
posgrep () {
pos=$1
if [[ $pos = nonVNA ]]; then
grep -v "+[VNA]+[^#]*$"
else
grep "+${pos}+[^#]*$"
fi
}
ana_to_forms_pos () {
# given ana input, output tab-separated
#FORM MAINPOS
gawk -F'\t|[+]' '
$1 {
sub(/[^\t]*#/,"")
pos="nonVNA"
for(i=NF;i>=0;i--) if($i~/^[VNA]$/){
pos=$i
break
}
print $1"\t"pos
}
'
}
ana_to_forms_lms_of_pos () {
# Used in anymalign
pos=$1
posgrep "${pos}" \
| ana_to_lemmas \
| awk 'BEGIN{OFS=FS="\t"} {lm=$2;for(i=3;i<=NF;i++)lm=lm $i;print $1,lm}' \
| sort -u
# We just concatenate compound lemmas here
}
all_lms_of_pos () {
lang=$1
pos=$2
lexc2lms < $GTHOME/langs/${lang}/src/morphology/lexicon.lexc \
| cat - words/all.${lang} <(cut -f2 freq/forms.${lang}) \
| sort -u \
| ana ${lang} \
| posgrep "${pos}" \
| cut -f1 \
| LC_ALL=C sort -u
}
rev_tsv () {
gawk 'BEGIN{OFS=FS="\t"} {for(i=2;i<=NF;i++)print $i,$1}' "$@"
}
cat_tsv () {
gawk 'BEGIN{OFS=FS="\t"} {for(i=2;i<=NF;i++)print $1,$i}' "$@"
}
cat_dict () {
l1=$1
l2=$2
pos=$3
[[ -f words/${l1}${l2}/${pos}_apertium.tsv ]] && cat_tsv words/${l1}${l2}/${pos}_apertium.tsv
[[ -f words/${l2}${l1}/${pos}_apertium.tsv ]] && rev_tsv words/${l2}${l1}/${pos}_apertium.tsv
rev_tsv words/${l2}${l1}/${pos}.tsv &&
cat_tsv words/${l1}${l2}/${pos}.tsv
}
synonyms () {
lang=$1
pos=$2
# Follows synonyms in dictionaries, but only for "one level".
# First we create a big "src→trg" dictionary where the first
# column is the src word, and the others are trg translations of
# that word. One trg may appear in several lines, e.g.
# x b c
# y b d
# (where x and y might come from different languages, or just from
# different directions of the same pair). We skip the first (src)
# column, and create the cross product of all words that appear in
# the same lines, finally giving
# b c
# b d
# c b
# c d
# d b
# d c
:|gawk \
-v trgsrc=<(cat words/${lang}???/${pos}.tsv) \
-v srctrg=<(cat words/???${lang}/${pos}.tsv) '
BEGIN{
OFS=FS="\t"
while(getline<srctrg) for(i=2;i<=NF;i++)d[$1][$i]++
while(getline<trgsrc) for(i=2;i<=NF;i++)d[$i][$1]++
for(f in d){o=f;for(t in d[f])o=o"\t"t;print o}
}' \
| gawk '
BEGIN{
OFS=FS="\t"
}
{
for(i=2;i<=NF;i++) for(j=2;j<=NF;j++) d[$i][$j]++
}
END{
for(a in d) for(b in d[a]) if(a!=b) print a,b
}'
}
loans () {
local -r srclang=$1
local -r trglang=$2
local -r dopos=$3
local words=
if ${FAD_ONLY}; then
words=fadwords
else
words=words
fi
for k in "${!src[@]}"; do
if [[ ${pos[k]} != ${dopos} ]]; then continue; fi
grep "..${src[k]}$" "${words}"/${pos[k]}."${srclang}" \
| sed "s/${src[k]}$/${trg[k]}/" \
| ana "${trglang}" \
| grep -v +Cmp | posgrep ${pos[k]} \
| cut -f1 \
| grep "${trg[k]}$" \
| awk -v src=${src[k]} -F"${trg[k]}$" '{print $1 src "\t" $0}'
done | sort -u
}
gaerjiste-vaalteme () {
if ${FAD_ONLY}; then
words=fadwords
else
words=words
fi
gawk -v w="${words}" -v pos="$1" 'BEGIN{OFS=FS="\t"; while(getline<(w"/"pos".nob"))nob[$0]} $1 in nob{print}' "$2"
}
|
unhammer/evttohus
|
functions.sh
|
Shell
|
gpl-2.0
| 14,292 |
#!/bin/bash
. ../MasterTest.sh
CleanFiles run.000 mremd.opts Hamiltonians.dat absolute.groupfile.save
TESTDIR=`pwd`
TESTDIR=`dirname $TESTDIR`
if [ -z "$TESTDIR" ] ; then
echo "Error: Could not get absolute path."
exit 1
fi
cat > mremd.opts <<EOF
DIMENSION ../Temperatures.dat
DIMENSION Hamiltonians.dat
DIMENSION ../AmdDihedral.dat
NSTLIM 500
DT 0.002
NUMEXCHG 100
TEMPERATURE 300.0
TOPOLOGY $TESTDIR/full.parm7
MDIN_FILE $TESTDIR/pme.remd.gamma1.opts
REF_FILE $TESTDIR/CRD
# Only fully archive lowest Hamiltonian
FULLARCHIVE 0
EOF
cat > Hamiltonians.dat <<EOF
#Hamiltonian
$TESTDIR/AltDFC.01.PagF.TIP3P.ff14SB.parm7
$TESTDIR/AltDFC.02.PagF.TIP3P.ff14SB.parm7
EOF
OPTLINE="-i mremd.opts -b 0 -e 0 -c $TESTDIR/CRD"
RunTest "Absolute path test"
DoTest ../mremd.dim.save run.000/remd.dim
sed "s:TESTDIR:$TESTDIR:g" absolute.groupfile.template > absolute.groupfile.save
DoTest absolute.groupfile.save run.000/groupfile
DoTest ../in.001.save run.000/INPUT/in.001
EndTest
|
drroe/CreateRemdDirs
|
test/Test_MREMD_Absolute/RunTest.sh
|
Shell
|
gpl-2.0
| 1,007 |
# Demonstrates: Implicit invocation of quick and full GC when out of memory
~/Work/src/agcs -i main.xml -c desc.xml -m 910 -n 10 -g DBI_L_MS -t
|
jskelin/AGCS
|
misc/tests/plain/ex16/example2.sh
|
Shell
|
gpl-2.0
| 145 |
#!/usr/bin/env bash
. ./lib
rm -rf temp1
mkdir temp1
cd temp1
darcs init
echo hello world > foo
darcs add foo
darcs record -a -m add -A x
echo goodbye world >> foo
echo y/y | tr / \\012 | darcs revert
darcs show contents foo | cmp foo -
# Now let's test a trickier revert where changes commute nontrivially.
cat > foo <<EOF
a
b
c
d
e
EOF
darcs record -a -A me -m cleanup
mv foo foo.tmp
cat foo.tmp | grep -v b | grep -v d > foo
echo "nyy" | darcs revert
DARCS_DONT_COLOR=1 darcs wh > whatsnew
cat > correct <<EOF
hunk ./foo 2
-b
EOF
diff -c correct whatsnew
# Try a situation where earlier (kept) changes are depended upon by the
# changes we want to revert:
darcs record -a -A me -m cleanup
echo hello world > bar
echo hello world > foo
darcs add bar
darcs replace hello goodbye bar foo
echo "cnnnyy/y" | tr / \\012 | darcs revert
DARCS_DONT_COLOR=1 darcs wh > whatsnew
cat > correct <<EOF
addfile ./bar
hunk ./bar 1
+goodbye world
hunk ./foo 1
-a
-c
-d
-e
+hello world
EOF
diff -c correct whatsnew
cd ..
rm -rf temp1
|
DavidAlphaFox/darcs
|
tests/revert_interactive.sh
|
Shell
|
gpl-2.0
| 1,033 |
#!/bin/sh
# Setup Simple OpenVPN server for Amazon Linux, Centos, Ubuntu and Debian
# Copyright (C) 2012-2013 Viljo Viitanen <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# 2012-12-11: initial version, tested only on amazon linux
# 2012-12-12: added centos 6.3 compability
# 2013-03-30: amazon linux 2013.03 - service iptables is missing, use rc.local
# whatismyip automation has stopped working, use ipchicken.com
# change from embedded tar gz to repo zip download
# 2013-10-02: add debian squeeze&wheezy and ubuntu 12.04 compatibility
# workaround for amazon ec2 rhel 6.4 image bug https://bugzilla.redhat.com/show_bug.cgi?id=956531
# use http://ipecho.net/plain - http://ipecho.net/developers.html
# 2013-10-06: add port, protocol and server name as optional parameters
#do not use any funny characters here, just lower case a-z.
OPENVPN='/etc/openvpn'
if [ "x$1" = "x-h" -o "x$1" = "x--help" ]
then
echo "Usage: $0 [port] [protocol] [servername]"
echo "Default: port 1194, UDP, servername OpenVPN-<protocol>-<port #>."
echo "The server name is just for your convinience, it does not"
echo "have to be related to the dns name of the server."
exit 0
fi
if [ "x$1" = "x" ]
then
PORT=1194
else
PORT=$1
fi
EXIT=0
TEST=`echo "$1" | tr -d [0-9]`
if [ "x$TEST" != "x" ]
then
echo "Port must be a number."
EXIT=1
fi
#make absolutely sure it's a simple number, not something silly like 007
PORT=`expr 0 + $PORT`
if [ $PORT -lt 1 -o $PORT -gt 65535 ]
then
echo "Port must be between 1 and 65535".
EXIT=1
fi
if [ "x$2" = "x" ]
then
PROTO="udp"
else
PROTO=$2
fi
if [ "$PROTO" != "udp" -a "$PROTO" != "tcp" ]
then
echo "Unknown protocol, must be udp or tcp".
EXIT=1
fi
if [ "x$3" = "x" ]
then
ME="openvpn-$PROTO-$PORT"
else
ME=$3
fi
TEST=`echo "$3" | tr -d [a-zA-Z]`
if [ "x$TEST" != "x" ]
then
echo "Server name must only contain letters a-z."
EXIT=1
fi
TEST=`expr length "$3"`
if [ $TEST -ge 64 ]
then
echo "Server name must be less than 64 characters."
#it's used in the certificate and config file names
EXIT=1
fi
if [ $EXIT = "1" ]
then
exit 1
fi
if [ $PORT -eq 22 -a "$PROTO" = "tcp" ]
then
echo "NOTE: you are using the SSH port and protocol."
echo "Sleeping for 10 seconds, press control-C to abort."
sleep 10
fi
if [ -d $OPENVPN ]
then
echo "$OPENVPN exists, aborting!"
exit 1
fi
if [ ! -f template-client-config ]
then
echo "Necessary files missing. Run script from same directory where you unzipped the zip file?"
exit 1
fi
if [ `id -u` -ne 0 ]
then
echo "Need root, try with sudo"
exit 0
fi
#install openvpn, zip and dependencies
if which apt-get 2>/dev/null
then
apt-get -y install openvpn zip || {
echo "============================================================"
echo "Could not install openvpn and zip with apt-get. Huh?"
echo "============================================================"
exit 1
}
elif which yum 2>/dev/null
then
yum -y install openvpn zip || {
echo "============================================================"
echo "Could not install openvpn and zip with yum."
echo "Enable EPEL repository?"
echo "See http://fedoraproject.org/wiki/EPEL"
echo "============================================================"
exit 1
}
else
echo "============================================================"
echo "Cannot find apt-get or yum. Can't continue."
echo "============================================================"
exit 1
fi
mkdir -p $OPENVPN || { echo "Cannot mkdir $OPENVPN, aborting!"; exit 1; }
#openvpn config files and easy-rsa tool
cp -r easy-rsa $OPENVPN/
cp template-server-config $OPENVPN/openvpn.conf
sed -i -e "s/VPN_PROTO/$PROTO/" -e "s/VPN_PORT/$PORT/" $OPENVPN/openvpn.conf
if grep -q "cat <<EOL >> /etc/ssh/sshd_config" /etc/rc.d/rc.local
then
echo "Note: working around a bug in Amazon EC2 RHEL 6.4 image"
sed -i.bak 19,21d /etc/rc.d/rc.local
fi
#ubuntu has exit 0 at the end of the file.
sed -i '/^exit 0/d' /etc/rc.local
#set up nat for the vpn
cat >> /etc/rc.local << END
echo 1 > /proc/sys/net/ipv4/ip_forward
iptables -I INPUT -p $PROTO --dport $PORT -j ACCEPT
iptables -t nat -A POSTROUTING -s 192.168.2.0/24 -d 0.0.0.0/0 -o eth0 -j MASQUERADE
#default firewall in centos forbids these
iptables -I FORWARD -i eth0 -o tun0 -j ACCEPT
iptables -I FORWARD -i tun0 -o eth0 -j ACCEPT
#not sure if these are really necessary, they probably are the default.
iptables -t nat -P POSTROUTING ACCEPT
iptables -t nat -P PREROUTING ACCEPT
iptables -t nat -P OUTPUT ACCEPT
END
sh /etc/rc.local
#setup keys
( cd $OPENVPN/easy-rsa || { echo "Cannot cd into $OPENVPN/easy-rsa, aborting!"; exit 1; }
[ -d keys ] && { echo "easy-rsa/keys directory already exists, aborting!"; exit 1; }
cp vars myvars
sed -i -e 's/Fort-Funston/$ME/' -e 's/SanFrancisco/Simple OpenVPN server/' myvars
. ./myvars
./clean-all
./build-dh
./pkitool --initca
./pkitool --server myserver
./pkitool client1-$ME
)
#for more client certificates:
# cd easy-rsa
# . ./myvars
# ./pkitool [unique-client-name]
#by default this server allows multiple connections per client certificate
#generate the client config file
#first find out external ip
#cache the result so this can be tested safely without hitting any limits
if [ `find "$HOME/.my.ip" -mmin -5 2>/dev/null` ]
then
IP=`cat "$HOME/.my.ip" | tr -cd [0-9].`
echo "Using cached external ip address"
else
echo "Detecting external IP address"
IP=`curl icanhazip.com`
echo "$IP" > "$HOME/.my.ip"
fi
if [ "x$IP" = "x" ]
then
IP="UNKNOWN-ADDRESS"
echo "============================================================"
echo " !!! COULD NOT DETECT SERVER EXTERNAL IP ADDRESS !!!"
echo "============================================================"
echo "Make sure you edit the $ME.ovpn file before trying to use it"
echo "Search 'UNKNOWN-ADDRESS' and replace it with the correct IP address"
else
echo "============================================================"
echo "Detected your server's external IP address: $IP"
echo "============================================================"
echo "Make sure it is correct before using the client configuration files!"
fi
sleep 2
TMPDIR=`mktemp -d --tmpdir=. openvpn.XXX` || { echo "Cannot make temporary directory, aborting!"; exit 1; }
cp template-client-config $TMPDIR/$ME.ovpn
cp template-client-config-linux $TMPDIR/linux-$ME.ovpn
cd $TMPDIR || { echo "Cannot cd into a temporary directory, aborting!"; exit 1; }
cp $OPENVPN/easy-rsa/keys/ca.crt "ca-$ME.crt"
cp $OPENVPN/easy-rsa/keys/client1-$ME.key $OPENVPN/easy-rsa/keys/client1-$ME.crt .
sed -i -e "s/VPN_SERVER_ADDRESS/$IP/" -e "s/client1/client1-$ME/" -e "s/^ca ca.crt/ca ca-$ME.crt/" $ME.ovpn
sed -i -e "s/VPN_PROTO/$PROTO/" -e "s/VPN_PORT/$PORT/" $ME.ovpn
sed -i -e "s/VPN_SERVER_ADDRESS/$IP/" -e "s/client1/client1-$ME/" -e "s/^ca ca.crt/ca ca-$ME.crt/" linux-$ME.ovpn
sed -i -e "s/VPN_PROTO/$PROTO/" -e "s/VPN_PORT/$PORT/" linux-$ME.ovpn
zip $ME-$IP.zip $ME.ovpn linux-$ME.ovpn ca-$ME.crt client1-$ME.key client1-$ME.crt
chmod -R a+rX .
echo "----"
echo "Generated configuration files are in $TMPDIR/ !"
echo "----"
echo "The server '$ME' uses port $PORT protocol $PROTO."
echo "Make sure they are open in an external firewall if there is one."
#enable openvpn at boot and start server!
if which yum 2>/dev/null
then
chkconfig openvpn on
fi
service openvpn start
exit 0
|
viljoviitanen/setup-simple-openvpn
|
normal-setup.sh
|
Shell
|
gpl-2.0
| 8,171 |
#!/bin/bash
run () {
${RUN_DOCKER} -it \
-e AWS_ACCESS_KEY_ID \
-e AWS_SECRET_ACCESS_KEY \
-e AWS_REGION \
-v "$(pwd)":/container/ \
"${CONTAINER_NAME}" "${CMD}"
}
|
silarsis/personal_dev
|
docker/awscli/run.sh
|
Shell
|
gpl-2.0
| 187 |
#!/bin/sh
# This test file relies on the programs 'oscdump' and 'oscsend' which
# are available as part of the LibLo distribution. Currently they are
# present in the LibLo svn repository, but not yet part of a stable
# release.
# This script assumes Dimple is already running.
# Disable path mangling in MSYS2
export MSYS2_ARG_CONV_EXCL="/world"
# Listen on port 7778. We'll assume this is the only oscdump instance
# running, and we don't want to run it if it's already running in
# another terminal.
if ! ((ps -A 2>/dev/null || ps -W 2>/dev/null || ps aux 2>/dev/null) | grep oscdump >/dev/null 2>&1 ); then (oscdump 7778 &); fi
oscsend localhost 7774 /world/clear
oscsend localhost 7774 /world/sphere/create sfff s 0 0 0
oscsend localhost 7774 /world/s/radius f 0.02
oscsend localhost 7774 /world/s/mass f 1
# Grab it.
oscsend localhost 7774 /world/s/grab
|
radarsat1/dimple
|
test/grab.sh
|
Shell
|
gpl-2.0
| 868 |
#!/bin/bash
# default devices
dev_playback="default"
dev_capture="default"
bin="alsabat"
commands="$bin -P $dev_playback -C $dev_capture"
file_sin_mono="default_mono.wav"
file_sin_dual="default_dual.wav"
logdir="tmp"
# frequency range of signal
maxfreq=16547
minfreq=17
# features passes vs. features all
feature_pass=0
feature_cnt=0
init_counter () {
feature_pass=0
feature_all=0
}
evaluate_result () {
feature_cnt=$((feature_cnt+1))
if [ $1 -eq 0 ]; then
feature_pass=$((feature_pass+1))
echo "pass"
else
echo "fail"
fi
}
print_result () {
echo "[$feature_pass/$feature_cnt] features passes."
}
feature_test () {
echo "============================================"
echo "$feature_cnt: ALSA $2"
echo "-------------------------------------------"
echo "$commands $1 --log=$logdir/$feature_cnt.log"
$commands $1 --log=$logdir/$feature_cnt.log
evaluate_result $?
echo "$commands $1" >> $logdir/$((feature_cnt-1)).log
}
# test items
feature_list_test () {
init_counter
commands="$bin"
feature_test "-c1 --saveplay $file_sin_mono" \
"generate mono wav file with default params"
feature_test "-c2 --saveplay $file_sin_dual" \
"generate dual wav file with default params"
sleep 5
feature_test "-P $dev_playback" "single line mode, playback"
feature_test "-C $dev_capture --standalone" "single line mode, capture"
commands="$bin -P $dev_playback -C $dev_capture"
feature_test "--file $file_sin_mono" "play mono wav file and detect"
feature_test "--file $file_sin_dual" "play dual wav file and detect"
feature_test "-c1" "configurable channel number: 1"
feature_test "-c2 -F $minfreq:$maxfreq" "configurable channel number: 2"
feature_test "-r44100" "configurable sample rate: 44100"
feature_test "-r48000" "configurable sample rate: 48000"
feature_test "-n10000" "configurable duration: in samples"
feature_test "-n2.5s" "configurable duration: in seconds"
feature_test "-f U8" "configurable data format: U8"
feature_test "-f S16_LE" "configurable data format: S16_LE"
feature_test "-f S24_3LE" "configurable data format: S24_3LE"
feature_test "-f S32_LE" "configurable data format: S32_LE"
feature_test "-f cd" "configurable data format: cd"
feature_test "-f dat" "configurable data format: dat"
feature_test "-F $maxfreq --standalone" \
"standalone mode: play and capture"
latestfile=`ls -t1 /tmp/bat.wav.* | head -n 1`
feature_test "--local -F $maxfreq --file $latestfile" \
"local mode: analyze local file"
feature_test "--roundtriplatency" \
"round trip latency test"
feature_test "--snr-db 26" \
"noise detect threshold in SNR(dB)"
feature_test "--snr-pc 5" \
"noise detect threshold in noise percentage(%)"
print_result
}
echo "*******************************************"
echo " BAT Test "
echo "-------------------------------------------"
# get device
echo "usage:"
echo " $0 <sound card>"
echo " $0 <device-playback> <device-capture>"
if [ $# -eq 2 ]; then
dev_playback=$1
dev_capture=$2
elif [ $# -eq 1 ]; then
dev_playback=$1
dev_capture=$1
fi
echo "current setting:"
echo " $0 $dev_playback $dev_capture"
# run
mkdir -p $logdir
feature_list_test
echo "*******************************************"
|
01org/bat
|
bat/alsabat-test.sh
|
Shell
|
gpl-2.0
| 3,233 |
#!/usr/bin/env bash
## Copyright (C) 2011 Ganesh Sittampalam <[email protected]>
##
## Test that darcs send uses the UTF-8 encoding for emails
## when non-ASCII characters are in the message
##
## Permission is hereby granted, free of charge, to any person
## obtaining a copy of this software and associated documentation
## files (the "Software"), to deal in the Software without
## restriction, including without limitation the rights to use, copy,
## modify, merge, publish, distribute, sublicense, and/or sell copies
## of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
## BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
## ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
## CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
. lib # Load some portability helpers.
switch_to_utf8_locale
darcs init --repo empty
darcs init --repo send
cd send
echo 'file1' > file1
darcs record -lam 'file1'
LANG=en_GB.UTF-8 \
DARCS_EDITOR='echo Non-ASCII chars: é è ề Ψ ޡ ߐ ह ჴ Ᏻ ‱ ⁂ ∰ ✈ ⢅ .. >' \
darcs send -a ../empty --to=invalid@invalid --edit \
--sendmail-command='grep "Content-Type: text/plain; charset=\"utf-8\"" %<'
|
DavidAlphaFox/darcs
|
tests/send-encoding.sh
|
Shell
|
gpl-2.0
| 1,717 |
#!/bin/bash
for rho in $(seq 4 4 8)
do
cd rho_$rho
for wfc in $(seq 20 10 250)
do
cd wfc_$wfc
energy=`grep ! pw.relax.out | awk '{print $5}'`
deltaE=`echo "scale=8; ($energy+36.89004179)*13.60569253*1000/2" | bc`
echo $wfc $deltaE >> ../../results_rho_$rho.dat
cd ..
done
cd ..
done
|
leseixas/quantum_espresso-benchmark
|
Graphene/basis/PAW/get_energies.sh
|
Shell
|
gpl-2.0
| 318 |
java -Xms64m -Xmx256m -Djava.library.path="libs/jme/lib/natives" -classpath bin:./libs/jme/jME_2.0.jar xenogeddon.games.Xenogeddon $*
|
tectronics/xenogeddon
|
start.sh
|
Shell
|
gpl-2.0
| 137 |
#!/bin/sh
# Script to download vervet genome, chlSab1
mkdir genomes/chlSab1
cd genomes/chlSab1
GENOME_FA=chlSab1.fa
wget \
'ftp://ftp.ensembl.org/pub/release-93/fasta/chlorocebus_sabaeus/dna/Chlorocebus_sabaeus.ChlSab1.1.dna_sm.toplevel.fa.gz' \
-O ${GENOME_FA}.gz
gunzip ${GENOME_FA}.gz
FIRST_SCAFFOLD_LINE=`grep -n "^>.*scaffold" $GENOME_FA | head -n1 | cut -d":" -f1`
LAST_LINE=$(($FIRST_SCAFFOLD_LINE - 1 ))
head -n $LAST_LINE $GENOME_FA | \
sed -e "s/^>\([^ ]*\) .*/>chr\\1/" > ${GENOME_FA/.fa/.chr.fa}
mv ${GENOME_FA/.fa/.chr.fa} $GENOME_FA
cd ../..
exit
|
bergeycm/NGS-map
|
genomes/download_chlSab1.sh
|
Shell
|
gpl-2.0
| 582 |
#! /bin/sh
# Copyright (C) 2011-2022 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Test if lib_LIBRARIES requests AM_PROG_AR.
. test-init.sh
cat >> configure.ac << 'END'
AC_PROG_CC
AC_PROG_RANLIB
END
cat > Makefile.am << 'END'
lib_LIBRARIES = libfoo.a
libfoo_a_SOURCES = foo.c
END
$ACLOCAL
AUTOMAKE_fails
grep 'requires.*AM_PROG_AR' stderr
cat >> configure.ac << 'END'
AM_PROG_AR
END
rm -rf autom4te*.cache
$ACLOCAL
$AUTOMAKE --add-missing
:
|
autotools-mirror/automake
|
t/ar-lib3.sh
|
Shell
|
gpl-2.0
| 1,060 |
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file is executed by build/envsetup.sh, and can use anything
# defined in envsetup.sh.
#
# In particular, you can add lunch options with the add_lunch_combo
# function: add_lunch_combo generic-eng
add_lunch_combo aosp_a2109-userdebug
|
PJBrs/android_device_lenovo_kai
|
vendorsetup.sh
|
Shell
|
gpl-2.0
| 846 |
#!/bin/bash
# vim: set autoindent smartindent tabstop=2 shiftwidth=2 expandtab filetype=sh:
function vgrun
{
local COMMAND="$1"
local NAME="$2"
[[ -n "$COMMAND" ]] || { echo "Syntax: vgrun <command> <name>"; return; }
[[ -n "$NAME" ]] || { echo "Syntax vgrun <command> <name>"; return; }
valgrind \
--leak-check=full --error-limit=no --track-origins=yes \
--undef-value-errors=yes --log-file=valgrind-${NAME}.log \
--read-var-info=yes \
$COMMAND | tee valgrind-${NAME}-output.log 2>&1
}
function vgtrace
{
local COMMAND="$1"
local NAME="$2"
[[ -n "$COMMAND" ]] || { echo "Syntax: vgtrace <command> <name>"; return; }
[[ -n "$NAME" ]] || { echo "Syntax vgtrace <command> <name>"; return; }
valgrind \
--leak-check=full --error-limit=no --track-origins=yes \
--undef-value-errors=yes --log-file=valgrind-${NAME}.log \
--read-var-info=yes --trace-children=yes \
"$COMMAND" | tee valgrind-${NAME}-output.log 2>&1
}
function vgdbg
{
[[ -n "$*" ]] || { echo "Syntax: vgrun <command>"; return; }
valgrind \
--leak-check=full --error-limit=no --track-origins=yes \
--undef-value-errors=yes --read-var-info=yes --db-attach=yes \
"$@"
}
[[ $1 == "vgrun" ]] && vgrun $2 $3
[[ $1 == "vgtrace" ]] && vgtrace $2 $3
[[ $1 == "vgdbg" ]] && vgdbg $2 $3
|
aurojr/steiner_tree
|
code/valgrind.sh
|
Shell
|
gpl-2.0
| 1,352 |
topdir=`pwd`/../../../
export PYTHONPATH=./:${topdir}/application/nbapi/c-swig/:${topdir}/application/nbapi/c-swig/.libs/:${topdir}/mul/.libs/:${topdir}/services/loadable/topo_routing/.libs/:${topdir}/common-libs/util-linux/libuuid/.libs/:${topdir}/application/fabric/.libs
|
openmul/openmul
|
application/nbapi/py-tornado/pythonpath.sh
|
Shell
|
gpl-2.0
| 274 |
#!/bin/sh
#example type = "shellscript";
do_preinst()
{
echo "do_preinst"
exit 0
}
do_postinst()
{
echo "do_postinst"
exit 0
}
echo $0 $1 > /dev/ttyO0
case "$1" in
preinst)
echo "call do_preinst"
do_preinst
;;
postinst)
echo "call do_postinst"
do_postinst
;;
*)
echo "default"
exit 1
;;
esac
|
jyelloz/swupdate
|
examples/scripts/shellscript.sh
|
Shell
|
gpl-2.0
| 348 |
#/bin/bash
file=$1
outputfile=$(echo $file | cut -d '.' -f1)"_throughput.txt"
avg=10
if [ -f temp.txt ]; then
rm temp.txt
fi
if [ -f $outputfile ]; then
echo Warning: deleting existing outputfile $outputfile
rm $outputfile
fi
cat $file | grep "Perf" > temp.txt
endSec=0
endMin=0
endHour=0
endTime=0
intervalMB=0
while read line
do
#parse all relevant information from this line
lineHour=$(echo $line | cut -d ' ' -f2- | cut -d ':' -f1 | sed 's/^0*//')
lineMin=$(echo $line | cut -d ' ' -f2- | cut -d ':' -f2 | sed 's/^0*//')
lineSec=$(echo $line | cut -d ' ' -f2- | cut -d ':' -f3 | cut -d ',' -f1 | sed 's/^0*//')
lineTime=$((lineHour * 60 * 60 + lineMin * 60 + lineSec))
lineMB=$(echo $line | cut -d ':' -f4- | cut -d ' ' -f3)
#do we have a new interval?
if [ $lineTime -ge $endTime ]
then
#write statistics of previous interval to output file
echo -e $endHour:$endMin:$endSec$"\t"$intervalMB >> $outputfile
echo $endHour:$endMin:$endSec $intervalMB
endSec=$((lineSec + avg))
endMin=$((lineMin + (endSec / 60)))
endSec=$((endSec % 60))
endHour=$((lineHour + (endMin / 60)))
endMin=$((endMin % 60))
endTime=$((endHour * 60 * 60 + endMin * 60 + endSec))
intervalMB=0
echo new interval $lineHour:$lineMin:$lineSec to $endHour:$endMin:$endSec
else
intervalMB=$(echo $intervalMB + $lineMB | bc)
fi
done < temp.txt
|
cmusatyalab/GigaSight
|
tool/experiment_scripts/parsePeriodThroughput.sh
|
Shell
|
gpl-2.0
| 1,318 |
for num in 2000000 10000000 20000000
do
for i in {1..10}
do
fq='/xubo/project/alignment/CloudBWA/g38/time/cloudBWAnewg38L50c'$num'Nhs20Paired12time10num16k1.transformI'$i'.adam'
out='/xubo/project/alignment/CloudBWA/g38/time/cloudBWAnewg38L50c'$num'Nhs20Paired12time10num16k1.transform.DiscoverVariantI'$i'.adam'
hadoop fs -rm -R -f $out
sh testDiscoverAndGenotype.sh $fq $out
done
done
|
xubo245/GCDSS
|
sh/discoverAndGenotypeTransform.sh
|
Shell
|
gpl-2.0
| 388 |
#!/bin/bash
function bu-custom-script-link () {
SCRIPT_REAL_PATH=$(realpath $1)
echo "${SCRIPT_REAL_PATH}"
ln -nfs ${SCRIPT_REAL_PATH} ${BASHUTILS_DIR}/custom-scripts
}
|
opeshm/bash-utils
|
functions/bash-utils.sh
|
Shell
|
gpl-2.0
| 179 |
#!/bin/sh
# Clean up the results directory
rm -rf results
mkdir results
#Synthesize the Wrapper Files
echo 'Synthesizing example design with XST';
xst -ifn xst.scr
cp ben_mem_exdes.ngc ./results/
# Copy the netlist generated by Coregen
echo 'Copying files from the netlist directory to the results directory'
cp ../../ben_mem.ngc results/
# Copy the constraints files generated by Coregen
echo 'Copying files from constraints directory to results directory'
cp ../example_design/ben_mem_exdes.ucf results/
cd results
echo 'Running ngdbuild'
ngdbuild -p xc6slx16-csg324-3 ben_mem_exdes
echo 'Running map'
map ben_mem_exdes -o mapped.ncd -pr i
echo 'Running par'
par mapped.ncd routed.ncd
echo 'Running trce'
trce -e 10 routed.ncd mapped.pcf -o routed
echo 'Running design through bitgen'
bitgen -w routed
echo 'Running netgen to create gate level Verilog model'
netgen -ofmt verilog -sim -tm ben_mem_exdes -pcf mapped.pcf -w -sdf_anno false routed.ncd routed.v
|
Vadman97/ImageAES
|
vga/ipcore_dir/ben_mem/implement/implement.sh
|
Shell
|
gpl-3.0
| 1,033 |
cd /srv/chembiohub/
bash scripts/install_linux64.sh chembiohub Ubuntu
|
thesgc/chembiohub_ws
|
deployment/packer/python_dependencies.sh
|
Shell
|
gpl-3.0
| 71 |
#!/bin/sh
SCRIPTDIR="$( cd "$(dirname $0)" && pwd )"
FIRMWAREDIR=/tmp/HomegearTemp/rootfs/rootfs.ubi/174024608/root/firmware
if test ! -d $FIRMWAREDIR; then
rm -Rf /tmp/HomegearTemp
[ $? -ne 0 ] && exit 1
mkdir /tmp/HomegearTemp
[ $? -ne 0 ] && exit 1
wget -P /tmp/HomegearTemp/ http://www.eq-3.de/Downloads/Software/HM-CCU2-Firmware_Updates/HM-CCU2%202.17.16/HM-CCU-2.17.16.tar.gz
[ $? -ne 0 ] && exit 1
tar -zxf /tmp/HomegearTemp/HM-CCU-2.17.16.tar.gz -C /tmp/HomegearTemp
[ $? -ne 0 ] && exit 1
rm -f /tmp/HomegearTemp/HM-CCU-2.17.16.tar.gz
echo "Downloading UBI Reader..."
echo "(C) 2013 Jason Pruitt (Jason Pruitt), see https://github.com/jrspruitt/ubi_reader"
wget -P /tmp/HomegearTemp/ https://github.com/jrspruitt/ubi_reader/archive/v2_ui.tar.gz
[ $? -ne 0 ] && exit 1
tar -zxf /tmp/HomegearTemp/v2_ui.tar.gz -C /tmp/HomegearTemp
[ $? -ne 0 ] && exit 1
/tmp/HomegearTemp/ubi_reader-2_ui/extract_files.py -o /tmp/HomegearTemp/rootfs /tmp/HomegearTemp/rootfs.ubi
[ $? -ne 0 ] && exit 1
fi
rm -f $SCRIPTDIR/0001.*
mv $FIRMWAREDIR/hmw_io_4_fm_hw0.hex $SCRIPTDIR/0001.00001000.fw
[ $? -ne 0 ] && exit 1
echo "0306" > $SCRIPTDIR/0001.00001000.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_lc_sw2_dr_hw0.hex $SCRIPTDIR/0001.00001100.fw
[ $? -ne 0 ] && exit 1
echo "0306" > $SCRIPTDIR/0001.00001100.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_io12_sw7_dr_hw0.hex $SCRIPTDIR/0001.00001200.fw
[ $? -ne 0 ] && exit 1
echo "0306" > $SCRIPTDIR/0001.00001200.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_lc_dim1l_dr_hw0.hex $SCRIPTDIR/0001.00001400.fw
[ $? -ne 0 ] && exit 1
echo "0303" > $SCRIPTDIR/0001.00001400.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_lc_bl1_dr_hw0.hex $SCRIPTDIR/0001.00001500.fw
[ $? -ne 0 ] && exit 1
echo "0306" > $SCRIPTDIR/0001.00001500.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_io_sr_fm_hw0_unstable.hex $SCRIPTDIR/0001.00001600.fw
[ $? -ne 0 ] && exit 1
echo "0001" > $SCRIPTDIR/0001.00001600.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_sen_sc_12_dr_hw0.hex $SCRIPTDIR/0001.00001900.fw
[ $? -ne 0 ] && exit 1
echo "0301" > $SCRIPTDIR/0001.00001900.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_sen_sc_12_fm_hw0.hex $SCRIPTDIR/0001.00001A00.fw
[ $? -ne 0 ] && exit 1
echo "0301" > $SCRIPTDIR/0001.00001A00.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_io_12_fm_hw0.hex $SCRIPTDIR/0001.00001B00.fw
[ $? -ne 0 ] && exit 1
echo "0300" > $SCRIPTDIR/0001.00001B00.version
[ $? -ne 0 ] && exit 1
mv $FIRMWAREDIR/hmw_io12_sw14_dr_hw0.hex $SCRIPTDIR/0001.00001C00.fw
[ $? -ne 0 ] && exit 1
echo "0100" > $SCRIPTDIR/0001.00001C00.version
[ $? -ne 0 ] && exit 1
rm -Rf /tmp/HomegearTemp
chown homegear:homegear $SCRIPTDIR/*.fw
chown homegear:homegear $SCRIPTDIR/*.version
chmod 444 $SCRIPTDIR/*.fw
chmod 444 $SCRIPTDIR/*.version
|
Homegear/Homegear-HomeMaticWired
|
misc/Data Directory/firmware/GetFirmwareUpdatesHomeMaticWired.sh
|
Shell
|
gpl-3.0
| 2,829 |
## vim:ts=4:sw=4:tw=200:nu:ai:nowrap:
##
## bashinator config for lvm-snaptool
##
## Created by Wolfram Schlich <[email protected]>
## Licensed under the GNU GPLv3
## Web: http://www.bashinator.org/projects/lvm-snaptool/
## Code: https://github.com/wschlich/lvm-snaptool/
##
##
## bashinator settings
##
## -- bashinator basic settings --
## log stdout and/or stderr of subcommands to a file.
## the output of all subcommands need to be manually redirected to the logfile
## contained in the variable _L which is automatically defined by bashinator.
##
## examples:
##
## - redirect stdout + stderr to the logfile:
## mkdir /foo &> "${_L}"
##
## - redirect only stderr to the logfile, so stdout can be processed as usual:
## grep localhost /etc/hosts 2> "${_L}"
##
export __ScriptSubCommandLog=1 # default: 0
## directory to create logfile in
#export __ScriptSubCommandLogDir="/var/log" # default: /var/log
## check for a lockfile on startup and error out if it exists, create it otherwise
export __ScriptLock=1 # default: 0
## directory to create lockfile in
#export __ScriptLockDir="/var/lock" # default: /var/lock
## use a safe PATH environment variable instead
## of the one supplied by the calling environment:
## - when running as non-root user: /bin:/usr/bin
## - when running as super user: /sbin:/usr/sbin:/bin:/usr/bin
#export __ScriptUseSafePathEnv=1 # default: 1
## set the umask
#export __ScriptUmask=077 # default: 077
## generate a stack trace when the __die() function is called (fatal errors)
## affects printing, mailing and logging!
#export __ScriptGenerateStackTrace=1 # default: 1
## -- bashinator message handling settings --
## enable quiet operation: nothing is printed on stdout/stderr,
## messages are only logged and/or mailed (if enabled).
## overrides __Print* variables!
## it should be possible to enable this by passing -q
## as an argument to your own application script.
#export __MsgQuiet=0 # default: 0
## timestamp format for the message functions,
## will be passed to date(1).
## default: "%Y-%m-%d %H:%M:%S %:z"
export __MsgTimestampFormat="[%Y-%m-%d %H:%M:%S %:z]" # with brackets
#export __MsgTimestampFormat="[%Y-%m-%d %H:%M:%S.%N %:z]" # with brackets and nanoseconds
## -- bashinator message printing settings --
## enable/disable printing of messages by severity
#export __PrintDebug=0 # default: 0
#export __PrintInfo=0 # default: 1
#export __PrintNotice=0 # default: 1
#export __PrintWarning=0 # default: 1
#export __PrintErr=0 # default: 1
#export __PrintCrit=0 # default: 1
#export __PrintAlert=0 # default: 1
#export __PrintEmerg=0 # default: 1
## enable/disable prefixing the messages to be printed with...
##
## ...their timestamp
#export __PrintPrefixTimestamp=1 # default: 1
##
## ...their severity
#export __PrintPrefixSeverity=1 # default: 1
##
## ...their source (file name, line number and function name)
#export __PrintPrefixSource=1 # default: 1
## print severity prefixes
#export __PrintPrefixSeverity7=">>> [____DEBUG]" # LOG_DEBUG
#export __PrintPrefixSeverity6=">>> [_____INFO]" # LOG_INFO
#export __PrintPrefixSeverity5=">>> [___NOTICE]" # LOG_NOTICE
#export __PrintPrefixSeverity4="!!! [__WARNING]" # LOG_WARNING
#export __PrintPrefixSeverity3="!!! [____ERROR]" # LOG_ERR
#export __PrintPrefixSeverity2="!!! [_CRITICAL]" # LOG_CRIT
#export __PrintPrefixSeverity1="!!! [____ALERT]" # LOG_ALERT
#export __PrintPrefixSeverity0="!!! [EMERGENCY]" # LOG_EMERG
## print severity colors (for the entire message, not just the prefix)
#export __PrintColorSeverity7="1;34" # LOG_DEBUG: blue on default
#export __PrintColorSeverity6="1;36" # LOG_INFO: cyan on default
#export __PrintColorSeverity5="1;32" # LOG_NOTICE: green on default
#export __PrintColorSeverity4="1;33" # LOG_WARNING: yellow on default
#export __PrintColorSeverity3="1;31" # LOG_ERR: red on default
#export __PrintColorSeverity2="1;37;41" # LOG_CRIT: white on red
#export __PrintColorSeverity1="1;33;41" # LOG_ALERT: yellow on red
#export __PrintColorSeverity0="1;37;45" # LOG_EMERG: white on magenta
## -- bashinator message logging settings --
## enable/disable logging of messages by severity
#export __LogDebug=0 # default: 0
#export __LogInfo=0 # default: 1
#export __LogNotice=0 # default: 1
#export __LogWarning=0 # default: 1
#export __LogErr=0 # default: 1
#export __LogCrit=0 # default: 1
#export __LogAlert=0 # default: 1
#export __LogEmerg=0 # default: 1
## enable/disable prefixing the messages to be logged with...
##
## ...their timestamp (ignored for syslog log target)
#export __LogPrefixTimestamp=1 # default: 1
##
## ...their severity (ignored for syslog log target)
#export __LogPrefixSeverity=1 # default: 1
##
## ...their source (file name, line number and function name)
#export __LogPrefixSource=1 # default: 1
## log severity prefixes
#export __LogPrefixSeverity7=">>> [____DEBUG]" # LOG_DEBUG
#export __LogPrefixSeverity6=">>> [_____INFO]" # LOG_INFO
#export __LogPrefixSeverity5=">>> [___NOTICE]" # LOG_NOTICE
#export __LogPrefixSeverity4="!!! [__WARNING]" # LOG_WARNING
#export __LogPrefixSeverity3="!!! [____ERROR]" # LOG_ERR
#export __LogPrefixSeverity2="!!! [_CRITICAL]" # LOG_CRIT
#export __LogPrefixSeverity1="!!! [____ALERT]" # LOG_ALERT
#export __LogPrefixSeverity0="!!! [EMERGENCY]" # LOG_EMERG
## log target configuration
## supported targets (any comma separated combination of):
## - "syslog:FACILITY"
## - "file:TARGET-FILE"
## - "file:TARGET-FILE:WRITE-MODE" (default WRITE-MODE: overwrite)
## default: "syslog:user"
#export __LogTarget="syslog:user"
#export __LogTarget="file:/var/log/${__ScriptName}.log"
#export __LogTarget="file:/var/log/${__ScriptName}.log:append"
#export __LogTarget="file:/var/log/${__ScriptName}.log:overwrite"
#export __LogTarget="file:/var/log/${__ScriptName}.log:append,syslog:user"
#export __LogTarget="file:/var/log/${__ScriptName}.log:overwrite,syslog:user"
#export __LogTarget="file:/var/log/${__ScriptName}.log:append,file:/var/log/${__ScriptName}-current.log:overwrite"
#export __LogTarget="file:/var/log/${__ScriptName}.$(date +"%Y%m%d-%H%M%S").log"
## -- bashinator message mailing settings --
## enable/disable mailing of messages by severity
#export __MailDebug=0 # default: 0
export __MailInfo=0 # default: 1
export __MailNotice=0 # default: 1
#export __MailWarning=0 # default: 1
#export __MailErr=0 # default: 1
#export __MailCrit=0 # default: 1
#export __MailAlert=0 # default: 1
#export __MailEmerg=0 # default: 1
## enable/disable prefixing the messages to be mailed with...
##
## ...their timestamp
#export __MailPrefixTimestamp=1 # default: 1
##
## ...their severity
#export __MailPrefixSeverity=1 # default: 1
##
## ...their source (file name, line number and function name)
#export __MailPrefixSource=1 # default: 1
## mail severity prefixes
#export __MailPrefixSeverity7="[____DEBUG]" # LOG_DEBUG
#export __MailPrefixSeverity6="[_____INFO]" # LOG_INFO
#export __MailPrefixSeverity5="[___NOTICE]" # LOG_NOTICE
#export __MailPrefixSeverity4="[__WARNING]" # LOG_WARNING
#export __MailPrefixSeverity3="[____ERROR]" # LOG_ERR
#export __MailPrefixSeverity2="[_CRITICAL]" # LOG_CRIT
#export __MailPrefixSeverity1="[____ALERT]" # LOG_ALERT
#export __MailPrefixSeverity0="[EMERGENCY]" # LOG_EMERG
## enable/disable appending the script subcommand log to the mail (if enabled)
#export __MailAppendScriptSubCommandLog=1 # default: 1
## mail data configuration
## default __MailFrom: "${USER} <${USER}@${__ScriptHost}>"
## default __MailEnvelopeFrom: "${USER}@${__ScriptHost}"
## default __MailRecipient: "${USER}@${__ScriptHost}"
## default __MailSubject: "Messages from ${__ScriptFile} running on ${__ScriptHost}"
#export __MailFrom="${USER} <${USER}@${__ScriptHost}>"
#export __MailEnvelopeFrom="${USER}@${__ScriptHost}"
#export __MailRecipient="${USER}@${__ScriptHost}"
#export __MailSubject="Messages from ${__ScriptFile} running on ${__ScriptHost}"
|
wschlich/lvm-snaptool
|
bashinator.cfg.sh
|
Shell
|
gpl-3.0
| 7,989 |
#!/usr/bin/env bash
#
# Copyright (C) 2013 Norbert Thiebaud
# License: GPLv3
#
do_help()
{
cat <<EOF
bin_library_info.sh is a tool that create a unique filename for a binary tar file that
contain the build of the given source tarfile. the unicity is based on the source tarfile which contains
a md5 already and the calculated sha1 of config_host_.mk and of the tree object associated with the top_level_module
in git.
syntax: bin_library_info.sh -m|--module <top_level_module> -l|--location <TARFILE_LOCATION> -s|--srcdir <SRCDIR> -b <BUILDDIR> -r|--tarfile <LIBRARY_TARFILE> [ -m|--mode verify|name ]
the default mode is 'name' which just print the associated binary tarfile name.
in 'verify' mode the programe print the name if the associated binary tarfile exist
and print nothing and return an error code if the file does not exist
Note: --location --builddir and --srcdir are optional if they are already in the env in the form of TARFILE_LOCATION and BUILDDIR SRCDIR respectively
EOF
exit 0;
}
die()
{
[ "$V" ] && echo "Error:" "$@"
exit -1;
}
get_config_sha()
{
pushd "${SRCDIR?}" > /dev/null
git hash-object "${BUILDDIR?}"/config_host.mk
popd > /dev/null
}
get_library_gbuild_sha()
{
local module="$1"
pushd "${SRCDIR?}" > /dev/null
if [ -d "${SRCDIR}/external/${module?}" ] ; then
git ls-tree -d HEAD "external/${module?}" | cut -f 1 | cut -d " " -f 3
else
git ls-tree -d HEAD | "{module?}" | cut -f 1 | cut -d " " -f 3
fi
popd > /dev/null
}
determine_binary_package_name()
{
local module="$1"
local tarball="$2"
local csha=""
local gsha=""
local binfile=""
csha=$(get_config_sha)
gsha=$(get_library_gbuild_sha "${module?}")
if [ -n "${csha?}" -a -n "${gsha}" ] ; then
binfile="${csha?}_${gsha?}_${tarball?}.${PLATFORM?}.tar.gz"
fi
echo "${binfile}"
}
MODULE=""
SOURCE_TARFILE=""
MODE="name"
V=1
while [ "${1}" != "" ]; do
parm=${1%%=*}
arg=${1#*=}
has_arg=
if [ "${1}" != "${parm?}" ] ; then
has_arg=1
else
arg=""
fi
case "${parm}" in
-h|--help) # display help
do_help
exit
;;
-b|--builddir)
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
BUILDDIR="${arg}"
;;
-o|--module)
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
MODULE="${arg}"
;;
-l|--location)
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
TARFILE_LOCATION="${arg}"
;;
-m|--mode)
# test if the binary package exist
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
MODE="$arg"
;;
-p|--platform)
# test if the binary package exist
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
PLATFORM="$arg"
;;
-q)
V=0
;;
-s|--srcdir) # do not override the local autogen.lastrun if present
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
SRCDIR="${arg}"
;;
-t|--tarfile)
if [ -z "${has_arg}" ] ; then
shift;
arg="$1"
fi
SOURCE_TARFILE="${arg}"
;;
-*)
die "Invalid option $1"
;;
*)
die "Invalid argument $1"
;;
esac
shift
done
if [ -z "${MODULE?}" ] ; then
die "Missing --module"
fi
if [ -z "${TARFILE_LOCATION}" ] ; then
die "Missing --location"
fi
if [ -z "${SOURCE_TARFILE}" ] ; then
die "Missing --tarfile"
fi
if [ -z "${SRCDIR}" ] ; then
die "Missing --srcdir"
fi
BINARY_TARFILE="$(determine_binary_package_name ${MODULE?} ${SOURCE_TARFILE?})"
if [ -z "${BINARY_TARFILE}" ] ; then
exit 2
fi
if [ "${MODE?}" = "verify" ] ; then
if [ -f "${TARFILE_LOCATION?}/${BINARY_TARFILE?}" ] ; then
echo "${BINARY_TARFILE?}"
else
exit 1
fi
else
echo "${BINARY_TARFILE?}"
fi
exit 0
|
beppec56/core
|
solenv/bin/bin_library_info.sh
|
Shell
|
gpl-3.0
| 4,361 |
#!/bin/bash
cp ./v4l/tbsctrl.o.x86_64 ./v4l/tbsctrl.o
cp ./v4l/tbs6680fe_driver.o.x86_64 ./v4l/tbs6680fe_driver.o
echo "TBS drivers configured for x86_64 platform."
|
dahlSTROM/tbs-v4l
|
v4l-6680/tbs-x86_64.sh
|
Shell
|
gpl-3.0
| 168 |
#!/bin/sh
ESSENTIALPKGS="git vim vim-gtk build-essential zlib1g-dev libsndfile1-dev libsdl1.2-dev"
sudo apt-get install $ESSENTIALPKGS
## retrieve git and vim settings
curl https://raw.githubusercontent.com/ryanpcmcquen/linuxTweaks/master/gitVimNORMALorROOT.sh | sh
|
Jheengut/linuxTweaks
|
ubuntu/ubuntu1404Setup.sh
|
Shell
|
gpl-3.0
| 269 |
#!/usr/bin/env bash
set -e
# init
cd "$( dirname "${BASH_SOURCE[0]}" )"
source script/init.sh
# delete stuff
git clean --force -dX .
|
johnmcfarlane/crag
|
nacl/nuke.sh
|
Shell
|
gpl-3.0
| 135 |
#!/bin/bash
python AnalyzeSimulation.py --paralog1 YJL177W --paralog2 YKL180W --simnum 39 > YJL177W_YKL180W_MG94_nonclock_Sim39_PrintScreen.txt
|
xjw1001001/IGCexpansion
|
Simulation/ShFiles/MG94_YJL177W_YKL180W_sim39.sh
|
Shell
|
gpl-3.0
| 145 |
#!/bin/bash
#
# Usage: dump-to-tree.sh <directory>
#
# Materializes the filesystem dump on stdin (in the format produced by
# tree-to-dump.sh) into the given filesystem directory. The directory will be
# created if it does not exist.
#
set -euo pipefail
mkdir -p "$1"
cd "$1"
LINE_NUMBER=0
read_and_inc() {
LINE_NUMBER=$((LINE_NUMBER+1))
read "$@"
}
read_and_inc_or_fail() {
read_and_inc "$@" || fail "unexpected end of input"
}
fail() {
echo "error: $@ on input line ${LINE_NUMBER}" >&2
exit 1
}
while read_and_inc FILE_TYPE FILE_MODE FILE_PATH; do
if [ -z "${FILE_TYPE}" -a -z "${FILE_MODE}" -a -z "${FILE_PATH}" ]; then
continue # ignore empty lines
fi
if [ -z "${FILE_TYPE}" -o -z "${FILE_MODE}" -o -z "${FILE_PATH}" ]; then
fail "unexpected end of entry header"
fi
if [[ "${FILE_PATH}" = /* ]]; then
fail "absolute path not allowed in entry header"
fi
case "${FILE_TYPE}" in
file)
install -D -m "${FILE_MODE}" /dev/null "${FILE_PATH}"
# header is followed by file content, terminated by a separator line like "---------------"
while IFS='' read_and_inc_or_fail LINE; do
if [[ "${LINE}" =~ ^-+$ ]]; then
break
fi
echo "${LINE}" >> "${FILE_PATH}"
done || true
;;
symlink)
# next line contains symlink target, followed by a separator line
read_and_inc_or_fail TARGET
[ -z "${TARGET}" ] && fail "missing symlink target"
read_and_inc_or_fail SEPARATOR
[[ "${SEPARATOR}" =~ ^-+$ ]] || fail "unexpected content line, expected separator"
mkdir -p "$(dirname "${FILE_PATH}")"
ln -sf "${TARGET}" "${FILE_PATH}"
;;
directory)
install -d -m "${FILE_MODE}" "${FILE_PATH}"
read_and_inc_or_fail SEPARATOR
[[ "${SEPARATOR}" =~ ^-+$ ]] || fail "unexpected content line, expected separator"
;;
*)
fail "unknown file type in entry header"
;;
esac
done || true
|
holocm/holo
|
util/dump-to-tree.sh
|
Shell
|
gpl-3.0
| 1,960 |
#!/bin/bash
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Collect the autopkgtest PPA results, in a docker container.
# Arguments:
# day: The day of the results, with format yyyymmdd
set -ev
if [ "$#" -ne 1 ] ; then
echo "Usage: "$0" <day>"
exit 1
fi
day="$1"
lxc="/snap/bin/lxc"
script_path="$(dirname "$0")"
project_path="$(readlink -f "$script_path/../..")"
"$script_path/setup_lxd.sh"
"$script_path/run_lxd_container.sh" autopkgtest-results
$lxc file push --recursive $project_path/tools autopkgtest-results/root/
$lxc exec autopkgtest-results -- sh -c "apt update && apt install --yes squashfuse"
# Ignore the core install error as a workaround for
# - Setup snap "core" (2462) security profiles (cannot reload udev rules: exit status 2
$lxc exec autopkgtest-results -- sh -c "snap install core" || echo "ignored error"
$lxc exec autopkgtest-results -- sh -c "/root/tools/collect_ppa_autopkgtests_results.py $1"
$lxc stop autopkgtest-results
|
elopio/snapcraft
|
tools/travis/collect_ppa_autopkgtests_results.sh
|
Shell
|
gpl-3.0
| 1,529 |
#!/bin/bash
# Use this script to automatically produce several graph pdfs
BASE_NAME=ceal_batch
declare -a EXP_NAMES=( \
exptree_eval \
list_quicksort \
list_mergesort \
);
declare -a EXP_SHORT_NAMES=( \
"exptree" \
"qs" \
"ms" \
);
TESTPOWERS="cp verf"
CHANGE_SIZES="1 4 8 64 128 512 1024 4096"
RUNS=5
for ((exp_index=0; exp_index<${#EXP_NAMES[@]}; exp_index++))
do
exp_name=${EXP_NAMES[$exp_index]}
exp_short_name=${EXP_SHORT_NAMES[$exp_index]}
folders=""
legends=""
for testpower in TESTPOWERS
do
for change_size in CHANGE_SIZES
do
folder="${BASE_NAME}_${exp_name}_${testpower}_cs${change_size}_${RUNS}runs"
if [ -d "../data/${folder}" ]
then
folders="${folders} ${folder}"
legends="${legends} ${exp_short_name}_${testpower}_cs${change_size}"
fi
done
done
./generate_graph_multi.sh "${folders}" "${legends}"
mkdir ../data/output
cp ../data/out.pdf "../data/output/${exp_name}.pdf"
rm ../data/*.eps
rm ../data/*.pdf
chmod -R 775 ../data/output
done
|
matthewhammer/ceal
|
testpower/scripts/generate_graphs.sh
|
Shell
|
gpl-3.0
| 1,132 |
#!/bin/bash
### set password for the mysql user lbd
### get a new password for the mysql user 'lbd'
if [ "$mysql_passwd_lbd" = 'random' ]
then
mysql_passwd_lbd=$(mcookie | head -c 16)
elif [ -z "${mysql_passwd_lbd+xxx}" -o "$mysql_passwd_lbd" = '' ]
then
echo
echo "===> Please enter new password for the MySQL 'lbd' account. "
echo
mysql_passwd_lbd=$(mcookie | head -c 16)
stty -echo
read -p "Enter password [$mysql_passwd_lbd]: " passwd
stty echo
echo
mysql_passwd_lbd=${passwd:-$mysql_passwd_lbd}
fi
### set password
source $(dirname $0)/set_mysql_passwd.sh
set_mysql_passwd lbd $mysql_passwd_lbd
### modify the configuration file of Drupal (settings.php)
for file in $(ls /var/www/lbd*/sites/default/settings.php)
do
sed -i $file \
-e "/^\\\$databases = array/,+10 s/'password' => .*/'password' => '$mysql_passwd_lbd',/"
done
|
dashohoxha/dbox
|
install/config/mysql_labdoo.sh
|
Shell
|
gpl-3.0
| 879 |
RUNNAME="RAM"
RUNASROOT=1
runfile_exec()
{
local l x i m n t BANK_FORM BANK_SIZE BANK_SPEED BANK_TYPE MEM_MAX MEM_NUM
((x=0))
((i=0))
while read l
do
if (( x == 2 ))
then
case "$l" in
'Maximum Capacity:'*)
MEM_MAX[i]="${l#*: }"
;;
'Number Of Devices:'*)
MEM_NUM[i]="${l#*: }"
;;
'Physical Memory Array')
((i++))
((x=1))
m=
n=
;;
esac
elif (( x == 1 ))
then
case "$l" in
'Maximum Capacity:'*)
m="${l#*: }"
;;
'Number Of Devices:'*)
n="${l#*: }"
;;
'Use: System Memory')
((x=2))
if [[ $m ]]
then
MEM_MAX[i]="$m"
m=
fi
if [[ $n ]]
then
MEM_NUM[i]="$n"
n=
fi
;;
esac
elif [[ "$l" == 'Physical Memory Array' ]]
then
((x=1))
fi
done <<< "$(sudo /usr/sbin/dmidecode -t 16 2>/dev/null|egrep -o '[^[:cntrl:]]+')"
((x=0))
((i=0))
while read l
do
if (( x == 2 ))
then
case "$l" in
'Size:'*)
m="${l#*Size: }"
if [[ "$m" == No* ]]
then
BANK_SIZE[i]='0'
else
BANK_SIZE[i]="$m"
fi
;;
'Speed:'*)
n="${l#*Speed: }"
if [[ "$n" == Un* ]]
then
BANK_SPEED[i]=''
else
BANK_SPEED[i]="$n"
fi
;;
'Type:'*)
t="${l#*Type: }"
if [[ "$t" == Un* ]]
then
BANK_TYPE[i]=''
else
BANK_TYPE[i]="$t"
fi
;;
'Memory Device')
((i++))
((x=1))
m=
n=
t=
;;
esac
elif (( x == 1 ))
then
case "$l" in
'Size:'*)
m="${l#*Size: }"
if [[ "$m" == No* ]]
then
m='0'
fi
;;
'Speed:'*)
n="${l#*Speed: }"
if [[ "$n" == Un* ]]
then
n=''
fi
;;
'Type:'*)
t="${l#*Type: }"
;;
'Form Factor: '*'DIMM')
((x=2))
BANK_FORM[i]="${l#*Form Factor: }"
if [[ $t ]]
then
BANK_TYPE[i]="$t"
t=
else
BANK_TYPE[i]=''
fi
if [[ $m ]]
then
BANK_SIZE[i]="$m"
m=
fi
if [[ $n ]]
then
BANK_SPEED[i]="$n"
n=
else
BANK_SPEED[i]=''
fi
;;
esac
elif [[ "$l" == 'Memory Device' ]]
then
((x=1))
fi
done <<< "$(sudo /usr/sbin/dmidecode -t 17 2>/dev/null|egrep -o '[^[:cntrl:]]+')"
((MEM_TOTAL=0))
for ((i=0; i<${#BANK_FORM[@]}; i++))
do
m="$(egrep -o '[0-9]+' <<< "${BANK_SIZE[i]}")"
if [[ ${BANK_SIZE[i]} == *GB ]]
then
(( m *= 1024 ))
elif [[ ${BANK_SIZE[i]} != *MB ]]
then
(( m = 0 ))
fi
(( MEM_TOTAL += m ))
done
RUNOUT="Total RAM: $MEM_TOTAL MiB"
for ((i = 0; i < ${#BANK_FORM[@]}; i++))
do
RUNOUT="$RUNOUT"$'\n'"Bank $i: ${BANK_TYPE[i]} "
if [[ "${BANK_SIZE[i]}" == '0' ]]
then
RUNOUT="$RUNOUT<empty>"
else
RUNOUT="$RUNOUT${BANK_SPEED[i]} ${BANK_SIZE[i]}"
fi
done
return 0
}
|
KoneetKiertoon/scripts
|
src/basiccheck.d/70-memory.sh
|
Shell
|
gpl-3.0
| 3,279 |
#---------------------------------------------------------------------
# Function: AskQuestions Debian 8
# Ask for all needed user input
#---------------------------------------------------------------------
AskQuestions() {
START_TIME=$SECONDS
CFG_SETUP_WEB=true #Needed for Multiserver setup compatibility
CFG_SETUP_MAIL=true #Needed for Multiserver setup compatibility
CFG_SETUP_NS=true #Needed for Multiserver setup compatibility
echo -n -e "$IDENTATION_LVL_0 ${BWhite}Gathering informations about softwares and versions:${NC} "
echo
while [ "x$CFG_SQLSERVER" == "x" ]; do
CFG_SQLSERVER=$(whiptail --title "Install SQL Server" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select SQL Server type" 10 60 3 \
"MySQL" "MySQL (default)" ON \
"MariaDB" "MariaDB" OFF \
"None" "(already installed)" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}SQL Server${NC}: ${green}$CFG_SQLSERVER${NC} "
echo
if [ $CFG_SQLSERVER == "MySQL" ]; then
while [ "x$CFG_MYSQL_VERSION" == "x" ]; do
CFG_MYSQL_VERSION=$(whiptail --title "MySQL Version" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select MySQL Version" 10 60 4 \
"default" "OS Current Version" ON \
"5.6" "MySQL-5.6" OFF \
"5.7" "MySQL-5.7" OFF \
"8.0" "MySQL-8.0" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Version${NC}: ${green}$CFG_MYSQL_VERSION${NC} "
echo
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Retrieve MySQL Root PASSWORD${NC}: "
CFG_MYSQL_ROOT_PWD=$(whiptail --title "MySQL Root Password" --backtitle "$WT_BACKTITLE" --inputbox \
"Please specify the MySQL Root Password (leave empty for autogenerate)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $CFG_MYSQL_ROOT_PWD ]]; then
CFG_MYSQL_ROOT_PWD_AUTO=true
#We generate a random 32 Chars Length
CFG_MYSQL_ROOT_PWD=$(tr </dev/urandom -dc 'A-Z-a-z-0-9~!@#^*_=-' | head -c${1:-16})
else
CFG_MYSQL_ROOT_PWD_AUTO=false
fi
echo -e " [ ${green}DONE${NC} ] "
while [ "x$CFG_WEBSERVER" == "x" ]; do
CFG_WEBSERVER=$(whiptail --title "Install Web Server" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select which web server you want to install" 15 60 3 \
"apache" "Apache" OFF \
"nginx" "Nginx (default)" ON \
"none" "No Install" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Web Server${NC}: ${green}$CFG_WEBSERVER${NC} "
echo
if [ $CFG_WEBSERVER == "nginx" ]; then
while [ "x$CFG_NGINX_VERSION" == "x" ]; do
CFG_NGINX_VERSION=$(whiptail --title "Nginx Web Server" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select which Nginx Version you want to install" 15 65 5 \
"n-os-default" "OS Default 1.14" OFF \
"n-nginx" "NGINX Official - nginx.org" ON \
"n-buster" "Debian Buster Backports" OFF \
"n-custom" "Built from sources" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Nginx Version${NC}: ${green}" $CFG_NGINX_VERSION "${NC} "
echo
else
if [ $CFG_WEBSERVER == "none" ]; then
CFG_SETUP_WEB=false
fi
CFG_NGINX_VERSION='none'
fi
while [ "x$CFG_PHP_VERSION" == "x" ]; do
CFG_PHP_VERSION=$(whiptail --title "Choose PHP Version(s)" --backtitle "$WT_BACKTITLE" --nocancel --separate-output --checklist \
"Choose PHP Version do you want to install" 20 75 7 \
"php7.0" "Latest Available from 7.0" ON \
"php7.1" "Latest Available from 7.1" ON \
"php7.2" "Latest Available from 7.2" ON \
"php7.3" "Latest Available from 7.3" ON \
"php7.4" "Latest Available from 7.4" ON \
"none" "No install" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}PHP Version(s)${NC}: ${green}" $CFG_PHP_VERSION "${NC} "
echo
while [ "x$CFG_PHP_CLI_VERSION" == "x" ]; do
CFG_PHP_CLI_VERSION=$(whiptail --title "Choose PHP Cli Default Version(s)" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Choose PHP CLI Version do you want to use" 20 75 5 \
"7.0" "7.0" OFF \
"7.1" "7.1" OFF \
"7.2" "7.2" OFF \
"7.3" "7.3" OFF \
"7.4" "7.4" OFF \
"latest" "Latest Installed" ON 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}PHP Version(s)${NC}: ${green}" $CFG_PHP_CLI_VERSION "${NC} "
echo
while [ "x$CFG_CERTBOT_VERSION" == "x" ]; do
CFG_CERTBOT_VERSION=$(whiptail --title "Install LetsEncrypt CertBot" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select CertBot Version" 10 60 3 \
"none" "No installation" OFF \
"default" "OS default version" OFF \
"buster" "Yes, from Buster backports" ON 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}LetsEncrypt CertBot Version${NC}: ${green}$CFG_CERTBOT_VERSION${NC} "
echo
while [ "x$CFG_HHVM" == "x" ]; do
CFG_HHVM=$(whiptail --title "HHVM" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Do you want to install HHVM?" 10 60 2 \
"no" "(default)" ON \
"yes" "" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install HHVM${NC}: ${green}$CFG_HHVM${NC} "
echo
while [ "x$CFG_PHPMYADMIN" == "x" ]; do
CFG_PHPMYADMIN=$(whiptail --title "Install phpMyAdmin" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"You want to install phpMyAdmin during install?" 10 60 2 \
"yes" "(default)" ON \
"no" "" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install PhpMyAdmin${NC}: ${green}$CFG_PHPMYADMIN${NC} "
echo
if [ $CFG_PHPMYADMIN == "yes" ]; then
while [ "x$CFG_PHPMYADMIN_VERSION" == "x" ]; do
CFG_PHPMYADMIN_VERSION=$(whiptail --title "phpMyAdmin Version" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"What version of phpMyAdmin do you want to install?" 15 75 4 \
"default" "Current OS Version" OFF \
"buster" "From buster backports - newer" OFF \
"latest-stable" "from phpMyAdmin.net" ON 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Version${NC}: ${green}$CFG_PHPMYADMIN_VERSION${NC} "
echo
else
CFG_PHPMYADMIN_VERSION='none'
fi
while [ "x$CFG_FTP" == "x" ]; do
CFG_FTP=$(whiptail --title "FTP Server" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Install and configure FTP SERVER ?" 10 60 4 \
"onlyFTP" "Yes, only with FTP" OFF \
"onlyTLS" "Yes, only with TLS" ON \
"FTPandTLS" "Yes, with FTP and TLS" OFF \
"none" "No, don't install it" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install and Configure FTP Server${NC}: ${green}$CFG_FTP${NC} "
echo
while [ "x$CFG_MTA" == "x" ]; do
CFG_MTA=$(whiptail --title "Mail Server" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select mailserver type" 10 60 3 \
"none" "" OFF \
"dovecot" "(default)" ON \
"courier" "" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Mail Server${NC}: ${green}$CFG_MTA${NC} "
echo
if [ $CFG_MTA == "none" ]; then
CFG_WEBMAIL="none"
CFG_SETUP_MAIL=false
else
CFG_SETUP_MAIL=true
while [ "x$CFG_WEBMAIL" == "x" ]; do
CFG_WEBMAIL=$(whiptail --title "Webmail client" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Select which Web Mail client you want" 10 60 4 \
"roundcube" "(default)" OFF \
"roundcube-lates" "latest available" ON \
"squirrelmail" "" OFF \
"none" "No Web Mail Client" OFF 3>&1 1>&2 2>&3)
done
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}WebMail client${NC}: ${green}$CFG_WEBMAIL${NC} "
echo
if (whiptail --title "Update Freshclam DB" --backtitle "$WT_BACKTITLE" --yesno "You want to update Antivirus Database during install?" 10 60); then
CFG_AVUPDATE=true
else
CFG_AVUPDATE=false
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Update Antivirus Database${NC}: ${green}$CFG_AVUPDATE${NC} "
echo
if (whiptail --title "Quota" --backtitle "$WT_BACKTITLE" --yesno "Setup user quota?" 10 60); then
CFG_QUOTA=true
else
CFG_QUOTA=false
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Setup Quota${NC}: ${green}$CFG_QUOTA${NC} "
echo
if (whiptail --title "Jailkit" --backtitle "$WT_BACKTITLE" --yesno "Would you like to install Jailkit?" 10 60); then
CFG_JKIT=true
else
CFG_JKIT=false
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install Jailkit${NC}: ${green}$CFG_JKIT${NC} "
echo
if (whiptail --title "DNS (bind9)" --backtitle "$WT_BACKTITLE" --yesno "Would you like to install DNS server (bind9)?" --defaultno 10 60); then
CFG_BIND=true
else
CFG_BIND=false
CFG_SETUP_NS=false
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install DNS server (bind9)${NC}: ${green}$CFG_JKIT${NC} "
echo
if (whiptail --title "WebStats" --backtitle "$WT_BACKTITLE" --yesno "Would you like to install WebStats?" 10 60); then
CFG_WEBSTATS=true
else
CFG_WEBSTATS=false
fi
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install DNS server (bind9)${NC}: ${green}$CFG_JKIT${NC} "
echo
while [ "x$CFG_INSTALL_ADITIONAL_SOFTWARE" == "x" ]; do
CFG_INSTALL_ADITIONAL_SOFTWARE=$(whiptail --title "Install Aditional Software" --backtitle "$WT_BACKTITLE" --nocancel --separate-output --checklist \
"Choose what programs do you want to install" 25 105 10 \
"htop" "HTOP - interactive process viewer" ON \
"nano" "NANO - text editor" ON \
"haveged" "HAVEGED - A simple entropy daemon" ON \
"ssh" "SSH - Secure Shell" ON \
"openssl-stable" "OpenSSL - toolkit with full-strength cryptography" OFF \
"openssl-buster" "OpenSSL - version from buster branch - usually newer" ON \
"openssh-server" "OpenSSH Server - collection of tools for control and transfer of data" OFF \
"openssh-server-buster" "OpenSSH Server - version from buster branch - usually newer" ON \
"none" "Not install any thing from the above list" OFF \
3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_1 ${BBlack}Install Aditional Software(s)${NC}: ${green}"$CFG_INSTALL_ADITIONAL_SOFTWARE"${NC} "
echo
echo -n -e "$IDENTATION_LVL_1 ${BBlack}ISPConfig Configuration: ${NC}"
echo
while [ "x$CFG_ISPC" == "x" ]; do
CFG_ISPC=$(whiptail --title "ISPConfig Setup" --backtitle "$WT_BACKTITLE" --nocancel --radiolist \
"Would you like full unattended setup of expert mode for ISPConfig?" 10 60 2 \
"standard" "Yes (default)" ON \
"expert" "No, i want to configure" OFF 3>&1 1>&2 2>&3)
done
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Install Mode${NC}: ${green}" $CFG_ISPC "${NC} "
echo
CFG_ISPONCFIG_PORT=$(whiptail --title "ISPConfig" --backtitle "$WT_BACKTITLE" --inputbox \
"Please specify a ISPConfig Port (leave empty for use 8080 port)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $CFG_ISPONCFIG_PORT ]]; then
CFG_ISPONCFIG_PORT=8080
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}ISPConfig Port${NC}: ${green}" $CFG_ISPONCFIG_PORT "${NC} "
echo
CFG_ISPONCFIG_APPS_PORT=$(whiptail --title "ISPConfig" --backtitle "$WT_BACKTITLE" --inputbox \
"Please specify a ISPConfig Apps Port (leave empty for use 8081 port)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $CFG_ISPONCFIG_APPS_PORT ]]; then
CFG_ISPONCFIG_APPS_PORT=8081
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}ISPConfig Apps Port${NC}: ${green}" $CFG_ISPONCFIG_APPS_PORT "${NC} "
echo
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Retrieve ISPConfig Admin password${NC}: "
CFG_ISPONCFIG_ADMIN_PASS=$(whiptail --title "ISPConfig" --backtitle "$WT_BACKTITLE" --inputbox \
"Please specify a ISPConfig Admin Password (leave empty for autogenerate)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $CFG_ISPONCFIG_ADMIN_PASS ]]; then
CFG_ISPONCFIG_ADMIN_PASS=$(tr </dev/urandom -dc 'A-Z-a-z-0-9' | head -c${1:-12})
fi
echo -e " [ ${green}DONE${NC} ] "
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Retrieve ISPConfig DB password${NC}: "
CFG_ISPCONFIG_DB_PASS=$(whiptail --title "ISPConfig DB Password" --backtitle "$WT_BACKTITLE" --inputbox \
"Please specify a ISPConfig DB Password (leave empty for autogenerate)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $CFG_ISPONCFIG_ADMIN_PASS ]]; then
CFG_ISPCONFIG_DB_PASS_AUTO=true
CFG_ISPCONFIG_DB_PASS=$(tr </dev/urandom -dc 'A-Z-a-z-0-9~!@#^*_=-' | head -c${1:-16})
else
CFG_ISPCONFIG_DB_PASS_AUTO=false
fi
echo -e " [ ${green}DONE${NC} ] "
echo -n -e "$IDENTATION_LVL_1 ${BBlack}SSL Configuration:${NC} "
echo
SSL_COUNTRY=$(whiptail --title "SSL Country Code" --backtitle "$WT_BACKTITLE" \
--inputbox "SSL Configuration - Country Code (2 letter code - ex. RO)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $SSL_COUNTRY ]]; then
SSL_COUNTRY="RO"
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Country${NC}: ${green}" $SSL_COUNTRY "${NC} "
echo
SSL_STATE=$(whiptail --title "SSL State or Province Name" --backtitle "$WT_BACKTITLE" \
--inputbox "SSL Configuration - STATE or Province Name (full name - ex. Romania)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $SSL_STATE ]]; then
SSL_STATE="Romania"
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}State${NC}: ${green}" $SSL_STATE "${NC} "
echo
SSL_LOCALITY=$(whiptail --title "SSL Locality" --backtitle "$WT_BACKTITLE" \
--inputbox "SSL Configuration - Locality (ex. Craiova)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $SSL_LOCALITY ]]; then
SSL_LOCALITY="Craiova"
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Locality${NC}: ${green}" $SSL_LOCALITY "${NC} "
echo
SSL_ORGANIZATION=$(whiptail --title "SSL Organization" --backtitle "$WT_BACKTITLE" \
--inputbox "SSL Configuration - Organization (ex. Company L.t.d.)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $SSL_ORGANIZATION ]]; then
SSL_ORGANIZATION="$CFG_HOSTNAME_FQDN"
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Organization${NC}: ${green}" $SSL_ORGANIZATION "${NC} "
echo
SSL_ORGUNIT=$(whiptail --title "SSL Organization Unit" --backtitle "$WT_BACKTITLE" \
--inputbox "SSL Configuration - Organization Unit (ex. IT)" --nocancel 10 60 3>&1 1>&2 2>&3)
if [[ -z $SSL_ORGUNIT ]]; then
SSL_ORGUNIT="IT"
fi
echo -n -e "$IDENTATION_LVL_2 ${BBlack}Unit${NC}: ${green}" $SSL_ORGUNIT "${NC} "
echo
MeasureTimeDuration $START_TIME
}
|
a1ur3l/ispconfig_setup
|
distros/debian10/02_askquestions.sh
|
Shell
|
gpl-3.0
| 14,371 |
#!/bin/sh
docker stop nginx-test
|
jianyingdeshitou/docker-debian8
|
nginx-d8/test/stop.sh
|
Shell
|
gpl-3.0
| 33 |
#!/bin/sh -e
DEV=/dev/sdb
DEVALIAS=mypen
if [ "x$1" != "x" ]; then
DEV=$1
fi
cryptsetup luksOpen $DEV $DEVALIAS
pvscan
vgchange -a y cryptvg
mount /dev/cryptvg/home /home
mount /dev/cryptvg/grab /var/grab
|
codders/odds-and-ends
|
bin/homecrypt.sh
|
Shell
|
gpl-3.0
| 207 |
#!/bin/bash
java -jar 'JAR files/Runnable Selenium Tests.jar' > logfile.log 2>&1
|
waaghals/Tainted-Aberrant-Lion
|
tests/Selenium/executeSeleniumTests.sh
|
Shell
|
gpl-3.0
| 81 |
#! /usr/bin/env bash
cd `dirname "${BASH_SOURCE[0]}"`
execname='thinkgear2ft'
if [ `uname -s` == 'Linux' ]; then
if [ "`uname -a`" == 'armv6l' ]; then
arch='raspberrypi'
else
arch='glnx86';
fi
else # Mac
arch='maci'
fi
buffexe="buffer/bin/${execname}";
if [ -r ${execname} ]; then
buffexe="${execname}";
fi
if [ -r buffer/bin/${arch}/${execname} ]; then
buffexe="buffer/bin/${arch}/${execname}";
fi
if [ -r buffer/${arch}/${execname} ]; then
buffexe="buffer/${arch}/${execname}";
fi
$buffexe /dev/ttyUSB0 mindwave.cfg localhost 1972
|
jadref/buffer_bci
|
dataAcq/startMindwave.sh
|
Shell
|
gpl-3.0
| 561 |
#!/bin/bash
#
# Script para enlistar grupos en el directorio LDAP
#
# Reynaldo Martinez P - Gotic-ccun
# Marzo del 2011
#
PATH=$PATH:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
#
# Verifico si el usuario ejecutor es root. Si no es root, se aborta !!
#
amiroot=`whoami`
case $amiroot in
root)
echo ""
;;
*)
echo ""
echo "ALERTA: Solo el usuario root puede ejecutar este script"
echo "abortando"
echo ""
exit 0
;;
esac
#
# Llamo a mi libreria de funciones comunes para todos los scripts
#
. /usr/local/ldapprovision/libs/functions.sh
#
# Variables basicas usadas por el script y existentes en functions.sh
#
# configdir="/usr/local/ldapprovision/etc"
# libsdir="/usr/local/ldapprovision/libs"
# tmpsdir="/usr/local/ldapprovision/tmp"
# binduser=`/bin/cat $configdir/readonlybindusr.txt`
# bindpass=`/bin/cat $configdir/readonlybindusrpass.txt`
# searchbase=`/bin/cat $configdir/searchbase.txt`
# baseuidnumber=`/bin/cat $configdir/baseuid.txt`
# basegidnumber=`/bin/cat $configdir/basegid.txt`
# ldapserver=`/bin/cat $configdir/ldap-server.txt`
# reserveduidlist="$configdir/reserved-accounts.txt"
# reservedgidlist="$configdir/reserved-groups.txt"
# usrtemplate="$libsdir/ldap-template-user.txt"
# usrtemplatemod="$libsdir/ldap-template-user-modify.txt"
# grptemplate="$libsdir/ldap-template-group.txt"
# grptemplatemod="$libsdir/ldap-template-group-modify.txt"
# sudotemplates="$configdir/sudoprofiles"
echo ""
echo "Lista de grupos creados en el directorio. Base: ou=groups,$searchbase"
echo ""
mylist=`ldapsearch -x -b ou=groups,$searchbase -D $binduser -w $bindpass cn|grep "cn:"|cut -d: -f2`
for i in $mylist
do
mygid=`getgidnumber $i`
echo -e "Grp: $i\t (gid: $mygid)"
done
echo ""
|
cgomeznt/OpenLdap
|
LDAP/bin/grouplist-ldap.sh
|
Shell
|
gpl-3.0
| 1,778 |
docker run \
--name neo4j -dit \
--publish=7474:7474 --publish=7687:7687 \
--volume=$HOME/neo4j/data:/data \
neo4j
http://ccnet1.tmit.bme.hu:7474/
user: neo4j
pass: kutya4
|
vuchetichbalint/useful_python_scripts
|
etc/docker/neo4j.sh
|
Shell
|
gpl-3.0
| 190 |
#!/bin/bash
user=$1
if [ -z "$user" ]; then
echo "Please specify a user"
exit 1
fi
mkdir -p /home/$user/CEB
chown $user /home/$user/CEB
group=$(groups $user | sed -E 's/ ?: ?/ /' | cut -d' ' -f2)
echo "##### UPDATING /home/$user/CEB ... GROUP: $group"
rsync --dry-run --exclude={".*","tmp"} --update --delete -hratzv --links ~fmmb/CEB.repository/ /home/$user/CEB
echo -n "##### <ENTER> to proceed..."
read ok
if [ -z "$ok" ]; then
rsync --exclude={".*","tmp"} --update --delete -hratzv --links ~fmmb/CEB.repository/ /home/$user/CEB
find /home/$user/CEB -exec chown ${user}:${group} {} \;
else
echo "##### Skipping /home/$user/CEB ..."
fi
|
fmmb/admin-tigre.iul.lab
|
update_CEB_info.sh
|
Shell
|
gpl-3.0
| 655 |
#!/bin/bash
mkdir logs
export dir=$(pwd)
export ERRFILE=${dir}/logs/${LOGFILE}
cat /etc/os-release
rm ${ERRFILE}
function check_error {
if [ "$1" != 0 ]; then
echo "Error $1"
exit $1
fi
}
function update_repos {
if [ "$CI_SERVER" == "" ];
then
return
fi
export DATA=$(cat /etc/resolv.conf|grep "nameserver 1.10.100.101")
if [ "$DATA" != "" ];
then
echo "Detected local runner"
sed -i 's!http://httpredir.debian.org/debian!http://1.10.100.103/debian!' /etc/apt/sources.list
else
echo "Detected non local runner"
fi
}
function gitclone1 {
echo git clone $2 $3
git clone $2 $3
if [ "$?" != 0 ]; then
echo git clone $1 $3
git clone $1 $3
return $?
fi
return $?
}
function gitclone {
export name1=$1/$2
export name2=${CI_BUILD_REPO##*@}
export name2=https://${name2%/*}/$2
gitclone1 "$name1" "$name2" $3
if [ "$?" != 0 ]; then
sleep 1s
gitclone1 "$name1" "$name2" $3
if [ "$?" != 0 ]; then
sleep 3s
gitclone1 "$name1" "$name2" $3
if [ "$?" != 0 ]; then
sleep 5s
gitclone1 "$name1" "$name2" $3
fi
fi
fi
check_error $?
}
function update_repos {
if [ "$CI_SERVER" == "" ];
then
return
fi
export DATA=$(cat /etc/resolv.conf|grep "nameserver 1.10.100.101")
if [ "$DATA" != "" ];
then
echo "Detected local runner"
sed -i 's!http://httpredir.debian.org/debian!http://1.10.100.103/debian!' /etc/apt/sources.list
else
echo "Detected non local runner"
fi
}
function aptget_update {
update_repos
echo apt-get update
apt-get update
if [ "$?" != 0 ]; then
sleep 1s
apt-get update
if [ "$?" != 0 ]; then
sleep 1s
apt-get update
fi
fi
check_error $?
}
function aptget_install {
echo apt-get -y install $*
apt-get -y install $*
if [ "$?" != 0 ]; then
sleep 1s
apt-get -y install $*
if [ "$?" != 0 ]; then
sleep 2s
apt-get -y install $*
fi
fi
check_error $?
}
function make_server {
ls -la ../server-data
ls -la ../server-data/plugins
echo source src/evol/tools/vars.sh
source ./src/evol/tools/vars.sh
check_error $?
export CPPFLAGS="$CPPFLAGS -DI_AM_AWARE_OF_THE_RISK_AND_STILL_WANT_TO_RUN_HERCULES_AS_ROOT"
echo "autoreconf -i"
autoreconf -i
check_error $?
echo ./configure $1 CPPFLAGS=\"${CPPFLAGS}${HERCCPPFLAGS}\"
./configure $1 CPPFLAGS="$CPPFLAGS$HERCCPPFLAGS"
export err="$?"
if [ "$err" != 0 ]; then
echo "Error $err"
echo cat config.log
cat config.log
exit $err
fi
echo "make -j2"
make -j2
check_error $?
echo "make -j2 plugin.script_mapquit"
make -j2 plugin.script_mapquit
check_error $?
make install
check_error $?
cd src/evol
echo "autoreconf -i"
mkdir m4
autoreconf -i
check_error $?
mkdir build
cd build
echo ../configure $2 CPPFLAGS=\"${VARS}\"
../configure $2 CPPFLAGS="${VARS}"
check_error $?
echo "make -j2 V=0"
make -j2 V=0
check_error $?
cd ../../../..
ls -la server-data/plugins
}
function do_init_data {
mkdir shared
cd ..
rm -rf server-data
cp -r ${CI_PROJECT_NAME:=serverdata} server-data
ls -la server-data
check_error $?
}
function do_init_tools {
cd ..
rm -rf tools
gitclone https://gitlab.com/evol evol-tools.git tools
}
function do_init {
do_init_data
rm -rf server-code
gitclone https://gitlab.com/evol hercules.git server-code
check_error $?
cd server-code/src
check_error $?
gitclone https://gitlab.com/evol evol-hercules.git evol
check_error $?
cd ../..
check_error $?
mkdir -p server-data/plugins
}
function build_init {
if [ "$CI_SERVER" == "" ];
then
return
fi
mkdir -p /local/bin
echo "#!/bin/bash" > /local/bin/id
echo "echo 1000" >> /local/bin/id
export PATH="/local/bin:$PATH"
chmod +x /local/bin/id
echo "fake id check"
id
cd server-code/src/evol
source tools/vars.sh
check_error $?
cd ../../..
check_error $?
echo $CC --version
$CC --version
check_error $?
}
function init_configs {
cd tools/localserver
./installconfigs.sh
cd ../..
cp server-data/.tools/conf/$1/* server-data/conf/import/
cp server-data/.tools/npc/motd-* server-data/npc/commands/
ls -la server-data/conf/import
cat server-data/conf/import/inter_conf.txt
}
|
themanaworld/server-data
|
.tools/scripts/init.sh
|
Shell
|
gpl-3.0
| 4,729 |
#!/bin/bash
for b in `aws s3api list-buckets | jq ".Buckets | .[].Name"`; do aws s3api get-bucket-acl --bucket ${b:1:-1} > ${b:1:-1}.json; done
grep -r "AllUsers" *
|
sharethis-github/awsRI
|
s3permcheck.sh
|
Shell
|
gpl-3.0
| 166 |
#!/bin/bash
# Build a release version for use
# with existing tests.
# Usage:
# ./build.sh
# ./build.sh <ARCH>
#
# Example:
# ./build.sh "x64"
set -ve
CC="$CC"
FLAGS="$FLAGS"
make distclean
make debug
make distclean
make
|
nil0x42/duplicut
|
test/build.sh
|
Shell
|
gpl-3.0
| 232 |
#!/bin/sh
# PHP taint checking with graudit - PoC script
# Written by Wireghoul - http://www.justanotherhacker.com
# Released under the GPL licence
VERSION=0.1
if [ -z "$1" ]; then
echo "Usage: $0 /path/to/check"
exit 2
fi
graudit -z -d php "$1" | \
perl -ne 'if ($_ =~ m/\$(\S+?)\s*=\s*\$_(GET|POST|REQUEST|COOKIE|FILES)\[.*?\]/) { print "\\\$$1\n"; }' | \
sort | uniq | \
graudit -d /dev/stdin "$1"
|
wireghoul/graudit
|
misc/phptaint.sh
|
Shell
|
gpl-3.0
| 409 |
# gstreamer 1 for QtMultimedia
# gtk3 style for QtGui/QStyle
# libusb1 for tqtc-boot2qt/qdb
# speech-dispatcher-devel for QtSpeech, otherwise it has no backend on Linux
sudo yum install -y \
gstreamer1-devel gstreamer1-plugins-base-devel \
gtk3-devel \
libusb1-devel \
speech-dispatcher-devel
|
geminy/aidear
|
oss/qt/qt-everywhere-opensource-src-5.9.0/coin/provisioning/qtci-linux-RHEL-7.2-x86_64/rhel_packages.sh
|
Shell
|
gpl-3.0
| 311 |
#!/bin/bash
rm -rf runs/advection_test 2> /dev/null
./setup advection_test -p problem.par.restart_test_v2
(
cd runs/advection_test
echo "run_id = ts1"
echo "Start: t = 0.0, nproc = 1"
mpiexec -n 1 ./piernik > ts1.out
echo "Restart: t = 1.0, nproc = 1"
mpiexec -n 1 ./piernik -n '$END_CONTROL tend = 2.0 /' >> ts1.out
echo "Finish: t = 2.0"
echo
echo "run_is = ts2"
echo "Start: t = 0.0, nproc = 5"
mpiexec -n 5 ./piernik -n '$OUTPUT_CONTROL run_id = "ts2" /' > ts2.out
echo "Restart: t = 1.0, nproc = 3"
mpiexec -n 3 ./piernik -n '$END_CONTROL tend = 2.0 /' -n '$OUTPUT_CONTROL run_id = "ts2" /' >> ts2.out
echo "Finish: t = 2.0"
)
[ ! -z $YT ] && source $YT
./bin/gdf_distance runs/advection_test/moving_pulse_ts{1,2}_0002.h5 | tee compare.log
|
Xarthisius/piernik
|
problems/advection_test/restart_test_v2_jenkins.sh
|
Shell
|
gpl-3.0
| 813 |
#!/bin/sh
tmux new-session -s artemis -d
tmux send-keys -t artemis 'cd /home/administrator/myschool-ruby-scripts/list-monitoring' C-m
tmux send-keys -t artemis 'ruby processGroupMembers.rb' C-m
tmux new-window -t artemis
tmux send-keys -t artemis 'cd /home/administrator/myschool-ruby-scripts/list-monitoring' C-m
tmux send-keys -t artemis 'while true; do ruby prepareGroups.rb ; ruby waitRandom.rb; done' C-m
tmux new-window -t artemis
tmux send-keys -t artemis 'cd /home/administrator/myschool-ruby-scripts/list-monitoring/efimeries' C-m
tmux send-keys -t artemis 'ruby processEmails.rb' C-m
|
haritak/myschool-ruby-scripts
|
list-monitoring/startListMonitoring.sh
|
Shell
|
gpl-3.0
| 594 |
#!/bin/bash
###################################################################################################################
# Author : Louis DAUBIGNARD
# Date : 19/01/2015
#
# Description : Script pour :
# - executer une requete oracle
#
# Syntax : bddOracle.sh
#
###################################################################################################################
#CHEMIN RACINE
PATHROOT="$PWD"
#RECUPERATION DES FONCTIONS
. $PATHROOT/../lib/functions.sh
#RECUPERATION DES PROPERTIES
. $PATHROOT/../config/config.sh
##################################################################################################################
echo "--------------------------------------------------------------------------------------------------"
echo " TEST REQUETE ORACLE "
echo "--------------------------------------------------------------------------------------------------"
# Vérifier que perl est installé pour la gestion des mots de passe
execReqOracle "SELECT * FROM TABLE;" "$PATHDESTLOG"
retval=$?
if [ $retval -eq 0 ]
then
printMessageTo "$(date +%d/%m/%Y-%H:%M:%S) - TRAITEMENT OK" "2" "2" "$PATHDEST_FICLOG"
else
printMessageTo "$(date +%d/%m/%Y-%H:%M:%S) - TRAITEMENT KO" "2" "2" "$PATHDEST_FICLOG"
fi
|
Acisia/SHELL-UNIX
|
BDD-ORACLE/bddOracle.sh
|
Shell
|
gpl-3.0
| 1,288 |
#!/bin/sh
grep -i customlog /etc/httpd/conf/httpd.conf | grep -v "#" | awk '{print $2}' | sed 's/access.log//' | sort
|
gohdan/scripts
|
bash/get_apache_log_pathes.sh
|
Shell
|
gpl-3.0
| 120 |
#!/bin/sh
BASENAME="ubuntu-16.04"
BUILDMARK="$(date +%Y-%m-%d-%H%M)"
IMG_NAME="$BASENAME-$BUILDMARK"
TMP_IMG_NAME="$BASENAME-tmp-$BUILDMARK"
IMG=ubuntu-16.04-server-cloudimg-amd64-disk1.img
IMG_URL=http://cloud-images.ubuntu.com/releases/16.04/release/$IMG
TMP_DIR=guest
if [ -f "$IMG" ]; then
rm $IMG
fi
wget -q $IMG_URL
if [ ! -d "$TMP_DIR" ]; then
mkdir $TMP_DIR
fi
guestmount -a $IMG -i $TMP_DIR
cp $TMP_DIR/etc/cloud/templates/hosts.debian.tmpl $TMP_DIR/etc/cloud/templates/hosts.tmpl
sed -i "/preserve_hostname/a manage_etc_hosts: true" $TMP_DIR/etc/cloud/cloud.cfg
sed -i "s/name: ubuntu/name: cloud/" $TMP_DIR/etc/cloud/cloud.cfg
sed -i "s/gecos: Ubuntu/gecos: Cloud user/" $TMP_DIR/etc/cloud/cloud.cfg
sed -i "/ed25519/d" $TMP_DIR/etc/ssh/sshd_config
sed -i "s#LABEL=cloudimg-rootfs#/dev/vda1#" \
$TMP_DIR/etc/fstab \
$TMP_DIR/boot/grub/menu.lst \
$TMP_DIR/boot/grub/grub.cfg
echo "sleep 5" >> $TMP_DIR/etc/init/plymouth-upstart-bridge.conf
sed -i "s/#GRUB_DISABLE_LINUX_UUID/GRUB_DISABLE_LINUX_UUID/" $TMP_DIR/etc/default/grub
guestunmount $TMP_DIR
glance image-create \
--file $IMG \
--disk-format qcow2 \
--container-format bare \
--name "$TMP_IMG_NAME"
TMP_IMG_ID="$(openstack image list --private | grep $TMP_IMG_NAME | tr "|" " " | tr -s " " | cut -d " " -f2)"
echo "TMP_IMG_ID for image '$TMP_IMG_NAME': $TMP_IMG_ID"
sed "s/TMP_IMAGE_ID/$TMP_IMG_ID/" $(dirname $0)/build-vars.template.yml > $(dirname $0)/build-vars.yml
sed -i "s/B_TARGET_NAME/$IMG_NAME/" $(dirname $0)/build-vars.yml
mkdir -p $(dirname $0)/output
cd ..
./build.sh ubuntu-xenial-xerus
BUILD_SUCCESS="$?"
echo "======= Deleting temporary image..."
#glance image-delete $TMP_IMG_ID
if [ ! "$BUILD_SUCCESS" ]; then
echo "Build failed! Check packer log for details."
echo "Error code: $BUILD_SUCCESS"
exit 1
fi
IMG_ID="$(openstack image list --private | grep $IMG_NAME | tr "|" " " | tr -s " " | cut -d " " -f2)"
echo "IMG_ID for image '$IMG_NAME': $IMG_ID"
pwd
export NOSE_IMAGE_ID=$IMG_ID
export NOSE_FLAVOR=21
export NOSE_NET_ID=$FACTORY_NETWORK_ID
export NOSE_SG_ID=$FACTORY_SECURITY_GROUP_ID
pushd ../test-tools/pytesting_os/
nosetests --nologcapture
popd
# FIXME: Actually delete images
# echo "======= Deleting deprecated images"
echo "======= Listing deprecated images"
openstack image list | grep -E "$BASENAME-[0-9]{4}-[0-9]{2}-[0-9]{2}-[0-9]{4}" | tr "|" " " | tr -s " " | cut -d " " -f 3 | sort -r | awk 'NR>5' # | xargs -r openstack image delete
glance image-show $IMG_ID
#if [ "$?" = "0" ]; then
# echo "======= Validation testing..."
# echo "URCHIN_IMG_ID=$IMG_ID $WORKSPACE/test-tools/urchin $WORKSPACE/test-tools/ubuntu-tests"
# URCHIN_IMG_ID=$IMG_ID "$WORKSPACE/test-tools/urchin" "$WORKSPACE/test-tools/ubuntu-tests"
#fi
|
juliend88/os_image_factory
|
os/ubuntu-xenial-xerus/build.sh
|
Shell
|
gpl-3.0
| 2,813 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.