code
stringlengths
2
1.05M
repo_name
stringlengths
5
110
path
stringlengths
3
922
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
2
1.05M
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Run as root to start secure datanodes in a security-enabled cluster. ## @description usage info ## @audience private ## @stability evolving ## @replaceable no function hadoop_usage() { echo "Usage: start-secure-dns.sh" } this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... if [[ -n "${HADOOP_HOME}" ]]; then HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}" # shellcheck disable=SC2034 HADOOP_NEW_CONFIG=true if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" else echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1 exit 1 fi echo "Starting datanodes" hadoop_uservar_su hdfs datanode "${HADOOP_HDFS_HOME}/bin/hdfs" \ --workers \ --config "${HADOOP_CONF_DIR}" \ --daemon start \ datanode
lukmajercak/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh
Shell
apache-2.0
1,793
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. targetdir=../../../target mkdir -p ${targetdir}/surefire-reports ${targetdir}/tap batsexe=$(which bats) 2>/dev/null if [[ -z ${batsexe} ]]; then echo "not ok - no bats executable found" > "${targetdir}/tap/shelltest.tap" echo "" echo "" echo "ERROR: bats not installed. Skipping bash tests." echo "ERROR: Please install bats as soon as possible." echo "" echo "" exit 0 fi for j in *.bats; do echo Running bats -t "${j}" bats -t "${j}" 2>&1 | tee "${targetdir}/tap/${j}.tap" result=${PIPESTATUS[0]} ((exitcode=exitcode+result)) done if [[ ${exitcode} -gt 0 ]]; then exit 1 fi exit 0
lukmajercak/hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/run-bats.sh
Shell
apache-2.0
1,416
#!/bin/bash ./scripts/build.sh watchify -t hbsfy -x opening_hours -x moment -x moment/locale/sv src/index.js -o site.js & http-server
perliedman/cykelbanor
scripts/run.sh
Shell
isc
135
#!/bin/bash set -e git push origin $(git tag -l v\* | sort -V | tail -1) ssh -t deploy@$1 " cd /home/deploy/Observatory git checkout master git pull origin git fetch --tags git checkout $(git tag -l v\* | sort -V | tail -1) sudo /home/deploy/Observatory/production/run_puppet.sh "
rcos/Observatory
production/update.sh
Shell
isc
283
#!/bin/bash # python2.6 to python2.7.x # github : https://github.com/apzkr/python26_to_python27x # If you change PYTHON_VERSION, please use 2.7.x version. PYTHON_VERSION=2.7.10 yum update -y yum install -y epel-release yum groupinstall -y "Development Tools" yum install -y zlib-devel openssl-devel sqlite-devel bzip2-devel cd /usr/local/src curl -O https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tgz tar xzf Python-$PYTHON_VERSION.tgz cd Python-$PYTHON_VERSION ./configure make make altinstall mv /usr/bin/python /usr/bin/python_old cp /usr/local/bin/python2.7 /usr/bin/python cp /usr/bin/yum /usr/bin/yum_old sed -i 's/\/usr\/bin\/python$/\/usr\/bin\/python2.6/g' /usr/bin/yum curl -O https://pypi.python.org/packages/source/s/setuptools/setuptools-15.2.tar.gz tar zxf setuptools-15.2.tar.gz python setuptools-15.2/ez_setup.py easy_install pip pip install virtualenv rm -rf /usr/local/src/Python-$PYTHON_VERSION rm -rf /usr/local/src/Python-$PYTHON_VERSION.tgz
apzkr/python26_to_python27x
python_update.sh
Shell
mit
1,002
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for DLA-330-1 # # Security announcement date: 2015-10-22 00:00:00 UTC # Script generation date: 2017-01-01 21:09:03 UTC # # Operating System: Debian 6 (Squeeze) # Architecture: x86_64 # # Vulnerable packages fix on version: # - unzip:6.0-4+deb6u3 # # Last versions recommanded by security team: # - unzip:6.0-4+deb6u3 # # CVE List: # - CVE-2015-7696 # - CVE-2015-7697 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade unzip=6.0-4+deb6u3 -y
Cyberwatch/cbw-security-fixes
Debian_6_(Squeeze)/x86_64/2015/DLA-330-1.sh
Shell
mit
622
#!/bin/bash ### 1 - BUILD WEB FRONT-END ./build-web.sh ### 2 - BUILD API ./build-api.sh ### 3 - START BABBAGE export JAVA_OPTS="-Xdebug -Xmx256m -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n" # Restolino configuration export RESTOLINO_STATIC="src/main/web" export RESTOLINO_CLASSES="target/classes" export PACKAGE_PREFIX=com.github.onsdigital export DP_COLOURED_LOGGING=true export DP_LOGGING_FORMAT=pretty_json export DEV_ENVIRONMENT="Y" export IS_PUBLISHING="Y" export RELOAD_TEMPLATES="Y" export TEMPLATES_DIR=src/main/web/templates/handlebars export ENABLE_COVID19_FEATURE=false # Development: reloadable java $JAVA_OPTS \ -Drestolino.realm=$REALM \ -Drestolino.files=$RESTOLINO_STATIC \ -Drestolino.classes=$RESTOLINO_CLASSES \ -Drestolino.packageprefix=$PACKAGE_PREFIX \ -cp "target/classes/:target/dependency/*" \ com.github.davidcarboni.restolino.Main
ONSdigital/babbage
run-publishing.sh
Shell
mit
891
#!/bin/sh echo "$CODE" | base64 -d > /run/code.java javac /run/code.java cd /run java Solution
rlods/CodingChallenge
launchers/launcher-java.sh
Shell
mit
95
#!/bin/bash # # Please see the LICENSE file for details. # set -e if [[ "$(composekit-env)" == "Linux" ]]; then sudo service docker stop else dinghy off fi
jaequery/composekit
scripts/off.sh
Shell
mit
163
echo 'before running specify the fiji executable and macro (located in this dir):' echo 'export FIJI=path/fiji' echo 'export MACRO=ThisDir/resizeNRRDtoJFRC2.ijm' echo 'run in the directory above the volume.nrrd files' echo '-f forces recreation' echo '-h runs in headless mode using xvfb-run' for file in $(pwd)/*/*.nrrd do echo $file if [ -f $file ] then if [ $(head $file | grep "sizes: 1025 513 218" | wc -l) -gt 0 ] then echo processing $(pwd)${file/.\//\/}... # resize nrrd if [[ $1 == *"h"* ]] then timeout 15m xvfb-run -w 10 $FIJI -macro $MACRO $file pkill Xvfb else timeout 15m $FIJI -macro $MACRO $file fi sleep 5s fi else echo Broken file ${file}! Skipping... fi done
Robbie1977/NRRDtools
resizeNRRDtoJFRC2.sh
Shell
mit
771
#!/bin/bash #$ -S /bin/bash #$ -q abaqus.q #$ -l qname=abaqus.q #$ -cwd #$ -V #$ -l mf=192G #$ -j y #$ -o /home/hpc2862/repos/pRs/sub_log.txt #!/bin/bash cd /home/hpc2862/repos/pRs/inst/analysis/optimal_comorbid make cmb.txt
Chris1221/pRs
inst/analysis/optimal_comorbid/sub.sh
Shell
mit
228
# Installs gems in parallel export BUNDLE_JOBS=4
benhanzl/dotfiles
ruby/bundler.bash
Shell
mit
49
#!/bin/bash target_host=`bash ~/.tmux/remote_hostname.sh` # echo "tty: $pane_tty , host: $target_host" ping -c 1 $target_host | tail -1 | cut -d "/" -f5 # (ping -c 1 #(ps -af | grep "`tmux display -p \"#{pane_tty}\" | cut -d \"/\" -f3,4` " | grep -v grep | grep -oP "ssh [a-zA-Z0-9.@\-]+" | cut -d " " -f2 | grep -oP "(?=@*)[\w\d.\-]*" | tail -1) | tail -1 | cut -d "/" -f5)
finaldie/final_dev_env
tmux/nw_ttl.sh
Shell
mit
379
#!/bin/bash #adam-BL# . BonnLogger.sh #adam-BL# . log_start # script to estimate absolute photometric # calibration on a night basis. It substitutes # the old create_photoinfo.sh script. # $1 main dir # $2 standard dir # $3 science dir # $4 image extension # $5 filter # $6 filter name in standardstar catalog # $7 color index (e.g. VmI) # $8 Extinction for fits with constant extinction # $9 color for fit with constant color term # $10 name of image that you want to apply calibration to (adds HEADERS) !! IMPORTANT !! # $11 lower limit for magnitudes to be considered # (standard star catalog) (OPTIONAL argument; # set to 0.0 by default) # 03.11.2004 Filter measured magnitudes for nonsense # # 25.11.2004: # we adapted the script to the new output of fitsort # that no longer gives the file including the whole # but only the filename. # # 07.12.2004: # I corrected a bug in the writing of ZP and COEFF # keyowrds (FITS standard issues) # # 10.12.2004: # Now passes the new label argument to phot_abs.py # # 26.02.2005: # I introduced a new argument to account for a different # 'filter' name and the name for that filter in the standard # star catalog. # # 01.03.2005: # I introduced temporary output files for files created # by photo_abs.py. photo_abs.py uses pgplot that cannot # handle too long file names. # # 11.03.2005: # We now write all photometric solutions to the header. # In case the night was estimated non-photometric the # ZPCHOICE keyword 0 # # 13.03.2005: # In the header updates the next free dummy keyword is # now searched automatically. This part is written more # modular now. # # 17.03.2005: # If the night is marked as non-photometric, ZPCHOICE is # set to 0 now. # # 19.03.2005: # I corrected a syntax error in an if statement (that probably # was introduced in one of the last commits) # # 14.08.2005: # The call of the UNIX 'sort' program is now done # via a variable 'P_SORT'. # # 05.12.2005 # Chips whose NOTUSE or NOTPROCESS flag is set are not # considered in the determination of absolute photometric # zeropoints. # # 23.01.2006: # I introduced a new command line argument giving the minimum # magnitude for standard stars to be considered. This argument # is optional. This change should help to better reject bright # objects with saturated features. string="${7}" firstcolor=${string%m*} secondcolor=${string#*m} declare -a COLORS COLORS=( B V R I z ) echo $firstcolor echo $secondcolor index=0 for color in ${COLORS[*]} do echo $color if [ ${color} == ${firstcolor} ]; then plusindex=$index fi if [ ${color} == ${secondcolor} ]; then minusindex=$index fi let index=index+1 done echo $plusindex echo $minusindex . ${INSTRUMENT:?}.ini . bash_functions.include if [ $# -eq 11 ]; then MINMAG=${11} else MINMAG=0.0 fi if [ ! -d "/$1/$2/calib" ]; then mkdir /$1/$2/calib fi # find the chip catalogs that should be used for photometry, # i.e. reject chips that have the NOTUSE flag set. CATS="" i=1 while [ ${i} -le ${NCHIPS} ] #while [ ${i} -le 8 ] do if [ ${NOTUSE[${i}]:=0} -eq 0 ] && [ ${NOTPROCESS[${i}]:=0} -eq 0 ]; then CATS="${CATS} /$1/$2/cat/chip_${i}_merg.cat" fi i=$(( $i + 1 )) done ${P_LDACPASTE} -i ${CATS} -t PSSC\ -o ${TEMPDIR}/tmp_chips_$$.cat ${P_LDACFILTER} -i ${TEMPDIR}/tmp_chips_$$.cat -t PSSC\ -c "(((${6}mag < 99) AND ($7 < 99)) AND (Mag < 0))AND(${6}mag > ${MINMAG});"\ -o /$1/$2/cat/allchips_merg.cat ${P_LDACFILTER} -i /$1/$2/cat/allchips_merg.cat -t PSSC\ -c "($7 > -10) AND ($7 < 10);"\ -o /$1/$2/cat/allchips_tmp.cat LABEL=`echo $7 | sed 's/m/-/g'` # Create a FIFO #mkfifo ${TEMPDIR}/nights_$$.asc # Get a list of all nights and write it to the FIFO ${P_LDACTOASC} -i /$1/$2/cat/allchips_tmp.cat -t PSSC\ -b -k GABODSID | ${P_SORT} | uniq > ${TEMPDIR}/nights_$$.asc & echo ${TEMPDIR}/nights_$$.asc # fd=0: FIFO, fd=3: TTY i=0 vo='' #cat ${TEMPDIR}/nights_$$.asc | cp ${TEMPDIR}/nights_$$.asc ${TEMPDIR}/nightscopy_$$.asc declare -a nightslist while read ni do nightslist[$i]="$ni" let i=i+1 done < ${TEMPDIR}/nightscopy_$$.asc echo ${nightslist[*]} #exec 4<&0 < ${TEMPDIR}/nights_$$.asc SIGMAOK=1 SIGMAREJECT=3 #exec 3<&0 < ${TEMPDIR}/nights_$$.asc echo ${nightslist[*]} #while read NIGHT for NIGHT in ${nightslist[*]} do echo $NIGHT echo ${nightslist[*]} SIGMAOK=1 while [ ${SIGMAOK} -eq 1 ] do echo ${NIGHT} echo " ---==== Calibrating night ${NIGHT} ====---" echo ${P_LDACFILTER} -i /$1/$2/cat/allchips_tmp.cat -t PSSC\ -o /$1/$2/cat/night_${NIGHT}.cat -c "(GABODSID=${NIGHT});" ${P_LDACTOASC} -i /$1/$2/cat/night_${NIGHT}.cat -t PSSC\ -b -k Mag ${6}mag ${7} AIRMASS MagErr ${6}err | sort | uniq > ${TEMPDIR}/night_$5_${NIGHT}_$$.asc # added a new filter to get rid of duplicates, the sort | uniq filter #-b -k Mag ${6}mag ${7} AIRMASS OBS_NAME IMAGEID Ra Dec > ${TEMPDIR}/night_$5_${NIGHT}_$$.asc echo "./photo_abs.py --input=${TEMPDIR}/night_$5_${NIGHT}_$$.asc \ --output=${TEMPDIR}/photo_res --extinction="$8" \ --color="$9" --night=${NIGHT} --label=${LABEL} --sigmareject=${SIGMAREJECT}" ./photo_abs.py --input=${TEMPDIR}/night_$5_${NIGHT}_$$.asc \ --output=${TEMPDIR}/photo_res --extinction="$8" \ --color="$9" --night=${NIGHT} --label=${LABEL} --sigmareject=${SIGMAREJECT} --cluster=${CLUSTER} mv ${TEMPDIR}/photo_res.asc /$1/$2/calib/night_${NIGHT}_$5_result.asc mv ${TEMPDIR}/photo_res.ps /$1/$2/calib/night_${NIGHT}_$5_result.ps echo echo "Displaying solutions ..." echo ${TEMPDIR} echo gv /$1/$2/calib/night_${NIGHT}_$5_result.ps & GVPID=$! i=1 while read -a SOL do ZP[$i]=${SOL[0]} COEFF[$i]=${SOL[1]} COL[$i]=${SOL[2]} i=$(($i + 1)) done < /$1/$2/calib/night_${NIGHT}_$5_result.asc ZP[$i]=-1.0 COEFF[$i]=-1.0 echo echo echo echo '##########################################' echo "The GABODSID value for this night is ${NIGHT}" echo -n "These are the nights included in this run " echo ${nightslist[*]} echo "Current Sigma Rejection Value is ${SIGMAREJECT}" echo "(-1) Enter a new Simga Rejection value " echo "(0) No acceptable solution (not photometric!)" echo "(1) 3 Parameter fit" echo "(2) 2 Parameter fit" echo "(3) 1 Parameter fit" echo -n "Choose input and press [ENTER]: " # Now read from old stdin/TTY read CHOICE #<&3 SIGMAOK=0 if [ ${CHOICE} -eq -1 ]; then echo "Enter new Sigma Rejection value " read SIGMAREJECT #<&3 SIGMAOK=1 fi done kill ${GVPID} 2>/dev/null ZPCHOICE=${CHOICE} # # the 'solution' -1.0 for the zeropoint, # i.e. the night was marked nonphotometric # is stored in the last array element. Hence, # this case needs special treatment as in # the interactive setup this is choice '0'. if [ ${CHOICE} -eq 0 ]; then CHOICE=$i ZPCHOICE=0 fi echo echo "Updating 3 second image of cluster header ..." echo COLORCAL=${7} IMGTOCAL=${10} ${P_DFITS} /$1/$2/${IMGTOCAL}*$4.fits | ${P_FITSORT} -d GABODSID | \ ${P_GAWK} '{if ($2 == '${NIGHT}') print $1}' > ${TEMPDIR}/night_${NIGHT}_img_list_$$.asc while read IMG do echo "${IMG}" # Write the choice to the header value ${plusindex} writekey /$1/$2/${IMG} COLORPLU "${VALUE}" REPLACE value ${minusindex} writekey /$1/$2/${IMG} COLORMIN "${VALUE}" REPLACE value ${ZPCHOICE} writekey /$1/$2/${IMG} ZPCHOICE "${VALUE}" REPLACE value ${ZP[$CHOICE]} writekey /$1/$2/${IMG} ZP "${VALUE}" REPLACE value ${COEFF[$CHOICE]} writekey /$1/$2/${IMG} COEFF "${VALUE}" REPLACE i=1 while [ "${i}" -le "3" ] do value ${ZP[$i]} writekey /$1/$2/${IMG} ZP${i} "${VALUE}" REPLACE value ${COEFF[$i]} writekey /$1/$2/${IMG} COEFF${i} "${VALUE}" REPLACE value ${COL[$i]} writekey /$1/$2/${IMG} COL${i} "${VALUE}" REPLACE i=$(( $i + 1 )) done done < ${TEMPDIR}/night_${NIGHT}_img_list_$$.asc done # Restore old stdin, close fd=3 #exec <&3 3<&- #rm -f ${TEMPDIR}/nights_$$.asc ${TEMPDIR}/night_$5_*_$$.asc \ # ${TEMPDIR}/night_*_img_list_$$.asc ${TEMPDIR}/tmp_chips_$$.cat #adam-BL# log_status $?
deapplegate/wtgpipeline
create_abs_photo_3SEC.sh
Shell
mit
9,266
#!/bin/bash # https://gist.github.com/domenic/ec8b0fc8ab45f39403dd set -e # exit with nonzero exit code if anything fails # go to the out directory and create a *new* Git repo cd dist git init # add CNAME record echo "phimhd.pw" > CNAME # hack GitHub Pages so browserhistory would work cp index.html 404.html # inside this git repo we'll pretend to be a new user git config user.name "Travis CI" git config user.email "[email protected]" # The first and only commit to this new Git repo contains all the # files present with the commit message "Deploy to GitHub Pages". git add . git commit -m "Deploy to GitHub Pages" # Force push from the current repo's master branch to the remote # repo's gh-pages branch. (All previous history on the gh-pages branch # will be lost, since we are overwriting it.) We redirect any output to # /dev/null to hide any sensitive credential data that might otherwise be exposed. git push --force --quiet "https://${GH_TOKEN}@${GH_REF}" master:gh-pages > /dev/null 2>&1
longseespace/quickflix
deploy.sh
Shell
mit
1,006
#! /bin/sh ### BEGIN INIT INFO # Provides: power_switch # Required-Start: $remote_fs $syslog # Required-Stop: $remote_fs $syslog # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: Set the blinking led to show raspi activity # Description: This file should be used to construct scripts to be # placed in /etc/init.d. ### END INIT INFO # Author: Foo Bar <[email protected]> # # Please remove the "Author" lines above and replace them # with your own name if you copy and modify this script. # Do NOT "set -e" # PATH should only include /usr/* if it runs after the mountnfs.sh script PATH=/sbin:/usr/sbin:/bin:/usr/bin DESC="Launch the blinking_led.py script" NAME=blinking_led DAEMON=/home/pi/raspi_NAS_deploy/code/power_switch.py DAEMON_ARGS="--options args" PIDFILE=/var/run/$NAME.pid SCRIPTNAME=/etc/init.d/$NAME # Exit if the package is not installed [ -x "$DAEMON" ] || exit 0 # Read configuration variable file if it is present [ -r /etc/default/$NAME ] && . /etc/default/$NAME # Load the VERBOSE setting and other rcS variables . /lib/init/vars.sh # Define LSB log_* functions. # Depend on lsb-base (>= 3.2-14) to ensure that this file is present # and status_of_proc is working. . /lib/lsb/init-functions # # Function that starts the daemon/service # do_start() { # Return # 0 if daemon has been started # 1 if daemon was already running # 2 if daemon could not be started start-stop-daemon --start --background --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \ || return 1 start-stop-daemon --start --background --quiet --make-pidfile --pidfile $PIDFILE --exec $DAEMON -- \ $DAEMON_ARGS \ || return 2 # Add code here, if necessary, that waits for the process to be ready # to handle requests from services started subsequently which depend # on this one. As a last resort, sleep for some time. } # # Function that stops the daemon/service # do_stop() { # Return # 0 if daemon has been stopped # 1 if daemon was already stopped # 2 if daemon could not be stopped # other if a failure occurred #start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE --name $NAME #RETVAL="$?" #[ "$RETVAL" = 2 ] && return 2 # Wait for children to finish too if this is a daemon that forks # and if the daemon is only ever run from this initscript. # If the above conditions are not satisfied then add some other code # that waits for the process to drop all resources that could be # needed by services started subsequently. A last resort is to # sleep for some time. #start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON #[ "$?" = 2 ] && return 2 # Many daemons don't delete their pidfiles when they exit. [ -f "$PIDFILE" ] && kill $(cat $PIDFILE) rm -f $PIDFILE return 0 } # # Function that sends a SIGHUP to the daemon/service # do_reload() { # # If the daemon can reload its configuration without # restarting (for example, when it is sent a SIGHUP), # then implement that here. # start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE --name $NAME return 0 } case "$1" in start) [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" do_start case "$?" in 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; esac ;; stop) [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" do_stop case "$?" in 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; esac ;; status) status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $? ;; #reload|force-reload) # # If do_reload() is not implemented then leave this commented out # and leave 'force-reload' as an alias for 'restart'. # #log_daemon_msg "Reloading $DESC" "$NAME" #do_reload #log_end_msg $? #;; restart|force-reload) # # If the "reload" option is implemented then remove the # 'force-reload' alias # log_daemon_msg "Restarting $DESC" "$NAME" do_stop case "$?" in 0|1) do_start case "$?" in 0) log_end_msg 0 ;; 1) log_end_msg 1 ;; # Old process is still running *) log_end_msg 1 ;; # Failed to start esac ;; *) # Failed to stop log_end_msg 1 ;; esac ;; *) #echo "Usage: $SCRIPTNAME {start|stop|restart|reload|force-reload}" >&2 echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2 exit 3 ;; esac :
ice3/raspi_NAS_deploy
backup_config/services/power_switch.sh
Shell
mit
4,451
# Replace this with hostname of remote unless testing locally HOST=localhost PORT=8089 # This will connect to the socket listener at which point you can issue commands telnet $HOST $PORT
m2ware/ControlSocket
scripts/connect.sh
Shell
mit
188
# reset all the clients pkill fbs_traffic.sh echo "" > /root/rolling_code ssh [email protected] pkill fbs_traffic.sh ssh [email protected] 'echo "" > /root/rolling_code' ssh [email protected] pkill fbs_traffic.sh ssh [email protected] 'echo "" > /root/rolling_code' ssh [email protected] pkill fbs_traffic.sh ssh [email protected] 'echo "" > /root/rolling_code' ssh [email protected] pkill fbs_traffic.sh ssh [email protected] 'echo "" > /root/rolling_code' # reset the server ssh [email protected] pkill python3
mhammerly/fbs
end.sh
Shell
mit
479
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-2357-1 # # Security announcement date: 2014-09-23 00:00:00 UTC # Script generation date: 2017-01-01 21:03:59 UTC # # Operating System: Ubuntu 12.04 LTS # Architecture: x86_64 # # Vulnerable packages fix on version: # - linux-image-3.2.0-1453-omap4:3.2.0-1453.73 # # Last versions recommanded by security team: # - linux-image-3.2.0-1453-omap4:3.2.0-1453.73 # # CVE List: # - CVE-2014-3601 # - CVE-2014-5471 # - CVE-2014-5472 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade linux-image-3.2.0-1453-omap4=3.2.0-1453.73 -y
Cyberwatch/cbw-security-fixes
Ubuntu_12.04_LTS/x86_64/2014/USN-2357-1.sh
Shell
mit
713
qr () { qrencode -l L -v 1 -o output.png -r "$1" && open output.png }
patbeagan1/libbeagan
scripts/util/qrencoder.sh
Shell
mit
74
#!/bin/bash # # Determines exact buildpack details for CloudFoundry app # # Supporting blog: https://fabianlee.org/2020/10/20/cloudfoundry-determining-buildpack-used-by-application/ # app="$1" if [ -z "$app" ]; then echo "ERROR You must provide a valid cf app name" exit 1 fi cf apps | awk {'print $1'} | grep $app >/dev/null if [ $? -eq 1 ]; then echo "ERROR the source app name $app is not valid" exit 1 fi guid=$(cf app $app --guid) echo "$app guid = $guid" bpackguid=$(cf curl /v2/apps/$guid/summary | jq .detected_buildpack_guid | tr -d '"') echo "buildpack guid = $bpackguid" # list all buildpacks # cf curl /v2/buildpacks bpackname=$(cf curl /v2/buildpacks/$bpackguid | jq .entity.name | tr -d '"') bpackfile=$(cf curl /v2/buildpacks/$bpackguid | jq .entity.filename | tr -d '"') echo "buildpack used by $app name/file = $bpackname/$bpackfile"
fabianlee/blogcode
CF/which-cf-buildpack.sh
Shell
mit
864
sudo pkg install -y autoconf automake bison gdb git libtool m4 pkgconf valgrind sudo pkg install -y openjdk8 gnutls libdbi libesmtp libgcrypt glib hiredis openldap-sasl-client libltdl libmemcached libmodbus mosquitto mysql56-client libnotify openipmi liboping libpcap postgresql94-client protobuf protobuf-c rabbitmq-c-devel rrdtool net-snmp libstatgrab tokyocabinet tokyotyrant nut varnish4 libvirt libxml2 yajl python34 libsigrok librouteros sed '/ForceCommand.*droplet/d' /etc/ssh/sshd_config > ~/sshd_config sudo cp ~/sshd_config /etc/ssh/sshd_config sudo cp -a ~/.ssh /root/ sudo chown -R root:wheel /root/.ssh/ # required by openjdk cat << EOF | sudo tee -a /etc/fstab fdesc /dev/fd fdescfs rw 0 0 proc /proc procfs rw 0 0 EOF sudo mkdir -p /opt/jenkins sudo ln -s /usr/local/bin/java /opt/jenkins/
rubenk/collectd-ci
packer/scripts/freebsd10.sh
Shell
mit
872
# Sets reasonable OS X defaults. # # Or, in other words, set shit how I like in OS X. # # The original idea (and a couple settings) were grabbed from: # https://github.com/mathiasbynens/dotfiles/blob/master/.osx # # Run ./set-defaults.sh and you'll be good to go. # Use AirDrop over every interface. srsly this should be a default. defaults write com.apple.NetworkBrowser BrowseAllInterfaces 1 # Show the ~/Library folder. chflags nohidden ~/Library ############################################################################### # Finder # ############################################################################### # Use list view in all Finder windows by default # Four-letter codes for the view modes: `Nlsv` `icnv`, `clmv`, `glyv` defaults write com.apple.Finder FXPreferredViewStyle "clmv" # When performing a search, search the current folder by default defaults write com.apple.finder FXDefaultSearchScope -string "SCcf" # Set the Finder prefs for showing a few different volumes on the Desktop. defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool true defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true # Disable the warning when changing a file extension defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false # Avoid creating .DS_Store files on network volumes defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true # Show all filename extensions defaults write NSGlobalDomain AppleShowAllExtensions -bool true # Save to disk (not to iCloud) by default defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false # Disable Resume system-wide defaults write com.apple.systempreferences NSQuitAlwaysKeepsWindows -bool false ############################################################################### # Trackpad, mouse, keyboard, Bluetooth accessories, and input # ############################################################################### # Trackpad: enable tap to click for this user and for the login screen defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1 defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1 # Enable full keyboard access for all controls # (e.g. enable Tab in modal dialogs) defaults write NSGlobalDomain AppleKeyboardUIMode -int 3 # Disable press-and-hold for keys in favor of key repeat. defaults write -g ApplePressAndHoldEnabled -bool false # Set a really fast key repeat. defaults write NSGlobalDomain KeyRepeat -int 2 defaults write NSGlobalDomain InitialKeyRepeat -int 15 # Disable auto-correct defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false # Disable smart quotes as they’re annoying when typing code defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false ############################################################################### # Screen # ############################################################################### # Save screenshots to the desktop defaults write com.apple.screencapture location -string "${HOME}/Desktop" # Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF) defaults write com.apple.screencapture type -string "png" # Enable subpixel font rendering on non-Apple LCDs # Reference: https://github.com/kevinSuttle/macOS-Defaults/issues/17#issuecomment-266633501 defaults write NSGlobalDomain AppleFontSmoothing -int 1 ############################################################################### # Dock, Dashboard, and hot corners # ############################################################################### # Automatic hide and show the dock defaults write com.apple.dock autohide -bool true ############################################################################### # Safari & WebKit # ############################################################################### # Hide Safari’s bookmarks bar by default defaults write com.apple.Safari ShowFavoritesBar -bool false # Hide Safari’s sidebar in Top Sites defaults write com.apple.Safari ShowSidebarInTopSites -bool false # Show full website address in Safari. defaults write com.apple.Safari ShowFullURLInSmartSearchField -bool true # Remove useless icons from Safari’s bookmarks bar defaults write com.apple.Safari ProxiesInBookmarksBar "()" # Set Safari’s home page to `about:blank` for faster loading defaults write com.apple.Safari HomePage -string "about:blank" # Set up Safari for development. defaults write com.apple.Safari IncludeInternalDebugMenu -bool true defaults write com.apple.Safari IncludeDevelopMenu -bool true defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true defaults write com.apple.Safari "com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled" -bool true defaults write NSGlobalDomain WebKitDeveloperExtras -bool true # Enable continuous spellchecking defaults write com.apple.Safari WebContinuousSpellCheckingEnabled -bool true # Disable auto-correct defaults write com.apple.Safari WebAutomaticSpellingCorrectionEnabled -bool false # Disable plug-ins defaults write com.apple.Safari WebKitPluginsEnabled -bool false defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2PluginsEnabled -bool false # Disable Java defaults write com.apple.Safari WebKitJavaEnabled -bool false defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaEnabled -bool false defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaEnabledForLocalFiles -bool false # Block pop-up windows defaults write com.apple.Safari WebKitJavaScriptCanOpenWindowsAutomatically -bool false defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaScriptCanOpenWindowsAutomatically -bool false # Disable auto-playing video #defaults write com.apple.Safari WebKitMediaPlaybackAllowsInline -bool false #defaults write com.apple.SafariTechnologyPreview WebKitMediaPlaybackAllowsInline -bool false #defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2AllowsInlineMediaPlayback -bool false #defaults write com.apple.SafariTechnologyPreview com.apple.Safari.ContentPageGroupIdentifier.WebKit2AllowsInlineMediaPlayback -bool false # Enable “Do Not Track” defaults write com.apple.Safari SendDoNotTrackHTTPHeader -bool true # Update extensions automatically defaults write com.apple.Safari InstallExtensionUpdatesAutomatically -bool true ############################################################################### # Mail # ############################################################################### # Copy email addresses as `[email protected]` instead of `Foo Bar <[email protected]>` in Mail.app defaults write com.apple.mail AddressesIncludeNameOnPasteboard -bool false # Add the keyboard shortcut ⌘ + Enter to send an email in Mail.app defaults write com.apple.mail NSUserKeyEquivalents -dict-add "Send" "@\U21a9" # Disable inline attachments (just show the icons) defaults write com.apple.mail DisableInlineAttachmentViewing -bool true ############################################################################### # Terminal & iTerm 2 # ############################################################################### # Enable Secure Keyboard Entry in Terminal.app # See: https://security.stackexchange.com/a/47786/8918 defaults write com.apple.terminal SecureKeyboardEntry -bool true ############################################################################### # Messages # ############################################################################### # Disable smart quotes as it’s annoying for messages that contain code defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticQuoteSubstitutionEnabled" -bool false # Disable continuous spell checking defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "continuousSpellCheckingEnabled" -bool false ############################################################################### # Tweetbot.app # ############################################################################### # Bypass the annoyingly slow t.co URL shortener defaults write com.tapbots.TweetbotMac OpenURLsDirectly -bool true ############################################################################### # Time Machine # ############################################################################### # Prevent Time Machine from prompting to use new hard drives as backup volume defaults write com.apple.TimeMachine DoNotOfferNewDisksForBackup -bool true # Disable local Time Machine backups # hash tmutil &> /dev/null && sudo tmutil disablelocal
joseramonc/dotfiles
osx/set-defaults.sh
Shell
mit
9,509
#!/bin/bash # copy data to _docker directory # read the config settings and define functions source ${0%/*}/configtemplate.sh source ${0%/*}/doconfig.sh source ${0%/*}/functions.sh dest="$DOCKER_DIR" ensureDir "$dest" for file in configtemplate.sh doconfig.sh functions.sh localize.sh provision.sh do cp "$SCRIPT_SRC_DIR/docker/$file" "$dest" done if [[ ! -e "$DOCKER_DIR/config.sh" ]]; then echo " Do not forget to: cd $DOCKER_DIR mv configtemplate.sh config.sh vim config.sh and adapt the contents to your situation. " fi
ETCBC/shebanq
scripts/docker/localize.sh
Shell
mit
540
#!/bin/bash awk '/^S/{print ">"$2"\n"$3}' $1 | fold
yunfeiguo/bioinfo_toolbox
utilities/seq_related/gfa2fa.sh
Shell
mit
52
#!/bin/bash APPNAME=landing SRCDIR=/var/deploy/landing SRCFILE=landing.tar.gz OUTDIR=/var/www/landing CWD=$(pwd) INFRASTRUCTURE=/var/www/infrastructure LOGFILE=/var/log/apicatus/deploy.landing.log echo "building: "$APPNAME # test if fd 1 (STDOUT) is NOT associated with a terminal if [ ! -t 1 ] ; then # redirect STDOUT and STDERR to a file (note the single > -- this will truncate log) exec > $LOGFILE 2>&1 fi # Stop server $INFRASTRUCTURE/apicatus.landing.sh stop # Remove previous snapshot rm -fr $OUTDIR.$(date +"%m-%d-%y") # Move original echo "MOVE" mv $OUTDIR $OUTDIR.$(date +"%m-%d-%y") # Make destination directory echo "MKDIR" mkdir $OUTDIR # copy target files echo "COPY" cp $SRCDIR/$SRCFILE $OUTDIR/ # Change working directory cd $OUTDIR pwd # unzip echo "UNZIP" gunzip landing.tar.gz # untar echo "UNTAR" tar -xvf landing.tar # install dependencies echo "INSTALL" npm install # get out of deploy dir cd $CWD # start server $INFRASTRUCTURE/apicatus.landing.sh start
apicatus/infrastructure
deploy.landing.sh
Shell
mit
992
#!/bin/bash # TODO: the script should give an error if less than two args is given. CODEBOOKNAME=$1 # the file from which the codebook is read. See makecodebook.sh. DATASET=$2 # a file that contains a list of filenames, one per # line. Each filename contains the sequence of feature # vectors for an audio piece. OUTPUTDIR=$3 # the directory where the models must be saved. # make vqmm # if [ "$?" -eq 0 ] # then vqmm -quiet y -output-dir $OUTPUTDIR -list-of-files $DATASET -epsilon 0.00001 -codebook $CODEBOOKNAME -make-class-models # fi # Example: # ./trainclassmodels.sh cbk/ISMIR2004.ALL.YMFCC2.r50.s25.cbk /tmp/ISMIR2004.r50.fold1.csv ./Models/ISMIR2004-r50.s25/
ybayle/ReproducibleResearchIEEE2017
src/vqmm/scripts/trainclassmodels.sh
Shell
mit
704
#!/bin/bash set -eo pipefail -o nounset if [[ -z $(conda info --envs | grep "*" | grep -o "\/.*") ]]; then export CONDA_ROOT=$(conda info --root) env_dir=$CONDA_ROOT export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg19/hg19-ncbi-refseq-genes-ucsc-v1/1 elif [[ $(conda info --envs | grep "*" | grep -o "\/.*") == "base" ]]; then export CONDA_ROOT=$(conda info --root) env_dir=$CONDA_ROOT export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg19/hg19-ncbi-refseq-genes-ucsc-v1/1 else env_dir=$(conda info --envs | grep "*" | grep -o "\/.*") export CONDA_ROOT=$env_dir export RECIPE_DIR=$env_dir/share/ggd/Homo_sapiens/hg19/hg19-ncbi-refseq-genes-ucsc-v1/1 fi PKG_DIR=`find "$CONDA_SOURCE_PREFIX/pkgs/" -name "$PKG_NAME-$PKG_VERSION*" | grep -v ".tar.bz2" | grep "$PKG_VERSION.*$PKG_BUILDNUM$"` if [ -d $RECIPE_DIR ]; then rm -r $RECIPE_DIR fi mkdir -p $RECIPE_DIR (cd $RECIPE_DIR && bash $PKG_DIR/info/recipe/recipe.sh) cd $RECIPE_DIR ## Iterate over new files and replace file name with data package name and data version for f in *; do ext="${f#*.}" filename="{f%%.*}" if [[ ! -f "hg19-ncbi-refseq-genes-ucsc-v1.$ext" ]] then (mv $f "hg19-ncbi-refseq-genes-ucsc-v1.$ext") fi done ## Add environment variables #### File if [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 1 ]] ## If only one file then recipe_env_file_name="ggd_hg19-ncbi-refseq-genes-ucsc-v1_file" recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g' | sed 's/\./_/g')" file_path="$(find $RECIPE_DIR -type f -maxdepth 1)" elif [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 2 ]] ## If two files then indexed_file=`find $RECIPE_DIR -type f \( -name "*.tbi" -or -name "*.fai" -or -name "*.bai" -or -name "*.crai" -or -name "*.gzi" \) -maxdepth 1` if [[ ! -z "$indexed_file" ]] ## If index file exists then recipe_env_file_name="ggd_hg19-ncbi-refseq-genes-ucsc-v1_file" recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g' | sed 's/\./_/g')" file_path="$(echo $indexed_file | sed 's/\.[^.]*$//')" ## remove index extension fi fi #### Dir recipe_env_dir_name="ggd_hg19-ncbi-refseq-genes-ucsc-v1_dir" recipe_env_dir_name="$(echo "$recipe_env_dir_name" | sed 's/-/_/g' | sed 's/\./_/g')" activate_dir="$env_dir/etc/conda/activate.d" deactivate_dir="$env_dir/etc/conda/deactivate.d" mkdir -p $activate_dir mkdir -p $deactivate_dir echo "export $recipe_env_dir_name=$RECIPE_DIR" >> $activate_dir/env_vars.sh echo "unset $recipe_env_dir_name">> $deactivate_dir/env_vars.sh #### File ## If the file env variable exists, set the env file var if [[ ! -z "${recipe_env_file_name:-}" ]] then echo "export $recipe_env_file_name=$file_path" >> $activate_dir/env_vars.sh echo "unset $recipe_env_file_name">> $deactivate_dir/env_vars.sh fi echo 'Recipe successfully built!'
gogetdata/ggd-recipes
recipes/genomics/Homo_sapiens/hg19/hg19-ncbi-refseq-genes-ucsc-v1/post-link.sh
Shell
mit
2,964
#!/bin/bash if [ $# -eq 0 ] then echo "Usage: dumpCoverage.sh <outdir> <dynodroid_dir> <apk>" exit fi #for i in `seq 1 12`; i=0 while true do i=$((i+1)) sleep 300 #sleep for 5 minutes echo $1/$i adb shell am broadcast -a edu.gatech.m3.emma.COLLECT_COVERAGE adb pull /mnt/sdcard/coverage.ec $1/coverage$i.ec echo "copying monkeyrunner output file" cp $2/workingDir/$3_WBT_RandomBiasBased_1000000/TestStrategy/WBT.log $1/WBT.$i.log done
open-nata/xdroid
myscripts/dumpCoverageAndDynodroidLogs.sh
Shell
mit
462
#!/bin/bash # T. Carman, Spring 2016 # Simple example script for generating a bunch of static plots # using the calibration-viewer.py program. Intended to be modified # as needed. # # # Command line argument processing # function usage () { echo "usage: " echo " $ ./bulk-plot [--numpfts N] [--sparse] [--parallel] [--format F] [-h | --help] --outdir PATH --tag TAG" echo "" echo " --sparse Prints only one suite, for faster runs and testing." echo " --parallel Runs the plotting script as a background process so" echo " many plots are made in parallel." echo " --numpfts Change the number of pfts plotted. '3' will plot pfts 0,1,2". echo " --outdir The path to a directory in which to store the generated plots." echo " --tag A pre-fix for the folder continaing the generated plots." echo " The folder will be created within the folder specified at the" echo " path given for '--outdir'. The current git tag is good to use," echo " but the value you provide for "--tag" can be anything else you like." echo " --format The file format to use for saving plots. Default=pdf" echo "" echo "NOTE: The bulk plot capability has been added directly to the " echo " calibration-viewer.py script. The implementation there is much " echo " more efficient because the json files/archives only need to be " echo " opened once. It is probably preferable to use " echo " calibration-viewer.py over this script! See the " echo " calibration-viewer.py --help flag for more info." echo " " if [[ "$#" -gt 0 ]] then echo "Error: $1" fi echo "" } # Function that basically just passes arguments thru to the # underlying calibration-viewer. This facilitates running the # calibration viewer in the background by calling "parallel_plotter <ARGS> &" function parallel_plotter () { ./calibration/calibration-viewer.py "$@" } NUM_PFTS=10 SUITES=("Fire" "Soil" "Vegetation" "VegSoil" "Environment" "NCycle") TAG= TARGET_CMT=5 OUTDIR= FORMAT="pdf" PFLAG= # Set to '&' to run plotting processes in background. while [ "$1" != "" ]; do case $1 in -n | --numpfts ) shift NUM_PFTS="$1" ;; # useful for debugging so you don't have to # wait for everything to plot --sparse ) SUITES=("VegSoil") ;; --parallel ) PFLAG="true" ;; --tag ) shift TAG="$1" ;; --format ) shift FORMAT="$1" ;; --targetcmt ) shift TARGET_CMT="$1" ;; --outdir ) shift OUTDIR="$1" ;; -h | --help ) usage "no error" exit ;; * ) usage "Problem with command line arguments!" exit 1 esac shift done if [[ $TAG == "" ]] then usage "You must supply a tag!" exit 1 fi if [[ $OUTDIR == "" ]] then usage "You must supply a directory for output!" exit 1 fi if [[ ! -x "calibration/calibration-viewer.py" ]] then echo "Cannot find the plotter from here!" echo "Try executing this script ($(basename $0)) from the main dvmdostem directory." exit 1 fi echo "Plotting for pfts 0 to $NUM_PFTS" echo "Will plot these suites:" for SUITE in ${SUITES[@]} do echo " $SUITE" done echo "Using TAG: $TAG" # # Finally, start working # SAVE_LOC="$OUTDIR/$TAG" echo "Making directory: $SAVE_LOC" mkdir -p "$SAVE_LOC" # Collect metadata cp "config/config.js" "$SAVE_LOC/" cp "config/calibration_directives.txt" "$SAVE_LOC" # build metadata? cmd line args? # Loop over suites and pfts creating and saving a bunch of plots. for SUITE in ${SUITES[@]}; do if [[ "$SUITE" == "Fire" || "$SUITE" == "Environment" ]] then args="--save-format $FORMAT --suite $SUITE --tar-cmtnum $TARGET_CMT --no-show --save-name $SAVE_LOC/$TAG-$SUITE" if $PFLAG then parallel_plotter $args & else parallel_plotter $args fi else for (( I=0; I<$NUM_PFTS; ++I )) do args="--save-format $FORMAT --suite $SUITE --tar-cmtnum $TARGET_CMT --no-show --save-name $SAVE_LOC/$TAG-$SUITE-pft$I --pft $I" if $PFLAG then parallel_plotter $args & else parallel_plotter $args fi done fi done if $PFLAG then echo "waiting for all sub processes to finish..." wait fi echo "Done plotting."
tobeycarman/dvm-dos-tem
scripts/bulk-plot.sh
Shell
mit
4,670
#!/bin/bash set -e if [ `id -u` = 0 ] ; then echo 'Starting setup...' else echo 'Root privilegies are required, exit.' exit 1 fi apt-get update apt-get install \ apt-transport-https \ ca-certificates \ curl \ gnupg2 \ software-properties-common curl -fsSL https://download.docker.com/linux/$(. /etc/os-release; echo "$ID")/gpg | apt-key add - apt-key fingerprint 0EBFCD88 add-apt-repository \ "deb [arch=amd64] https://download.docker.com/linux/$(. /etc/os-release; echo "$ID") \ $(lsb_release -cs) \ stable" apt-get update apt-get install docker-ce docker run hello-world echo 'Done.'
ColossusCoinXT/ColossusCoinXT
contrib/docker/setup_docker_debian.sh
Shell
mit
648
### # check if the dotfiles must be recreated ### # Check both ~/.dotfiles and ~/.bash_profile as we # don't want to run if the user has “old style” dotfiles. if [ -d ~/.dotfiles ] || [ -f ~/.bash_profile ]; then return fi ### # pre-clone ### # Prepare GnuPG homedir export GNUPGHOME=$(mktemp -d --tmpdir gpg.XXXXXX) trap "rm -rf -- '${GNUPGHOME}'; unset GNUPGHOME" EXIT cat > "${GNUPGHOME}/gpg.conf" <<EOF # Never, ever, ever do this in your personal gpg.conf # However, this is sane when you know you use an empty GNUPGHOME keyring /var/lib/hashbang/admins.gpg trust-model always EOF ### # cloning ### if ! git clone --recursive https://github.com/hashbang/dotfiles ~/.dotfiles; then cat >&2 <<EOF CRITICAL: Failed to clone your dotfiles from https://github.com/hashbang/dotfiles EOF rm -rf ~/.dotfiles return fi if ! git -C ~/.dotfiles verify-commit HEAD; then echo "CRITICAL: Failed to verify signature on dotfiles" >&2 rm -rf ~/.dotfiles return fi rm -rf -- "${GNUPGHOME}" unset GNUPGHOME trap - EXIT ### # stowing ### cd ~/.dotfiles stow bash git gnupg hashbang ssh tmux weechat zsh cd ### # Make sure a proper maildir is in place ### mkdir -p ~/Mail/cur ~/Mail/new ~/Mail/tmp ### # Edit the welcome message ### sed -i "s/{date}/$(date '+%a, %-d %b %Y %T %Z')/g" Mail/new/msg.welcome sed -i "s/{username}/$(whoami)/g" Mail/new/msg.welcome
hashbang/shell-etc
profile.d/dotfiles.sh
Shell
mit
1,418
#!/usr/bin/env bash echo "This is on the standard OUTPUT" echo "This is on the standard ERROR" >&2 echo "This is on the standard OUTPUT too" echo "This is on the standard ERROR too" >&2
lbriais/easy_app_helper
test/process/test.sh
Shell
mit
188
#! /bin/bash cwd=`pwd` if [ "$EUID" -ne 0 ] then echo "Please run as root" exit fi chmod +x *.sh read -p "Would you like to add always-on monitoring (y/N)? " monitor_axela case ${monitor_axela} in [yY] ) echo "monitoring WILL be installed." ;; * ) echo "monitoring will NOT be installed." ;; esac apt-get update apt-get install wget git -y cd /opt echo "--copying pocketsphinx--" git clone https://github.com/cmusphinx/pocketsphinx.git cd $cwd wget --output-document vlc.py "http://git.videolan.org/?p=vlc/bindings/python.git;a=blob_plain;f=generated/vlc.py;hb=HEAD" apt-get install python-dev swig libasound2-dev memcached python-pip python-alsaaudio vlc libpulse-dev -y pip install -r requirements.txt touch /var/log/axela.log case ${monitor_axela} in [yY] ) cp initd_axela_monitored.sh /etc/init.d/Axela ;; * ) cp initd_axela.sh /etc/init.d/Axela ;; esac update-rc.d Axela defaults echo "--Creating creds.py--" echo "Enter your Device Type ID:" read productid echo ProductID = \"$productid\" > creds.py echo "Enter your Security Profile Description:" read spd echo Security_Profile_Description = \"$spd\" >> creds.py echo "Enter your Security Profile ID:" read spid echo Security_Profile_ID = \"$spid\" >> creds.py echo "Enter your Client ID:" read cid echo Client_ID = \"$cid\" >> creds.py echo "Enter your Client Secret:" read secret echo Client_Secret = \"$secret\" >> creds.py python ./auth_web.py
artiya4u/Axela
setup.sh
Shell
mit
1,505
#!/bin/bash -e pip freeze | grep -q Sphinx || pip install Sphinx pushd docs make clean make html popd rm -rf docs/linux mv docs/_build/html docs/linux echo 'Done!'
Fewbytes/rubber-docker
mkdocs.sh
Shell
mit
168
#!/bin/bash for f in *.dot do dot -Tpng $f -o ../images/${f/dot/png} done
drBenway/siteResearch
dot files/dotfiles.sh
Shell
mit
79
#!/usr/bin/env bash print_stderr "Running installation for sudo." apt-get -y install sudo readonly INSTALLATION_STATUS=$? program_exists "sudo" if [[ $? -eq 0 ]]; then print_stderr "Adding [${SCRIPT_INVOKER_USERNAME}] to the sudoer's group." usermod -a -G sudo "${SCRIPT_INVOKER_USERNAME}" else print_stderr "Unable to add [${SCRIPT_INVOKER_USERNAME}] to the sudoer's group." print_log "Unable to add [${SCRIPT_INVOKER_USERNAME}] to the sudoer's group due to a possible installation failure." fi exit $INSTALLATION_STATUS
nexocentric/dotfiles
installers/sudo.sh
Shell
mit
528
#!/bin/bash set -e DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" export PATH=$PATH:$DIR/java-$(uname -i)/bin export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$DIR/java-$(uname -i)/lib $DIR/java-$(uname -i)/bin/java -Xms128m -Xmx128m -jar $DIR/devicehive-server/devicehive-boot.jar \ -Dspring.datasource.url=jdbc:postgresql://127.0.0.1:5432/devicehive \ -Dspring.datasource.username="postgres" \ -Dspring.datasource.password="12345" \ -Dmetadata.broker.list=127.0.0.1:9092 \ -Dzookeeper.connect=127.0.0.1:2181 \ -Dserver.context-path=dh/rest \ -Dserver.port=8080
Nikolay-Kha/devicehive-portable-server
scripts/devicehive.sh
Shell
mit
575
#!/bin/bash # Import all users listed in the IAM UsersGroup and assign each sudo privileges. UsersGroup="" aws iam get-group --group-name "${UsersGroup}" --query "Users[].[UserName]" --output text | while read User; do UserName="$User" # Replace any illegal characters UserName=${UserName//"+"/".plus."} UserName=${UserName//"="/".equal."} UserName=${UserName//","/".comma."} UserName=${UserName//"@"/".at."} # Add user if not already present if ! grep "^$UserName:" /etc/passwd > /dev/null; then /usr/sbin/useradd --create-home --shell /bin/bash "$UserName" fi # Add user entry in /etc/sudoers.d UserFileName=$(echo "$UserName" | tr "." " ") UserSudoFilePath="/etc/sudoers.d/$UserFileName" echo "$UserName ALL=(ALL) NOPASSWD:ALL" > "$UserSudoFilePath" done
ehalpern/aws-ec2-ssh
import_users.sh
Shell
mit
793
#! /usr/bin/bash # 2018年の麻布中学校の入試問題(算数 問4)の解答例です。 # 問題は以下のサイトにあります。 # https://www.inter-edu.com/nyushi/azabu/ ruby -e "r=->(a){['+','*','=='].repeated_permutation(a.size-1).map{|x|a.zip(x).join}.select{|s|s.count('=')==2&&eval(s)}};p r[[1,4,5,6,7,8]];p r[[2,3,5,7,11,13,17]]"
zettsu-t/examQuestions
2018math.sh
Shell
mit
355
#!/usr/bin/env bash echo "Attempting to process data..." echo "\n(1/7) Removing unnecessary columns from accidents data..." python3 1_remove_accidents_columns.py echo "\n(2/7) Renaming streets, dropping uncertain road-addresses..." python3 2_modify_road_usages_names.py echo "\n(3/7) Dropping somewhat duplicate road usages data from streets with multiple measurement points..." python3 3_drop_additional_road_usages_measurements.py echo "\n(4/7) By car count, merging rows sharing (year, hour) which are in 15-minute range instead of hourly range. Summing also over directions..." python3 4_merge_road_usages_hours.py echo "\n(5/7) Formatting accident-data address-names to match road-usage-data..." python3 5_change_accidents_katuosoite.py echo "\n(6/7) Filtering accidents by addresses, which are included in usage-data..." python3 6_select_accidents_in_road_usages.py echo "\n(7/7) Combining datas by inner join... (run code with different method parameter for different result)" python3 7_combine_data.py echo "Processing done!"
IntroDS2017/TrafficAccidents
process_data.sh
Shell
mit
1,044
#!/bin/bash # Set up to Git er goin! # Git er goin if ! [[ -f $DOT_ROOT/git/gitconfig.symlink ]]; then question 'First, what is your github user name?' read -e git_user_name question 'Thanks! Now, what is your github user email?' read -e git_user_email sed -e "s/USER_NAME/$git_user_name/g" -e "s/USER_EMAIL/$git_user_email/g" $DOT_ROOT/git/gitconfig.symlink.template > $DOT_ROOT/git/gitconfig.symlink else working 'Edit gitconfig.symlink if config changes desired' fi success 'Looks like your gitconfig is all set...' # Create SSH key for github because fancy PUB_KEY=$HOME/.ssh/id_rsa.pub message 'Checking for SSH key, generating one if it does not exist...' if ! [[ -f $PUB_KEY ]]; then [[ -f $PUB_KEY ]] || ssh-keygen -t rsa # Copy and open github SSH page because extra fancy message 'Copying public key to clipboard. Paste it into your Github account...' [[ -f $PUB_KEY ]] && cat $PUB_KEY | pbcopy open 'https://github.com/account/ssh' fi
dsgrant/dotfiles
git/init.sh
Shell
mit
972
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for RHSA-2012:0144 # # Security announcement date: 2012-02-17 11:51:47 UTC # Script generation date: 2017-01-11 21:23:59 UTC # # Operating System: Red Hat 6 # Architecture: x86_64 # # Vulnerable packages fix on version: # - flash-plugin.i686:10.3.183.15-1.el6 # # Last versions recommanded by security team: # - flash-plugin.i686:24.0.0.194-1.el6_8 # # CVE List: # - CVE-2012-0752 # - CVE-2012-0753 # - CVE-2012-0754 # - CVE-2012-0755 # - CVE-2012-0756 # - CVE-2012-0767 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo yum install flash-plugin.i686-24.0.0.194 -y
Cyberwatch/cbw-security-fixes
Red_Hat_6/x86_64/2012/RHSA-2012:0144.sh
Shell
mit
725
#!/bin/bash ~/Android/Sdk/emulator/emulator -avd PhoneAPI23 > /dev/null 2>&1htop adb reverse tcp:7007 tcp:7007 npm run storybook xdg-open http://localhost:7007/
timLoewel/sites
runStorybook.sh
Shell
mit
162
#!/bin/sh # # Note runlevel 2345, 86 is the Start order and 85 is the Stop order # # chkconfig: 2345 86 85 # description: phpsock library server daemon export PHPSOCK_HOME=/home/phpsock export PHPSOCK_LOGS=/var/log/phpsock-broker export PHPSOCK_USER=nginx export PHPSOCK_GROUP=nginx export PATH=$PATH:/usr/local/bin export NODE_PATH=$NODE_PATH:/usr/local/lib/node_modules case "$1" in start) cd $PHPSOCK_HOME sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP forever stop broker.js > /dev/null 2>&1 sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP forever start -a -p . --killSignal=SIGABRT --minUptime 1000 --spinSleepTime 100 -e $PHPSOCK_LOGS/error.log -l $PHPSOCK_LOGS/forever.log --pidFile ./forever.pid broker.js ;; stop) cd $PHPSOCK_HOME sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP forever stop broker.js ;; restart) cd $PHPSOCK_HOME sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP forever restart broker.js ;; status) cd $PHPSOCK_HOME sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP forever list | grep broker.js ;; log) cd $PHPSOCK_HOME sudo HOME=$PHPSOCK_HOME -u $PHPSOCK_USER -g $PHPSOCK_GROUP ./bin/log.sh ;; *) echo "Usage: /etc/init.d/phpsock-broker {start|stop|restart|status|log}" exit 1 ;; esac exit 0
dvorakjan/phpsock-broker
bin/initScript.sh
Shell
mit
1,323
#!/bin/bash WGET=$(which wget) CURL=$(which curl) SUDO=$(which sudo) CHMOD=$(which chmod) TEE=$(which tee) CP=$(which cp) SERVICE=$(which service) CERTS_URL="https://raw.githubusercontent.com/kennedyj/nagios-check-apache-certs/master/check_apache_certs.sh" BALANCER_URL="https://raw.githubusercontent.com/kennedyj/nagios-check-apache-certs/master/check_apache_balancer.py" NAGIOS_HOME="/usr/local/nagios" NRPE_CONFIG="$NAGIOS_HOME/etc/nrpe.cfg" CHECK_PATH="$NAGIOS_HOME/libexec" function install_check { CHECK_URL=$1 CHECK_FILE=$2 if [ ! -e "$CHECK_PATH" ]; then echo "The plugin path doesn't exist" exit 1 fi if [ ! -e "$NRPE_CONFIG" ]; then echo "The NRPE configuration doesn't exist" exit 1 fi if [ -z "$CHECK_FILE" ]; then echo "No check specified" exit 1 fi if [ -z "$CHECK_URL" ]; then echo "No URL specified" exit 1 fi if [ -e "$WGET" ]; then $SUDO $WGET $CHECK_URL -O $CHECK_PATH/$CHECK_FILE elif [ -e "$CURL" ]; then $SUDO $CURL -s $CHECK_URL -o $CHECK_PATH/$CHECK_FILE else echo "you need to have either curl or wget installed" exit 1 fi $SUDO $CHMOD 755 $CHECK_PATH/$CHECK_FILE # add check grep -q "^command\[$CHECK_FILE\]" $NRPE_CONFIG if [ $? -ne 0 ]; then echo "command[$CHECK_FILE]=$CHECK_PATH/$CHECK_FILE" | $SUDO $TEE -a $NRPE_CONFIG > /dev/null fi } $SUDO $CP $NRPE_CONFIG /tmp/nrpe-backup.cfg install_check $CERTS_URL check_apache_certs install_check $BALANCER_URL check_apache_balancer if [ -e "$SERVICE" ]; then $SUDO $SERVICE nrpe restart fi
kennedyj/nagios-apache-checks
bin/install.sh
Shell
mit
1,584
#!/usr/bin/env bash function _exists () { local thefile=${1} if [ -z ${thefile} ]; then echo "Expected a path as first parameter, got none" >&2 return 1 fi if [ ! -f ${thefile} ] && [ ! -d ${thefile} ]; then echo "Invalid path: ${thefile}" >&2 return 2 fi if [ ! -r ${thefile} ]; then echo "Can't read path: ${thefile}" >&2 return 3 fi return 0 } function _existsordie () { local thefile=${1} local msg=$( _exists ${thefile} ) local ret=${?} if [ 0 -ne ${ret} ]; then echo "${msg}" exit ${ret} fi } wd=$( dirname $( realpath ${0} ) ) self=$( basename ${0} ) action=${1:-st} host=$(hostname) invdir=${wd}/inventory fmasters=${invdir}/vms-masters fworkers=${invdir}/vms-workers vmdir=/vms _existsordie ${vmdir} _exists ${fmasters} 2>>/dev/null [ 0 -eq ${?} ] && masters=$( cat ${fmasters} ) || masters="" _exists ${fworkers} 2>>/dev/null [ 0 -eq ${?} ] && workers=$( cat ${fworkers} ) || workers="" qemubin=$( which qemu-system-x86_64 ) if [ ! -x ${qemubin} ]; then echo "Can't execute ${qemubin}" >&2 exit 3 fi ext=qcow2 vncid=2 vncopt='' basemac=52:54:00:12:34:60 machead=${basemac%:*} mactail=${basemac##*:} function _vmstart () { local thisvm=${1} if [ -z ${thisvm} ]; then echo "Expected VM name as first parameter, got none" >&2 return 1 fi local cores=${2:-8} local mem=${3:-16G} local vmpath=${vmdir}/${thisvm}.${ext} if [ -f ${vmpath} ]; then echo "[start] ${thisvm} on ${host}:${vncid} w/mac ${machead}:${mactail}" ${qemubin} -enable-kvm -daemonize -cpu host -net bridge,name=vlan0,br=br0 -hda ${vmpath} -smp ${cores} -m ${mem} -net nic,macaddr=${machead}:${mactail} -vnc ${vncopt}:${vncid} 2>>/dev/null & (( mactail++ )) (( vncid++ )) #sleep 1s else echo "[error] VM not found: ${vmpath}" >&2 fi } function _vmstop () { local thisvm=${1} local stopcmd="poweroff" if [ -z ${thisvm} ]; then echo "Expected VM name as first parameter, got none" >&2 return 1 fi echo "[stop] ${thisvm}, sent ${stopcmd} command" ssh ${thisvm} "${stopcmd}" 2>>/dev/null & } function up () { # check that NO VMs are running [ -n "$( st 2>>/dev/null )" ] && exit 4 echo -e "\nStarting all VMs:\n" # bring up master VMs for vm in ${masters[@]}; do _vmstart ${vm} 2 4G done # bring up on worker VMs for vm in ${workers[@]}; do _vmstart ${vm} done; echo } function dn () { # check that some VMs are running [ -z "$( st 2>>/dev/null )" ] && exit 5 echo -e "\nStopping all VMs:\n" # bring down master and worker VMs local allvms=( ${workers[@]} ${masters[@]} ) for vm in "${allvms[@]}"; do _vmstop ${vm} done; echo } function st () { local vmprocs=$( pgrep -u root -a qemu-system ) if [ -n "${vmprocs}" ]; then echo -e "\nCurrent running VMs:\n" vmprocs=$( echo 'PID HD P M MAC VNC'; echo "${vmprocs}" ) echo -e "${vmprocs}\n" | sed -r -e 's/-[a-Z0-9-]+ //g' -e "s#${qemubin}.*br0 ##g" -e 's/nic.*=//g' | column -et else echo -e "\nNo VMs currently up\n" >&2 fi } function h () { cat << EOF Usage: ${self} <action> Where <action> can be: h - show this usage information up - start all vms dn - stop all vms st - show vm info EOF } # die if action starts with underscore '_' if [ '_' == ${action:0:1} ]; then echo "Error: actions can't start with an underscore '_', with action ${action}" >&2 exit 2 fi # check action maps to a function or die if [ -z $( declare -F ${action} ) ]; then echo "Invalid action '${action}'!" >&2 h exit 3 fi # execute action ${action}
aphran/vscripts
vm-manage.sh
Shell
mit
3,819
#!/bin/bash # # Creates CoreOS cluster on GCE. # PROJECT_NAME=henrik-jonsson # GCE project, with billing enabled ZONE=europe-west1-b # GCE zone MACHINE_TYPE=f1-micro # GCE machine type #MACHINE_TYPE=n1-standard-1 # All valid images can be shown with: # gcloud compute images list --project coreos-cloud gcloud compute --project ${PROJECT_NAME} instances create core1 core2 \ --image https://www.googleapis.com/compute/v1/projects/coreos-cloud/global/images/coreos-stable-522-4-0-v20150108 \ --zone ${ZONE} --machine-type ${MACHINE_TYPE} --metadata-from-file user-data=cloud-config.yaml
hkjn/junk
coreos/gce/create_cluster.sh
Shell
mit
605
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for USN-2782-1 # # Security announcement date: 2015-10-27 00:00:00 UTC # Script generation date: 2017-01-01 21:04:51 UTC # # Operating System: Ubuntu 12.04 LTS # Architecture: x86_64 # # Vulnerable packages fix on version: # - apport:2.0.1-0ubuntu17.13 # # Last versions recommanded by security team: # - apport:2.0.1-0ubuntu17.15 # # CVE List: # - CVE-2015-1341 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade apport=2.0.1-0ubuntu17.15 -y
Cyberwatch/cbw-security-fixes
Ubuntu_12.04_LTS/x86_64/2015/USN-2782-1.sh
Shell
mit
622
#!/bin/bash # when we get tags, make it possible to get a cleaner output, like the # current release (tag) printf "%q\n" "$( git log \ --pretty="format:%ci, author: \"%aN\" <%aE>, commit: %h" -1 "${1}" || \ echo no git \ )"
fofoni/openwar
get-git-version.sh
Shell
mit
234
#!/usr/bin/env bash python3 anagrams.py rats | diff ../test/rats_heap_expected - && \ python3 anagrams_itertools.py rats && \ python3 animals.py && \ python3 basic_iterables.py && \ python3 custom_iterable.py && \ python3 custom_type.py && \ python3 early_declaration_error.py && \ python3 -m py_compile genexp.py && \ python3 inheritance.py && \ python3 kwargs_examples.py && \ python3 log_decorator_example.py && \ python3 memoized_fibonacci.py && \ python3 mi.py && \ python3 object_pool.py && \ python3 powers_of_two.py && \ python3 powers_of_two_infinite.py && \ python3 proxy.py && \ python3 scope.py && \ python3 simple_keyword_arguments.py && \ python3 simple_types.py && \ python3 sum_of_even_squares.py && \ python3 triple_comprehensions.py && \ python3 triple.py | diff ../test/triple_expected - && \ python3 types_are_types.py && \ python3 varargs.py && \ python3 vectors.py && \ python3 wordcount.py < ../test/wordcount_ascii_input | diff ../test/wordcount_ascii_expected - if [ $? -ne 0 ]; then echo echo "*** PYTHON TESTS FAILED ***" exit 1 else echo echo "PYTHON TESTS PASSED" fi
rtoal/polyglot
python/test.sh
Shell
mit
1,117
#!/bin/sh # # # originally contributed by @rbuffat to Toblerity/Fiona set -e GDALOPTS=" --with-ogr \ --with-geos \ --with-expat \ --with-pg \ --without-libtool \ --with-libtiff=internal \ --with-geotiff=internal \ --without-gif \ --without-grass \ --without-libgrass \ --without-cfitsio \ --without-pcraster \ --without-netcdf \ --with-png=internal \ --with-jpeg=internal \ --without-gif \ --without-ogdi \ --without-fme \ --without-hdf4 \ --without-hdf5 \ --without-jasper \ --without-ecw \ --without-kakadu \ --without-mrsid \ --without-jp2mrsid \ --without-bsb \ --without-grib \ --without-mysql \ --without-ingres \ --without-xerces \ --without-odbc \ --with-curl \ --with-sqlite3 \ --without-dwgdirect \ --without-idb \ --without-sde \ --without-perl \ --without-php \ --without-ruby \ --without-python" # Create build dir if not exists if [ ! -d "$GDALBUILD" ]; then mkdir $GDALBUILD; fi if [ ! -d "$GDALINST" ]; then mkdir $GDALINST; fi ls -l $GDALINST if [ "$GDALVERSION" = "trunk" ]; then # always rebuild trunk svn checkout https://svn.osgeo.org/gdal/trunk/gdal $GDALBUILD/trunk cd $GDALBUILD/trunk ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS make -s -j 2 make install elif [ ! -d "$GDALINST/gdal-$GDALVERSION" ]; then # only build if not already installed cd $GDALBUILD wget http://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz tar -xzf gdal-$GDALVERSION.tar.gz cd gdal-$GDALVERSION ./configure --prefix=$GDALINST/gdal-$GDALVERSION $GDALOPTS make -s -j 2 make install fi # change back to travis build dir cd $TRAVIS_BUILD_DIR
smnorris/pgdb
scripts/travis_gdal_install.sh
Shell
mit
2,074
#!/bin/bash hash 'pico2wave' 2>/dev/null || { dialog_yesno "SVOX Pico doesn't seem to be installed.\nDo you want to install it?" true >/dev/null && { if [[ "$platform" == "linux" ]]; then echo -e "Updating..." sudo apt-get -qq update || exit 1 echo -e "Upgrading..." sudo apt-get -qq upgrade -y || exit 1 echo -e "Downloading & Installing..." sudo apt-get install -y libttspico-utils >/dev/null || exit 1 dialog_msg "Installation Completed" elif [[ "$platform" == "osx" ]]; then dialog_msg "SVOX Pico is not available on your platform" else dialog_msg "Unknown platform" exit 1 fi } } svox_pico_TTS () { # TTS () {} Speaks text $1 wavfile="$jv_cache_folder/tts.wav" /usr/bin/pico2wave -l ${language//_/-} -w "$wavfile" "$1" jv_play "$wavfile" }
alexylem/jarvis
tts_engines/svox_pico/main.sh
Shell
mit
918
#!/bin/sh node-debug code1.js
hansrwindhoff/NodeJsDebugging
2 and for node/runnodeinspector.sh
Shell
mit
30
#!/usr/bin/env bash DOMAIN=$1 WWW=$2 DOCKER_MACHINE_VERSION=`docker-machine --version` LETSENCRYPT_MACHINE="letsencrypt.hw.dnt.no" CERTS_DIR=`pwd`/ssl-certs CERT_FILE=${CERTS_DIR}/${DOMAIN}.pem USAGE="""Usage: $0 <domain name> [www]\n\nInclude the [www] argument to add www.<domain name> to the certificate""" if [ -z "${DOMAIN}" ]; then echo -e ${USAGE} exit 1 fi if [ -z "${WWW}" ]; then LETSENCRYPT_DOMAIN_ARGS="-d ${DOMAIN}" elif [ "${WWW}" == "www" ]; then LETSENCRYPT_DOMAIN_ARGS="-d ${DOMAIN} -d www.${DOMAIN}" else echo -e ${USAGE} exit 1 fi if [[ ! "${DOCKER_MACHINE_VERSION}" =~ ^docker-machine\ version\ 0.6. ]]; then echo "Sorry, this script is compatible with docker-machine version 0.6; you're running: ${DOCKER_MACHINE_VERSION}" exit 1 fi eval "$(docker-machine env ${LETSENCRYPT_MACHINE})" ACTIVE_MACHINE=`docker-machine active` if [ "${ACTIVE_MACHINE}" != "${LETSENCRYPT_MACHINE}" ]; then echo "Please add ${LETSENCRYPT_MACHINE} to your Docker machines." exit 1 fi if [ ! -e $CERTS_DIR ]; then echo "Certificate directory ${CERTS_DIR} does not exist." exit 1 fi if [ -e ${CERT_FILE} ]; then echo "Certificate ${CERT_FILE} already exists." read -p "Renew? [y/N] " yn case $yn in [Yy]*) ;; *) exit 0;; esac fi echo "Domain name args for letsencrypt: ${LETSENCRYPT_DOMAIN_ARGS}" read -p "Continue? [y/N] " yn case $yn in [Yy]*) ;; *) exit 0;; esac echo "Requesting certificate with ${LETSENCRYPT_MACHINE}, please wait..." docker run \ -it \ --rm \ -v /certs:/etc/letsencrypt/archive \ --name letsencrypt \ -p 0.0.0.0:80:80 \ quay.io/letsencrypt/letsencrypt \ certonly \ --standalone \ --standalone-supported-challenges http-01 \ --agree-tos \ --register-unsafely-without-email \ ${LETSENCRYPT_DOMAIN_ARGS} DOCKER_RUN_STATUS=$? if [ $DOCKER_RUN_STATUS -ne 0 ]; then echo "Certificate issuance exited with code ${DOCKER_RUN_STATUS}, aborting..." exit $DOCKER_RUN_STATUS fi echo "Done, moving certificate to your local machine..." docker-machine scp ${LETSENCRYPT_MACHINE}:/certs/${DOMAIN}/privkey1.pem ${CERT_FILE}-key docker-machine scp ${LETSENCRYPT_MACHINE}:/certs/${DOMAIN}/fullchain1.pem ${CERT_FILE}-chain cat ${CERT_FILE}-key ${CERT_FILE}-chain > ${CERT_FILE} rm ${CERT_FILE}-key ${CERT_FILE}-chain docker-machine ssh ${LETSENCRYPT_MACHINE} "sudo rm -rf /certs/${DOMAIN}/" echo "Ok, your certificate is stored in: ${CERT_FILE}"
Turistforeningen/sherpa-prod
haproxy/issue-certificate.sh
Shell
mit
2,444
echo "${YELLOW}Checking air updates...${TXTRESET}" cd $HOME git clone https://github.com/digitoimistodude/air cd $STARTER_THEME_PATH_TEMP git stash git pull echo "${YELLOW}Copying starter theme to project folder ${PROJECTS_HOME}/${PROJECT_NAME}/content/themes/${THEME_NAME}${TXTRESET}" cp -R ${STARTER_THEME_PATH_TEMP} ${PROJECT_THEME_PATH} echo "${YELLOW}Creating acf-json...${TXTRESET}" mkdir -p ${PROJECT_THEME_PATH}/acf-json
digitoimistodude/air
bin/tasks/get-theme.sh
Shell
mit
429
#!/bin/bash while : do echo "Press [CTRL+C] to stop.." sleep 10 done
zmarkan/breaking_bash
waldo.sh
Shell
mit
71
#!/bin/bash export JAVA_OPTS="${JAVA_OPTS} -javaagent:${AGENT_PATH}" /entrypoint.py
mritd/dockerfile
jira/hijack.sh
Shell
mit
86
#!/bin/bash echo "YeAPF 0.8.53 shipping js parts"; echo "Copyright (C) 2004-2017 Esteban Daniel Dortta - [email protected]"; tmp=`mktemp` echo "Generating 7 years license..." license=`php tools/ylicenseExpiration.php +2555 | grep x58e1` cat skel/webApp/yeapf.db.ini | sed "s/x58e1d9ca63ef85abef352d3306a6fac3=.*/$license/g" > $tmp mv $tmp skel/webApp/yeapf.db.ini echo "Compiling..." temp=`php tools/compile-yloader.php` new=`echo "$temp" | cut -d'.' --complement -f2-` new="$new.min.js" echo "Minifying... ($temp)" echo " yloader.js" java -jar tools/compressors/compiler.jar --language_in=ECMASCRIPT5 --js $temp --js_output_file $new for a in 'ysandboxifc' 'ystorage' 'yifc' 'yapp' 'ycomm-worker' do echo " $a.js" b="app-src/js/min/$a" a="app-src/js/$a" java -jar tools/compressors/compiler.jar --language_in=ECMASCRIPT5 --js "$a.js" --js_output_file "$b.min.js" done echo "Spreading..." php tools/spread-js.php $temp $new echo "Compiling config2.php..." php tools/compile-configure.php echo "Removing $temp" rm -f $temp echo "Removing $new" rm -f $new echo "All done"
EDortta/YeAPF
0.8.53/tools/ship-js.sh
Shell
mit
1,093
#!/usr/bin/env sh # generated from catkin/cmake/template/setup.sh.in # Sets various environment variables and sources additional environment hooks. # It tries it's best to undo changes from a previously sourced setup file before. # Supported command line options: # --extend: skips the undoing of changes from a previously sourced setup file # since this file is sourced either use the provided _CATKIN_SETUP_DIR # or fall back to the destination set at configure time : ${_CATKIN_SETUP_DIR:=/home/weiyu/Dev/ROS/sddr_ws/devel} _SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py" unset _CATKIN_SETUP_DIR if [ ! -f "$_SETUP_UTIL" ]; then echo "Missing Python script: $_SETUP_UTIL" return 22 fi # detect if running on Darwin platform _UNAME=`uname -s` _IS_DARWIN=0 if [ "$_UNAME" = "Darwin" ]; then _IS_DARWIN=1 fi unset _UNAME # make sure to export all environment variables export CMAKE_PREFIX_PATH if [ $_IS_DARWIN -eq 0 ]; then export LD_LIBRARY_PATH else export DYLD_LIBRARY_PATH fi unset _IS_DARWIN export PATH export PKG_CONFIG_PATH export PYTHONPATH # remember type of shell if not already set if [ -z "$CATKIN_SHELL" ]; then CATKIN_SHELL=sh fi # invoke Python script to generate necessary exports of environment variables # use TMPDIR if it exists, otherwise fall back to /tmp if [ -d "${TMPDIR}" ]; then _TMPDIR="${TMPDIR}" else _TMPDIR=/tmp fi _SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"` unset _TMPDIR if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then echo "Could not create temporary file: $_SETUP_TMP" return 1 fi CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ >> "$_SETUP_TMP" _RC=$? if [ $_RC -ne 0 ]; then if [ $_RC -eq 2 ]; then echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?" else echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC" fi unset _RC unset _SETUP_UTIL rm -f "$_SETUP_TMP" unset _SETUP_TMP return 1 fi unset _RC unset _SETUP_UTIL . "$_SETUP_TMP" rm -f "$_SETUP_TMP" unset _SETUP_TMP # source all environment hooks _i=0 while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i unset _CATKIN_ENVIRONMENT_HOOKS_$_i eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE # set workspace for environment hook CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace . "$_envfile" unset CATKIN_ENV_HOOK_WORKSPACE _i=$((_i + 1)) done unset _i unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
wxue/sddr_ws
devel/setup.sh
Shell
mit
2,533
#!/bin/sh prog=lowpass2ndOrderCascade_socp_test.m depends="lowpass2ndOrderCascade_socp_test.m \ test_common.m stability2ndOrderCascade.m print_polynomial.m print_pole_zero.m \ lowpass2ndOrderCascade_socp.m x2tf.m casc2tf.m tf2casc.m qroots.m qzsolve.oct" tmp=/tmp/$$ here=`pwd` if [ $? -ne 0 ]; then echo "Failed pwd"; exit 1; fi fail() { echo FAILED ${0#$here"/"} $prog 1>&2 cd $here rm -rf $tmp exit 1 } pass() { echo PASSED ${0#$here"/"} $prog cd $here rm -rf $tmp exit 0 } trap "fail" 1 2 3 15 mkdir $tmp if [ $? -ne 0 ]; then echo "Failed mkdir"; exit 1; fi for file in $depends;do \ cp -R src/$file $tmp; \ if [ $? -ne 0 ]; then echo "Failed cp "$file; fail; fi \ done cd $tmp if [ $? -ne 0 ]; then echo "Failed cd"; fail; fi # # the output should look like this # cat > test.a.ok << 'EOF' a = [ -0.0021092647, 0.0004069327, 0.0076883393, 0.0051931734, ... -0.0115231074, -0.0210543663, 0.0019486078, 0.0417050316, ... 0.0578424349, 0.0390044906, 0.0123776724 ]'; EOF if [ $? -ne 0 ]; then echo "Failed x.a output cat"; fail; fi cat > test.d.ok << 'EOF' d = [ 1.0000000000, -2.4277939753, 3.0446749910, -2.3351332890, ... 1.1383562682, -0.3371599421, 0.0468622435 ]'; EOF if [ $? -ne 0 ]; then echo "Failed x.d output cat"; fail; fi # # run and see if the results match # echo "Running $prog" octave --no-gui -q $prog >test.out 2>&1 if [ $? -ne 0 ]; then echo "Failed running $prog"; fail; fi diff -Bb test.a.ok lowpass2ndOrderCascade_socp_test_a_coef.m if [ $? -ne 0 ]; then echo "Failed diff -Bb test.a.ok"; fail; fi diff -Bb test.d.ok lowpass2ndOrderCascade_socp_test_d_coef.m if [ $? -ne 0 ]; then echo "Failed diff -Bb test.d.ok"; fail; fi # # this much worked # pass
robertgj/DesignOfIIRFilters
test/00/t0030a.sh
Shell
mit
1,811
#!/bin/bash #SBATCH --partition=mono #SBATCH --ntasks=1 #SBATCH --time=4-0:00 #SBATCH --mem-per-cpu=8000 #SBATCH -J Deep-DAE_SDAE_4_inc_bin_CAE_tanh #SBATCH -e Deep-DAE_SDAE_4_inc_bin_CAE_tanh.err.txt #SBATCH -o Deep-DAE_SDAE_4_inc_bin_CAE_tanh.out.txt source /etc/profile.modules module load gcc module load matlab cd ~/deepLearn && srun ./deepFunction 4 'DAE' 'SDAE' '128 1000 1500 10' '0 1 1 1' '4_inc_bin' 'CAE_tanh' "'iteration.n_epochs', 'learning.lrate', 'use_tanh', 'noise.drop', 'noise.level', 'rica.cost', 'cae.cost'" '200 1e-3 1 0 0 0.01 0' "'iteration.n_epochs', 'use_tanh'" '200 1'
aciditeam/matlab-ts
jobs/deepJobs_DAE_SDAE_4_inc_bin_CAE_tanh.sh
Shell
mit
605
python select_top_decoded.py $1 $2 #python select_top_decoded.py /mnt/vol/gfsai-east/ai-group/users/jiwei/Generation/conversation/data/decode/dialogue_BS_four_iter1_Bsize1.txt top_response_four_index_iter1.txt #python NumToString.py movie_25000 top_response_four_index_iter1.txt top_response_four_word_iter1.txt #python select_top_decoded.py /data/users/jiwel/fbsource/fbcode/experimental/deeplearning/jiwei/babi/rlbabi2/Atten/decode/dialogue_decode/dialogue_BS_iter1_Bsize1.txt top_response_four_index_iter0.txt #python NumToString.py movie_25000 top_response_four_index_iter0.txt top_response_four_word_iter0.txt #python select_top_decoded.py /mnt/vol/gfsai-east/ai-group/users/jiwei/Generation/conversation/data/decode/dialogue_BS_four_iter2_Bsize1.txt top_response_four_index_iter2.txt #python NumToString.py movie_25000 top_response_four_index_iter2.txt top_response_four_word_iter2.txt #python select_top_decoded.py /mnt/vol/gfsai-east/ai-group/users/jiwei/Generation/conversation/data/dialogue_decode/dialogue_BS_iter3_Bsize1.txt top_response_four_index_iter3.txt #python NumToString.py movie_25000 top_response_four_index_iter3.txt top_response_four_word_iter3.txt
jiweil/Neural-Dialogue-Generation
Distill/extract_top/select_top_decoded.sh
Shell
mit
1,178
virt-clone -o w7-base -n w7-off2007 -f /home/vmiklos/virt/w7-off2007/w7-off2007.img --connect=qemu:///system
vmiklos/vmexam
virt/clone-w7-off2007.sh
Shell
mit
109
#!/bin/bash SCRIPT_NAME=$(basename $BASH_SOURCE) SCRIPT_LOGFILE="./logs/"$(basename -s .sh $BASH_SOURCE)".log" SCRIPT_ENVFILE="./logs/"$(basename -s .sh $BASH_SOURCE)".env" mkdir -p ./logs && chmod 755 ./logs echo "running "$SCRIPT_NAME # tested: works on ubuntu 14.04.3 gnome # checking nodejs if hash apm 2>/dev/null; then echo " checking apm => ok" else echo " checking apm => apm not installed, please install atom and retry" exit; fi apm --no-confirm install nuclide-installer
dchapkine/ubuntu-desktop-post-install
install_nuclide.sh
Shell
mit
495
docker start damianmacgithubio_web_1
DamianMac/damianmac.github.io
start.sh
Shell
mit
36
#!/bin/bash # A small script to automate installation of dependencies DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" cd "$DIR/../.." catkin_make || true sudo apt-get update source ./devel/setup.sh rosdep update rosdep -y install igvc sudo apt-get -y install qt5-default wget expect wget https://files.gitter.im/RoboJackets/igvc-software/BGRW/flycapture2-2.8.3.1-amd64-pkg.tgz tar -xvf flycapture2-2.8.3.1-amd64-pkg.tgz cp ./src/igvc-software/util/flycapture-setup.exp flycapture2-2.8.3.1-amd64/ cd flycapture2-2.8.3.1-amd64 # Install dependencies for flycapture sudo apt-get -y install libatkmm-1.6-1 libcairomm-1.0-1 libglade2-0 libglademm-2.4-1c2a libglibmm-2.4-1c2a \ libgtkmm-2.4-1c2a libpangomm-1.4-1 libsigc++-2.0-0c2a libatkmm-1.6-dev libcairomm-1.0-dev libglibmm-2.4-dev \ libgtkglext1-dev libgtkglextmm-x11-1.2-0 libgtkglextmm-x11-1.2-dev libgtkmm-2.4-dev libpangomm-1.4-dev \ libpangox-1.0-dev libsigc++-2.0-dev libxmu-dev libxmu-headers sudo ./flycapture-setup.exp # Cross our fingers...
nareddyt/igvc-software
util/indigo-setup.sh
Shell
mit
1,021
#!/bin/bash set -e # Upgrade the Package Manager Sources sudo apt-get -y upgrade # Update the Package Manger Sources sudo apt-get -y update # Install Common Packages sudo apt-get -y install software-properties-common zip unzip libssl-dev openssl wget build-essential cmake # Removing any pre-installed ffmpeg and x264 sudo apt-get -y remove ffmpeg x264 libx264-dev # Install dependencies sudo apt-get -y install libopencv-dev pkg-config yasm libjpeg-dev libjasper-dev libavcodec-dev libavformat-dev libswscale-dev libdc1394-22-dev libxine-dev libgstreamer0.10-dev libgstreamer-plugins-base0.10-dev libv4l-dev python-dev python-numpy libtbb-dev libqt4-dev libgtk2.0-dev libfaac-dev libmp3lame-dev libopencore-amrnb-dev libopencore-amrwb-dev libtheora-dev libvorbis-dev libxvidcore-dev x264 v4l-utils ffmpeg qt5-default # OpenCV version CV_VERSION="$(wget -q -O - http://sourceforge.net/projects/opencvlibrary/files/opencv-unix | egrep -m1 -o '\"[0-9](\.[0-9]+)+' | cut -c2-)" # OpenCV Package URL CV_URL="http://sourceforge.net/projects/opencvlibrary/files/opencv-unix/$CV_VERSION/opencv-"$CV_VERSION".zip/download" # OpenCV Directory Name CV_DIR="opencv-$CV_VERSION" # OpenCV Install Directory CV_INSTALL_LOCATION="/home/$USER/.libs" # Create the directory mkdir -p "$CV_INSTALL_LOCATION" # Download OpenCV wget -O "/tmp/$CV_DIR.zip" "$CV_URL" # Extract it unzip "/tmp/$CV_DIR.zip" -d "$CV_INSTALL_LOCATION" # Change to the directory cd "$CV_INSTALL_LOCATION/$CV_DIR" # Make a build directory mkdir build cd build # Create makefile cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D WITH_TBB=ON -D BUILD_NEW_PYTHON_SUPPORT=ON -D WITH_V4L=ON -D INSTALL_C_EXAMPLES=ON -D INSTALL_PYTHON_EXAMPLES=ON -D BUILD_EXAMPLES=ON -D WITH_QT=ON -D WITH_OPENGL=ON .. # Make make -j2 sudo sh -c 'echo "/usr/local/lib" > /etc/ld.so.conf.d/opencv.conf' sudo ldconfig # Clean up rm -rf "/tmp/$CV_DIR.zip"
chaosmail/sh-install
dist/opencv.sh
Shell
mit
1,924
# npm install --save-dev electron-rebuild # Every time you run "npm install", run this: ./node_modules/.bin/electron-rebuild
ShahNewazKhan/ai8
npm-prerun.sh
Shell
cc0-1.0
125
#!/bin/bash base="graphviz-web" git_repo=${base}-static OUT=output_$git_repo >$OUT cd /home/gviz rm ${git_repo}.tgz >> $OUT 2>&1 rm doc.tgz >> $OUT 2>&1 #Set timestamps, create compressed file bin/git_archive_static.sh $git_repo >> $OUT 2>&1 if [ ! -s $OUT ] then rm $OUT fi
ellson/graphviz-build
hg/bin/git_get_static_tgz.sh
Shell
epl-1.0
280
#/bin/sh menu() { echo ' ============================================================ | Utility Menu | | 1. kodi (xbmc media center) | | 2. links (browser with gui) | | 3. w3m (browser w/o gui) | |----------------------------------------------------------| | w. WIFI list (iwlist scan) | | x. WIFI connect (need sudo) | | y. bluetooth config (bluetoothctl) | | z. Raspberry PI config (sudo raspi-config) | | 0. exit | ============================================================ ' read choice case $choice in 0) exit 0 ;; 1) echo "starting kodi..." kodi ;; 2) links ;; 3) echo "Enter the URL for w3m:" read w3m_url w3m $w3m_url ;; w) iwlist scan ;; x) wifi_connect ;; y) echo ' bluetoothctl basic commands: help - show commands list - list controllers power on - power on bluetooth device power off - scan on - scan devices agent on - to register agent (need to be on before pair) pair <DEVICE ID> - pair a device trust <DEVICE ID> - trust a device connect <DEVICE ID> - connect to a device quit - quit Common steps to connect a device: power on -> scan on -> agent on -> pair -> (optional) trust -> connect -> quit ' bluetoothctl ;; z) sudo raspi-config ;; *) echo "Incorrect choice. Please select again." esac menu } wifi_connect() { echo "Enter WIFI SSID (name):" read wifi_ssid echo "Enter WIFI password:" read wifi_pwd echo ' network={ ssid="'${wifi_ssid}'" psk="'${wifi_pwd}'" }' |sudo tee -a /etc/wpa_supplicant/wpa_supplicant.conf >/dev/null echo "WIFI $wifi_ssid added." } menu
samuelchen/code-snippets
raspbian/menu.sh
Shell
gpl-2.0
1,989
# Prints the bitcoin price in dollars # Requires curl cli installed (you can change curl to wget with similar results). # Originally based on the wan_ip checker, so it only checks once a minute. # Uses the Coinbase.com api. There are plenty of other API's available. # By: Steve Cook <[email protected]> # Github: RevBooyah run_segment() { local tmp_file="${TMUX_POWERLINE_DIR_TEMPORARY}/bitcoin.txt" local bitcoin if [ -f "$tmp_file" ]; then if shell_is_osx || shell_is_bsd; then stat >/dev/null 2>&1 && is_gnu_stat=false || is_gnu_stat=true if [ "$is_gnu_stat" == "true" ];then last_update=$(stat -c "%Y" ${tmp_file}) else last_update=$(stat -f "%m" ${tmp_file}) fi elif shell_is_linux || [ -z $is_gnu_stat]; then last_update=$(stat -c "%Y" ${tmp_file}) fi time_now=$(date +%s) update_period=60 up_to_date=$(echo "(${time_now}-${last_update}) < ${update_period}" | bc) if [ "$up_to_date" -eq 1 ]; then bitcoin=$(cat ${tmp_file}) fi fi if [ -z "$bitcoin" ]; then #bitcoin=`curl -s https://coinbase.com/api/v1/prices/spot_rate | sed -e 's/^.*"amount":"\([^"]*\)".*$/\1/'` bitcoin=`curl -s https://api.coinbase.com/v2/prices/spot?currency=USD | sed -e 's/^.*"amount":"\([^"]*\)".*$/\1/'` if [ "$?" -eq "0" ]; then echo "${bitcoin}" > $tmp_file elif [ -f "${tmp_file}" ]; then bitcoin=$(cat "$tmp_file") fi fi if [ -n "$bitcoin" ]; then echo "฿ ${bitcoin}" fi return 0 }
RevBooyah/tmux-powerline-bitcoin
bitcoin.sh
Shell
gpl-2.0
1,931
#!/bin/sh # # To add new binary into initrd, you need to update the command in # make_initrd() function # usage="usage: update_img.sh [option] -h print this help message -f update floppy disk -d update hard disk -a update both floppy and hard disk" tool_path="tools/make_initrd" image_dir="images" mnt_point="images/mnt" loop_dev="" # Exit with an error message die() { echo $@ exit -1 } check_env() { # Check whether the tool exists and executable if [ ! -x "$tool_path" ]; then die "$tool_path not exist or not executable" fi # Check whether image directory exists if [ ! -d "$image_dir" ]; then die "$image_dir not found" fi # Check whether mount point exists if [ ! -d "$mnt_point" ]; then die "$mnt_point not found" fi } make_initrd() { echo "Preparing init ramdisk" $tool_path bin/init init bin/crond crond bin/echo echo bin/unit_test unit_test \ bin/ls ls bin/cat cat bin/clear clear bin/shutdown shutdown bin/mkdir mkdir \ bin/date date bin/mount mount bin/umount umount bin/mknod mknod bin/dd dd \ bin/lsmod lsmod bin/initrd } update_kernel_and_initrd() { echo "Updating kernel and initrd" sudo rm -f "$mnt_point/matrix" sudo rm -f "$mnt_point/initrd" sudo cp bin/matrix "$mnt_point" sudo cp bin/initrd "$mnt_point" sync } find_loopdev() { res=$(sudo losetup -l | grep "$1"); if [ -z "$res" ]; then loop_dev=""; else loop_dev=$(echo $res | awk '{print $1}') echo "Found loop device: $loop_dev" fi } setup_loopdev() { loop_dev=$(sudo losetup --show --find "$1") echo "Set up loop device: $loop_dev" } update_image() { img="$image_dir/$1" if [ ! -e "$img" ]; then die "$img not found" fi make_initrd # First try whether we can find the loop device find_loopdev "$img" if [ -z "$loop_dev" ]; then setup_loopdev "$img" fi # Mount the root partition onto $mnt_point echo "Mounting $loop_dev to $mnt_point" sudo mount $loop_dev "$mnt_point" update_kernel_and_initrd # Unmount the root partition echo "Umounting $loop_dev" sudo umount "$loop_dev" # Delete loopback device echo "Deleting $loop_dev" sudo losetup -d "$loop_dev" echo "Done." } case "$1" in "-h") echo "$usage" ;; "-f") check_env update_image "matrix-flpy.img" ;; "-d") check_env update_image "matrix-hd.img" ;; "-a") check_env update_image "matrix-flpy.img" update_image "matrix-hd.img" ;; *) # If no option provided, just update the hard disk image update_image "matrix-hd.img" ;; esac
Ted-Chang/matrix
update_img.sh
Shell
gpl-2.0
2,638
# (c) 2014-2015 Sam Nazarko # [email protected] #!/bin/bash . ../common.sh # Build in native environment if [ $1 == "rbp1" ]; then pull_source "https://github.com/bavison/arm-mem/archive/cd2c8f9202137c79f7afb77ecb87e713a0800d3c.zip" "$(pwd)/src"; fi if [ $1 == "rbp2" ]; then pull_source "https://github.com/bavison/arm-mem/archive/master.zip" "$(pwd)/src"; fi build_in_env "${1}" $(pwd) "rbp-armmem-osmc" build_return=$? if [ $build_return == 99 ] then echo -e "Building package rbp-armmem" out=$(pwd)/files sed '/Package/d' -i files/DEBIAN/control echo "Package: ${1}-armmem-osmc" >> files/DEBIAN/control make clean pushd src/arm-mem-* make if [ $? != 0 ]; then echo "Error occured during build" && exit 1; fi strip_libs mkdir -p $out/usr/lib cp -ar libarmmem.so $out/usr/lib cp -ar libarmmem.a $out/usr/lib popd fix_arch_ctl "files/DEBIAN/control" dpkg -b files/ rbp-armmem-osmc.deb build_return=$? fi teardown_env "${1}" exit $build_return
ojengwa/osmc
package/rbp-armmem-osmc/build.sh
Shell
gpl-2.0
970
#!/bin/sh function repo_clean { REPO_DIR=/etc/zypp/repos.d/ TMP_DIR=${REPO_DIR}/old/ mkdir -p ${TMP_DIR} for REPO_FILE in `ls -1 ${REPO_DIR}/*.repo`; do rpm -qf ${REPO_FILE} &> /dev/null if [ $? -eq 1 ]; then echo "Moving ${REPO_FILE} to ${TMP_DIR}." mv ${REPO_FILE} ${TMP_DIR} fi done } if [ $# -ne 1 -o "$1" != "clean" ]; then echo "Usage: $0 clean" exit 1 fi repo_clean
giucam/ssu
tools/ssu-repos.sh
Shell
gpl-2.0
417
#!/system/bin/sh # # Copyright (c) 2015 Javier Sayago <[email protected]> # Contact: [email protected] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # PATH=/sbin:/system/sbin:/system/bin:/system/xbin export PATH # Inicio mount -o remount,rw -t auto /system/lib/modules/ mount -o remount,rw -t auto /system/etc/wifi/ mount -o remount,rw -t auto /system/etc/firmware/wlan/qca_cld/ mount -t rootfs -o remount,rw rootfs cp -f -R /res/modules/*.ko /system/lib/modules/ > /dev/null 2>&1 sync cp -f -R /res/etc/firmware/wlan/qca_cld/WCNSS_cfg.dat /system/etc/firmware/wlan/qca_cld/ > /dev/null 2>&1 cp -f -R /res/etc/firmware/wlan/qca_cld/*.ini /system/etc/firmware/wlan/qca_cld/ > /dev/null 2>&1 cp -f -R /res/etc/firmware/wlan/qca_cld/*.bin /system/etc/firmware/wlan/qca_cld/ > /dev/null 2>&1 sync cp -f -R /res/etc/wifi/*.conf /system/etc/wifi/ > /dev/null 2>&1 cp -f -R /res/etc/wifi/*.ini /system/etc/wifi/ > /dev/null 2>&1 cp -f -R /res/etc/wifi/*.bin /system/etc/wifi/ > /dev/null 2>&1 sync sed -i s/560/480/ /system/build.prop > /dev/null 2>&1 mount -t rootfs -o remount,ro rootfs mount -o remount,rw -t auto /system/etc/firmware/wlan/qca_cld/ mount -o remount,rw -t auto /system/etc/wifi/ mount -o remount,ro -t auto /system/lib/modules/
javilonas/Lonas_KL-SM-G901F
ramdisk/init.post-modules.sh
Shell
gpl-2.0
1,765
#!/bin/bash BINPATH=`dirname $0` python "$BINPATH/../ginn/main.py" $@
pacoqueen/ginn
bin/ginn.sh
Shell
gpl-2.0
71
#!/bin/bash # funcoeszz # vim: noet noai tw=78 # # INFORMAÇÕES: http://aurelio.net/zz # NASCIMENTO : 22 fevereiro 2000 # AUTORES : Aurélio Marinho Jargas <verde (a) aurelio net> # Thobias Salazar Trevisan <thobias (a) lcp coppe ufrj br> # DESCRIÇÃO : Funções de uso geral para bash[12], que buscam informações em # arquivos locais e dicionários/tradutores/fontes na internet # # REGISTRO DE MUDANÇAS: # 20000222 ** 1ª versão # 20000424 ++ cores, beep, limpalixo, rpmverdono # 20000504 ++ calcula, jpgi, gifi, trocapalavra, ajuda, echozz, forzz # 20000515 ++ dominiopais, trocaextensao, kill, <> $* > "$@" # -- jpgi, gifi: o identify já faz # 20000517 <> trocapalavra -> basename no $T # 20000601 -- dicbabel: agora com session &:( # 20000612 ++ celulartim, trocaarquivo # 20000823 <> dicjarg: + palavras com espaços, -- celulartim # 20000914 ++ dicabl, dicdict, dicbabel, <> nome oficial: .bashzz # -- dicdic: página fora do ar # 20000915 ++ mini-manual no cabeçalho, bugzilla, getczz, ZZPOST # !! ** 1º anúncio externo # 20000920 ++ freshmeat, ZZWWWHTML, <> ZZDUMP -> ZZWWWDUMP (idem ZZPOST) # <> ZZWWW*: -crawl -width -aacookies, <> bugzilla: saída +limpa # <> kill: mostra núm do processo # 20001108 <> dic: babi->babylon, jarg->jargon, babel->babelfish # ++ dicmichaelis, ++ cep # 20001230 <> cep: TipoPesquisa=, <> cabeçalho == /bin/bash, ++ cinclude # <> dolar: mostra data e trata acima de R$ 2 (triste realidade) # 20010214 ++ detran # 20010314 <> dominiopais: URL nova, pesquisa local, procura código ou nome # <> freshmeat: atualizada # 20010320 <> bugzilla: ++UNCONFIRMED, product: Conectiva Linux # 20010322 <> bugzilla: status entre () # 20010713 <> babelfish: re-re-arrumado, jargon: quebra 72 colunas # 20010717 <> trocaextensao: /usr/bin/rename, /tmp/zz<arquivo>.$$ # ++ arrumanome, ++ diffpalavra # 20010724 ++ ramones, <> dicdict: atualizado # 20010801 <> calcula: entrada/saída com vírgulas # 20010808 ++ dicasl, -- ramal (palha) # 20010809 ++ irpf (valeu stulzer), <> detran -> detranpr # <> dicjargon: agora local e www (tá BLOAT Kra!!!) # 20010820 <> dicdict: saída em 72 colunas, <> detranpr: mais dados # <> cep: URL nova # 20010823 ++ ZZTMP (andreas chato &:) ) # !! ** funcoeszz agora é um pacote do Conectiva Linux # 20010828 ++ maiores, <> dicmichaelis: simplificado # 20011001 ++ chavepgp (valeu missiaggia) # 20011002 <> limpalixo: aceita $1 também, ++ /usr/bin/funcoeszz (valeu gwm) # <> dolar: URL nova, formato novo (valeu bruder) # 20011015 <> arrumanome: s/^-/_/, mv -v -- # 20011018 <> "$@" na chamada do executável (++aspas) # 20011108 <> dolar: formato melhorado # 20011113 ++ cores # 20011211 <> freshmeat: mudança no formato (©), ++ detransp (valeu elton) # ++ $ZZER{DATA,HORA} # 20011217 ++ noticiaslinux, whoisbr (valeu marçal) # 20020107 ++ zzzz, $ZZPATH, --version # 20020218 ++ função temporária casadosartistas &:) # !! ** criada a página na Internet da funções # 20020219 ++ tv # 20020222 <> cep: número do CEP, ++ sigla (valeu thobias) # 20020226 <> s/registrobr/whoisbr/ na ajuda (valeu graebin) # 20020228 ++ rpmfind (valeu thobias), s/==/=/ pro bash1 (valeu kallás) # 20020306 <> dolar: pequena mudança na saída # 20020313 <> zz: ++listagem das funções, ++--atualiza, ++--bashrc # <> chamando pelo executável, pode omitir o zz do nome # ++ TODAS as funções agora possuem --help (ou -h) # 20020315 ++ nextel, <> noticiaslinux: ++tchelinux, zz: (bashrc) # 20020419 ++ pronuncia (valeu thobias) # 20020605 <> trocaextensao: -- /usr/bin/rename (valeu leslie) # <> casadosartistas: atualizada para casa3 (valeu ataliba) # <> zzzz: pr tirado fora (bug na versão nova) # 20020611 <> casadosartistas: ++ index.php3 na URL (valeu thobias) # <> nextel: URL atualizada (valeu samsoniuk) # <> noticiaslinux: Z) URL/filtro atualizados (valeu thobias) # 20020622 <> dicasl: URL/filtro atualizados (valeu thobias) # ++ uniq, <> limpalixo: s/stdin/${1:--}/, reconhece vim # <> ramones: agora grava arquivo para consulta local (+rápido!) # 20020827 <> tv: checa 2 horas adiante se na atual falhar (valeu copag) # ++ howto (valeu thobias), <> tv: URL atualizada (valeu copag) # <> arrumanome: nome mais limpo # 20021030 <> noticiaslinux: Z) URL atualizada (valeu thobias) # <> noticiaslinux: B) filtro atualizado # <> dicbabelfish: filtro atualizado # -- casadosartistas: fim do programa # 20021107 <> pronuncia: filtro arrumado (valeu thobias) # <> pronuncia: reconhece na hora arquivos já baixado (+rápido!) # ++ senha (valeu thobias), ++ linuxnews (valeu thobias) # 20021206 <> maiores: adicionada opção -r # <> uniq: malabarismos de cat/sort/uniq trocados por um SED # <> dicbabelfish: filtro atualizado, <> dolar: filtro atualizado # ++ ascii, ++ seqzz # 20030124 <> ascii: adicionado --help # <> dicjargon: URL atualizada (valeu jic) # <> noticiaslinux: I) URL atualizada (valeu thobias) # ++ letrademusica (valeu thobias) # 20030207 ++ data, <> noticiaslinux B) filtro atualizado (valeu thobias) # <> linuxnews: D) filtro atualizado (valeu thobias) # <> pronuncia: filtro arrumado (valeu thobias) # 20030211 <> data: arrumado bug "value too great for base" (valeu sergio) # -- michaelis: a UOL fechou o acesso livre :( # <> zzzz: retirado código de pacote RPM no --atualiza # <> linuxnews: F) filtro atualizado # <> noticiaslinux: B) filtro atualizado # 20030226 ++ dicportugues: direto de Portugal (valeu marciorosa) # 20030317 <> zzzz: adicionado --tcshrc (valeu spengler) # <> echozz: não estava imprimindo texto sem cores # <> linuxnews: O) arrumado, filtro atualizado (valeu thobias) # 20030331 <> dicportugues: URL nova, filtro novo (valeu thobias) # 20030403 ++ google, dolar: agora inclui a hora da última atualização # !! ** o Thobias foi empossado como co-autor (06maio) # 20030507 <> trocapalavra: só regrava arquivos modificados # <> irpf: recebe o ano como parâmetro (2001 ou 2002) # <> noticiaslinux: Z) URL e filtros atualizados (valeu brain) # <> trocaextensao: adicionada checagem se é a mesma extensão # <> uniq: arrumada, não estava funcionado direito # <> dicbabelfish, tv: filtro atualizado # <> arrumado bug que mostrava "esta função não existe!" # 20030612 ++ ss, ++ maiusculas, ++ minusculas # <> irpf: restituições de 2003 incluídas # <> arrumanome: mais caracteres estranhos cadastrados # <> noticiaslinux: apagado lixo: "tee pbr.html" (valeu bernardo) # <> trocapalavra: só regrava arquivos modificados (agora sim) # <> trocapalavra: trata arquivos com espaço no nome (valeu rbp) # <> cep: URL mudou (valeu fernando braga) # <> echozz: mensagens coloridas em azul # 20030713 ++ noticiassec # <> howto: URL nova, procura mini-HOWTOs também # <> dicjargon: URL nova, cache local, mais esperto # <> linuxnews: atualizada URL para Linux Weekly News # <> arrumanome: não apaga arquivo já existente (valeu paulo henrique) # <> noticiaslinux: adicionado site Notícias Linux (valeu bernardo) # <> dicbabelfish, dolar: arrumado filtro # -- bugzilla, rpmdono, rpmdisco: retiradas do pacote # 20031002 ++ converte, contapalavra # <> howto: de volta para a URL antiga # <> noticiassec: --help arrumado # <> noticiaslinux: Z) filtro atualizado # <> tv: sbt arrumado (valeu vinicius) # <> zzzz: bashrc: checagem +esperta, quebra a linha (valeu luciano) # 20031124 <> arrumado problema de várias funções em arquivos com espaços # <> echozz: arrumado problema de expansão do asterisco # <> cep: URL e filtro atualizados (agora só por endereço) # <> dicportugues: URL e filtro atualizados (valeu geraldo) # <> pronuncia: URL atualizada (valeu moyses) # <> linuxnews: N) filtro atualizado # <> dicjargon: URL atualizada # <> ramones: mostra mensagem quando atualiza dados # 20040128 ++ hora, -- somahora (valeu victorelli) # ++ ZZCOR,ZZPATH,ZZTMPDIR: cfg via variáveis de ambiente (valeu rbp) # <> arrumanome: adicionadas opções -d e -r # <> arrumanome: arrumado bug DIR/ARQ de mesmo nome (valeu helio) # <> ss: arrumado bug com --rapido e --fundo (valeu ulysses) # <> ss: a frase não precisa mais das aspas # <> irpf: arrumada mensagem de erro (valeu rbp) # 20040219 ++ tempo # <> beep: com parâmetros, agora serve de alarme # <> howto: saída melhorada, mais limpa # <> dicabl: URL atualizada (valeu leonardo) # <> ajuda: agora paginando com o $PAGER, se existir (valeu rbp) # <> echozz: arrumado bug de imprimir duplicado (valeu rbp) # <> configurações: arrumado bug do $ZZPATH (valeu nexsbr) # <> zzzz: --bashrc detecta comando 'source' ou '.' # <> zzzz: --bashrc adicionado "export ZZPATH" # ############################################################################## # # Configuração # ------------ # # ### Configuração via variáveis de ambiente # # Algumas variáveis de ambiente podem ser usadas para alterar o comportamento # padrão das funções. Basta defini-las em seu .bashrc ou na própria linha de # comando antes de chamar as funções. São elas: # # $ZZCOR - Liga/Desliga as mensagens coloridas (1 e 0) # $ZZPATH - Caminho completo para o arquivo das funções # $ZZTMPDIR - Diretório para armazenar arquivos temporários # # ### Configuração fixa neste arquivo (hardcoded) # # A configuração também pode ser feita diretamente neste arquivo, se você # puder fazer alterações nele. # ZZCOR_DFT=1 # colorir mensagens? 1 liga, 0 desliga ZZPATH_DFT=/usr/bin/funcoeszz # rota absoluta deste arquivo ZZTMPDIR_DFT=${TMPDIR:-/tmp} # diretório temporário # # ############################################################################## # # Inicialização # ------------- # # # Variáveis e funções auxiliares usadas pelas funções ZZ. # Não altere nada aqui. # # ZZWWWDUMP='lynx -dump -nolist -crawl -width=300 -accept_all_cookies' ZZWWWLIST='lynx -dump -width=300 -accept_all_cookies' ZZWWWPOST='lynx -post-data -nolist -crawl -width=300 -accept_all_cookies' ZZWWWHTML='lynx -source' ZZERDATA='[0-9][0-9]\/[0-9][0-9]\/[0-9]\{4\}'; # dd/mm/aaa ou mm/dd/aaaa ZZERHORA='[012][0-9]:[0-9][0-9]' ZZSEDURL='s| |+|g;s|&|%26|g' getczz(){ stty raw; eval $1="`dd bs=1 count=1 2>&-`"; stty cooked; } echozz(){ if [ "$ZZCOR" != '1' ]; then echo -e "$*" ; else echo -e "\033[36;1m$*"; echo -ne "\033[m" ; fi } seqzz(){ local o=+ a=1 z=${1:-1}; [ "$2" ] && { a=$1; z=$2; } ; [ $a -gt $z ] && o=- while [ $a -ne $z ]; do echo $a ; eval "a=\$((a$o 1))"; done; echo $a } # # ### Truques para descobrir a localização deste arquivo no sistema # # Se a chamada foi pelo executável, o arquivo é o $0. # Senão, tenta usar a variável de ambiente ZZPATH, definida pelo usuário. # Caso não exista, usa o local padrão ZZPATH_DFT. # Finalmente, força que ZZPATH seja uma rota absoluta. # [ "${0##*/}" = 'bash' -o "${0#-}" != "$0" ] || ZZPATH="$0" [ "$ZZPATH" ] || ZZPATH=$ZZPATH_DFT [ "$ZZPATH" ] || echozz 'AVISO: $ZZPATH vazia. zzajuda e zzzz não funcionarão' [ "${ZZPATH#/}" = "$ZZPATH" ] && ZZPATH="$PWD/${ZZPATH#./}" # ### Últimos ajustes # ZZCOR="${ZZCOR:-$ZZCOR_DFT}" ZZTMP="${ZZTMPDIR:-$ZZTMPDIR_DFT}/zz" unset ZZCOR_DFT ZZPATH_DFT ZZTMPDIR_DFT # # ############################################################################## # ---------------------------------------------------------------------------- # Mostra uma tela de ajuda com explicação e sintaxe de todas as funções # Obs.: tátátá, é xunxo. Sou preguiçoso sim, e daí &:) # Uso: zzajuda # ---------------------------------------------------------------------------- zzajuda(){ zzzz -z $1 zzajuda && return local pinte=: ; [ $ZZCOR = '1' -a "$PAGER" != 'less' ] && pinte='s \<zz[a-z2]\+\> & ' sed '1s/.*/*** ajuda das funções ZZ (tecla Q sai)/p;2g;2p;/^# --*$/,/^# --*$/{ s/^# //p;};d' $ZZPATH | uniq | sed "$pinte" | ${PAGER:-less -r} } # ---------------------------------------------------------------------------- # Mostra informações (como versão e localidade) sobre as funções # Com a opção --atualiza, baixa a versão mais nova das funções # Com a opção --bashrc, "instala" as funções no ~/.bashrc # Com a opção --tcshrc, "instala" as funções no ~/.tcshrc # Uso: zzzz [--atualiza|--bashrc|--tcshrc] # ---------------------------------------------------------------------------- zzzz(){ if [ "$1" = '-z' -o "$1" = '-h' -o "$1" = '--help' ]; then # -h)zzzz -z)resto [ "$1" = '-z' -a "$2" != '--help' -a "$2" != '-h' ] && return 1 #alarmefalso local pat="Uso: [^ ]*${3:-zzzz}"; zzajuda | grep -C9 "^$pat" | sed ":a ;$ bz;N;ba;:z;s/.*\n---*\(\n\)\(.*$pat\)/\1\2/;s/\(\n\)---.*/\1/"; return 0 fi local rc vl vr URL='http://aurelio.net/zz' cfg="source $ZZPATH" cfgf=~/.bashrc local cor='não'; [ "$ZZCOR" = '1' ] && cor='sim'; [ -f "$ZZPATH" ] || return vl=`sed '/^$/{g;q;};/^# 200./!d;s/^# ...\(.\)\(....\).*/\1.\2/;h;d' $ZZPATH` if [ "$1" = '--atualiza' ]; then # obtém versão nova, se !=, download echo "Procurando a versão nova, aguarde." vr=`$ZZWWWDUMP $URL | sed -n 's/.*versão atual \([0-9.]\+\).*/\1/p'` echo "versão local : $vl"; echo "versão remota: $vr"; echo if [ "$vl" = "$vr" ]; then echo 'Você já está com a última versão.' else local urlexe="$URL/funcoeszz" exe="funcoeszz-$vr" echo -n 'Baixando a versão nova... '; $ZZWWWHTML $urlexe > $exe echo 'PRONTO!'; echo "Arquivo '$exe' baixado, instale-o manualmente." fi elif [ "$1" = '--bashrc' ]; then # instala funções no ~/.bashrc if ! grep -q "^ *\(source\|\.\) .*funcoeszz" $cfgf; then (echo; echo "$cfg"; echo "export ZZPATH=$ZZPATH") >> $cfgf echo 'feito!' else echo "as funções já estão no $cfgf!"; fi elif [ "$1" = '--tcshrc' ]; then # cria aliases para as funções no /.tcshrc cfgf=~/.zzcshrc cfg="source $cfgf"; echo > $cfgf if ! grep -q "^ *$cfg" ~/.tcshrc; then echo "$cfg" >> ~/.tcshrc ; fi for func in `ZZCOR=0 zzzz | sed '1,/^(( fu/d;s/,//g'`; do echo "alias zz$func 'funcoeszz zz$func'" >> $cfgf; done; echo 'feito!' else # mostra informações sobre as funçÕes rc='não instalado' ; grep -qs "^ *$cfg" $cfgf && rc="$cfgf" echozz "( local)\c"; echo " $ZZPATH"; echozz "(versão)\c"; echo " $vl" echozz "( cores)\c"; echo " $cor"; echozz "( tmp)\c"; echo " $ZZTMP" echozz "(bashrc)\c"; echo " $rc"; echo echozz "( lista)\c"; echo ' [email protected]' echozz "(página)\c"; echo " $URL" [ "$ZZPATH" -a -f "$ZZPATH" ] && { echo; echozz '(( funções disponíveis ))' sed '/^zz[a-z0-9]\+(/!d;s/^zz//;s/(.*//' $ZZPATH | sort | sed ':a;$!N;s/\n/, /;ta;s/\(.\{60\}[^ ]*\) /\1\ /g' } fi } # ---------------------------------------------------------------------------- # #### D I V E R S O S # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Aguarda N minutos e dispara uma sirene usando o 'speaker' # Útil para lembrar de eventos próximos no mesmo dia # Se não receber nenhum argumento, serve para restaurar o 'beep' da máquina # para o seu tom e duração originais # Obs.: a sirene tem 4 toques, sendo 2 tons no modo texto e apenas 1 no xterm # Uso: zzbeep # zzbeep 0 # zzbeep 1 5 15 # espere 1 minuto, depois mais 5, e depois 15 # ---------------------------------------------------------------------------- zzbeep(){ zzzz -z $1 zzbeep && return [ "$1" ] || { echo -ne '\033[10;750]\033[11;100]\a'; return; } for i in $*; do echo -n "Vou bipar em $i minutos... "; sleep $((i*60)) echo -ne '\033[11;900]' # beep longo for freq in 500 400 500 400; do echo -ne "\033[10;$freq]\a"; sleep 1; done echo -ne '\033[10;750]\033[11;100]'; echo OK; shift; done } # ---------------------------------------------------------------------------- # Retira linhas em branco e comentários # Para ver rapidamente quais opções estão ativas num arquivo de configuração # Além do tradicional #, reconhece comentários de arquivos .vim # Obs.: aceita dados vindos da ENTRADA PADRÃO (STDIN) # Uso: zzlimpalixo [arquivo] # Ex.: zzlimpalixo ~/.vimrc # cat /etc/inittab | zzlimpalixo # ---------------------------------------------------------------------------- zzlimpalixo(){ zzzz -z $1 zzlimpalixo && return local z='#'; case "$1" in *.vim|*.vimrc*)z='"';; esac cat "${1:--}" | tr '\t' ' ' | sed "\,^ *\($z\|$\),d" | uniq } # ---------------------------------------------------------------------------- # Converte as letras do texto para minúsculas/MAIÚSCULAS, inclusive acentuadas # Uso: zzmaiusculas [arquivo] # zzminusculas [arquivo] # Ex.: zzmaiusculas /etc/passwd # echo NÃO ESTOU GRITANDO | zzminusculas # ---------------------------------------------------------------------------- zzminusculas(){ zzzz -z $1 zzminusculas && return sed 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ y/ÀÁÂÃÄÅÈÉÊËÌÍÎÏÒÓÔÕÖÙÚÛÜÇÑ/àáâãäåèéêëìíîïòóôõöùúûüçñ/' "$@"; } zzmaiusculas(){ zzzz -z $1 zzmaiusculas && return sed 'y/abcdefghijklmnopqrstuvwxyz/ABCDEFGHIJKLMNOPQRSTUVWXYZ/ y/àáâãäåèéêëìíîïòóôõöùúûüçñ/ÀÁÂÃÄÅÈÉÊËÌÍÎÏÒÓÔÕÖÙÚÛÜÇÑ/' "$@"; } # ---------------------------------------------------------------------------- # Retira as linhas repetidas (consecutivas ou não) # Útil quando não se pode alterar a ordem original das linhas, # Então o tradicional sort|uniq falha. # Uso: zzuniq [arquivo] # Ex.: zzuniq /etc/inittab # cat /etc/inittab | zzuniq # ---------------------------------------------------------------------------- zzuniq(){ zzzz -z $1 zzuniq && return ## versão UNIX, rápida, mas precisa de cat, sort, uniq e cut cat -n "${1:--}" | sort -k2 | uniq -f1 | sort -n | cut -f2- ## versão SED, mais lenta para arquivos grandes, mas só precisa do SED ##sed "G;/^\([^\n]*\)\n\([^\n]*\n\)*\1\n/d;h;s/\n.*//" $1 } # ---------------------------------------------------------------------------- # Mata os processos que tenham o(s) padrão(ões) especificado(s) no nome do # comando executado que lhe deu origem # Obs.: se quiser assassinar mesmo o processo, coloque a opção -9 no kill # Uso: zzkill padrão [padrão2 ...] # Ex.: zzkill netscape # zzkill netsc soffice startx # ---------------------------------------------------------------------------- zzkill(){ zzzz -z $1 zzkill && return; local C P; for C in "$@"; do for P in `ps x --format pid,comm | sed -n "s/^ *\([0-9]\+\) [^ ]*$C.*/\1/p"` do kill $P && echo -n "$P "; done; echo; done } # ---------------------------------------------------------------------------- # Mostra todas as combinações de cores possíveis no console, juntamente com # os respectivos códigos ANSI para obtê-las # Uso: zzcores # ---------------------------------------------------------------------------- zzcores(){ zzzz -z $1 zzcores && return local frente fundo bold c for frente in 0 1 2 3 4 5 6 7; do for bold in '' ';1'; do for fundo in 0 1 2 3 4 5 6 7; do c="4$fundo;3$frente"; echo -ne "\033[$c${bold}m $c${bold:- } \033[m" done; echo done; done } # ---------------------------------------------------------------------------- # Gera uma senha aleatória de N caracteres formada por letras e números # Obs.: a senha gerada não possui caracteres repetidos # Uso: zzsenha [n] (padrão n=6) # Ex.: zzsenha # zzsenha 8 # ---------------------------------------------------------------------------- zzsenha(){ zzzz -z $1 zzsenha && return local n alpha="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" n=6 ; [ "$1" ] && n=`echo "$1" | sed 's/[^0-9]//g'` [ $n -gt 62 ] && { echo "zzsenha: O tamanho máximo é 62" ; return ; } while [ $n -ne 0 ]; do n=$((n-1)) ; pos=$((RANDOM%${#alpha}+1)) echo -n "$alpha" | sed "s/\(.\)\{$pos\}.*/\1/" # igual a cut -c$pos alpha=`echo $alpha | sed "s/.//$pos"` ; done | tr -d '\012' ; echo } # ---------------------------------------------------------------------------- # Mostra a tabela ASCII com todos os caracteres imprimíveis (32-126,161-255) # no formato: <decimal> <octal> <ascii> # Obs.: o número de colunas e a largura da tabela são configuráveis # Uso: zzascii [colunas] [largura] # Ex.: zzascii # zzascii 7 # zzascii 9 100 # ---------------------------------------------------------------------------- zzascii(){ zzzz -z $1 zzascii && return local ncols=${1:-6} largura=${2:-78} chars=`seqzz 32 126 ; seqzz 161 255` local largcol=$((largura/ncols)) nchars=`echo "$chars" | sed -n $=` local nlinhas=$((nchars/ncols+1)) cols=`seqzz 0 $((ncols-1))` local ref num octal char linha=0 echo $nchars caracteres, $ncols colunas, $nlinhas linhas, $largura de largura while [ ${linha} -lt $nlinhas ]; do ref=''; linha=$((linha+1)) for col in $cols; do ref="$ref $((nlinhas*col+linha))p;"; done for num in `echo "$chars" | sed -n "$ref"`; do octal=$((num/8*10+num%8)) ; octal=$((octal/80*100+octal%80)) [ $octal -lt 100 ] && octal="0$octal" ; char=`echo -e "\\\\$octal"` printf "% ${largcol}s" "$num $octal $char" done ; echo done } # ---------------------------------------------------------------------------- # Screen Saver para console, com cores e temas # Temas: mosaico, espaco, olho, aviao, jacare, alien, rosa, peixe, siri # Obs.: aperte Ctrl+C para sair # Uso: zzss [--rapido|--fundo] [--tema <tema>] [texto] # Ex.: zzss # zzss fui ao banheiro # zzss --rapido / # zzss --fundo --tema peixe # ---------------------------------------------------------------------------- zzss(){ zzzz -z $1 zzss && return local a i j x y z zn lc c fundo tema temas hl pausa=1 c1='40;3' lin=25 col=80 temas="{mosaico} ;{espaco}.; {olho}00;{aviao}--o-0-o--;{jacare}==*-,,--,,--; {alien}/-=-\\ ;{rosa}--/--\\-<@;{peixe}>-)))-D; {siri}(_).-=''=-.(_);" lc=`stty size 2>&-`; [ "$lc" ] && { lin=${lc% *}; col=${lc#* }; } # scr size tema=mosaico ; while [ $# -ge 1 ]; do case "$1" in # cmdline --fundo)fundo=1;; --rapido)unset pausa;; --tema)tema=${2:- }; shift;; *)unset tema;z="$*"; break;; esac; shift; done [ "$tema" ] && if echo $temas | grep -qs "{$tema}" # theme then z=`echo $temas|sed "s/.*{$tema}//;s/;.*//"` # get str else echo "tema desconhecido '$tema'"; return; fi # error [ "$tema" = mosaico ] && { fundo=1 ; unset pausa; z=' ';} # special trap "clear;return" SIGINT; [ "$fundo" ] && c1='30;4'; zn=${#z} # init clear ; i=0 ; while :; do # loop i=$((i+1)) ; j=$((i+1)) ; RANDOM=$j # set vars x=$((((RANDOM+c*j)%lin)+1)) ; y=$((((RANDOM*c+j)%(col-zn+1))+1)) # set X,Y c=$(((x+y+j+RANDOM)%7 +1)) ; echo -ne "\033[$x;${y}H" # goto X,Y unset hl; [ ! "$fundo" -a $((y%2)) -eq 1 ] && hl='1;' # bold? [ "$ZZCOR" != 1 ] && a="$z" || a="\033[${hl}$c1${c}m$z\033[m" # color? echo -ne $a ; ${pausa:+sleep 1} ; done # show } # ---------------------------------------------------------------------------- # #### A R Q U I V O S # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Conversão de arquivos texto entre DOS e linux # Obs.: o arquivo original é gravado como arquivo.{dos,linux} # Uso: zzdos2linux arquivo(s) # zzlinux2dos arquivo(s) # ---------------------------------------------------------------------------- zzdos2linux(){ zzzz -z $1 zzdos2linux && return local A; for A in "$@"; do cp "$A" "$A.dos" && chmod -x $A && sed 's/ $//' "$A.dos" > "$A" && echo "convertido $A"; done; } zzlinux2dos(){ zzzz -z $1 zzdos2linux && return local A; for A in "$@"; do cp "$A" "$A.linux" && sed 's/$/ /' "$A.linux" > "$A" && echo "convertido $A"; done; } # ---------------------------------------------------------------------------- # Troca a extensão de um (ou vários) arquivo especificado # Uso: zztrocaextensao antiga nova arquivo(s) # Ex.: zztrocaextensao .doc .txt * # ---------------------------------------------------------------------------- zztrocaextensao(){ zzzz -z $1 zztrocaextensao && return [ "$3" ] || { echo 'uso: zztrocaextensao antiga nova arquivo(s)'; return; } local A p1="$1" p2="$2"; shift 2; [ "$p1" = "$p2" ] && return for A in "$@"; do [ "$A" != "${A%$p1}" ] && mv -v "$A" "${A%$p1}$p2"; done } # ---------------------------------------------------------------------------- # Troca o conteúdo de dois arquivos, mantendo suas permissões originais # Uso: zztrocaarquivos arquivo1 arquivo2 # Ex.: zztrocaarquivos /etc/fstab.bak /etc/fstab # ---------------------------------------------------------------------------- zztrocaarquivos(){ zzzz -z $1 zztrocaarquivos && return [ "$2" ] || { echo 'uso: zztrocaarquivos arquivo1 arquivo2'; return; } local at="$ZZTMP.$$"; cat "$2" > $at; cat "$1" > "$2"; cat "$at" > "$1" rm $at; echo "feito: $1 <-> $2" } # ---------------------------------------------------------------------------- # Troca uma palavra por outra em um (ou vários) arquivo especificado # Obs.: se quiser que seja insensível a maiúsculas/minúsculas, apenas # coloque o modificador 'i' logo após o modificador 'g' no comando sed # desligado por padrão # Uso: zztrocapalavra antiga nova arquivo(s) # Ex.: zztrocapalavra excessão exceção *.txt # ---------------------------------------------------------------------------- zztrocapalavra(){ zzzz -z $1 zztrocapalavra && return [ "$3" ] || { echo 'uso: zztrocapalavra antiga nova arquivo(s)'; return; } local A T p1="$1" p2="$2"; shift 2; for A in "$@"; do grep -qs "$p1" "$A" || continue ; T=$ZZTMP${A##*/}.$$ ; cp "$A" "$T" && sed "s§$p1§$p2§g" "$T" > "$A" && rm -f "$T" && echo "feito $A"; done } # ---------------------------------------------------------------------------- # Renomeia arquivos do diretório atual, arrumando nomes estranhos. # Obs.: ele deixa tudo em minúsculas, retira acentuação e troca espaços em # branco, símbolos e pontuação pelo sublinhado _ # Use o -r para ser recursivo e o -d para renomear diretórios também # Uso: zzarrumanome [-d] [-r] arquivo(s) # Ex.: zzarrumanome * # zzarrumanome -d -r . # zzarrumanome "DOCUMENTO MALÃO!.DOC" # fica documento_malao.doc # zzarrumanome "RAMONES - I Don't Care" # fica ramones-i_don_t_care # ---------------------------------------------------------------------------- zzarrumanome(){ zzzz -z $1 zzarrumanome && return local A A1 A2 D i f_R=0 f_D=0; [ "$1" = '-d' ] && { f_D=1; shift; } [ "$1" = '-r' ] && { f_R=1; shift; }; [ "$1" = '-d' ] && { f_D=1; shift; } [ "$1" ] || { echo 'uso: zzarrumanome [-d] [-r] arquivo(s)'; return; } for A in "$@"; do [ "$A" != / ] && A=${A%/} [ -f "$A" -o -d "$A" ] || continue; [ -d "$A" ] && { [ "$f_R" -eq 1 ] && zzarrumanome -r ${f_D:+-d} "$A"/* [ "$f_D" -eq 0 ] && continue; } A1="${A##*/}"; D='.'; [ "${A%/*}" != "$A" ] && D="${A%/*}"; A2=`echo $A1 | sed "s/[\"']//g"' y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/;s/^-/_/ y/ÀàÁáÂâÃãÄÅäåÈèÉéÊêËëÌìÍíÎîÏïÇçÑñ/aaaaaaaaaaaaeeeeeeeeiiiiiiiiccnn/ y/ÒòÓóÔôÕõÖöÙùÚúÛûÜüß¢Ð£Øø§µÝý¥¹²³/oooooooooouuuuuuuubcdloosuyyy123/ s/[^a-z0-9._-]/_/g;s/__*/_/g;s/_\([.-]\)/\1/g;s/\([.-]\)_/\1/g'` [ "$A1" = "$A2" ] && continue ; [ -f "$D/$A2" -o -d "$D/$A2" ] && { i=1 ; while [ -f "$D/$A2.$i" -o -d "$D/$A2.$i" ]; do i=$((i+1)); done A2="$A2.$i"; }; mv -v -- "$A" "$D/$A2"; done } # ---------------------------------------------------------------------------- # Mostra a diferença entre dois textos, mas no contexto de palavras. # Útil para conferir revisões ortográficas ou mudanças pequenas em frases. # Obs.: se tiver muitas _linhas_ diferentes o diff normal é aconselhado. # Uso: zzdiffpalavra arquivo1 arquivo2 # Ex.: zzdiffpalavra texto-orig.txt texto-novo.txt # zzdiffpalavra txt1 txt2 | vi - # saída com sintaxe colorida # ---------------------------------------------------------------------------- zzdiffpalavra(){ zzzz -z $1 zzdiffpalavra && return [ "$2" ] || { echo 'uso: zzdiffpalavra arquivo1 arquivo2'; return; } local split='s/ $//;s/^/§§§\n/;s/ /\n/g' at1="$ZZTMP${1##*/}.$$" local at2="$ZZTMP${2##*/}.$$"; sed "$split" $1 >$at1; sed "$split" $2 >$at2 diff -u100 $at1 $at2 | cat - -E | sed '4,${s/^+/¤/;s/^-/¯/;};s/$$/¶/' | tr -d '\012' | sed 's/\(¶¯[^¶]*\)\+/\n&\n/g;s/\(¶¤[^¶]*\)\+/&\n/g; s/\(¶ [^¶]*\)\(\(¶¤[^¶]*\)\+\)/\1\n\2/g;s/¶/\n/3;s/¶/\n/2;s/¶/\n/1;s/¶//g; s/\n¤/\n+/g;s/\n¯/\n-/g;s/[¤¯]/ /g;s/\n\? \?§§§\n\?/\n/g'; rm $at1 $at2 } # ---------------------------------------------------------------------------- # Acha as funções de uma biblioteca da linguagem C (arquivos .h) # Obs.: o diretório padrão de procura é o /usr/include # Uso: zzcinclude # Ex.: zzcinclude stdio # zzcinclude /minha/rota/alternativa/stdio.h # ---------------------------------------------------------------------------- zzcinclude(){ zzzz -z $1 zzcinclude && return [ "$1" ] || { echo "uso: zzcinclude nome-biblioteca"; return; } local i="$1"; [ "${i#/}" = "$i" ] && i="/usr/include/$i.h" [ -f $i ] || { echo "$i não encontrado" ; return; } ; cpp -E $i | sed '/^ *$/d;/^\(#\|typedef\) /d;/^[^a-z]/d;s/ *(.*//;s/.* \*\?//' | sort } # ---------------------------------------------------------------------------- # Acha os 15 maiores arquivos/diretórios do diretório atual (ou especificados) # Usando-se a opção -r é feita uma busca recursiva nos subdiretórios # Uso: zzmaiores [-r] [dir1 dir2 ...] # Ex.: zzmaiores # zzmaiores /etc /tmp # zzmaiores -r ~ # ---------------------------------------------------------------------------- zzmaiores(){ zzzz -z $1 zzmaiores && return local d rr=0 ; [ "$1" == '-r' ] && { rr=1 ; shift; } if [ "$2" ]; then d=`echo $* | sed 's/^/{/;s/$/}/;s/ \+/,/'` elif [ "$1" ]; then d="$1"; else d=.; fi if [ $rr -eq 1 ]; then find $d -type f -printf "%11s %p\n" | sort -nr | sed ' :p1; s/^\( *[0-9]\+\)\([0-9]\{3\}\)/\1.\2/ ; /^ *[0-9]\{4\}/b p1; :p2; s/^/ / ; /^[ .0-9]\{1,13\}[0-9] /b p2 ; 15q' else du -s `eval echo $d/{*,.[^.]*}` 2>/dev/null | sort -nr | sed 15q fi } # ---------------------------------------------------------------------------- # Conta o número de vezes que uma palavra aparece num arquivo # Obs.: -i Ignora a diferença de maiúsculas/minúsculas # -p Parcial, conta trechos de palavras # Uso: zzcontapalavra [-i|-p] palavra arquivo # Ex.: zzcontapalavra root /etc/passwd # zzcontapalavra -i -p a /etc/passwd # ---------------------------------------------------------------------------- zzcontapalavra(){ zzzz -z $1 zzcontapalavra && return local ic word='-w' mask='@@_@_@@' [ "$1" = '-p' ] && { word= ; shift; } ; [ "$1" = '-i' ] && { ic=$1; shift; } [ "$1" = '-p' ] && { word= ; shift; } ; p=$1 ; arq=$2 [ $# -ne 2 ] && { echo 'uso: zzcontapalavra [-i|-p] palavra arquivo'; return;} [ "$ic" ] && p=`echo "$p" | zzminusculas` ; [ "$word" ] && p="\b$p\b" grep $ic $word "$p" $arq | ([ "$ic" ] && zzminusculas || cat -) | sed "s§$p§\\ $mask§g" | grep -c "^$mask" } # ---------------------------------------------------------------------------- # #### C Á L C U L O # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # Calculadora: + - / * ^ % # mais operadores, ver `man bc` # Obs.: números fracionados podem vir com vírgulas ou pontos: 1,5 ou 1.5 # Uso: zzcalcula número operação número # Ex.: zzcalcula 2,1 / 3,5 # zzcalcula '2^2*(4-1)' # 2 ao quadrado vezes 4 menos 1 # ---------------------------------------------------------------------------- zzcalcula(){ zzzz -z $1 zzcalcula && return [ "$1" ] && echo "scale=2;$*" | sed y/,/./ | bc | sed y/./,/ ; } # ---------------------------------------------------------------------------- # Faz cálculos com datas e/ou converte data->num e num->data # Que dia vai ser daqui 45 dias? Quantos dias há entre duas datas? zzdata! # Quando chamada com apenas um parâmetro funciona como conversor de data # para número inteiro (N dias passados desde Epoch) e vice-versa. # Obs.: Leva em conta os anos bissextos (Epoch = 01/01/1970, editável) # Uso: zzdata data|num [+|- data|num] # Ex.: zzdata 22/12/1999 + 69 # zzdata hoje - 5 # zzdata 01/03/2000 - 11/11/1999 # zzdata hoje - dd/mm/aaaa <---- use sua data de nascimento # ---------------------------------------------------------------------------- zzdata(){ zzzz -z $1 zzdata && return [ $# -eq 3 -o $# -eq 1 ] || { echo 'zzdata data|num [+|- data|num]'; return; } local yyyy mm dd n1 n2 days d i n isd1=1 d1=$1 oper=$2 d2=$3 epoch=1970 local NUM n1=$d1 n2=$d2 months='31 28 31 30 31 30 31 31 30 31 30 31' for d in $d1 $d2; do NUM=0 ; [ "$d" = 'hoje' -o "$d" = 'today' ] && { d=`date +%d/%m/%Y` ; [ "$isd1" ] && d1=$d || d2=$d ; } # get 'today' if [ "$d" != "${d#*/}" ]; then n=1 ; y=$epoch # --date2num-- yyyy=${d##*/};dd=${d%%/*};mm=${d#*/};mm=${mm%/*};mm=${mm#0};dd=${dd#0} op=+; [ $yyyy -lt $epoch ] && op=-; while :; do days=365 # year2num [ $((y%4)) -eq 0 ]&&[ $((y%100)) -ne 0 ]||[ $((y%400)) -eq 0 ] && days=366 [ $y -eq $yyyy ] && break; NUM=$((NUM $op days)); y=$((y $op 1)); done for i in $months; do [ $n -eq $mm ] && break; n=$((n+1)) # month2num [ $days -eq 366 -a $i -eq 28 ] && i=29 ; NUM=$((NUM+$i)); done NUM=$((NUM+dd-1)); [ "$isd1" ] && n1=$NUM || n2=$NUM # day2num (-1) fi ; isd1= ; done ; NUM=$(($n1 $oper $n2)) # calculate N [ "${d1##??/*}" = "${d2##??/*}" ] && { echo $NUM; return; } # show num? y=$epoch; mm=1 ; dd=$((NUM+1)); while :; do days=365 # num2year [ $((y%4)) -eq 0 ]&&[ $((y%100)) -ne 0 ]||[ $((y%400)) -eq 0 ] && days=366 [ $dd -le $days ] && break; dd=$((dd-days)); y=$((y+1)); done; yyyy=$y for i in $months; do [ $days -eq 366 -a $i -eq 28 ] && i=29 # num2month [ $dd -le $i ] && break; dd=$((dd-i)); mm=$((mm+1)); done # then pad&show [ $dd -le 9 ] && dd=0$dd ; [ $mm -le 9 ] && mm=0$mm ; echo $dd/$mm/$yyyy } # ---------------------------------------------------------------------------- # Faz cálculos com horários # A opção -r torna o cálculo relativo à primeira data, por exemplo: # 02:00 - 03:30 = -01:30 (sem -r) e 22:30 (com -r) # Uso: zzhora [-r] hh:mm [+|- hh:mm] # Ex.: zzhora 8:30 + 17:25 # preciso somar duas horas! # zzhora 12:00 - agora # quando falta para o almoço? # zzhora -12:00 + -5:00 # horas negativas!!! # zzhora 1000 # quanto é 1000 minutos? # zzhora -r 5:30 - 8:00 # que horas ir dormir pra acordar às 5:30? # zzhora -r agora + 57:00 # e daqui 57 horas, será quando? # ---------------------------------------------------------------------------- zzhora(){ zzzz -z $1 zzhora && return local rel=0; [ "$1" = '-r' ] && rel=1 && shift [ "$1" ] || { echo "uso: zzhora [-r] hh:mm [+|- hh:mm]"; return; } local hh1 mm1 hh2 mm2 M1 M2 RES H M HD neg Hp Mp HDp local D=0 hhmm1="$1" oper="${2:-+}" hhmm2="${3:-00}" [ "${oper#[+-]}" ] && echo "Operação Inválida: $oper" && return [ "$hhmm1" = 'agora' -o "$hhmm1" = 'now' ] && hhmm1=`date +%H:%M` [ "$hhmm2" = 'agora' -o "$hhmm2" = 'now' ] && hhmm2=`date +%H:%M` [ "${hhmm1#*:}" != "$hhmm1" ] || hhmm1=00:$hhmm1 [ "${hhmm2#*:}" != "$hhmm2" ] || hhmm2=00:$hhmm2 hh1=${hhmm1%:*}; mm1=${hhmm1#*:}; hh2=${hhmm2%:*}; mm2=${hhmm2#*:} # extrai hh1=${hh1#0} ; mm1=${mm1#0} ; hh2=${hh2#0} ; mm2=${mm2#0} # s/^0// M1=$((hh1*60+mm1)); M2=$((hh2*60+mm2)); RES=$(($M1 $oper $M2)) # calcula [ $RES -lt 0 ] && neg=- RES=${RES#-} # del - H=$((RES/60)); M=$((RES%60)); D=$((H/24)); HD=$((H%24)); Hp=$H Mp=$M HDp=$HD [ $H -le 9 ] && Hp=0$H; [ $M -le 9 ] && Mp=0$M; [ $HD -le 9 ] && HDp=0$HD if [ $rel -eq 1 ]; then [ "$neg" ] && { Mp=$(((60-M)%60)); D=$((H/24+(Mp>0))); HDp=$(((24-HD-(Mp>0))%24)) [ $HDp -le 9 ] && HDp=0$HDp; [ $Mp -le 9 ] && Mp=0$Mp ; } # padding [ $D -eq 1 ] && { extra=amanhã; [ "$neg" ] && extra=ontem; } [ $D -eq 0 ] && extra=hoje; [ "$extra" ] || extra="$neg${D} dias" echo "$HDp:$Mp ($extra)" else echo "$neg$Hp:$Mp (${D}d ${HD}h ${M}m)" fi } # ---------------------------------------------------------------------------- # Faz várias conversões como: caracteres, temperatura e distância # cf = (C)elsius para (F)ahrenheit # fc = (F)ahrenheit para (C)elsius # km = (K)Quilômetros para (M)ilhas # mk = (M)ilhas para (K)Quilômetros # db = (D)ecimal para (B)inário # bd = (B)inário para (D)ecimal # cd = (C)aractere para (D)ecimal # dc = (D)ecimal para (C)aractere # Uso: zzconverte <cf|fc|mk|km|db|bd|cd> número # Ex.: zzconverte cf 5 # zzconverte dc 65 # zzconverte db 32 # ---------------------------------------------------------------------------- zzconverte(){ zzzz -z $1 zzconverte && return [ "$1" = "cf" ] && echo "$2 C = $(echo "scale=2;($2*9/5)+32" | bc) F" [ "$1" = "fc" ] && echo "$2 F = $(echo "scale=2;($2-32)*5/9" | bc) C" [ "$1" = "km" ] && echo "$2 km = $(echo "scale=2;$2*0.6214" | bc) milhas" [ "$1" = "mk" ] && echo "$2 milhas = $(echo "scale=2;$2*1.609" | bc) km" [ "$1" = "db" ] && echo "obase=2;$2" | bc -l [ "$1" = "bd" ] && echo "$((2#$2))" [ "$1" = "cd" ] && echo -n "$2" | od -d | sed -n '1s/^.* \+//p' [ "$1" = "dc" ] && awk "BEGIN {printf(\"%c\n\",$2)}" } #-----------8<------------daqui pra baixo: FUNÇÕES QUE FAZEM BUSCA NA INTERNET #-------------------------podem parar de funcionar se as páginas mudarem # ---------------------------------------------------------------------------- # #### C O N S U L T A S (internet) # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # http://br.invertia.com # Busca a cotação do dia do dólar (comercial, paralelo e turismo) # Obs.: as cotações são atualizadas de 10 em 10 minutos # Uso: zzdolar # ---------------------------------------------------------------------------- zzdolar(){ zzzz -z $1 zzdolar && return $ZZWWWDUMP 'http://br.invertia.com/mercados/divisas/tiposdolar.asp' | sed 's/^ *//;/Data:/,/Turismo/!d;/percent/d;s/ */ /g s/.*Data: \(.*\)/\1 compra venda hora/;s|^[1-9]/|0&|; s,^\([0-9][0-9]\)/\([0-9]/\),\1/0\2, s/^D.lar \|- Corretora//g;s/ [-+]\?[0-9.]\+ %$// s/al /& /;s/lo /& /;s/mo /& /;s/ \([0-9]\) / \1.000 / s/\.[0-9]\>/&0/g;s/\.[0-9][0-9]\>/&0/g;/^[^0-9]/s/[0-9] /& /g' } # ---------------------------------------------------------------------------- # http://www.receita.fazenda.gov.br # Consulta os lotes de restituição do imposto de renda # Obs.: funciona para os anos de 2001, 2002 e 2003 # Uso: zzirpf ano número-cpf # Ex.: zzirpf 2003 123.456.789-69 # ---------------------------------------------------------------------------- zzirpf(){ zzzz -z $1 zzirpf && return [ "$2" ] || { echo 'uso: zzirpf ano número-cpf'; return; } local ano=$1 URL='http://www.receita.fazenda.gov.br/Scripts/srf/irpf' z=${ano#200} ; [ "$z" != 1 -a "$z" != 2 -a "$z" != 3 ] && { echo "Ano inválido '$ano'. Deve ser 2001, 2002 ou 2003."; return; } $ZZWWWDUMP "$URL/$ano/irpf$ano.dll?VerificaDeclaracao&CPF=$2" | sed '1,8d;s/^ */ /;/^ \[BUTTON\]$/d' } # ---------------------------------------------------------------------------- # http://www.terra.com.br/cep # Busca o CEP de qualquer rua de qualquer cidade do país ou vice-versa # Uso: zzcep estado cidade nome-da-rua # Ex.: zzcep PR curitiba rio gran # zzcep RJ 'Rio de Janeiro' Vinte de # ---------------------------------------------------------------------------- zzcep(){ zzzz -z $1 zzcep && return [ "$3" ] || { echo 'uso: zzcep estado cidade rua'; return; } local URL='http://www.correios.com.br/servicos/cep/Resultado_Log.cfm' local r c e="$1"; c=`echo "$2"| sed "$ZZSEDURL"` shift ; shift ; r=`echo "$*"| sed "$ZZSEDURL"` echo "UF=$e&Localidade=$c&Tipo=&Logradouro=$r" | $ZZWWWPOST "$URL" | sed -n '/^ *UF:/,/^$/{ /Página Anter/d; s/.*óxima Pág.*/...CONTINUA/; p;}' } # ---------------------------------------------------------------------------- # http://www.pr.gov.br/detran # Consulta débitos do veículo, como licenciamento, IPVA e multas (detran-PR) # Uso: zzdetranpr número-renavam # Ex.: zzdetranpr 123456789 # ---------------------------------------------------------------------------- zzdetranpr(){ zzzz -z $1 zzdetranpr && return [ "$1" ] || { echo 'uso: zzdetranpr número-renavam'; return; } local URL='http://celepar7.pr.gov.br/detran/consultas/veiculos/deb_novo.asp'; $ZZWWWDUMP "$URL?renavam=$1" | sed 's/^ *//;/^\(___*\)\?$/d; /^\[/d; 1,/^\(Renavam\|Data\):/{//!d;}; /^Resumo das Multas\|^Voltar$/,$d; /^AUTUAÇ/,${/^Infração:/!d;s///;}; /^\(Discrimi\|Informa\)/s/.*//; /^Placa/s/^[^:]*: \([A-Z0-9-]\+\).*:/\1 ano/; /^\(Marca\|Munic\)/s/[^:]*: //; s|^\(.*\) \([0-9]\+,[0-9]\{2\}\|\*\*\* QUITADO \*\*\*\)|\2 \1|;' } # ---------------------------------------------------------------------------- # http://www.detran.sp.gov.br # Consulta débitos do veículo, como licenciamento, IPVA e multas (detran-SP) # Uso: zzdetransp número-renavam # Ex.: zzdetransp 123456789 # ---------------------------------------------------------------------------- zzdetransp(){ zzzz -z $1 zzdetransp && return [ "$1" ] || { echo 'uso: zzdetransp número-renavam'; return; } local URL='http://sampa5.prodam.sp.gov.br/multas/c_multas.asp'; echo echo "text1=$1" | $ZZWWWPOST "$URL" | sed 's/^ *//;/^Resultado/,/^Última/!d; /^___\+$/s/.*/_____/; /^Resultado/s/.* o Ren/Ren/; /^Seq /,/^Total/{/^Seq/d;/^Total/!s/^/+++/;}; /Última/{G;s/\n//;s/\n_____\(\n\)$/\1/;s/^[^:]\+/Data /;p;};H;d' | sed '/^+++/{H;g;s/^\(\n\)+++[0-9]\+ \(...\)\(....\) \([^ ]\+ \)\{2\}\(.*\) \('$ZZERDATA' '$ZZERHORA'\) \(.*\) \('$ZZERDATA' .*\)/Placa: \2-\3\nData : \6\nLocal: \7\nInfr.: \5\nMulta: \8\n/;}' } # ---------------------------------------------------------------------------- # #### P R O G R A M A S (internet) # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # http://freshmeat.net # Procura por programas na base do freshmeat # Uso: zzfreshmeat programa # Ex.: zzfreshmeat tetris # ---------------------------------------------------------------------------- zzfreshmeat(){ zzzz -z $1 zzfreshmeat && return [ "$1" ] || { echo 'uso: zzfreshmeat programa'; return; } $ZZWWWLIST "http://freshmeat.net/search/?q=$1" | sed -n '/^ *© Copyright/,${s,^.* ,,;\|meat\.net/projects/|s,/$,,gp;}' | uniq } # ---------------------------------------------------------------------------- # http://rpmfind.net/linux # Procura por pacotes RPM em várias distribuições # Obs.: a arquitetura padrão de procura é a i386 # Uso: zzrpmfind pacote [distro] [arquitetura] # Ex.: zzrpmfind sed # zzrpmfind lilo mandr i586 # ---------------------------------------------------------------------------- zzrpmfind(){ zzzz -z $1 zzrpmfind && return [ "$1" ] || { echo 'uso: zzrpmfind pacote [distro] [arquitetura]'; return; } local URL='http://rpmfind.net/linux/rpm2html/search.php' echozz 'ftp://rpmfind.net/linux/' $ZZWWWLIST "$URL?query=$1&submit=Search+...&system=$2&arch=${3:-i386}" | sed -n '\,ftp://rpmfind,s,^[^A-Z]*/linux/, ,p' | sort } # ---------------------------------------------------------------------------- # #### D I V E R S O S (internet) # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # http://www.iana.org/cctld/cctld-whois.htm # Busca a descrição de um código de país da internet (.br, .ca etc) # Obs.: o sed deve suportar o I de ignorecase na pesquisa # Uso: zzdominiopais [.]código|texto # Ex.: zzdominiopais .br # zzdominiopais br # zzdominiopais republic # ---------------------------------------------------------------------------- zzdominiopais(){ zzzz -z $1 zzdominiopais && return [ "$1" ] || { echo 'uso: zzdominiopais [.]código|texto'; return; } local i1 i2 a='/usr/share/zoneinfo/iso3166.tab' p=${1#.} [ $1 != $p ] && { i1='^'; i2='^\.'; } [ -f $a ] && { echozz 'local:'; sed "/^#/d;/$i1$p/I!d" $a; } local URL=http://www.iana.org/cctld/cctld-whois.htm ; echozz 'www :' $ZZWWWDUMP "$URL" | sed -n "s/^ *//;1,/^z/d;/^__/,$ d;/$i2$p/Ip" } # ---------------------------------------------------------------------------- # http://pgp.dtype.org:11371 # Busca a identificação da chave PGP, fornecido o nome ou email da pessoa. # Obs.: de brinde, instruções de como adicionar a chave a sua lista. # Uso: zzchavepgp nome|email # Ex.: zzchavepgp Carlos Oliveira da Silva # zzchavepgp [email protected] # ---------------------------------------------------------------------------- zzchavepgp(){ zzzz -z $1 zzchavepgp && return [ "$1" ] || { echo 'uso: zzchavepgp nome|email'; return; } local id TXT=`echo "$*"| sed "$ZZSEDURL"` URL='http://pgp.dtype.org:11371' $ZZWWWDUMP "$URL/pks/lookup?search=$TXT&op=index" | sed '/^Type /,$!d;$G' | tee /dev/stderr | sed -n 's,^[^/]*/\([0-9A-F]\+\) .*,\1,p' | while read id; do [ "$id" ] && echo "adicionar: gpg --recv-key $id && gpg --list-keys $id"; done } # ---------------------------------------------------------------------------- # http://www.dicas-l.unicamp.br # Procura por dicas sobre determinado assunto na lista Dicas-L # Obs.: as opções do grep podem ser usadas (-i já é padrão) # Uso: zzdicasl [opção-grep] palavra(s) # Ex.: zzdicasl ssh # zzdicasl -w vi # zzdicasl -vEw 'windows|unix|emacs' # ---------------------------------------------------------------------------- zzdicasl(){ zzzz -z $1 zzdicasl && return [ "$1" ] || { echo 'uso: zzdicasl [opção-grep] palavra(s)'; return; } local o URL='http://www.dicas-l.unicamp.br'; [ "${1##-*}" ] || { o=$1; shift; } echozz "$URL/dicas-l/<DATA>.shtml"; $ZZWWWHTML "$URL/dicas-l" | sed '/^<LI><A HREF=/!d;s///;s/\.shtml>//;s,</A>,,' | grep -i $o "$*" } # ---------------------------------------------------------------------------- # http://registro.br # Whois da fapesp para domínios brasileiros # Uso: zzwhoisbr domínio # Ex.: zzwhoisbr abc.com.br # zzwhoisbr www.abc.com.br # ---------------------------------------------------------------------------- zzwhoisbr(){ zzzz -z $1 zzwhoisbr && return [ "$1" ] || { echo 'uso: zzwhoisbr domínio'; return; } local dom="${1#www.}" URL='http://registro.br/cgi-bin/nicbr/whois' $ZZWWWDUMP "$URL?qr=$dom" | sed '1,/^%/d;/^remarks/,$d;/^%/d; /^alterado\|atualizado\|status\|servidor \|último /d' } # ---------------------------------------------------------------------------- # http://www.ibiblio.org # Procura de documentos HOWTO # Uso: zzhowto palavra | --atualiza # Ex.: zzhowto apache # zzhowto --atualiza # ---------------------------------------------------------------------------- zzhowto(){ zzzz -z $1 zzhowto && return [ "$1" ] || { echo 'uso: zzhowto [--atualiza] palavra'; return; } local URL z=$1 arq=$ZZTMP.howto URL='http://www.ibiblio.org/pub/Linux/docs/HOWTO/other-formats/html_single/' [ "$z" = '--atualiza' ] && { rm -f $arq ; z='' ; } [ -s "$arq" ] || { echo -n 'AGUARDE. Atualizando listagem...' $ZZWWWHTML "$URL" | sed -n '/ALT="\[TXT\]"/{ s/^.*HREF="\([^"]*\).*/\1/;p;}' > $arq ; echo ' feito!' ; } [ "$z" ] && { echozz $URL; grep -i "$z" $arq; } } # ---------------------------------------------------------------------------- # http://... - vários # Busca as últimas notícias sobre linux em páginas nacionais. # Obs.: cada página tem uma letra identificadora que pode ser passada como # parâmetro, identificando quais páginas você quer pesquisar: # # R)evista do linux I)nfoexame # O)linux linux in braZ)il # ponto B)r T)chelinux # C)ipsga N)otícias linux # # Uso: zznoticiaslinux [sites] # Ex.: zznoticiaslinux # zznoticiaslinux rci # ---------------------------------------------------------------------------- zznoticiaslinux(){ zzzz -z $1 zznoticiaslinux && return local URL limite n=5 s='brotcizn'; limite="sed ${n}q"; [ "$1" ] && s="$1" [ "$s" != "${s#*r}" ] && { URL='http://www.RevistaDoLinux.com.br' echo ; echozz "* RdL ($URL):"; $ZZWWWHTML $URL | sed '/^<.*class=noticias><b>/!d;s///;s,</b>.*,,' | $limite; } [ "$s" != "${s#*o}" ] && { URL='http://olinux.uol.com.br/home.html' echo ; echozz "* OLinux ($URL):"; $ZZWWWDUMP $URL | sed 's/^ *//;/^\[.*ÚLTIMAS/,/^\[.*CHAT /!d;/^\[/d;/^$/d' | $limite; } [ "$s" != "${s#*b}" ] && { URL='http://pontobr.org' echo ; echozz "* .BR ($URL):"; $ZZWWWHTML $URL | sed '/class="\(boldtext\|type4bigger\)"/!d;s/<[^>]*>//g;s/^[[:blank:]]*//'| $limite; } [ "$s" != "${s#*c}" ] && { URL='http://www.cipsga.org.br' echo ; echozz "* CIPSGA ($URL):"; $ZZWWWHTML $URL | sed '/^.*<tr><td bgcolor="88ccff"><b>/!d;s///;s,</b>.*,,' | $limite; } [ "$s" != "${s#*z}" ] && { URL='http://brlinux.linuxsecurity.com.br/noticias/' echo ; echozz "* Linux in Brazil ($URL):"; $ZZWWWDUMP $URL | sed -n 's/^ *//;/.org - Publicado por/{x;p;};h' | $limite; } [ "$s" != "${s#*i}" ] && { URL='http://info.abril.com.br' echo ; echozz "* InfoExame ($URL):"; $ZZWWWDUMP $URL | sed 's/^ *//;/^últimas/,/^download/s/^\[[^]]*] //p;d' | $limite; } [ "$s" != "${s#*t}" ] && { URL='http://www.tchelinux.com.br' echo ; echozz "* TcheLinux ($URL):"; $ZZWWWDUMP "$URL/backend.php" | sed '/<title>/!d;s/ *<[^>]*>//g;/^Tchelinux$/d' | $limite; } [ "$s" != "${s#*n}" ] && { URL='http://www.noticiaslinux.com.br' echo ; echozz "* Notícias Linux ($URL):"; $ZZWWWDUMP "$URL" | sed '/^[0-9][0-9]h[0-9][0-9]min/!d;s///;s/...//' | $limite; } } # ---------------------------------------------------------------------------- # http://... - vários # Busca as últimas notícias sobre linux em páginas em inglês. # Obs.: cada página tem uma letra identificadora que pode ser passada como # parâmetro, identificando quais páginas você quer pesquisar: # # F)reshMeat Linux D)aily News # S)lashDot Linux W)eekly News # N)ewsForge O)S News # # Uso: zzlinuxnews [sites] # Ex.: zzlinuxnews # zzlinuxnews fsn # ---------------------------------------------------------------------------- zzlinuxnews(){ zzzz -z $1 zzlinuxnews && return local URL limite n=5 s='fsndwo'; limite="sed ${n}q"; [ "$1" ] && s="$1" [ "$s" != "${s#*f}" ] && { URL='http://freshmeat.net' echo ; echozz "* FreshMeat ($URL):"; $ZZWWWHTML $URL | sed '/href="\/releases/!d;s/<[^>]*>//g;s/&nbsp;//g;s/^ *- //' | $limite ; } [ "$s" != "${s#*s}" ] && { URL='http://slashdot.org' echo ; echozz "* SlashDot ($URL):"; $ZZWWWHTML $URL | sed '/^[[:blank:]]*FACE="arial,helv/!d;s/^[^>]*>//;s/<[^>]*>//g s/&quot;/"/g'| $limite ;} [ "$s" != "${s#*n}" ] && { URL='http://newsforge.net' echo ; echozz "* NewsForge - ($URL):"; $ZZWWWHTML $URL | sed '/^<b>/!d;s///;s|</b>.*||;s/<[^>]*>//g' | $limite ; } [ "$s" != "${s#*d}" ] && { URL='http://www.linuxdailynews.com' echo ; echozz "* Linux Daily News ($URL):"; $ZZWWWHTML $URL | sed '/color="#101073">/!d;s,</b>.*,,;s/^.*<b> *//;s,</\?i>,,g' | $limite ; } [ "$s" != "${s#*w}" ] && { URL='http://lwn.net/Articles' echo ; echozz "* Linux Weekly News - ($URL):"; $ZZWWWHTML $URL | sed '/class="Headline"/!d;s/^ *//;s/<[^>]*>//g' | $limite ; } [ "$s" != "${s#*o}" ] && { URL='http://osnews.com' echo ; echozz "* OS News - ($URL):"; $ZZWWWDUMP $URL | sed -n 's/^ *Read similar articles *//p' | $limite ; } } # ---------------------------------------------------------------------------- # http://... - vários # Busca as últimas notícias em sites especializados em segurança. # Obs.: cada página tem uma letra identificadora que pode ser passada como # parâmetro, identificando quais páginas você quer pesquisar: # # Linux Security B)rasil Linux T)oday - Security # Linux S)ecurity Security F)ocus # C)ERT/CC # # Uso: zznoticiassec [sites] # Ex.: zznoticiassec # zznoticiassec bcf # ---------------------------------------------------------------------------- zznoticiassec(){ zzzz -z $1 zznoticiassec && return local URL limite n=5 s='bsctf'; limite="sed ${n}q"; [ "$1" ] && s="$1" [ "$s" != "${s#*b}" ] && { URL='http://www.linuxsecurity.com.br/share.php' echo ; echozz "* LinuxSecurity Brasil ($URL):"; $ZZWWWDUMP $URL | sed -n '/item/,$s,.*<title>\(.*\)</title>,\1,p' | $limite ; } [ "$s" != "${s#*s}" ] && { URL='http://www.linuxsecurity.com/linuxsecurity_advisories.rdf' echo ; echozz "* Linux Security ($URL):"; $ZZWWWDUMP $URL | sed -n '/item/,$s,.*<title>\(.*\)</title>,\1,p' | $limite ; } [ "$s" != "${s#*c}" ] && { URL='http://www.cert.org/channels/certcc.rdf' echo ; echozz "* CERT/CC ($URL):"; $ZZWWWDUMP $URL | sed -n '/item/,$s,.*<title>\(.*\)</title>,\1,p' | $limite ; } [ "$s" != "${s#*t}" ] && { URL='http://linuxtoday.com/security/index.html' echo ; echozz "* Linux Today - Security ($URL):"; $ZZWWWHTML $URL | sed -n '/class="nav"><B>/s/<[^>]*>//gp' | $limite ; } [ "$s" != "${s#*f}" ] && { URL='http://www.securityfocus.com/bid' echo ; echozz "* SecurityFocus Vulns Archive ($URL):"; $ZZWWWDUMP $URL | sed -n 's/^ *\([0-9]\{4\}-[0-9][0-9]-[0-9][0-9]\)/\1/p' | $limite ; } } # ---------------------------------------------------------------------------- # http://google.com # Retorna apenas os títulos e links do resultado da pesquisa no Google # Uso: zzgoogle [-n <número>] palavra(s) # Ex.: zzgoogle receita de bolo de abacaxi # zzgoogle -n 5 ramones papel higiênico cachorro # ---------------------------------------------------------------------------- zzgoogle(){ zzzz -z $1 zzgoogle && return [ "$1" ] || { echo 'uso: zzgoogle [-n <número>] palavra(s)'; return; } local TXT n=10 URL='http://www.google.com.br/search' [ "$1" = '-n' ] && { n=$2; shift; shift; } TXT=`echo "$*"| sed "$ZZSEDURL"` ; [ "$TXT" ] || return 0 $ZZWWWHTML "$URL?q=$TXT&num=$n&ie=ISO-8859-1&hl=pt-BR" | sed '/<p class=g>/!d;s|.*<p class=g>||;s|</a><br>.*||;h s|^<[^>]*>||;s|</\?b>||g;s|</a>.*||;x;s|^<a href=| |;s|>.*||;H;g;s|.*||;H;g' ## label # url # blank } # ---------------------------------------------------------------------------- # http://letssingit.com # Busca letras de músicas, procurando pelo nome da música # Obs.: se encontrar mais de uma, mostra a lista de possibilidades # Uso: zzletrademusica texto # Ex.: zzletrademusica punkrock # zzletrademusica kkk took my baby # ---------------------------------------------------------------------------- zzletrademusica(){ zzzz -z $1 zzletrademusica && return [ "$1" ] || { echo 'uso: zzletrademusica texto'; return; } local txt=`echo "$*"|sed "$ZZSEDURL"` URL=http://letssingit.com/cgi-exe/am.cgi $ZZWWWDUMP "$URL?a=search&p=1&s=$txt&l=song" | sed -n ' s/^ *//;/^artist /,/Page :/p;/^Artist *:/,${/IFRAME\|^\[params/d;p;}' } # ---------------------------------------------------------------------------- # http://tudoparana.globo.com/gazetadopovo/cadernog/tv.html # Consulta a programação do dia dos canais abertos da TV # Pode-se passar os canais e o horário que se quer consultar # Identificadores: B)and, C)nt, E)ducativa, G)lobo, R)ecord, S)bt, cU)ltura # Uso: zztv canal [horário] # Ex.: zztv bsu 19 # band, sbt e cultura, depois das 19:00 # zztv . 00 # todos os canais, depois da meia-noite # zztv . # todos os canais, o dia todo # ---------------------------------------------------------------------------- zztv(){ zzzz -z $1 zztv && return [ "$1" ] || { echo 'uso: zztv canal [horário] (ex. zztv bs 22)'; return; } local c h URL=http://tudoparana.globo.com/gazetadopovo/cadernog/sess-21.html h=`echo $2|sed 's/^\(..\).*/\1/;s/[^0-9]//g'` ; h="($h|$((h+1))|$((h+2)))" h=`echo $h|sed 's/24/00/;s/25/01/;s/26/02/;s/\<[0-9]\>/0&/g;s,[(|)],\\\\&,g'` c=`echo $1|sed 's/b/2,/;s/s/4,/;s/c/6,/;s/r/7,/;s/u/9,/;s/g/12,/;s/e/59,/ s/,$//;s@,@\\\\|@g'`; c=$(echo $c | sed 's/^\.$/..\\?/'); $ZZWWWDUMP $URL | sed -e 's/^ *//;s/[Cc][Aa][Nn][Aa][Ll]/CANAL/;/^[012C]/!d;/^C[^A]/d;/^C/i \'\ -e . | sed "/^CANAL \($c\) *$/,/^.$/!d;/^C/,/^$h/{/^C\|^$h/!d;};s/^\.//" } # ---------------------------------------------------------------------------- # http://www.acronymfinder.com # Dicionário de siglas, sobre qualquer assunto (como DVD, IMHO, OTAN, WYSIWYG) # Obs.: há um limite diário de consultas (10 acho) # Uso: zzsigla sigla # Ex.: zzsigla RTFM # ---------------------------------------------------------------------------- zzsigla(){ zzzz -z $1 zzsigla && return [ "$1" ] || { echo 'uso: zzsigla sigla'; return; } local URL=http://www.acronymfinder.com/af-query.asp $ZZWWWDUMP "$URL?String=exact&Acronym=$1&Find=Find" | sed -n 's/^ *//;s/ *\[go\.gif] *$//p' } # ---------------------------------------------------------------------------- # http://cheetah.eb.com # Toca um .wav que contém a pronúncia correta de uma palavra em inglês # Uso: zzpronuncia palavra # Ex.: zzpronuncia apple # ---------------------------------------------------------------------------- zzpronuncia(){ zzzz -z $1 zzpronuncia && return [ "$1" ] || { echo 'uso: zzpronuncia palavra'; return; } local URL URL2 arq dir tmpwav="$ZZTMP.$1.wav" URL='http://www.m-w.com/cgi-bin/dictionary' URL2='http://www.m-w.com/sound' [ -f "$tmpwav" ] || { arq=`$ZZWWWHTML "$URL?va=$1" | sed "/wav=$1/!d;s/wav=$1'.*/wav/;s/.*?//"` [ "$arq" ] || { echo "$1: palavra não encontrada"; return; } dir=`echo $arq | sed 's/^\(.\).*/\1/'` WAVURL="$URL2/$dir/$arq" ; echo "URL: $WAVURL" $ZZWWWHTML "$WAVURL" > $tmpwav ; echo "Gravado o arquivo '$tmpwav'" ; } play $tmpwav } # ---------------------------------------------------------------------------- # http://weather.noaa.gov/ # Mostra as condições do tempo em um determinado local # Se nenhum parâmetro for passado, são listados os países disponíveis. # Se só o país for especificado, são listados os lugares deste país. # Você também pode utilizar as siglas apresentadas para diferenciá-los. # Ex: SBPA = Porto Alegre. # Uso: zztempo <país> <local> # Ex.: zztempo 'United Kingdom' 'London City Airport' # zztempo brazil 'Curitiba Aeroporto' # zztempo brazil SBPA # ---------------------------------------------------------------------------- zztempo(){ zzzz -z $1 zztempo && return local arq_c P arq_p=$ZZTMP.tempo_p URL='http://weather.noaa.gov' [ -s "$arq_p" ] || { $ZZWWWHTML "$URL" | sed -n '/="country"/,/\/select/{ s/.*="\([a-zA-Z]*\)">\(.*\) <.*/\1 \2/p;}' > $arq_p; } [ "$1" ] || { sed 's/^[^ ]* \+//' $arq_p; return; } P=$(sed -n "s/^[^ ]* \+//;/^$1$/Ip" $arq_p) [ "$P" ] || { echozz "País [$1] não existe"; return; } LOCALE_P=$(sed -n "s/ \+$1//Ip" $arq_p); arq_c=$ZZTMP.tempo.$LOCALE_P [ -s "$arq_c" ] || { $ZZWWWHTML "$URL/weather/${LOCALE_P}_cc.html" | sed -n '/="cccc"/,/\/select/{//d;s/.*="\([a-zA-Z]*\)">/\1 /p;}' > $arq_c; } [ "$2" ] || { cat $arq_c; return; }; L=$(sed -n "/${2}/Ip" $arq_c) [ "$L" ] || { echozz "Local [$2] não existe"; return; } [ $(echo "$L" | wc -l) -eq 1 ] && { $ZZWWWDUMP "$URL/weather/current/${L%% *}.html" | sed -n '/Current Weather/,/24 Hour/{//d;/_\{5,\}/d;p;}' || echo "$L"; } } # ---------------------------------------------------------------------------- # http://www.nextel.com.br # Envia uma mensagem para um telefone NEXTEL (via rádio) # Obs.: o número especificado é o número próprio do telefone (não o ID!) # Uso: zznextel de para mensagem # Ex.: zznextel aurélio 554178787878 minha mensagem mala # ---------------------------------------------------------------------------- zznextel(){ zzzz -z $1 zznextel && return [ "$3" ] || { echo 'uso: zznextel de para mensagem'; return; } local from="$1" to="$2" URL=http://page.nextel.com.br/cgi-bin/sendPage_v3.cgi shift; shift; local subj=zznextel msg=`echo "$*"| sed "$ZZSEDURL"` echo "to=$to&from=$from&subject=$subj&message=$msg&count=0&Enviar=Enviar" | $ZZWWWPOST "$URL" | sed '1,/^ *CENTRAL/d;s/.*Individual/ /;N;q' } # ---------------------------------------------------------------------------- # #### T R A D U T O R E S e D I C I O N Á R I O S (internet) # ---------------------------------------------------------------------------- # ---------------------------------------------------------------------------- # http://babelfish.altavista.digital.com # Faz traduções de palavras/frases/textos em português e inglês # Uso: zzdicbabelfish [i] texto # Ex.: zzdicbabelfish my dog is green # zzdicbabelfish i falcão detona! # ---------------------------------------------------------------------------- zzdicbabelfish(){ zzzz -z $1 zzdicbabelfish && return [ "$1" ] || { echo 'uso: zzdicbabelfish [i] palavra(s)'; return; } local URL='http://babelfish.altavista.com/babelfish/tr' L=en_pt FIM='^<\/div>' local INI='^.*<Div style=padding[^>]*>'; [ "$1" = 'i' ] && { shift; L=pt_en; } local TXT=`echo "$*"| sed "$ZZSEDURL"` $ZZWWWHTML "$URL?doit=done&tt=urltext&intl=1&urltext=$TXT&lp=$L" | sed -n "/$INI/,/$FIM/{/$FIM\|^$/d;/$INI/{s/<[^>]*>//g;p;};}" } # ---------------------------------------------------------------------------- # http://www.babylon.com # Tradução de palavras em inglês para um monte de idiomas: # francês, alemão, japonês, italiano, hebreu, espanhol, holandês e # português. O padrão é o português, é claro. # Uso: zzdicbabylon [idioma] palavra # Ex.: zzdicbabylon hardcore # zzdicbabylon jap tree # ---------------------------------------------------------------------------- zzdicbabylon(){ zzzz -z $1 zzdicbabylon && return [ "$1" ] || { echo -e "zzdicbabylon [idioma] palavra idioma = fre ger jap ita heb spa dut ptg" && return; } local L=ptg ; [ "$2" ] && L=$1 && shift $ZZWWWDUMP "http://www.babylon.com/trans/bwt.cgi?$L$1" | sed '1,4d;s/^ *//;/^\($\|_\+\)/d;s/^/ /' } # ---------------------------------------------------------------------------- # http://www.dictionary.com # Definições de palavras em inglês, com pesquisa em *vários* bancos de dados # Uso: zzdicdict palavra # Ex.: zzdicdict hardcore # ---------------------------------------------------------------------------- zzdicdict(){ zzzz -z $1 zzdicdict && return [ "$1" ] || { echo "zzdicdict palavra" && return; } local INI='^ *Found [0-9]\+ entr\(y\|ies\)' FIM='^ *Try your search' $ZZWWWDUMP -width=72 "http://www.dictionary.com/cgi-bin/dict.pl?db=*&term=$*"| sed -n "/$INI/,/$FIM/{/$INI\|$FIM/d;p;}" } # ---------------------------------------------------------------------------- # http://www.academia.org.br/vocabula.htm # Dicionário da ABL - Academia Brasileira de Letras # Uso: zzdicabl palavra # Ex.: zzdicabl cabeça-de- # ---------------------------------------------------------------------------- zzdicabl(){ zzzz -z $1 zzdicabl && return [ "$1" ] || { echo 'uso: zzdicabl palavra'; return; } local URL='http://www.academia.org.br/scripts/volta_abl_org.asp' echo "palavra=$*" | $ZZWWWPOST $URL | sed '1,5d;/^ *\./,$d;s/^ */ /' } # ---------------------------------------------------------------------------- # http://www.portoeditora.pt/dol # Dicionário de português (de Portugal) # Uso: zzdicportugues palavra # Ex.: zzdicportugues bolacha # ---------------------------------------------------------------------------- zzdicportugues(){ zzzz -z $1 zzdicportugues && return [ "$1" ] || { echo 'uso: zzdicportugues palavra'; return; } local URL='http://www.priberam.pt/dlpo/definir_resultados.aspx' local INI='^\(Não \)\?[Ff]o\(i\|ram\) encontrad' FIM='^Imprimir *$' $ZZWWWDUMP "$URL?pal=$1" | sed -n "s/^ *//;/^$/d; s/\[transparent.gif]//;/$INI/,/$FIM/{/$INI\|$FIM/d;p;}" } # ---------------------------------------------------------------------------- # http://catb.org/jargon/ # Dicionário de jargões de informática, em inglês # Uso: zzdicjargon palavra(s) # Ex.: zzdicjargon vi # zzdicjargon all your base are belong to us # ---------------------------------------------------------------------------- zzdicjargon(){ zzzz -z $1 zzdicjargon && return [ "$1" ] || { echo 'uso: zzdicjargon palavra'; return; } local arq=$ZZTMP.jargonfile URL='http://catb.org/jargon/html' local achei achei2 num mais TXT=`echo "$*" | sed 's/ /-/g'` [ -s "$arq" ] || { echo -n 'AGUARDE. Atualizando listagem...' $ZZWWWLIST "$URL/go01.html" | sed '/^ *[0-9]\+\. /!d;s,.*/html/,,;/^[A-Z0]\//!d' > $arq ; } achei=`grep -i "$TXT" $arq` ; num=`echo "$achei" | sed -n '$='` [ "$achei" ] || return ; [ $num -gt 1 ] && { mais=$achei achei2=`echo "$achei" | grep -w "$TXT" | sed q` [ "$achei2" ] && achei="$achei2" && num=1 ; } if [ $num -eq 1 ]; then $ZZWWWDUMP -width=72 "$URL/$achei" | sed '1,/_\{9\}/d;/_\{9\}/,$d' ; [ "$mais" ] && echozz '\nTermos parecidos:' else echozz 'Achei mais de um! Escolha qual vai querer:' ; fi [ "$mais" ] && echo "$mais" | sed 's/..//;s/\.html$//' } # ---------------------------------------------------------------------------- # Usa todas as funções de dicionário e tradução de uma vez # Uso: zzdictodos palavra # Ex.: zzdictodos Linux # ---------------------------------------------------------------------------- zzdictodos(){ zzzz -z $1 zzdictodos && return [ "$1" ] || { echo 'uso: zzdictodos palavra'; return; } local D ; for D in babelfish babylon jargon abl portugues dict do echozz "zzdic$D:"; zzdic$D $1; done } # ---------------------------------------------------------------------------- # http://aurelio.net/doc/ramones.txt # Procura frases de letras de músicas do ramones # Uso: zzramones [palavra] # Ex.: zzramones punk # zzramones # ---------------------------------------------------------------------------- zzramones(){ zzzz -z $1 zzramones && return local txt n url='http://aurelio.net/doc/ramones.txt' arq=$ZZTMP.ramones [ -s "$arq" ] || { echo -n 'AGUARDE. Atualizando listagem...' $ZZWWWDUMP "$url" > $arq ; echo ' feito!'; }; txt=`grep -iw "${1:-.}" $arq` n=`echo "$txt" | sed -n $=`; n=$((RANDOM%n)); echo "$txt" | sed -n ${n}p } # ---------------------------------------------------------------------------- ## lidando com a chamada pelo executável if [ "$1" ]; then if [ "$1" = '--help' -o "$1" = '-h' ]; then $0 elif [ "$1" = '--version' -o "$1" = '-v' ]; then echo -n 'funções ZZ v'; zzzz | sed '/versã/!d;s/.* //' else func="zz${1#zz}" ; type $func >&- 2>&- || { # a função existe? echo "ERRO: a função '$func' não existe! (tente --help)"; exit 1; } shift ; $func "$@" # vai! fi ## chamando do executável sem argumentos (também para --help) elif [ "${0##*/}" != 'bash' -a "${0#-}" = "$0" ]; then echo " uso: funcoeszz <função> [<parâmetros>] funcoeszz <função> --help dica: inclua as funções ZZ no seu login shell, e depois chame-as diretamente pelo nome: prompt$ funcoeszz zzzz --bashrc prompt$ source ~/.bashrc prompt$ zz<TAB><TAB> Obs.: funcoeszz zzzz --tcshrc também funciona lista das funções: " zzzz | sed '1,/(( fu/d' exit 0 fi
edersg/website
download/funcoeszz-4.0219.sh
Shell
gpl-2.0
67,720
#!/bin/bash -e # # Script to upload tarball of assembly build to static.druid.io for serving # s3cmd put services/target/druid-services-*-bin.tar.gz s3://static.druid.io/artifacts/releases/
guod08/druid
upload.sh
Shell
gpl-2.0
191
#!/bin/sh # # Copyright (C) 2010-2012 Internet Systems Consortium, Inc. ("ISC") # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, # INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM # LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE # OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR # PERFORMANCE OF THIS SOFTWARE. # $Id: tests.sh,v 1.2.76.2 2011/04/19 23:47:31 tbox Exp $ SYSTEMTESTTOP=.. . $SYSTEMTESTTOP/conf.sh status=0 n=0 rm -f dig.out.* DIGOPTS="+tcp +noadd +nosea +nostat +nocmd +dnssec -p 5300" # Check the example.com. domain echo "I:checking DNAME at apex works ($n)" ret=0 $DIG $DIGOPTS +norec foo.example.com. \ @10.53.0.1 a > dig.out.ns1.test$n || ret=1 grep "status: NOERROR" dig.out.ns1.test$n > /dev/null || ret=1 grep "example.com..*DNAME.*example.net." dig.out.ns1.test$n > /dev/null || ret=1 grep "foo.example.com..*CNAME.*foo.example.net." dig.out.ns1.test$n > /dev/null || ret=1 grep "flags:[^;]* aa[ ;]" dig.out.ns1.test$n > /dev/null || ret=1 n=`expr $n + 1` if [ $ret != 0 ]; then echo "I:failed"; fi status=`expr $status + $ret` echo "I:exit status: $status" exit $status
phra/802_21
myODTONE/app/dhcp_usr/libs/bind/bind-9.8.4-P1/bin/tests/system/dlz/tests.sh
Shell
gpl-2.0
1,580
#!/bin/bash # this file is to install SAT-Assembler in the current folder. DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" cd $DIR chmod 755 hmmer3_pipeline.sh chmod 755 parse_hmm_files.py chmod 755 metadomain.py chmod 755 check_python_packages.py chmod 755 analyze_hmmscore_file.py chmod 755 HMMSCORE/hmmscore
zhangy72/SALT
chmod_scripts.sh
Shell
gpl-2.0
321
#! /bin/sh # Copyright (C) 2011-2017 Free Software Foundation, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # TAP support: # - interactions between "TAP plan with SKIP" and logging of earlier or # later TAP or non-TAP text . test-init.sh . tap-setup.sh echo TEST_LOG_DRIVER_FLAGS = --comments >> Makefile cat > foo.test <<END 1..0 a non-TAP line # a comment END cat > foo2.test <<END a non-TAP line 2 # a comment 2 1..0 END cat > bar.test <<END # an early comment an early non-TAP line $tab 1..0 # SKIP # a later comment a later non-TAP line END run_make -O TESTS='foo.test foo2.test bar.test' check count_test_results total=3 pass=0 fail=0 xpass=0 xfail=0 skip=3 error=0 grep '^# foo\.test: a comment$' stdout grep '^# foo2\.test: a comment 2$' stdout grep '^# bar\.test: an early comment$' stdout grep '^# bar\.test: a later comment$' stdout cat foo.log cat foo2.log cat bar.log grep '^a non-TAP line$' foo.log grep '^# a comment$' foo.log grep '^a non-TAP line 2$' foo2.log grep '^# a comment 2$' foo2.log grep '^# an early comment' bar.log grep '^an early non-TAP line$' bar.log grep '^# a later comment' bar.log grep '^a later non-TAP line$' bar.log grep "^ $tab$" bar.log :
Starlink/automake
t/tap-planskip-and-logging.sh
Shell
gpl-2.0
1,778
#!/bin/bash # # Copyright (C) 2020 Google LLC # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # # test hw checksum offload edge cases of a device under test ("tdev") # see individual cases below for details NS_TDEV=tdev NS_PEER=peer ADDR_V4_TDEV=192.168.1.1 ADDR_V4_PEER=192.168.1.2 ADDR_V6_TDEV=fdaa::1 ADDR_V6_PEER=fdaa::2 set -eu do_test() { local -r ipver=$1 local -r dir=$2 shift shift if [[ "${dir}" == "to_tdev" ]]; then src_ns="${NS_PEER}" dst_ns="${NS_TDEV}" if [[ "${ipver}" == "-4" ]]; then saddr="${ADDR_V4_PEER}" daddr="${ADDR_V4_TDEV}" else saddr="${ADDR_V6_PEER}" daddr="${ADDR_V6_TDEV}" fi else src_ns="${NS_TDEV}" dst_ns="${NS_PEER}" if [[ "${ipver}" == "-4" ]]; then saddr="${ADDR_V4_TDEV}" daddr="${ADDR_V4_PEER}" else saddr="${ADDR_V6_TDEV}" daddr="${ADDR_V6_PEER}" fi fi # verify udp checksum 0 is sent as 0xFFFF # argument '-Z' selects a source port to cause this checksum ip netns exec "${dst_ns}" ./csum "${ipver}" -u -S "${saddr}" -D "${daddr}" -R "$@" & sleep 0.2 ip netns exec "${src_ns}" ./csum "${ipver}" -u -S "${saddr}" -D "${daddr}" -T "$@" wait } do_check_preconditions # test receive h/w checksumming: # # - udp packets with csum that adds up to zero are sent with csum 0xFFFF, # to distinguish these packets from checksum disabled. # arg -Z selects a source port to cause the condition. # arg -U sends from a udp socket to use h/w checksum offload do_test -4 to_peer -U -Z do_test -6 to_peer -U -Z # test receive h/w checksumming: # - packets with correct csum are delivered # - packets with bad csum are dropped # test rx hw csum: accept packets with correct csum do_test -4 to_tdev do_test -6 to_tdev do_test -4 to_tdev -E do_test -6 to_tdev -E
wdebruij/kerneltools
tests/csum.sh
Shell
gpl-2.0
2,416
#!/bin/bash # # permfix.sh # # Copyright 2015 Antônio Sérgio Garcia Ferreira <serginho@serginho-desktop> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # 2015/09/04 13:31:32 # # --- # # Altera a permissão dos arquivos e diretórios não ocultos do $HOME. # As permissões do sistema geralmente vem padronizadas no format 755 ou 644 # dependendo do tipo de arquivo. Esse script remove a terceira permissão # (outros) do octal XXX, as atualizando para zero. Isso faz com que apenas # o dono e membros do grupo possa acessar os arquivos. # # Algumas pastas são específicas do meu sistema. Caso queira usar este script # altere para melhor satisfazê-lo. # function print_usage { echo "Uso: permfix [OPÇÃO] [DIRETÓRIO]" echo 'Altera a permissão dos arquivos e diretórios não ocultos de DIRETÓRIO.' echo "Sem argumentos o programa entende DIRETÓRIO como \$HOME" echo "" echo " -h, --help mostra esta mensagem " echo " -v, --verbose imprime na tela as modificações" echo "" echo "Exemplos:" echo " permfix -v" echo " permfix /home/serginho" echo " permfix --help" exit 0 } prefix="chmod" if [ $# -gt 0 ]; then while [ "${1+defined}" ]; do case "$1" in -h | --help) print_usage ;; -v | --verbose) prefix="chmod -v" shift ;; *) dir="$1" if [ ! -d "$dir" ]; then print_usage fi shift esac done fi if [ -z "$dir" ]; then $dir="$HOME" fi # A opção -print0 imprime o nome do arquivo seguido de um caracter nulo. # Em seguida read lê a entrada usando o caracter nulo como delimitador. find "$dir" -print0 | while read -d $'\0' file do case "$file" in #não faz nada se casar com os seguintes padrões $HOME/.*|$HOME/bin*|*.git*) ;; #Se for diretório coloca o bit de execução *) mod=640 if [ -d "$file" ]; then mod=750 fi command $prefix $mod "$file" ;; esac done exit 0
antoniosergius/sh
permfix.sh
Shell
gpl-2.0
2,790
#! /bin/sh # # autogen.sh - GNU build system preparation for Lightning. # Copyright (C) 2003 - 2004 Tuomo Venäläinen # # See the file COPYING for information about using this software. # # This module uses the following GNU tools: Automake, Autoconf, gettext, # intltool, and libtool. # # <WARNING> # - do NOT use the -C and -D flags unless you have read the function # autogen_clean. # </WARNING> # # <TODO> # - update the list below. # </TODO> # # The following is a list of files installed by GNU build tools that should be # distributed with software packages: # Makefile.am # Makefile.in # aclocal.m4 # config.guess # config.sub # configure # configure.ac # depcomp # install-sh # intltool-extract.in # intltool-merge.in # intltool-update.in # libtool # ltmain.sh # missing # mkinstalldirs # # <TODO> # - support ACLOCAL_AMFLAGS="-I m4dir" in Makefile.am. # - clean up the function interfaces. # </TODO> # # GNU build tool command-line options. aclocal_opts= autoconf_opts= autoheader_opts= #case $CC in #xlc) automake_opts="--include-deps" ;; #esac automake_opts="--copy --add-missing" autopoint_opts= configure_opts="$configure_default_opts" gettextize_opts="--copy --no-changelog --intl" intltoolize_opts="--copy --automake" libtoolize_opts="--copy --automake" autogen_version="0.0.0" autogen_topdir=`pwd` autogen_potfile_exts="c h" autogen_potfile_excludedirs="intl po" # autogen command-line options. autogen_opt_autopoint= autogen_opt_clean= autogen_opt_configure= autogen_opt_debug= autogen_opt_dist_clean= autogen_opt_dry_run= autogen_opt_force= autogen_opt_gettextize= autogen_opt_intltoolize= autogen_opt_make= autogen_opt_silent= autogen_opt_verbose= HAVE_ACLOCAL=0 HAVE_AUTOCONF=0 HAVE_AUTOHEADER=0 HAVE_AUTOMAKE=0 HAVE_AUTOPOINT=0 HAVE_GETTEXT=0 HAVE_GETTEXTIZE=0 HAVE_INTLTOOLIZE=0 HAVE_LIBTOOL=0 HAVE_LIBTOOLIZE=0 USE_GNU_GETTEXT=0 autogen_main() { configure_ac_files= dir= autogen_get_opts $@ \ && autogen_print_start_message \ && autogen_check_tools \ && { configure_ac_files=`find $autogen_topdir -name configure.ac` if test -n "$configure_ac_files" ; then for file in $configure_ac_files do dir=`dirname $file` if test "$autogen_opt_silent" != "yes" ; then lsh_message "entering $dir" lsh_message "'cd $dir'" fi cd $dir \ && autogen_check_src \ && autogen_clean \ && autogen_create_version_files \ && autogen_exec_tools \ && autogen_create_potfiles_in done cd $autogen_topdir fi } return 0; } autogen_get_opts() { if test "$autogen_running" != "yes" ; then autogen_opt_silent="yes" fi for arg do case $arg in -A | --autopoint) if test -z "$autogen_opt_autopoint" ; then autogen_opt_autopoint="yes" fi ;; -C | --clean) if test -z "$autogen_opt_clean" ; then autogen_opt_clean="yes" fi ;; -c | --configure) if test -z "$autogen_opt_configure" ; then autogen_opt_configure="yes" fi ;; -d | --debug) if test -z "$autogen_opt_debug" ; then lsh_message "enabling shell trace mode" set -x autogen_opt_debug="yes" fi ;; -D | --dist-clean) if test -z "$autogen_opt_dist_clean" ; then autogen_opt_dist_clean="yes" autogen_opt_clean="yes" fi ;; -n | --dry-run) if test -z "$autogen_opt_dry_run" ; then autogen_opt_dry_run="yes" fi ;; -f | --force) if test -z "$autogen_opt_force" ; then autoconf_opts="--force $autoconf_opts" autoheader_opts="--force $autoheader_opts" automake_opts="--force-missing $automake_opts" gettextize_opts="--force $gettextize_opts" intltoolize_opts="--force $intltoolize_opts" libtoolize_opts="--force $libtoolize_opts" autogen_opt_force="yes" fi ;; -G | --gettextize) if test -z "$autogen_opt_gettextize" ; then autogen_opt_gettextize="yes" fi ;; -h | --help) cat <<EOF Usage: $lsh_scriptname [option] ... Prepare $autogen_distribution for 'configure' or compile. -A, --autopoint run 'autopoint'. Not run with 'gettextize'. -c, --configure run 'configure'. -C, --clean clean extra files and exit. -d, --debug enable shell trace mode for debugging. -D, --dist-clean clean non-distribution files and exit. -n, --dry-run print commands without running them. -f, --force force reconfiguration. -G, --gettextize run 'gettextize'. -h, --help print this help message. -I, --intltoolize run 'intltoolize'. -m, --make run 'make'. -q, -s, --quiet, --silent work silently. Passed to GNU tools. -v, --verbose be verbose. VERY noisy. Passed to GNU tools. -V, --version print version information. Passed to GNU tools. Other options are passed to the 'configure' script. Default 'configure' options: $configure_default_opts EOF return 1 ;; -I | --intltoolize) if test -z "$autogen_opt_intltoolize" ; then autogen_opt_intltoolize="yes" fi ;; -m | --make) if test -z "$autogen_opt_make" ; then autogen_opt_make="yes" fi ;; -q | -s | --quiet | --silent) if test -z "$autogen_opt_silent" ; then configure_opts="$configure_opts --silent" autogen_opt_silent="yes" fi ;; -v | --verbose) # # <FIXME>: # - does --verbose apply to configure as well? # </FIXME> # if test -z "$autogen_opt_verbose" ; then aclocal_opts="--verbose $aclocal_opts" autoconf_opts="--verbose $autoconf_opts" autoheader_opts="--verbose $autoheader_opts" automake_opts="--verbose $automake_opts" autogen_opt_verbose="yes" fi ;; -V | --version) # echo "autogen.sh $autogen_version" autogen_print_versions return 1 ;; *) configure_opts="$configure_opts $arg" ;; esac done return 0; } autogen_print_versions() { aclocal --version | grep GNU autoconf --version | grep GNU autoheader --version | grep GNU automake --version | grep GNU autopoint --version | grep GNU gettext --version | grep GNU gettextize --version | grep GNU intltoolize --version | grep GNU libtool --version | grep GNU libtoolize --version | grep GNU echo "$autogen_distribution autogen.sh $autogen_version" } autogen_print_start_message() { if test "$autogen_opt_silent" != "yes" ; then if test "$autogen_opt_dist_clean" = "yes" ; then lsh_message "cleaning $autogen_distribution non-distribution files..." elif test "$autogen_opt_clean" = "yes" ; then lsh_message "cleaning $autogen_distribution extra files..." elif test "$autogen_opt_configure" = "yes" ; then if test "$autogen_opt_make" = "yes" ; then lsh_message "building $autogen_distribution..." else lsh_message "configuring $autogen_distribution for compile..." fi else lsh_message "preparing $autogen_distribution for 'configure'..." fi lsh_message "packages: $autogen_packages" fi return 0; } autogen_check_tools() { (aclocal --version) < /dev/null > /dev/null 2>&1 && HAVE_ACLOCAL=1 (autoconf --version) < /dev/null > /dev/null 2>&1 && HAVE_AUTOCONF=1 (autoheader --version) < /dev/null > /dev/null 2>&1 && HAVE_AUTOHEADER=1 (automake --version) < /dev/null > /dev/null 2>&1 && HAVE_AUTOMAKE=1 (autopoint --version) < /dev/null > /dev/null 2>&1 && HAVE_AUTOPOINT=1 (gettext --version) < /dev/null > /dev/null 2>&1 && HAVE_GETTEXT=1 (gettextize --version) < /dev/null > /dev/null 2>&1 && HAVE_GETTEXTIZE=1 (intltoolize --version) < /dev/null > /dev/null 2>&1 && HAVE_INTLTOOLIZE=1 (libtool --version) < /dev/null > /dev/null 2>&1 && HAVE_LIBTOOL=1 (libtoolize --version) < /dev/null > /dev/null 2>&1 && HAVE_LIBTOOLIZE=1 return 0; } autogen_check_src() { dir=`pwd` uniquefile= if test -f "ignore.ag" ; then lsh_message "$dir/ignore.ag exists, skipping $dir" return 1 fi if test ! -f "configure.ac" ; then lsh_message "$dir/configure.ac doesn't exist" lsh_message "please run autogen.sh from the top directory of your source tree" return 1 fi uniquefile=`$lsh_sed -n 's,^AC_INIT(\(.*\)),\1,p' < configure.ac` if test -n "$uniquefile" ; then if test ! -f "$uniquefile" ; then lsh_message "$dir/$uniquefile doesn't exist" lsh_message "please check the call to AC_INIT in $autogen_srcdir/configure.ac" return 1 fi else lsh_message "$dir/configure.ac doesn't call AC_INIT" lsh_message "please fix $dir/configure.ac" return 1 fi return 0; } autogen_clean() { configure_ac_files= dir= currentdir= aclocal_m4_files= if test "$autogen_opt_clean" = "yes" || test "$autogen_opt_dist_clean" = "yes" ; then configure_ac_files=`find $autogen_topdir -name configure.ac` if test -n "$configure_ac_files" ; then for file in $configure_ac_files do dir=`dirname $file` currentdir=`pwd` if test "$dir" = "$currentdir" ; then : else if test "$autogen_opt_silent" != "yes" ; then lsh_message "entering $dir" lsh_message "'cd $dir'" fi cd $dir fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$autogen_opt_dist_clean" != "yes" ; then autogen_clean_dirs fi autogen_clean_files if test -f "./clean.sh" ; then ./clean.sh fi fi done if test "$autogen_opt_dist_clean" != "yes" ; then aclocal_m4_files=`find $autogen_topdir -name aclocal.m4` if test -n "$aclocal_m4_files" ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "cleaning 'aclocal.m4' files" fi for file in $aclocal_m4_files do dir=`dirname $file` currentdir=`pwd` if test "$dir" = "$currentdir" ; then : else if test "$autogen_opt_silent" != "yes" ; then lsh_message "entering $dir" lsh_message "'cd $dir'" fi cd $dir fi if test "$autogen_opt_dry_run" != "yes" ; then rm -f aclocal.m4 fi done fi fi cd $autogen_topdir fi return 1 fi return 0; } autogen_clean_dirs() { configdirs="`$lsh_sed -n 's,^AC_CONFIG_AUX_DIR(\(.*\)),\1,gp' < configure.ac`" if test -f "aclocal.m4" ; then configdirs="$configdirs `$lsh_sed -n 's,^AC_CONFIG_AUX_DIR(\(.*\)),\1,gp' < aclocal.m4`" fi if test -n "$configdirs" ; then for configdir in $configdirs do if test "$autogen_opt_dry_run" != "yes" ; then rm -rf $configdir fi done fi return 0; } autogen_clean_files() { if test "$autogen_opt_dry_run" != "yes" ; then if test -f Makefile ; then make clean fi if test -x "clean.sh" ; then ./clean.sh fi if test "$autogen_opt_dist_clean" != "yes" ; then rm -f `find . -name Makefile.in` rm -f `find . -name POTFILES.in` # rm -f `find . -name aclocal.m4` rm -f `find . -name autoscan.log` rm -f `find . -name configure` rm -f `find . -name configure.scan` rm -f `find . -name core` rm -f `find . -name version.h` rm -f `find . -name version.m4` fi rm -f `find . -name Makefile` rm -f `find . -name autoconf.h\*` rm -f `find . -name config.cache` rm -f `find . -name config.h\*` rm -f `find . -name config.log` rm -f `find . -name config.status` rm -f `find . -name configure.sh` rm -f `find . -name .dirstamp` rm -f `find . -name libtool` rm -f `find . -name stamp-h\*` rm -f `find . -name \*~` rm -f `find . -name \*.o` rm -f `find . -name \*.lo` rm -f `find . -name \*.la` rm -f `find . -name \*.a` rm -rf `find . -name .deps` rm -rf `find . -name .libs` rm -rf `find . -name autom4te.cache` fi return 0; } # # <FIXME> # - this may destroy version.m4 files unnecessarily... # </FIXME> # autogen_create_version_files() { loop=0 line= header= major= minor= patchlevel= package_uppercase= major_macro= minor_macro= patchlevel_macro= set_version_macro= version_macro= version_info_macro= header_basename= header_macro= if test ! -f "versions.ag" ; then return 0 fi cat > m4/version.m4 <<EOF # # NOTE: this file was generated by $lsh_scriptname. DO NOT EDIT! # # version.m4 - version information for $autogen_distribution. # # See the file COPYING for information about using this software. EOF for package in $autogen_packages do line=`grep "^$package" versions.ag` if test -n "$line" ; then header=`echo $line | $lsh_sed -n 's,^\(['$lsh_letters']*\):\(['$lsh_letters'/_.]*\):\(['$lsh_digits']*\)\.\(['$lsh_digits']*\)\.\(['$lsh_digits']*\),\2,p'` major=`echo $line | $lsh_sed -n 's,^\(['$lsh_letters']*\):\(['$lsh_letters'/_.]*\):\(['$lsh_digits']*\)\.\(['$lsh_digits']*\)\.\(['$lsh_digits']*\),\3,p'` minor=`echo $line | $lsh_sed -n 's,^\(['$lsh_letters']*\):\(['$lsh_letters'/_.]*\):\(['$lsh_digits']*\)\.\(['$lsh_digits']*\)\.\(['$lsh_digits']*\),\3,p'` patchlevel=`echo $line | $lsh_sed -n 's,^\(['$lsh_letters']*\):\(['$lsh_letters'/_.]*\):\(['$lsh_digits']*\)\.\(['$lsh_digits']*\)\.\(['$lsh_digits']*\),\3,p'` package_uppercase=`echo $package | $lsh_sed 'y,['$lsh_lowercase_letters'],['$lsh_uppercase_letters'],'` major_macro=__$package_uppercase'_MAJOR__' minor_macro=__$package_uppercase'_MINOR__' patchlevel_macro=__$package_uppercase'_PATCHLEVEL__' set_version_macro=$package_uppercase'_SET_VERSION' version_macro=$package_uppercase'_VERSION' version_info_macro=$package_uppercase'_VERSION_INFO' if test -n "$header" ; then lsh_message "creating $header" header_basename=`$lsh_basename $header` header_macro=`echo $package_uppercase'_'$header_basename | $lsh_sed 'y,['$lsh_lowercase_letters'/.],['$lsh_uppercase_letters'__],'` cat > $header <<EOF /* * NOTE: this file was generated by $lsh_scriptname. DO NOT EDIT! * * $header_basename - version information for $package. * * See the file COPYING for information about using this software. */ #ifndef $header_macro #define $header_macro #define $major_macro $major #define $minor_macro $minor #define $patchlevel_macro $patchlevel #endif /* $header_macro */ EOF fi if test "$loop" -eq 0 ; then lsh_message "creating $autogen_topdir/m4/version.m4" loop=1 else lsh_message "appending to $autogen_topdir/m4/version.m4" fi cat >> m4/version.m4 <<EOF # Set $package version information. AC_DEFUN([$set_version_macro], [ $version_macro="$major.$minor.$patchlevel" AC_SUBST($version_macro) $version_info_macro="$major:$minor:$patchlevel" AC_SUBST($version_info_macro) ]) EOF fi done return 0; } autogen_exec_tools() { autogen_exec_aclocal \ && autogen_create_config_dirs \ && autogen_check_gnu_gettext \ && autogen_exec_gettextize \ && autogen_exec_intltoolize \ && autogen_exec_autopoint \ && autogen_exec_libtoolize \ && autogen_exec_autoheader \ && autogen_exec_automake \ && autogen_exec_autoconf \ && autogen_exec_configure \ && autogen_exec_make return 0; } autogen_exec_aclocal() { aclocal_cmd= aclocal_includes= includedirs=`$lsh_sed -n 's,^AM_ACLOCAL_INCLUDE(\(.*\)),\1,gp' < configure.ac` if test -z "$aclocal_opts" ; then aclocal_cmd="aclocal" else aclocal_cmd="aclocal $aclocal_opts" fi if test -n "$aclocal_default_includes" ; then aclocal_includes="$aclocal_default_includes" fi if test -n "$includedirs" ; then for includedir in $includedirs do aclocal_includes="$aclocal_includes -I $includedir" done fi if test -n "$aclocal_includes" ; then aclocal_cmd="$aclocal_cmd $aclocal_includes" fi if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$aclocal_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_ACLOCAL" -eq 1 ; then $aclocal_cmd else autogen_missing_aclocal return 1 fi fi includedirs=`$lsh_sed -n 's,^AM_ACLOCAL_INCLUDE(\(.*\)),\1,gp' < aclocal.m4` if test -n "$includedirs" ; then for includedir in $includedirs do aclocal_includes="$aclocal_includes -I $includedir" done aclocal_cmd="$aclocal_cmd $aclocal_includes" if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$aclocal_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then $aclocal_cmd fi fi return 0; } autogen_create_config_dirs() { configdirs=`$lsh_sed -n 's,^AC_CONFIG_AUX_DIR(\(.*\)),\1,gp' < configure.ac` configdirs="$configdirs `$lsh_sed -n 's,^AC_CONFIG_AUX_DIR(\(.*\)),\1,gp' < aclocal.m4`" if test -n "$configdirs" ; then for configdir in $configdirs do if test "$silent" != "yes" ; then lsh_message "'mkdir $configdir'" fi if test "$autogen_opt_dry_run" != "yes" ; then mkdir $configdir fi done fi return 0; } autogen_check_gnu_gettext() { if grep "^AM_GNU_GETTEXT" configure.ac > /dev/null \ || (test -r aclocal.m4 \ && grep "^AM_GNU_GETTEXT" aclocal.m4 > /dev/null) \ || grep "^AM_GNOME_GETTEXT" configure.ac > /dev/null \ || (test -r aclocal.m4 \ && grep "^AM_GNOME_GETTEXT" aclocal.m4 > /dev/null) ; then USE_GNU_GETTEXT=1 fi return 0; } autogen_exec_gettextize() { gettextize_cmd= if test "$USE_GNU_GETTEXT" -ne 1 ; then return 0 fi if test -z "$gettextize_opts" ; then gettextize_cmd="gettextize" else gettextize_cmd="gettextize $gettextize_opts" fi if grep "sed.*POTFILES" configure.ac > /dev/null ; then : # do nothing - we still have an unmodified configure.ac elif test "$autogen_opt_gettextize" = "yes" ; then autogen_opt_autopoint="no" if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$gettextize_cmd > gettextize.log'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_GETTEXTIZE" -eq 1 ; then $gettextize_cmd > gettextize.log else autogen_missing_gettextize return 1 fi fi else lsh_message "NOTE: you seem to be using GNU gettext." lsh_message "NOTE: you might want to run 'gettextize'." lsh_message "NOTE: try '$0 --help' for more information." fi return 0; } autogen_exec_intltoolize() { intltoolize_cmd= if test "$USE_GNU_GETTEXT" -ne 1 ; then return 0 fi if test -z "$intltoolize_opts" ; then intltoolize_cmd="intltoolize" else intltoolize_cmd="intltoolize $intltoolize_opts" fi if grep "^AC_PROG_INTLTOOL" configure.ac > /dev/null \ || (test -r aclocal.m4 \ && grep "^AC_PROG_INTLTOOL" aclocal.m4 > /dev/null) ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$intltoolize_cmd > intltoolize.log'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_INTLTOOLIZE" -eq 1 ; then $intltoolize_cmd > intltoolize.log else autogen_missing_intltoolize return 1 fi fi fi return 0; } autogen_exec_autopoint() { autopoint_cmd= if test "$USE_GNU_GETTEXT" -ne 1 ; then return 0 fi if test -z "$autopoint_opts" ; then autopoint_cmd="autopoint" else autopoint_cmd="autopoint $autopoint_opts" fi if grep "sed.*POTFILES" configure.ac > /dev/null ; then : # do nothing - we still have an unmodified configure.ac elif test "$autogen_opt_autopoint" = "yes" ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$autopoint_cmd > autopoint.log'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_AUTOPOINT" -eq 1 ; then $autopoint_cmd > autopoint.log else autogen_missing_autopoint return 1 fi fi else lsh_message "NOTE: you seem to be using GNU gettext." lsh_message "NOTE: you might want to run 'autopoint'." lsh_message "NOTE: try '$0 --help' for more information." fi return 0; } autogen_exec_libtoolize() { libtoolize_cmd= if test -z "$libtoolize_opts" ; then libtoolize_cmd="libtoolize" else libtoolize_cmd="libtoolize $libtoolize_opts" fi if grep "^AC_PROG_LIBTOOL" configure.ac > /dev/null \ || (test -r aclocal.m4 \ && grep "^AC_PROG_LIBTOOL" aclocal.m4 > /dev/null) ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$libtoolize_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_LIBTOOLIZE" -eq 1 ; then $libtoolize_cmd else autogen_missing_libtoolize return 1 fi fi fi return 0; } autogen_exec_autoheader() { autoheader_cmd= if test -z "$autoheader_opts" ; then autoheader_cmd="autoheader" else autoheader_cmd="autoheader $autoheader_opts" fi if grep "^AC_CONFIG_HEADERS" configure.ac > /dev/null \ || (test -r aclocal.m4 \ && grep "^AC_CONFIG_HEADERS" aclocal.m4 > /dev/null) ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$autoheader_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_AUTOHEADER" -eq 1 ; then $autoheader_cmd else autogen_missing_autoheader return 1 fi fi fi return 0; } autogen_exec_automake() { automake_cmd= if test -z "$automake_opts" ; then automake_cmd="automake" else automake_cmd="automake $automake_opts" fi if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$automake_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_AUTOMAKE" -eq 1 ; then $automake_cmd else autogen_missing_automake return 1 fi fi return 0; } autogen_exec_autoconf() { autoconf_cmd= if test -z "$autoconf_opts" ; then autoconf_cmd="autoconf" else autoconf_cmd="autoconf $autoconf_opts" fi if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$autoconf_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then if test "$HAVE_AUTOCONF" -eq 1 ; then $autoconf_cmd else autogen_missing_autoconf return 1 fi fi return 0; } autogen_exec_configure() { configure_cmd= if test -z "$configure_opts" ; then configure_cmd="./configure" else configure_cmd="./configure $configure_opts" fi autogen_create_configure_lsh if test "$autogen_opt_configure" = "yes" ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'$configure_cmd'" fi if test "$autogen_opt_dry_run" != "yes" ; then $configure_cmd || return 1 fi else if test "$autogen_opt_silent" != "yes" ; then if test "$autogen_opt_dry_run" != "yes" ; then lsh_message "NOTE: run './configure [option] ...' to configure $autogen_distribution." lsh_message "NOTE: run './configure --help' for more information." fi fi return 1 fi return 0; } autogen_exec_make() { if test "$autogen_opt_make" = "yes" ; then if test "$autogen_opt_silent" != "yes" ; then lsh_message "'make'" fi if test "$autogen_opt_dry_run" != "yes" ; then make || return 1 fi fi return 0; } autogen_create_configure_lsh() { if test "$autogen_opt_silent" != "yes" ; then lsh_message "creating configure.sh" fi if test "$autogen_opt_dry_run" != "yes" ; then cat > configure.sh <<EOF #! /bin/lsh $configure_cmd EOF chmod u+x configure.sh fi return 0; } autogen_create_potfiles_in() { sortednames= excluded= potfiles= if test "$USE_GNU_GETTEXT" -ne 1 ; then return 0 fi for ext in $autogen_potfile_exts do if test -z "$sortednames" ; then sortednames=`find . -name \*.$ext | sort` else sortednames="$sortednames `find . -name \*.$ext | sort`" fi done for name in $sortednames do excluded="false" if test -d "$name" ; then : else for excludedir in $autogen_potfile_excludedirs do if echo "$name" | grep "^\./$excludedir[/]" > /dev/null 2>&1 ; then excluded="true" break else : fi done if test "$excluded" = "true" ; then : elif test -z "$potfiles" ; then potfiles="$name" else potfiles="$potfiles $name" fi fi done if test -n "$potfiles" ; then touch po/POTFILES.in for potfile in $potfiles do potfile=`echo "$potfile" | sed 's,^\(\./\),,'` echo $potfile >> po/POTFILES.in done fi } autogen_missing_autoconf() { echo lsh_message "You must have 'autoconf' installed to compile $autogen_distribution." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" return 1; } autogen_missing_automake() { echo lsh_message "You must have 'automake' installed to compile $autogen_distribution." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" return 1; } autogen_missing_aclocal() { if test "$HAVE_AUTOMAKE" -eq 1 ; then echo lsh_message "Missing 'aclocal'. The version of 'automake' installed" lsh_message "doesn't appear recent enough." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" else autogen_missing_automake fi return 1; } autogen_missing_autoheader() { if test "$HAVE_AUTOMAKE" -eq 1 ; then echo lsh_message "Missing 'autoheader'. The version of 'automake' installed" lsh_message "doesn't appear recent enough." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" else autogen_missing_automake fi return 1; } autogen_missing_gettext() { echo lsh_message "You must have 'gettext' installed to compile $autogen_distribution." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://alpha.gnu.org/gnu/" return 1; } autogen_missing_autopoint() { if test "$HAVE_GETTEXT" -eq 1 ; then echo lsh_message "Missing 'autopoint'. The version of 'gettext' installed" lsh_message "doesn't appear recent enough." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" else autogen_missing_gettext fi return 1; } autogen_missing_gettextize() { if test "$HAVE_GETTEXT" -eq 1 ; then echo lsh_message "Missing 'gettextize'. The version of 'gettext' installed" lsh_message "doesn't appear recent enough." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" else autogen_missing_gettext fi return 1; } autogen_missing_intltoolize() { echo lsh_message "You must have 'intltoolize' installed to compile $autogen_distribution." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnome.org/pub/GNOME/" return 1; } autogen_missing_libtool() { echo lsh_message "You must have 'libtool' installed to compile $autogen_distribution." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" return 1; } autogen_missing_libtoolize() { if test "$HAVE_LIBTOOL" -eq 1 ; then echo lsh_message "Missing 'libtoolize'. The version of 'libtool' installed" lsh_message "doesn't appear recent enough." lsh_message "Download the appropriate package for your distribution," lsh_message "or get the latest source from ftp://ftp.gnu.org/pub/gnu/" else autogen_missing_libtool fi return 1; }
vendu/Resurrection
sh/autogen.sh
Shell
gpl-2.0
32,471
#!/bin/bash # $Id: sfdisk_lvm_vg.sh 633 2013-01-19 19:50:41Z tbr $ # # (c) Thorsten Bruhns ([email protected]) # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # ICH UEBERNEHME KEINE GARANTIE FUER DIE FUNKTION DES SKRIPTES # FALSCHES ANWENDEN KANN ERHEBLICHEN DATENVERLUST NACH SICH ZIEHEN! # Das Skript erzeugt auf einem Device eine 1. primaere Partition # ueber den gesammten Bereich mit dem Typ LVM # Disk Alignment wird dabei beruecksichtigt. # sfdisk muss als Befehl verfuegbar sein! # # Im Anschluss wird eine Volume-Gruppe angelegt. Sollte die Gruppe # bereits vorhanden sein wird sie entsprechend erweitert. # # ACHTUNG!!! # Eine Disk ohne Partition wird dabei ueberschrieben. Disks mit # bestehenden Partitionen werden nicht veraendert! # # Parameter 1: Device fuer sfdisk # Parameter 2: Name der Volume-Gruppe # todo: Ueberpruefung mittels blkid zur Sicherrung das kein Device genutzt wird! set_env() { # we need 2 parameter! if [ ${#} -ne 2 ] then echo " " echo `basename $0`" <physical Device> <Volume-Group>" echo " " echo "Example: "`basename $0`" /dev/xvds testvg" echo " " echo "This script creates a 1st primary partition on given disc for LVM when no partition table is existing" echo "The partition will labeled for LVM after then." echo "Finaly we create a Volume-Group with the new partition or extend an existing Volume-Group" exit 99 fi sdpartition=${1} volumegroup=${2} pvpartition=${sdpartition}1 } do_create_pv() { # check for an existing partition # sfdisk prints the device only 1 times when no partition-table is existing countpart=`sfdisk -lR ${sdpartition} | grep ${sdpartition} | wc -l` if [ ${countpart:-0} -eq 1 ] then # todo!! blkid ${sdpartition} > /dev/null retcode=${?} if [ ${retcode} -eq 0 ] then # found a valid filesystem or something else # => we can't create a parti tion table here echo "blkid (blkid ${sdpartition}) found something on "${sdpartition} echo "Aborting script!" exit 20 fi pvdisplay ${sdpartition} > /dev/null 2>&1 retcode=${?} if [ ${retcode} -eq 0 ] then # We found a valid physical volume on the disc! echo "Valid Label for LVM found on "${sdpartition} echo "Aborting script!" exit 30 else # we have no physical volume on this disc! # => We can create a partition! echo "2048,,8e"|sfdisk -uS -q --force ${sdpartition} if [ ${?} -eq 0 ] then blkid -g # sleep 2 seconds, because SLES11 SP2 doesn't find the partition for pvcreate sleep 2 # we only create a physical volume when sfdisk was able to create the partition! # we can create the physical volume on the new partition echo "Creating a Label for LVM on "${pvpartition} pvcreate ${pvpartition} fi fi fi } do_make_vg() { # is the device for LVM a block-device? if [ ! -b ${pvpartition} ] then echo "Cannot work on Volume-Group "${volumegroup}" because partition "${pvpartition}" is not a block device!" echo "Skript aborted!" exit 10 fi # extend existing Volume Group or create a new one echo "Check for an existing Volume-Group" vgdisplay ${volumegroup} > /dev/null 2>&2 retcode=${?} if [ ${retcode} -eq 0 ] then # Volume Group exists! vgextend ${volumegroup} ${pvpartition} else # creating a new Volume Group vgcreate ${volumegroup} ${pvpartition} # change max number of physical volumes for the new volume group echo "Change the maximum number of physical disks for the new Volume-Group" vgchange ${volumegroup} -p 0 fi } set_env ${*} do_create_pv do_make_vg
Rendanic/oracleToolbox
Linux/fdisk/sfdisk_lvm_vg.sh
Shell
gpl-2.0
4,207
#!/bin/sh ############################################################################### # HARDENED RHEL DVD CREATOR # # This script was written by Frank Caviggia, Red Hat Consulting # Last update was 23 July 2015 # This script is NOT SUPPORTED by Red Hat Global Support Services. # Please contact Josh Waldman for more information. # # Author: Frank Caviggia ([email protected]) # Copyright: Red Hat, (c) 2014 # Version: 1.2 # License: GPLv2 # Description: Kickstart Installation of RHEL 7 with DISA STIG ############################################################################### # GLOBAL VARIABLES DIR=`pwd` # USAGE STATEMENT function usage() { cat << EOF usage: $0 rhel-server-7.X-x86_64-dvd.iso SCAP Security Guide RHEL Kickstart RHEL 7.1+ Customizes a RHEL 7.1+ x86_64 Server or Workstation DVD to install with the following hardening: - SCAP Security Guide (SSG) for Red Hat Enterprise Linux - Classification Banner (Graphical Desktop) EOF } while getopts ":vhq" OPTION; do case $OPTION in h) usage exit 0 ;; ?) echo "ERROR: Invalid Option Provided!" echo usage exit 1 ;; esac done # Check for root user if [[ $EUID -ne 0 ]]; then if [ -z "$QUIET" ]; then echo tput setaf 1;echo -e "\033[1mPlease re-run this script as root!\033[0m";tput sgr0 fi exit 1 fi # Check for required packages rpm -q genisoimage &> /dev/null if [ $? -ne 0 ]; then yum install -y genisoimage fi rpm -q syslinux &> /dev/null if [ $? -ne 0 ]; then yum install -y syslinux fi rpm -q isomd5sum &> /dev/null if [ $? -ne 0 ]; then yum install -y isomd5sum fi # Determine if DVD is Bootable `file $1 | grep 9660 | grep -q bootable` if [[ $? -eq 0 ]]; then echo "Mounting RHEL DVD Image..." mkdir -p /rhel mkdir $DIR/rhel-dvd mount -o loop $1 /rhel echo "Done." # Tests DVD for RHEL 7.1+ if [ -e /rhel/.discinfo ]; then RHEL_VERSION=$(grep "Red Hat" /rhel/.discinfo | awk '{ print $5 }') MAJOR=$(echo $RHEL_VERSION | awk -F '.' '{ print $1 }') MINOR=$(echo $RHEL_VERSION | awk -F '.' '{ print $2 }') if [[ $MAJOR -ne 7 ]]; then echo "ERROR: Image is not RHEL 7.1+" umount /rhel rm -rf /rhel exit 1 fi if [[ $MINOR -lt 1 ]]; then echo "ERROR: Image is not RHEL 7.1+" umount /rhel rm -rf /rhel exit 1 fi else echo "ERROR: Image is not RHEL" exit 1 fi echo -n "Copying RHEL DVD Image..." cp -a /rhel/* $DIR/rhel-dvd/ cp -a /rhel/.discinfo $DIR/rhel-dvd/ echo " Done." umount /rhel rm -rf /rhel else echo "ERROR: ISO image is not bootable." exit 1 fi echo -n "Modifying RHEL DVD Image..." # Set RHEL Version in ISO Linux sed -i "s/7.X/$RHEL_VERSION/g" $DIR/config/isolinux/isolinux.cfg sed -i "s/7.X/$RHEL_VERSION/g" $DIR/config/EFI/BOOT/grub.cfg cp -a $DIR/config/* $DIR/rhel-dvd/ if [[ $MINOR -ge 2 ]]; then rm -f $DIR/rhel-dvd/hardening/openscap*rpm fi sed -i "s/$RHEL_VERSION/7.X/g" $DIR/config/isolinux/isolinux.cfg sed -i "s/$RHEL_VERSION/7.X/g" $DIR/config/EFI/BOOT/grub.cfg echo " Done." echo "Remastering RHEL DVD Image..." cd $DIR/rhel-dvd chmod u+w isolinux/isolinux.bin find . -name TRANS.TBL -exec rm '{}' \; /usr/bin/mkisofs -J -T -V "RHEL-$RHEL_VERSION Server.x86_64" -o $DIR/ssg-rhel-$RHEL_VERSION.iso -b isolinux/isolinux.bin -c isolinux/boot.cat -no-emul-boot -boot-load-size 4 -boot-info-table -eltorito-alt-boot -e images/efiboot.img -no-emul-boot -R -m TRANS.TBL . cd $DIR rm -rf $DIR/rhel-dvd echo "Done." echo "Signing RHEL DVD Image..." /usr/bin/isohybrid --uefi $DIR/ssg-rhel-$RHEL_VERSION.iso &> /dev/null /usr/bin/implantisomd5 $DIR/ssg-rhel-$RHEL_VERSION.iso echo "Done." echo "DVD Created. [ssg-rhel-$RHEL_VERSION.iso]" exit 0
stephenwb/ssg-el7-kickstart
createiso.sh
Shell
gpl-2.0
3,658
##!=======================>> FUNCTiONS <<=======================!## base_install() { # install dependencies COMMON="apache2-utils autoconf automake binutils bzip2 ca-certificates cpp curl fail2ban file gamin gcc git-core gzip htop iptables libexpat1 libtool libxml2 m4 make openssl patch perl pkg-config python python-gamin python-openssl python-setuptools screen subversion sudo unrar unzip zip" DYNAMIC="libcurl3 libcurl3-gnutls libcurl4-openssl-dev libncurses5 libncurses5-dev libsigc++-2.0-dev" DEBIAN="$COMMON $DYNAMIC aptitude autotools-dev build-essential cfv comerr-dev dtach g++ libcppunit-dev libperl-dev libssl-dev libterm-readline-gnu-perl libtorrent-rasterbar-dev ncurses-base ncurses-bin ncurses-term perl-modules ssl-cert" SUSE="$COMMON libcppunit-devel libcurl-devel libopenssl-devel libtorrent-rasterbar-devel gcc-c++ ncurses-devel libncurses6 libsigc++2-devel" ARCHLINUX="base-devel yaourt" # TODO PHP_COMMON="php5-curl php5-gd php5-mcrypt php5-mysql php5-suhosin php5-xmlrpc" PHP_DEBIAN="$PHP_COMMON php5-cgi php5-cli php5-common php5-dev php5-mhash" PHP_SUSE="$PHP_COMMON php5-devel" PHP_ARCHLINUX="php php-cgi" # TODO echo -en "\n${bldred} iNSTALLiNG BASE PACKAGES, this may take a while...${rst}" case "$DISTRO" in Ubuntu|[Dd]ebian|*Mint) packages install $DEBIAN ;; ARCH*|[Aa]rch* ) packages install $ARCHLINUX ;; SUSE*|[Ss]use* ) packages install $SUSE ;; esac if_error "Required system packages failed to install" log "Base Installation | Completed" echo -e "${bldylw} done${rst}" } checkout() { # increase verbosity if [[ $DEBUG = 1 ]]; then svn co $@ ; E_=$? else svn co -q $@ ; E_=$? fi } checkroot() { # check if user is root [[ $UID = 0 ]] && echo -e ">>> RooT USeR ChecK...[${bldylw} done ${rst}]" || error "PLEASE RUN WITH SUDO" } cleanup() { # remove tmp folder and restore permissions cd $BASE && rm --recursive --force tmp chown -R $USER $BASE log "Removed tmp/ folder" } clear_logfile() { # clear the logfile [[ -f $LOG ]] && rm --force $LOG } compile() { # compile with num of threads as cpu cores and time it compile_time=$SECONDS make -j$CORES $@ ; E_=$? let compile_time=$SECONDS-$compile_time } ctrl_c() { # interrupt trap log "CTRL-C : abnormal exit detected..." echo -en "\n Cleaning up and exiting..." cleanup echo -e " done \n" exit 0 } debug_wait() { # prints a message and wait for user before continuing if [[ $DEBUG = '1' ]]; then echo -e "${bldpur} DEBUG: $1" echo -en "${bldpur} Press Enter...${rst}" read ENTER fi } download() { # show progress bars if debug is on if [[ $DEBUG = 1 ]]; then wget --no-verbose $1 ; E_=$? else wget --quiet $1 ; E_=$? fi } error() { # call this when you know there will be an error echo -e " Error:${bldred} $1 ${rst} \n" ;exit 1 } extract() { # find type of compression and extract accordingly case "$1" in *.tar.bz2) tar xjf $1 ;; *.tbz2 ) tar xjf $1 ;; *.tar.gz ) tar xzf $1 ;; *.tgz ) tar xzf $1 ;; *.tar ) tar xf $1 ;; *.gz ) gunzip -q $1 ;; *.bz2 ) bunzip2 -q $1 ;; *.rar ) unrar x $1 ;; *.zip ) unzip $1 ;; *.Z ) uncompress $1 ;; *.7z ) 7z x $1 ;; esac } if_error() { # call this to catch a bad return code and log the error if [[ $E_ != 0 ]]; then echo -e " Error:${bldred} $1 ${rst} ($E_)" log "Error: $1 ($E_)" cleanup ;exit 1 fi } log() { # send to the logfile echo -e "$1" >> $LOG } mkpass() { # generate a random password of user defined length newPass=$(tr -cd '[:alnum:]' < /dev/urandom | head -c ${1:-${passwdlength}}) notice "$newPass" ;exit 0 } mksslcert() { # use 2048 bit certs, use sha256, and regenerate if [[ $1 = 'generate-default-snakeoil' ]]; then # called once after questionaire exits sed -i 's:default_bits .*:default_bits = 2048:' /etc/ssl/openssl.cnf sed -i 's:default_md .*:default_md = sha256:' /etc/ssl/openssl.cnf if which make-ssl-cert >/dev/null; then echo -en "${bldred} Generating SSL Certificate...${rst}" sed -i 's:default_bits .*:default_bits = 2048:' $SSLCERT make-ssl-cert $1 --force-overwrite echo -e "${bldylw} done${rst}" fi else [[ $# = 1 ]] && openssl req -new -x509 -days 3650 -nodes -out $1 -keyout $1 -subj '/C=AN/ST=ON/L=YM/O=OU/CN=S/[email protected]' # generate single key file [[ $# = 2 ]] && openssl req -new -x509 -days 3650 -nodes -out $1 -keyout $2 -subj '/C=AN/ST=ON/L=YM/O=OU/CN=S/[email protected]' # 2nd arg creates separate .pem and .key files chmod 400 $@ # Read write permission for owner only fi } notice() { # echo status or general info to stdout echo -en "\n${bldred} $1... ${rst}\n" } packages() { # use appropriate package manager depending on distro if [[ $DISTRO = @(Ubuntu|[dD]ebian|*Mint) ]]; then [[ $DEBUG != 1 ]] && quiet='-qq' case "$1" in addkey ) apt-key adv --keyserver keyserver.ubuntu.com --recv-keys $2 ;; clean ) apt-get -qq autoclean alias_autoclean="apt-get autoremove && apt-get autoclean" ;; install) shift # forget $1 apt-get install --yes $quiet $@ 2>> $LOG; E_=$? alias_install="apt-get install" ;; remove ) shift apt-get autoremove --yes $quiet $@ 2>> $LOG; E_=$? alias_remove="apt-get autoremove" ;; update ) apt-get update $quiet alias_update="apt-get update" ;; upgrade) apt-get upgrade --yes $quiet alias_upgrade="apt-get upgrade" ;; version) aptitude show $2 | grep Version: ;; setvars) REPO_PATH=/etc/apt/sources.list.d ;; esac elif [[ $DISTRO = @(ARCH|[Aa]rch)* ]]; then [[ $DEBUG != 1 ]] && quiet='--noconfirm' case "$1" in clean ) pacman --sync --clean -c $quiet alias_autoclean="pacman -Scc" ;; install) shift pacman --sync $quiet $@ 2>> $LOG; E_=$? alias_install="pacman -S" ;; remove ) shift pacman --remove $@ 2>> $LOG; E_=$? alias_remove="pacman -R" ;; update ) pacman --sync --refresh $quiet alias_update="pacman -Sy" ;; upgrade) pacman --sync --refresh --sysupgrade $quiet alias_upgrade="pacman -Syu" ;; version) pacman -Qi $2 | grep Version: ;; setvars) REPO_PATH=/etc/pacman.conf WEB=/srv/http WEBUSER='http' WEBGROUP='http' ;; esac elif [[ $DISTRO = @(SUSE|[Ss]use)* ]]; then [[ $DEBUG != 1 ]] && quiet='--quiet' case "$1" in addrepo) shift zypper --no-gpg-checks --gpg-auto-import-keys addrepo --refresh $@ 2>> $LOG ;; clean ) zypper $quiet clean alias_autoclean="zypper clean" ;; install) shift zypper $quiet --non-interactive install $@ 2>> $LOG; E_=$? alias_install="zypper install" ;; remove ) shift zypper $quiet remove $@ 2>> $LOG; E_=$? alias_remove="zypper remove" ;; update ) zypper $quiet refresh alias_update="zypper refresh" ;; upgrade) zypper $quiet --non-interactive update --auto-agree-with-licenses alias_upgrade="zypper update" ;; version) zypper info $2 | grep Version: ;; setvars) REPO_PATH=/etc/zypp/repos.d WEB=/srv/www/htdocs WEBUSER='wwwrun' WEBGROUP='www' ;; esac elif [[ $DISTRO = "Fedora" ]]; then [[ $DEBUG != 1 ]] && quiet='' case "$1" in clean ) yum clean all -y alias_autoclean="yum clean all" ;; install) shift yum install -y $@ 2>> $LOG; E_=$? alias_install="yum install" ;; remove ) shift yum remove -y $@ 2>> $LOG; E_=$? alias_remove="yum remove" ;; update ) yum check-update -y alias_update="yum check-update" ;; upgrade) yum upgrade -y alias_upgrade="yum upgrade" ;; version) yum info $2 | grep Version: ;; setvars) REPO_PATH=/etc/yum/repos.d/ ;; esac elif [[ $DISTRO = "Gentoo" ]]; then [[ $DEBUG != 1 ]] && quiet='--quiet' case "$1" in clean ) emerge --clean # --depclean alias_autoclean="emerge --clean" ;; install) shift emerge $quiet --jobs=$CORES $@ 2>> $LOG; E_=$? alias_install="emerge" ;; remove ) shift emerge --unmerge $quiet $@ 2>> $LOG; E=$? alias_remove="emerge -C" ;; update ) emerge --sync alias_update="emerge --sync" ;; upgrade) emerge --update world $quiet # --deep alias_upgrade="emerge -u world" ;; version) emerge -S or emerge -pv ;; setvars) ;; # TODO esac fi } spanner() { SP_COUNT=0 while [[ -d /proc/$1 ]]; do while [[ "$SP_COUNT" -lt 10 ]]; do echo -en "${bldpur}\b+ " ;sleep 0.1 ((SP_COUNT++)) done until [[ "$SP_COUNT" -eq 0 ]]; do echo -en "\b\b $rst" ;sleep 0.1 ((SP_COUNT -= 1)) done done } spinner() { SP_WIDTH=0.1 SP_STRING=".o0Oo" while [[ -d /proc/$1 ]]; do printf "${bldpur}\e7 %${SP_WIDTH}s \e8${rst}" "$SP_STRING" sleep 0.2 SP_STRING=${SP_STRING#"${SP_STRING%?}"}${SP_STRING%?} done } usage() { # help screen echo -e "\n${bldpur} Usage:${bldred} $0 ${bldpur}[${bldred}option${bldpur}]" echo -e " Options:" echo -e " ${bldred} -p, --pass ${bldpur}[${bldred}length${bldpur}] ${bldylw} Generate a strong password" echo -e " ${bldred} -v, --version ${bldylw} Show version number\n ${rst}" exit 1 } yes() { # user input for yes or no while read line; do case "$line" in y|Y|Yes|YES|yes) return 0 ;; n|N|No|NO|no) return 1 ;; *) echo -en " Please enter ${undrln}y${rst} or ${undrln}y${rst}: " ;; esac;done } init() { clear ; echo -n ">>> iNiTiALiZiNG......" OS=$(uname -s) ##[ Determine OS ]## if [[ $OS = "Linux" ]] ; then [[ -f /etc/fedora-release ]] && error "TODO - Fedora" [[ -f /etc/gentoo-release ]] && error "TODO - Gentoo" if [[ -f /etc/SuSE-release ]]; then if ! which lsb_release >/dev/null; then # install lsb_release (distros dont like to keep things simple) packages install lsb_release ;fi else if ! which lsb-release >/dev/null; then # install lsb-release (debian stable doesnt package it) packages install lsb-release lsb_release ;fi fi # Distributor -i > Ubuntu > Debian > Debian > LinuxMint > Arch > SUSE LINUX (DISTRO) # Release -r > 10.04 > 5.0.6 > testing > 1|10 > n/a > 11.3 (RELASE) # Codename -c > lucid > lenny > squeeze > debian|julia > n/a > n/a (NAME) readonly DISTRO=$(lsb_release -is) RELEASE=$(lsb_release -rs) NAME=$(lsb_release -cs) ARCH=$(uname -m) KERNEL=$(uname -r) ##[ Create folders if not already created ]## mkdir --parents tmp/ mkdir --parents logs/ iP=$(wget --quiet --timeout=30 www.whatismyip.com/automation/n09230945.asp -O - 2) [[ $iP != *.*.* ]] && error "Unable to find ip from outside" packages setvars # just sets REPO_PATH= at the moment readonly iP USER CORES BASE WEB HOME=/home/$USER LOG=$BASE/$LOG # make sure these variables aren't overwritten else error "Unsupported OS" fi echo -e "[${bldylw} done ${rst}]" ;sleep 1 } ##[ VARiABLE iNiT ]## CORES=$(grep -c ^processor /proc/cpuinfo) SSLCERT=/usr/share/ssl-cert/ssleay.cnf LOG=logs/installer.log iFACE=eth0 WEBUSER='www-data' WEBGROUP='www-data' WEB=/var/www #!=====================>> COLOR CONTROL <<=====================!# ##[ echo -e "${txtblu}test ${rst}" ]## txtblk='\e[0;30m' # Black ---Regular txtred='\e[0;31m' # Red txtgrn='\e[0;32m' # Green txtylw='\e[0;33m' # Yellow txtblu='\e[0;34m' # Blue txtpur='\e[0;35m' # Purple txtcyn='\e[0;36m' # Cyan txtwht='\e[0;37m' # White bldblk='\e[1;30m' # Black ---Bold bldred='\e[1;31m' # Red bldgrn='\e[1;32m' # Green bldylw='\e[1;33m' # Yellow bldblu='\e[1;34m' # Blue bldpur='\e[1;35m' # Purple bldcyn='\e[1;36m' # Cyan bldwht='\e[1;37m' # White unkblk='\e[4;30m' # Black ---Underline undred='\e[4;31m' # Red undgrn='\e[4;32m' # Green undylw='\e[4;33m' # Yellow undblu='\e[4;34m' # Blue undpur='\e[4;35m' # Purple undcyn='\e[4;36m' # Cyan undwht='\e[4;37m' # White bakblk='\e[40m' # Black ---Background bakred='\e[41m' # Red badgrn='\e[42m' # Green bakylw='\e[43m' # Yellow bakblu='\e[44m' # Blue bakpur='\e[45m' # Purple bakcyn='\e[46m' # Cyan bakwht='\e[47m' # White undrln='\e[4m' # Underline rst='\e[0m' # --------Reset
mcipovic/Fangspitzen
includes/functions.sh
Shell
gpl-2.0
12,207
#!/bin/sh [ -f CImg.h ] && rm CImg.h wget -c https://raw.githubusercontent.com/dtschump/CImg/master/CImg.h -O - | gunzip - > CImg.h
BoboTiG/docolav
src/maj-cimg.sh
Shell
gpl-2.0
132
source lab.conf # download all needed config files wget http://tfindelkind.com/wp-content/uploads/2015/09/sndk-ifos-1.1.0.03.tar.gz #change file permission which git does not preserver chmod 0400 $STUDENT.pem # change hostname sudo /bin/su -c "echo ceph-admin > /etc/hostname" sudo hostname ceph-admin # enable password ssh access sudo cp ./config-files/sshd_config /etc/ssh/sshd_config sudo reload ssh echo ubuntu:ceph | sudo /usr/sbin/chpasswd # config for NAT sudo cp ./config-files/sysctl.conf /etc/sysctl.conf sudo apt-get update echo iptables-persistent iptables-persistent/autosave_v4 boolean true | sudo debconf-set-selections echo iptables-persistent iptables-persistent/autosave_v6 boolean true | sudo debconf-set-selections sudo apt-get install -y iptables-persistent sudo iptables -t nat -A POSTROUTING -s 10.$LAB_SUBNET.0.0/16 -j MASQUERADE sudo /bin/su -c "iptables-save > /etc/iptables/rules.v4" # install BIND server and use lab config sudo apt-get install -y bind9 # prepare bind files sed -i.bak s/LS./$LAB_SUBNET./g ./config-files/named.conf.local sed -i.bak s/LU./$LAB_SUBNET_USER./g ./config-files/named.conf.local sed -i.bak s/LS./$LAB_SUBNET./g ./config-files/10.100.rev sed -i.bak s/LU./$LAB_SUBNET_USER./g ./config-files/10.100.rev sed -i.bak s/LS./$LAB_SUBNET./g ./config-files/lab.hosts sed -i.bak s/LU./$LAB_SUBNET_USER./g ./config-files/lab.hosts sudo cp ./config-files/named.conf.local /etc/bind/named.conf.local sudo cp ./config-files/named.conf.options /etc/bind/named.conf.options sudo cp ./config-files/10.100.rev /var/lib/bind/10.$LAB_SUBNET.rev sudo cp ./config-files/lab.hosts /var/lib/bind/lab.hosts sudo service bind9 restart # install webmin for easy changes sudo apt-get install -y libnet-ssleay-perl libauthen-pam-perl libio-pty-perl apt-show-versions wget http://prdownloads.sourceforge.net/webadmin/webmin_1.660_all.deb sudo dpkg --install webmin_1.660_all.deb sudo /usr/share/webmin/changepass.pl /etc/webmin root ceph #config DHCP for local DNS sed -i.bak s/LS./$LAB_SUBNET./g ./config-files/dhclient.conf sed -i.bak s/LU./$LAB_SUBNET_USER./g ./config-files/dhclient.conf sudo cp ./config-files/dhclient.conf /etc/dhcp/ sudo dhclient -r; sudo dhclient #prepare IFOS tar -xvzf sndk-ifos-1.1.0.03.tar.gz cp ./config-files/install.conf sndk-ifos-1.1.0.03 #prepare other hosts ssh-keyscan -H ceph-admin >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.4 >> ~/.ssh/known_hosts ssh-keyscan -H devstack >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.36 >> ~/.ssh/known_hosts ssh-keyscan -H radosgw >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.68 >> ~/.ssh/known_hosts ssh-keyscan -H mon1 >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.100 >> ~/.ssh/known_hosts ssh-keyscan -H osd-node1 >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.132 >> ~/.ssh/known_hosts ssh-keyscan -H osd-node2 >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.148 >> ~/.ssh/known_hosts ssh-keyscan -H osd-node3 >> ~/.ssh/known_hosts ssh-keyscan -H 10.$LAB_SUBNET.$LAB_SUBNET_USER.133 >> ~/.ssh/known_hosts ./scripts/prepare_node.sh devstack ./scripts/prepare_node.sh radosgw ./scripts/prepare_node.sh mon1 ./scripts/prepare_node.sh osd-node1 ./scripts/prepare_node.sh osd-node2 ./scripts/prepare_node.sh osd-node3 ./scripts/prepare_node.sh ceph-admin
Tfindelkind/ceph-on-AWS
setup_ceph-admin.sh
Shell
gpl-2.0
3,411
#!/bin/bash # # @ guy maurel # 28. 11. 2016 # # It is not enought to test how uncrustify is running with lot of examples. # It is necessary to test if uncrustify can run properly. # The last changes of code (November 2016) show some more problems. # So it is necessary to test some more. # It might be usefull to complete the list below. # #set -x SCRIPTS="./scripts" RESULTS="./results" # # control the CMAKE_BUILD_TYPE CMAKE_BUILD_TYPE=`grep -i CMAKE_BUILD_TYPE:STRING=release ./build/CMakeCache.txt` how_different=${?} if [ ${how_different} == "0" ] ; then echo "CMAKE_BUILD_TYPE is correct" else echo "CMAKE_BUILD_TYPE must be 'Release' to test" exit 1 fi # rm -rf ${RESULTS} mkdir ${RESULTS} INPUT="scripts/Input" OUTPUT="scripts/Output" CONFIG="scripts/Config" # # Test help # -h -? --help --usage file="help.txt" ./build/uncrustify > "${RESULTS}/${file}" cmp -s "${RESULTS}/${file}" "${SCRIPTS}/More_Options_to_Test/${file}" how_different=${?} if [ ${how_different} != "0" ] ; then echo echo "Problem with "${file} echo "use: diff ${RESULTS}/${file} ${SCRIPTS}/More_Options_to_Test/${file} to find why" diff "${RESULTS}/${file}" "${SCRIPTS}/More_Options_to_Test/${file}" else rm "results/${file}" fi # # Test --show-config # file="show_config.txt" ./build/uncrustify --show-config > "${RESULTS}/${file}" sed 's/# Uncrustify.*//g' "${RESULTS}/${file}" > "${RESULTS}/${file}.sed" cmp -s "${RESULTS}/${file}.sed" "${SCRIPTS}/More_Options_to_Test/${file}" how_different=${?} if [ ${how_different} != "0" ] ; then echo echo "Problem with ${RESULTS}/${file}.sed" echo "use: diff ${RESULTS}/${file}.sed ${SCRIPTS}/More_Options_to_Test/${file} to find why" diff "${RESULTS}/${file}.sed" "${SCRIPTS}/More_Options_to_Test/${file}" else rm "results/${file}" rm "results/${file}.sed" fi # # Test --update-config # ConfigFileNames="mini_d mini_nd" for ConfigFileName in ${ConfigFileNames} do ResultsFile="${RESULTS}/${ConfigFileName}_uc.txt" OutputFile="${OUTPUT}/${ConfigFileName}_uc.txt" ConfigFile="${CONFIG}/${ConfigFileName}.cfg" ./build/uncrustify -c "${ConfigFile}" --update-config &> "${ResultsFile}" sed 's/# Uncrustify.*//g' "${ResultsFile}" > "${ResultsFile}.sed" cmp -s "${ResultsFile}.sed" "${OutputFile}" how_different=${?} if [ ${how_different} != "0" ] ; then echo echo "Problem with ${ResultsFile}.sed" echo "use: diff ${ResultsFile}.sed ${OutputFile} to find why" diff "${ResultsFile}.sed" "${OutputFile}" else rm "${ResultsFile}" rm "${ResultsFile}.sed" fi done # # Test --update-config-with-doc # ConfigFileNames="mini_d mini_nd" for ConfigFileName in ${ConfigFileNames} do ResultsFile="${RESULTS}/${ConfigFileName}_ucwd.txt" OutputFile="${OUTPUT}/${ConfigFileName}_ucwd.txt" ConfigFile="${CONFIG}/${ConfigFileName}.cfg" ./build/uncrustify -c "${ConfigFile}" --update-config-with-doc &> "${ResultsFile}" sed 's/# Uncrustify.*//g' "${ResultsFile}" > "${ResultsFile}.sed" cmp -s "${ResultsFile}.sed" "${OutputFile}" how_different=${?} if [ ${how_different} != "0" ] ; then echo "Problem with ${ResultsFile}.sed" echo "use: diff ${ResultsFile}.sed ${OutputFile} to find why" diff "${ResultsFile}.sed" "${OutputFile}" else rm "${ResultsFile}" rm "${ResultsFile}.sed" fi done # # Test -p # ResultsFile="${RESULTS}/p.txt" InputFile="${INPUT}/28.cpp" OutputFile="${OUTPUT}/p.txt" ConfigFile="${CONFIG}/mini_nd.cfg" ./build/uncrustify -c "${ConfigFile}" -f "${InputFile}" -p "${ResultsFile}" &> /dev/null sed 's/# Uncrustify.*//g' "${ResultsFile}" > "${ResultsFile}.sed" cmp -s "${ResultsFile}.sed" "${OutputFile}" how_different=${?} if [ ${how_different} != "0" ] ; then echo "Problem with ${ResultsFile}.sed" echo "use: diff ${ResultsFile}.sed ${OutputFile} to find why" diff "${ResultsFile}.sed" "${OutputFile}" else rm "${ResultsFile}" rm "${ResultsFile}.sed" fi # Debug Options: # -L # look at src/log_levels.h Liste_of_Ls_A="9 21 25 28 31 36 66 92" for L_Value in ${Liste_of_Ls_A} do InputFile="${INPUT}/${L_Value}.cpp" OutputFile="${OUTPUT}/${L_Value}.txt" LFile="${RESULTS}/${L_Value}.txt" ./build/uncrustify -c /dev/null -f "${InputFile}" -o /dev/null -L "${L_Value}" 2> "${LFile}" sed 's/[0-9]//g' "${LFile}" > "${LFile}.sed" cmp -s "${LFile}.sed" "${OutputFile}" how_different=${?} #echo "the status of is "${how_different} if [ ${how_different} != "0" ] ; then echo echo "Problem with "${InputFile} echo "use: diff ${LFile}.sed ${OutputFile} to find why" diff "${LFile}.sed" "${OutputFile}" diff "${LFile}" "${OutputFile}" break else rm "${LFile}" rm "${LFile}.sed" fi done Liste_of_Error_Tests="I-842" for Error_T in ${Liste_of_Error_Tests} do ConfigFile="${CONFIG}/${Error_T}.cfg" InputFile="${INPUT}/${Error_T}.cpp" OutputFile="${OUTPUT}/${Error_T}.txt" ErrFile="${RESULTS}/${Error_T}.txt" ./build/uncrustify -q -c "${ConfigFile}" -f "${InputFile}" -o /dev/null 2> "${ErrFile}" cmp -s "${ErrFile}" "${OutputFile}" how_different=${?} if [ ${how_different} != "0" ] ; then echo echo "Problem with "${Error_T} echo "use: diff ${ErrFile} ${OutputFile} to find why" diff "${ErrFile}" "${OutputFile}" break else rm "${ErrFile}" fi done rmdir --ignore-fail-on-non-empty results if [[ -d results ]] then echo echo "some problem(s) are still present" exit 1 else echo "all tests are OK" exit 0 fi
MeteorAdminz/uncrustify
scripts/Test_more_Options.sh
Shell
gpl-2.0
5,467
rm -f arch/trimedia/bsp/intfs/Itmosal rm -f arch/trimedia/bsp/intfs/ItmbslPhy rm -f arch/trimedia/bsp/tmBootInfo/tmBootInfo.c rm -f arch/trimedia/bsp/tmBootInfo/tmBootInfoCommon.h rm -f arch/trimedia/bsp/tmBootInfo/tmMemUncached.c rm -f arch/trimedia/bsp/tmBootInfo/tmBootInfoUtil.c rm -f arch/trimedia/bsp/tmBootInfo/inc rm -f arch/trimedia/bsp/tmbslCore/tmbslCore_vsnprintf.c rm -f arch/trimedia/bsp/tmbslCore/tmbslCore_vsnprintf.h rm -f arch/trimedia/bsp/tmbslCore/tmbslMgr.c rm -f arch/trimedia/bsp/tmbslCore/tmbslOsSpecific.c rm -f arch/trimedia/bsp/tmbslCore/inc rm -f arch/trimedia/bsp/tmbslPnx1500/inc rm -f arch/trimedia/bsp/tmbslTmOnly/dummy_tmtdosal.c rm -f arch/trimedia/bsp/tmbslTmOnly/tmbslTmOnly.c rm -f arch/trimedia/bsp/tmbslTmOnly/tmbslTmOnly_Exception.s rm -f arch/trimedia/bsp/tmbslTmOnly/tmbslTmOnly_OsSpecific.c rm -f arch/trimedia/bsp/tmbslTmOnly/tmTmOnlyMmio.h rm -f arch/trimedia/bsp/tmbslTmOnly/tmbslTmOnly.h rm -f arch/trimedia/bsp/tmhwPci/tmhwPci.c rm -f arch/trimedia/bsp/tmhwPci/tmhwPciInternal.h rm -f arch/trimedia/bsp/tmhwPci/tmhwPciIo.c rm -f arch/trimedia/bsp/tmhwPci/tmhwPciMemSpace.c rm -f arch/trimedia/bsp/tmhwPci/inc rm -f arch/trimedia/bsp/tmhwGpio/pnx1500/tmhwGpio.c rm -f arch/trimedia/bsp/tmhwGpio/inc rm -f arch/trimedia/bsp/tmhwEth/tmhwEth.c rm -f arch/trimedia/bsp/tmhwEth/inc rm -f arch/trimedia/bsp/tmhwXio/tmhwXio.c rm -f arch/trimedia/bsp/tmhwXio/tmhwXioInternal.h rm -f arch/trimedia/bsp/tmhwXio/tmhwXio_CfgLocal.h rm -f arch/trimedia/bsp/tmhwXio/tmhwXio_DvpCfg.c rm -f arch/trimedia/bsp/tmhwXio/tmhwXio_DvpNoBslCfg.c rm -f arch/trimedia/bsp/tmhwXio/tmvhPciXio0113_Reg.h rm -f arch/trimedia/bsp/tmhwXio/tmvhPciXioA051_Reg.h rm -f arch/trimedia/bsp/tmhwXio/inc rm -f arch/trimedia/bsp/tmStdTypes/inc rm -f arch/trimedia/bsp/tmbslPhyDp83847/tmbslPhyDp83847.c rm -f arch/trimedia/bsp/tmbslPhyDp83847/tmbslPhyDp83847RegBe.h rm -f arch/trimedia/bsp/tmbslPhyDp83847/tmbslPhyDp83847RegLe.h rm -f arch/trimedia/bsp/tmbslPhyDp83847/inc rm -f arch/trimedia/board-mbe/gmdsbslLcp_IntDrv.h rm -f arch/trimedia/board-mbe/gmdsbslLcp_CoreUtilities.h rm -f arch/trimedia/board-mbe/mdsbslLcp_OsSpecific.c rm -f arch/trimedia/board-mbe/mdsbslLcp_IntDrv.c rm -f arch/trimedia/board-mbe/mdsbslLcp_Vid.h rm -f arch/trimedia/board-mbe/mdsbslLcp_Vid.c rm -f arch/trimedia/board-mbe/mdsbslLcp.h rm -f arch/trimedia/board-mbe/mdsbslLcp.c rm -f arch/trimedia/board-mbe/mdsbslLcp_Aud.h rm -f arch/trimedia/board-mbe/mdsbslLcp_Aud.c rm -f arch/trimedia/board-sim/tmbslNull.c rm -f arch/trimedia/board-sim/tmbslNull.h rm -f arch/trimedia/board-sim/tmbslNull_IntDrv.c rm -f arch/trimedia/board-sim/tmbslNull_IntDrv.h rm -f arch/trimedia/board-sim/tmbslNull_Mmio.h rm -f arch/trimedia/board-sim/tmbslNull_OsSpecific.c
camelguo/linux-2.6-trimedia
scripts/cleanup_ndk.sh
Shell
gpl-2.0
2,777
#/bin/sh echo "Creating Filesystem..." ./scripts/make_ev3_os_EDU.sh echo "Creating Image..." ./scripts/make_image.pl
EAVR/EV3.14
ev3sources/lms2012/open_first/make_image_EDU.sh
Shell
gpl-2.0
119
#!/bin/bash ########################################################################################### ## Copyright 2003, 2015 IBM Corp ## ## ## ## Redistribution and use in source and binary forms, with or without modification, ## ## are permitted provided that the following conditions are met: ## ## 1.Redistributions of source code must retain the above copyright notice, ## ## this list of conditions and the following disclaimer. ## ## 2.Redistributions in binary form must reproduce the above copyright notice, this ## ## list of conditions and the following disclaimer in the documentation and/or ## ## other materials provided with the distribution. ## ## ## ## THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS AND ANY EXPRESS ## ## OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF ## ## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ## ## THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF ## ## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ## ## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ## ## OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ## ## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## ############################################################################################ ### File : libcroco.sh ## ## ### Description: This testcase tests the libcroco package ## ## ### Author: Gopal Kalita <[email protected]> ## ########################################################################################### #cd $(dirname $0) #LTPBIN=${LTPBIN%/shared}/libcroco source $LTPBIN/tc_utils.source LIBCROCO_TESTS_DIR="${LTPBIN%/shared}/libcroco/tests" function tc_local_setup() { ##Installation check ## [ -f /usr/lib*/libcroco-0.6.so.3.0.1 ] tc_break_if_bad $? "libcroco not installed properly" } function run_test() { pushd $LIBCROCO_TESTS_DIR >$stdout 2>$stderr tc_register "Read file byte by byte" ./test0 test0.1.css >$stdout 2>$stderr tc_pass_or_fail $? "Reading byte by byte failed" tc_register "Read file character by character" ./test1 test1.css >$stdout 2>$stderr tc_pass_or_fail $? "Reading file by character failed" # Some of the below testcases test with more than 1 different files. # both the files are .css files but of different # css styles and contents tc_register "Test the cr_parser_parse method: file1" ./test2 test2.1.css >$stdout 2>$stderr tc_pass_or_fail $? "cr_parser_parse for file1 failed" tc_register "Test the cr_parser_parse method: file2" ./test2 test2.2.css >$stdout 2>$stderr tc_pass_or_fail $? "cr_parser_parse for file2 failed" tc_register "Test CROMParser class: file1" ./test3 test3.css >$stdout 2>$stderr tc_pass_or_fail $? "CROMparser for file1 failed" tc_register "Test CROMParser class: file2" ./test3 test3.1.css >$stdout 2>$stderr tc_pass_or_fail $? "CROMparser for file2 failed" tc_register "Test CROMParser class: file3" ./test3 test3.2.css >$stdout 2>$stderr tc_pass_or_fail $? "CROMparser for file3 failed" # The 2 tests below has many sub-routines # cr_parser, cr_statement, cr_term_parse, cr_declaration # and many others tc_register "Test some sub-routines: file1" ./test4 test4.1.css >$stdout 2>$stderr tc_pass_or_fail $? "Test for file1 failed" tc_register "Test some sub-routines: file2" ./test4 test4.2.css >$stdout 2>$stderr tc_pass_or_fail $? "Test for file2 failed" tc_register "Test the selection Engine" ./test5 test5.1.css >$stdout 2>$stderr tc_pass_or_fail $? "Test for selection engine failed" tc_register "Test the cr_input_read_byte method" ./test6 >$stdout 2>$stderr tc_pass_or_fail $? "Test cr_input_read_byte failed" popd >$stdout 2>$stderr } # #main # TST_TOTAL=11 tc_setup && run_test
PoornimaNayak/autotest-client-tests
linux-tools/libcroco/libcroco.sh
Shell
gpl-2.0
4,509
#!/bin/bash if [ ! -f /.run-rabbitmq-server-firstrun ]; then # TBD #PASS=`pwgen -s 12 1` PASS="guest" cat >/etc/rabbitmq/rabbitmq.config <<EOF [ {rabbit, [{default_user, <<"admin">>}, {default_pass, <<"$PASS">>}]} ]. EOF echo "set default user = admin and default password = $PASS" # add the vhost (sleep 10 && rabbitmqctl add_vhost $DEVEL_VHOST_NAME && rabbitmqctl set_permissions -p $DEVEL_VHOST_NAME admin ".*" ".*" ".*") & touch /.run-rabbitmq-server-firstrun fi exec /usr/sbin/rabbitmq-server
RHEMS-Japan/LogSystem
run-rabbitmq-server.sh
Shell
gpl-2.0
512
#!/bin/bash echo ">> Dropping caches" sudo sh -c 'echo 3 >/proc/sys/vm/drop_caches' echo ">> Running program" SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" # allow running from other dirs $SCRIPTPATH/rpcf
davidag/systems-performance
run_rpcf.sh
Shell
gpl-2.0
210
#!/bin/sh #################################################################### # QJ1520265 project - source code and support files for OGC WMS and # WPS implementation http://rain.fsv.cvut.cz # # Purpose: install software requirements # Author: Martin Landa <martin.landa fsv.cvut.cz> # Licence: see LICENCE file for details #################################################################### apt-get install --yes flex bison libproj-dev libtiff-dev \ mesa-common-dev libglu1-mesa-dev libfftw3-dev libblas-dev \ liblapack-dev libcairo-dev proj-bin libgdal1-dev libwxbase3.0-dev \ gettext subversion emacs24-nox g++ python-numpy cgi-mapserver \ mapserver-bin apache2 python-lxml gdal-bin make htop python-wxgtk3.0 a2enmod cgi service apache2 restart # Proj.4 (5514) EPSG=/usr/share/proj/epsg if [ `grep 5514 $EPSG | wc -l` = 1 ] ; then cat proj4/epsg >> $EPSG fi if [ ! -d /opt/grass ] ; then svn checkout https://svn.osgeo.org/grass/grass/branches/releasebranch_7_2 /opt/grass ./compile-svn-grass.sh git clone https://github.com/geopython/PyWPS.git /opt/pywps cp wps/pywps.cgi /usr/lib/cgi-bin chgrp www-data /opt/pywps/pywps/Templates/ -R chmod g+w /opt/pywps/pywps/Templates/ -R fi ./configure.sh exit 0
ctu-osgeorel/subdayprecip-design
install.sh
Shell
gpl-2.0
1,245
#!/bin/bash set -e cat << EndOfMessage _ __ __ __ __ | | / /____ _____ / /__ / /_ ____ ____/ / | | / // __ \ / ___// //_// __ \ / __ \ / __ / | |/ // /_/ /(__ )/ ,< / / / // /_/ // /_/ / |___/ \____//____//_/|_|/_/ /_/ \____/ \__,_/ ¤ De novo assembly / Trinity wrapper Version 20170721 Voskhod Pipeline version V1.2 Part of the Voskhod project https://github.com/egeeamu/voskhod GPL-3.0 Arnaud Ungaro [email protected] If you have an unstranded library : > Choose Option 1 if: You have a single-end unstranded library with only one fastq file This step will launch Trinity with relevant parameters for single input file and an unstranded library. > Choose Option 2 if: You have a paired-end unstranded library with R1 & R2 fastq files This step will launch Trinity with relevant parameters for input files and an unstranded library. If you have a stranded library : > Choose Option 3 if: You have a single-end stranded library with only one fastq file This step will launch Trinity with relevant parameters for a single input file and a stranded library. > Choose Option 4 if: You have a paired-end stranded library with R1 & R2 fastq files This step will launch Trinity with relevant parameters for input files and a stranded library. There are four stranded library types: > Paired: ¤ RF: first read (/1) of fragment pair is sequenced as anti-sense (reverse(R)), and second read (/2) is in the sense strand (forward(F)); typical of the dUTP/UDG sequencing method. ¤ FR: first read (/1) of fragment pair is sequenced as sense (forward), and second read (/2) is in the antisense strand (reverse) > single reads: ¤ F: the single read is in the sense (forward) orientation ¤ R: the single read is in the antisense (reverse) orientation This script assumes you use the dUTP/UDG sequencing method, and if you select stranded options (3 or 4), RF (paired) & R (single) parameter is given to Trinity. If you use another sequencing method, modify this option in the script, or if unknown, use unstranded options (1 or 2). The input file(s) must be in ./cleaned_input/assembly and in fastq format. The assembly files will be in ./assembly/raw/denovo EndOfMessage # --SS_lib_type RF PS3='Please enter your choice: ' options=("Option 1" "Option 2" "Option 3" "Option 4" "Quit") select opt in "${options[@]}" do case $opt in "Option 1") argu=1 break ;; "Option 2") argu=2 break ;; "Option 3") argu=3 break ;; "Option 4") argu=4 break ;; "Quit") exit ;; *) echo invalid option;; esac done if [ "$argu" = "1" ]; then echo "" echo "" echo "Type the name of the assembled species (without space ex: drer_trinity), followed by [ENTER]:" read name echo "" echo "" echo "Type the number of cores to use, followed by [ENTER]:" read cores echo "" echo "" echo "Type the max memory tu use (in GB), followed by [ENTER]:" read maxram echo "" echo "" prompt="Please select the input file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R1 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "You picked $opt which is file $REPLY" break else echo "Invalid option. Try another one." fi done echo "" echo "" mkdir -p ./logs rm -rfv trinity_part_"$name" mkdir -p trinity_part_"$name" mkdir -p assembly/raw/denovo cd ./trinity_part_"$name" ../bin/trinityrnaseq/Trinity --seqType fq --single ."$R1" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #../bin/trinityrnaseq/Trinity --seqType fq --single ."$R1" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #--normalize_reads #nice -n 19 ../bin/trinityrnaseq/Trinity --seqType fq --left ../cleaned_input/assembly/R1_cleaned_sync.fastq --right ../cleaned_input/assembly/R2_cleaned_sync.fastq --normalize_reads --CPU 8 --max_memory 12G | tee ../logs/logs_trinity_R1_R2.txt cd trinity_out_dir cp ../../bin/convert_fasta_to_fastq.py ./ mv Trinity.fasta "$name"_Trinity.fasta python convert_fasta_to_fastq.py ./"$name"_Trinity.fasta "$name"_Trinity.fastq rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fasta rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fastq mv -v "$name"_Trinity.* ../../assembly/raw/denovo/ fi if [ "$argu" = "2" ]; then echo "" echo "" echo "Type the name of the assembled species (without space ex: drer_trinity), followed by [ENTER]:" read name echo "" echo "" echo "Type the number of cores to use, followed by [ENTER]:" read cores echo "" echo "" echo "Type the max memory tu use (in GB), followed by [ENTER]:" read maxram echo "" echo "" prompt="Please select R1 file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R1 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "" break else echo "Invalid option. Try another one." fi done echo "" echo "" prompt="Please select R2 file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R2 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "" break else echo "Invalid option. Try another one." fi done echo "" echo "" mkdir -p ./logs rm -rfv trinity_part_"$name" mkdir -p trinity_part_"$name" mkdir -p assembly/raw/denovo cd ./trinity_part_"$name" ../bin/trinityrnaseq/Trinity --seqType fq --left ."$R1" --right ."$R2" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #nice -n "$nicevalue" ../bin/trinityrnaseq/Trinity --seqType fq --left ."$R1" --right ."$R2" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #--normalize_reads #nice -n 19 ../bin/trinityrnaseq/Trinity --seqType fq --left ../cleaned_input/assembly/R1_cleaned_sync.fastq --right ../cleaned_input/assembly/R2_cleaned_sync.fastq --normalize_reads --CPU 8 --max_memory 12G | tee ../logs/logs_trinity_R1_R2.txt cd trinity_out_dir cp ../../bin/convert_fasta_to_fastq.py ./ mv Trinity.fasta "$name"_Trinity.fasta python convert_fasta_to_fastq.py ./"$name"_Trinity.fasta "$name"_Trinity.fastq rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fasta rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fastq mv -v "$name"_Trinity.* ../../assembly/raw/denovo/ fi if [ "$argu" = "3" ]; then echo "" echo "" echo "Type the name of the assembled species (without space ex: drer_trinity), followed by [ENTER]:" read name echo "" echo "" echo "Type the number of cores to use, followed by [ENTER]:" read cores echo "" echo "" echo "Type the max memory tu use (in GB), followed by [ENTER]:" read maxram echo "" echo "" prompt="Please select the input file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R1 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "You picked $opt which is file $REPLY" break else echo "Invalid option. Try another one." fi done echo "" echo "" mkdir -p ./logs rm -rfv trinity_part_"$name" mkdir -p trinity_part_"$name" mkdir -p assembly/raw/denovo cd ./trinity_part_"$name" ../bin/trinityrnaseq/Trinity --seqType fq --single ."$R1" --SS_lib_type R --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #../bin/trinityrnaseq/Trinity --seqType fq --single ."$R1" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #--normalize_reads #nice -n 19 ../bin/trinityrnaseq/Trinity --seqType fq --left ../cleaned_input/assembly/R1_cleaned_sync.fastq --right ../cleaned_input/assembly/R2_cleaned_sync.fastq --normalize_reads --CPU 8 --max_memory 12G | tee ../logs/logs_trinity_R1_R2.txt cd trinity_out_dir cp ../../bin/convert_fasta_to_fastq.py ./ mv Trinity.fasta "$name"_Trinity.fasta python convert_fasta_to_fastq.py ./"$name"_Trinity.fasta "$name"_Trinity.fastq rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fasta rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fastq mv -v "$name"_Trinity.* ../../assembly/raw/denovo/ fi if [ "$argu" = "4" ]; then echo "" echo "" echo "Type the name of the assembled species (without space ex: drer_trinity), followed by [ENTER]:" read name echo "" echo "" echo "Type the number of cores to use, followed by [ENTER]:" read cores echo "" echo "" echo "Type the max memory tu use (in GB), followed by [ENTER]:" read maxram echo "" echo "" prompt="Please select R1 file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R1 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "You picked $opt which is file $REPLY" break else echo "Invalid option. Try another one." fi done echo "" echo "" prompt="Please select R2 file:" options=( $(find ./cleaned_input/assembly -maxdepth 1 -type f -iregex '.*\.\(fastq\|fq\)$' -print0 | xargs -0) ) PS3="$prompt " select R2 in "${options[@]}" "Quit" ; do if (( REPLY == 1 + ${#options[@]} )) ; then exit elif (( REPLY > 0 && REPLY <= ${#options[@]} )) ; then echo "You picked $opt which is file $REPLY" break else echo "Invalid option. Try another one." fi done echo "" echo "" mkdir -p ./logs rm -rfv trinity_part_"$name" mkdir -p trinity_part_"$name" mkdir -p assembly/raw/denovo cd ./trinity_part_"$name" ../bin/trinityrnaseq/Trinity --seqType fq --left ."$R1" --right ."$R2" --SS_lib_type RF --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #nice -n "$nicevalue" ../bin/trinityrnaseq/Trinity --seqType fq --left ."$R1" --right ."$R2" --normalize_reads --CPU "$cores" --max_memory "$maxram"G | tee ../logs/logs_trinity_R1_R2.txt #--normalize_reads #nice -n 19 ../bin/trinityrnaseq/Trinity --seqType fq --left ../cleaned_input/assembly/R1_cleaned_sync.fastq --right ../cleaned_input/assembly/R2_cleaned_sync.fastq --normalize_reads --CPU 8 --max_memory 12G | tee ../logs/logs_trinity_R1_R2.txt cd trinity_out_dir cp ../../bin/convert_fasta_to_fastq.py ./ mv Trinity.fasta "$name"_Trinity.fasta python convert_fasta_to_fastq.py ./"$name"_Trinity.fasta "$name"_Trinity.fastq rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fasta rm -rfv ../../assembly/raw/denovo/"$name"_Trinity.fastq mv -v "$name"_Trinity.* ../../assembly/raw/denovo/ fi
egeeamu/voskhod
03_denovo_assembly.sh
Shell
gpl-3.0
11,386
#!/bin/sh # Show that mv doesn't preserve links to files the user has declined to move. # Copyright (C) 2002-2018 Free Software Foundation, Inc. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. . "${srcdir=.}/tests/init.sh"; path_prepend_ ./src print_ver_ mv mkdir a b || framework_failure_ echo foo > a/foo || framework_failure_ ln a/foo a/bar || framework_failure_ echo FUBAR > b/FUBAR || framework_failure_ ln b/FUBAR b/bar || framework_failure_ chmod a-w b/bar || framework_failure_ echo n > no || framework_failure_ mv a/bar a/foo b < no > out 2> err || fail=1 touch exp touch exp_err compare exp out || fail=1 compare exp_err err || fail=1 case "$(cat b/foo)" in foo) ;; *) fail=1;; esac Exit $fail
pexip/os-coreutils
tests/mv/i-link-no.sh
Shell
gpl-3.0
1,302
#!/usr/bin/env bash cd ../bin ./TravellerProblem_Evolutive_Computing ./arguments/distancias_ce_10_2016.txt ./arguments/aleatorios_ce_10_2016.txt > ./ultimaTraza.txt cat ./ultimaTraza.txt echo "---------FIN DE LA SALIDA DEL PROGRAMA---------" echo "Ejecutando diff entre la traza generada y la del campus virtual: " diff ultimaTraza.txt ./arguments/traza_ce_10_2016.txt > ultimoDiff.txt head -n100 ultimoDiff.txt
Jazzzy/TravellerProblem
EC/evolutiveComputing/scripts/run.sh
Shell
gpl-3.0
413