code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/sh
#
# Copyright (c) 2002-2012 The Xfce development team. All rights reserved.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Written for Xfce by Benedikt Meurer <[email protected]>.
#
(type xdt-autogen) >/dev/null 2>&1 || {
cat >&2 <<EOF
autogen.sh: You don't seem to have the Xfce development tools installed on
your system, which are required to build this software.
Please install the xfce4-dev-tools package first, it is available
from http://www.xfce.org/.
EOF
exit 1
}
XDT_AUTOGEN_REQUIRED_VERSION="4.9.1" \
exec xdt-autogen $@
|
cedl38/xfce4-windowck-plugin
|
autogen.sh
|
Shell
|
gpl-3.0
| 1,250 |
#!/bin/bash
#-------------------------------------------------------------
#
# (C) Copyright IBM Corp. 2010, 2015
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------
set -e
if [ "$3" == "SPARK" ]; then CMD="./sparkDML.sh "; DASH="-"; elif [ "$3" == "MR" ]; then CMD="hadoop jar SystemML.jar " ; else CMD="echo " ; fi
BASE=$2
export HADOOP_CLIENT_OPTS="-Xmx2048m -Xms2048m -Xmn256m"
tstart=$SECONDS
${CMD} -f ../algorithms/PCA.dml $DASH-explain $DASH-stats $DASH-nvargs INPUT=$1 SCALE=1 PROJDATA=1 OUTPUT=${BASE}/output
ttrain=$(($SECONDS - $tstart - 3))
echo "PCA on "$1": "$ttrain >> times.txt
|
aloknsingh/systemml
|
system-ml/scripts/perftest/runPCA.sh
|
Shell
|
apache-2.0
| 1,160 |
#!/bin/bash
# basic test to ensure that package-install files remain sorted
# alphabetically.
TOP=$(cd $(dirname "$0")/.. && pwd)
source $TOP/tests/unittest.sh
PKG_FILES=$(find $TOP/files/debs $TOP/files/rpms $TOP/files/rpms-suse -type f)
TMPDIR=$(mktemp -d)
SORTED=${TMPDIR}/sorted
UNSORTED=${TMPDIR}/unsorted
for p in $PKG_FILES; do
grep -v '^#' $p > ${UNSORTED}
sort ${UNSORTED} > ${SORTED}
if [ -n "$(diff -c ${UNSORTED} ${SORTED})" ]; then
failed "$p is unsorted"
# output this, it's helpful to see what exactly is unsorted
diff -c ${UNSORTED} ${SORTED}
else
passed "$p is sorted"
fi
done
rm -rf ${TMPDIR}
report_results
|
mssumanth/devstack
|
tests/test_package_ordering.sh
|
Shell
|
apache-2.0
| 688 |
#!/bin/bash
WORK_DIR="$(dirname "$0")"
PROJECT_DIR="$(dirname "$WORK_DIR")"
pip --version >/dev/null 2>&1 || {
echo >&2 -e "\npip is required but it's not installed."
echo >&2 -e "You can install it by running the following command:\n"
{% if cookiecutter.use_python2 == 'n' -%}
echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python3 get-pip.py"
{% else %}
echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python2 get-pip.py"
{%- endif %}
echo >&2 -e "\n"
echo >&2 -e "\nFor more information, see pip documentation: https://pip.pypa.io/en/latest/"
exit 1;
}
virtualenv --version >/dev/null 2>&1 || {
echo >&2 -e "\nvirtualenv is required but it's not installed."
echo >&2 -e "You can install it by running the following command:\n"
{% if cookiecutter.use_python2 == 'n' -%}
echo >&2 "sudo -H pip3 install virtualenv"
{% else %}
echo >&2 "sudo -H pip2 install virtualenv"
{%- endif %}
echo >&2 -e "\n"
echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
exit 1;
}
if [ -z "$VIRTUAL_ENV" ]; then
echo >&2 -e "\nYou need activate a virtualenv first"
echo >&2 -e 'If you do not have a virtualenv created, run the following command to create and automatically activate a new virtualenv named "venv" on current folder:\n'
{% if cookiecutter.use_python2 == 'n' -%}
echo >&2 -e "virtualenv venv --python=\`which python3\`"
{% else %}
echo >&2 -e "virtualenv venv --python=\`which python2\`"
{%- endif %}
echo >&2 -e "\nTo leave/disable the currently active virtualenv, run the following command:\n"
echo >&2 "deactivate"
echo >&2 -e "\nTo activate the virtualenv again, run the following command:\n"
echo >&2 "source venv/bin/activate"
echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/"
echo >&2 -e "\n"
exit 1;
else
pip install -r $PROJECT_DIR/requirements/local.txt
pip install -r $PROJECT_DIR/requirements/test.txt
{% if cookiecutter.use_heroku == "y" -%}
pip install -r $PROJECT_DIR/requirements.txt
{%- endif %}
fi
|
andresgz/cookiecutter-django
|
{{cookiecutter.project_slug}}/utility/install_python_dependencies.sh
|
Shell
|
bsd-3-clause
| 2,274 |
#!/bin/bash
#
# BLIS
# An object-based framework for developing high-performance BLAS-like
# libraries.
#
# Copyright (C) 2014, The University of Texas at Austin
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of The University of Texas at Austin nor the names
# of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
#
# gen-make-frag.sh
#
# Field G. Van Zee
#
print_usage()
{
#local script_name
# Get the script name
#script_name=${0##*/}
# Echo usage info
echo " "
echo " "$script_name
echo " "
echo " Field G. Van Zee"
echo " "
echo " Automatically generates makefile fragments for a specified directory"
echo " tree. "
echo " "
echo " Usage:"
echo " ${script_name} [options] root_dir templ.mk suff_list ign_list spec_list"
echo " "
echo " Arguments (mandatory):"
echo " "
echo " root_dir The root directory in which makefile fragments will be"
echo " generated."
echo " "
echo " templ.mk The template makefile fragment used to generate the actual"
echo " fragments."
echo " "
echo " suff_list File containing a newline-separated list of file suffixes"
echo " of source files to that the top-level makefile expects to"
echo " access."
echo " "
echo " ign_list File containing a newline-separated list of directory names"
echo " to ignore when descending recursively into "
echo " "
echo " spec_list File containing a newline-separated list of directories"
echo " considered to be special in some way; source files found"
echo " in these directories will be accumulated into a different"
echo " makefile sub-variables based on the name of the special"
echo " directory names."
echo " "
echo " The following options are accepted:"
echo " "
echo " -d dry-run"
echo " Go through all the motions, but don't actually generate any"
echo " makefile fragments."
echo " -r recursive"
echo " Also generate makefile fragments for subdirectories of"
echo " root_dir."
echo " -h hide"
echo " Hide the makefile fragments by prepending filenames with '.'."
echo " -p PREFIX prefix name"
echo " Use PREFIX instead of uppercased root_dir in the makefile"
echo " variable name. If the root_dir were 'stuff' and -p was not"
echo " used, then source would be accumulated into a makefile"
echo " variable named 'MK_STUFF', but if -p JUNK were given, then"
echo " the variable name would instead be MK_JUNK."
echo " -o SCRIPT output script name"
echo " Use SCRIPT when outputting messages instead of the script's"
echo " actual name."
echo " -v [0|1|2] verboseness level"
echo " level 0: silent (no output)"
echo " level 1: default (one line per directory)"
echo " level 2: verbose (several lines per directory)."
echo " "
# Exit with non-zero exit status
exit 1
}
#
# gen_mkfile()
#
# Creates a single makefile fragment in a user-specified directory and adds
# any local source files found to a top-level Makefile variable.
#
gen_mkfile()
{
# Local variable declarations
local mkfile_frag_var_name
local this_dir
local mkfile_frag_tmpl_name
local mkfile_name
local mkfile_frag_path
local cur_frag_dir
local cur_frag_path
local local_src_files
local sub_items
local item_path
local item_suffix
local cur_frag_sub_dirs
# Extract our arguments to local variables
mkfile_frag_var_name=$1
this_dir=$2
# Strip the leading path from the template makefile path to get its
# simple filename. Hide the output makefile fragment filename, if
# requested.
mkfile_frag_tmpl_name=${mkfile_frag_tmpl_path##*/}
if [ -n "$hide_flag" ]; then
mkfile_frag_path=$this_dir/.$mkfile_frag_tmpl_name
else
mkfile_frag_path=$this_dir/$mkfile_frag_tmpl_name
fi
# Determine the directory in which the fragment will reside.
cur_frag_path=$this_dir
cur_frag_dir=${this_dir##*/}
# Initialize the local source list to empty
local_src_files=""
# Get a listing of the items in $this_dir
sub_items=$(ls $this_dir)
# Generate a list of the source files we've chosen
for item in $sub_items; do
# Prepend the directory to the item to get a relative path
item_path=$this_dir/$item
# Acquire the item's suffix, if it has one
item_suffix=${item_path##*.}
# If the suffix matches, then add it to our list
if is_in_list $item_suffix "$src_file_suffixes"
then
local_src_files="$local_src_files $item"
fi
done
# Delete the leading " " space character in the local source files list.
local_src_files=${local_src_files##" "}
# Initialize the fragment subdirectory list to empty
cur_frag_sub_dirs=""
# Capture the relative path listing of items in $this_dir.
sub_items=$(ls $this_dir)
# Determine the fragment's subdirectory names, if any exist
for item in $sub_items; do
# Prepend the directory to the item to get a relative path
item_path=$this_dir/$item
# If item is a directory, and it's not in the ignore list, descend into it.
#if [ -d $item_path ] && ! should_ignore $item; then
if [ -d $item_path ] && ! is_in_list $item "$ignore_dirs" ; then
cur_frag_sub_dirs=$cur_frag_sub_dirs" "$item
fi
done
# Delete the leading " " space character in fragment's subdirectory list.
cur_frag_sub_dirs=${cur_frag_sub_dirs##" "}
# Be verbose, if level 2 was requested.
if [ "$verbose_flag" = "2" ]; then
echo "mkf frag tmpl path: $mkfile_frag_tmpl_path"
echo "mkf frag path: $mkfile_frag_path"
echo "cur frag path: $cur_frag_path"
echo "cur frag dir: $cur_frag_dir"
echo "cur frag sub dirs: $cur_frag_sub_dirs"
echo "local src files: $local_src_files"
echo "src file suffixes: $src_file_suffixes"
echo "mkf frag var name: $mkfile_frag_var_name"
echo "--------------------------------------------------"
fi
# Copy the template makefile to the directory given, using the new
# makefile name we just created above.
if [ -z "$dry_run_flag" ]; then
cat $mkfile_frag_tmpl_path | sed -e s/"$mkfile_fragment_cur_dir_name_anchor"/"$cur_frag_dir"/g \
| sed -e s/"$mkfile_fragment_sub_dir_names_anchor"/"$cur_frag_sub_dirs"/g \
| sed -e s/"$mkfile_fragment_local_src_files_anchor"/"$local_src_files"/g \
| sed -e s/"$mkfile_fragment_src_var_name_anchor"/"$mkfile_frag_var_name"/g \
> $mkfile_frag_path
fi
# Return peacefully.
return 0
}
#
# gen_mkfiles
#
# Recursively generates makefile fragments for a directory and all
# subdirectories. All of the actual work happens in gen_mkfile().
#
gen_mkfiles()
{
# Local variable declarations
local item sub_items cur_dir this_dir
# Extract our argument
cur_dir=$1
# Append a relevant suffix to the makefile variable name, if necesary
all_add_src_var_name "$cur_dir"
# Be verbose if level 2 was requested
if [ "$verbose_flag" = "2" ]; then
echo ">>>" $script_name ${src_var_name}_$SRC $cur_dir
elif [ "$verbose_flag" = "1" ]; then
echo "$script_name: creating makefile fragment in $cur_dir"
fi
# Call our function to generate a makefile in the directory given.
gen_mkfile "${src_var_name}_$SRC" $cur_dir
# Get a listing of the directories in $directory
sub_items=$(ls $cur_dir)
# Descend into the contents of root_dir to generate the subdirectories'
# makefile fragments.
for item in $sub_items; do
# If item is a directory, and it's not in the ignore list, descend into it.
#if [ -d "$cur_dir/$item" ] && ! should_ignore $item; then
if [ -d "$cur_dir/$item" ] && ! is_in_list $item "$ignore_dirs" ; then
this_dir=$cur_dir/$item
gen_mkfiles $this_dir
fi
done
# Remove a relevant suffix from the makefile variable name, if necesary
all_del_src_var_name "$cur_dir"
# Return peacefully
return 0
}
update_src_var_name_special()
{
local dir act i name var_suffix
# Extract arguments.
act="$1"
dir="$2"
# Strip / from end of directory path, if there is one, and then strip
# path from directory name.
dir=${dir%/}
dir=${dir##*/}
# Run through our list.
for specdir in "${special_dirs}"; do
# If the current item matches sdir, then we'll have
# to make a modification of some form.
if [ "$dir" = "$specdir" ]; then
# Convert the directory name to uppercase.
var_suffix=$(echo "$dir" | tr '[:lower:]' '[:upper:]')
# Either add or remove the suffix, and also update the
# source file suffix variable.
if [ "$act" == "+" ]; then
src_var_name=${src_var_name}_$var_suffix
else
src_var_name=${src_var_name%_$var_suffix}
fi
# No need to continue iterating.
break;
fi
done
}
#init_src_var_name()
#{
# local dir="$1"
#
# # Strip off the leading / if there is one
# dir=${dir%%/}
#
# # Convert the / directory separators into spaces to make a list of
# # directories.
# list=${dir//\// }
#
# # Inspect each item in $list
# for item in $list; do
#
# # Try to initialize the source variable name
# all_add_src_var_name $item
# done
#}
all_add_src_var_name()
{
local dir="$1"
update_src_var_name_special "+" "$dir"
}
all_del_src_var_name()
{
local dir="$1"
update_src_var_name_special "-" "$dir"
}
read_mkfile_config()
{
local index lname
declare -i count
# Read the file describing file suffixes.
src_file_suffixes=$(cat "${suffix_file}")
# Read the file listing the directories to ignore.
ignore_dirs=$(cat "${ignore_file}")
# Read the file listing the special directories.
special_dirs=$(cat "${special_file}")
# Change newlines into spaces. This is optional, but helps when
# printing these values out (so they appear on one line).
src_file_suffixes=$(echo ${src_file_suffixes} | sed "s/\n/ /g")
ignore_dirs=$(echo ${ignore_dirs} | sed "s/\n/ /g")
special_dirs=$(echo ${special_dirs} | sed "s/\n/ /g")
}
main()
{
# -- BEGIN GLOBAL VARIABLE DECLARATIONS --
# Define these makefile template "anchors" used in gen_mkfile().
mkfile_fragment_cur_dir_name_anchor="_mkfile_fragment_cur_dir_name_"
mkfile_fragment_sub_dir_names_anchor="_mkfile_fragment_sub_dir_names_"
mkfile_fragment_local_src_files_anchor="_mkfile_fragment_local_src_files_"
mkfile_fragment_src_var_name_anchor="_mkfile_fragment_src_var_name_"
# The name of the script, stripped of any preceeding path.
script_name=${0##*/}
# The prefix for all makefile variables.
src_var_name_prefix='MK'
# The variable that always holds the string that will be passed to
# gen_mkfile() as the source variable to insert into the fragment.mk.
src_var_name=''
# The suffix appended to all makefile fragment source variables.
SRC='SRC'
# The list of source file suffixes to add to the makefile variables.
src_file_suffixes=''
# The lists of directories to ignore and that are special.
ignore_dirs=''
special_dirs=''
# The arguments to this function. They'll get assigned meaningful
# values after getopts.
mkfile_frag_tmpl_path=""
root_dir=""
suffix_file=""
ignore_file=""
special_file=""
# Flags set by getopts.
dry_run_flag=""
hide_flag=""
recursive_flag=""
output_name=""
prefix_flag=""
verbose_flag=""
# -- END GLOBAL VARIABLE DECLARATIONS --
# Local variable declarations.
local item sub_items this_dir
# Process our command line options.
while getopts ":dho:p:rv:" opt; do
case $opt in
d ) dry_run_flag="1" ;;
h ) hide_flag="1" ;;
r ) recursive_flag="1" ;;
o ) output_name=$OPTARG ;;
p ) prefix_flag=$OPTARG ;;
v ) verbose_flag=$OPTARG ;;
\? ) print_usage
esac
done
shift $(($OPTIND - 1))
# Make sure that verboseness level is valid.
if [ "$verbose_flag" != "0" ] &&
[ "$verbose_flag" != "1" ] &&
[ "$verbose_flag" != "2" ]; then
verbose_flag="1"
fi
# Check the number of arguments after command line option processing.
if [ $# != "5" ]; then
print_usage
fi
# If an output script name was given, overwrite script_name with it.
if [ -n "${output_name}" ]; then
script_name="${output_name}"
fi
# Extract our arguments.
root_dir=$1
mkfile_frag_tmpl_path=$2
suffix_file=$3
ignore_file=$4
special_file=$5
# Read the makefile config files to be used in the makefile fragment
# generation.
read_mkfile_config
# Strip / from end of directory path, if there is one.
root_dir=${root_dir%/}
# Initialize the name of the makefile source variable.
if [ -n "$prefix_flag" ]; then
# If prefix_flag is not null, then we construct src_var_name using
# it instead of root_dir. So if the prefix is 'junk', we will get
# makefile variables that begin with 'MK_JUNK'.
root_dir_upper=$(echo "$prefix_flag" | tr '[:lower:]' '[:upper:]')
src_var_name="${src_var_name_prefix}_${root_dir_upper}"
else
# Otherwise, we use root_dir. If the root directory is 'foo' then
# makefile variables will begin with 'MK_FOO'.
# We are also careful to convert forward slashes into underscore so
# root directories such as foo/bar result in makefile variables
# that begin with 'MK_FOO_BAR'.
root_dir_upper=$(echo "$root_dir" | tr '[:lower:]' '[:upper:]')
root_dir_upper=$(echo "$root_dir_upper" | tr '/' '_')
src_var_name="${src_var_name_prefix}_${root_dir_upper}"
fi
# Be verbose if level 2 was requested.
if [ "$verbose_flag" = "2" ]; then
echo ">>>" $script_name ${src_var_name}_$SRC $root_dir
elif [ "$verbose_flag" = "1" ]; then
echo "$script_name: creating makefile fragment in $root_dir"
fi
# Call our function to generate a makefile in the root directory given.
gen_mkfile "${src_var_name}_$SRC" $root_dir
# If we were asked to act recursively, then continue processing
# root_dir's contents.
if [ -n "$recursive_flag" ]; then
# Get a listing of the directories in $directory.
sub_items=$(ls $root_dir)
# Descend into the contents of root_dir to generate the makefile
# fragments.
for item in $sub_items; do
# If item is a directory, and it's not in the ignore list, descend into it.
#if [ -d "$root_dir/$item" ] && ! should_ignore $item ; then
if [ -d "$root_dir/$item" ] && ! is_in_list $item "$ignore_dirs" ; then
this_dir=$root_dir/$item
gen_mkfiles $this_dir
fi
done
fi
# Exit peacefully.
return 0
}
is_in_list()
{
local cur_item the_item item_list
# Extract argument.
the_item="$1"
item_list="$2"
# Check each item in the list against the item of interest.
for cur_item in ${item_list}; do
# If the current item in the list matches the one of interest.
if [ "${cur_item}" = "${the_item}" ]; then
# Return success (ie: item was found).
return 0
fi
done
# If we made it this far, return failure (ie: item not found).
return 1
}
# The script's main entry point, passing all parameters given.
main "$@"
|
scibuilder/blis
|
build/gen-make-frags/gen-make-frag.sh
|
Shell
|
bsd-3-clause
| 16,471 |
#!/bin/bash
GET http://celestrak.com/NORAD/elements/noaa.txt > new.tle
GET http://celestrak.com/NORAD/elements/goes.txt >> new.tle
GET http://celestrak.com/NORAD/elements/gps-ops.txt >> new.tle
GET http://celestrak.com/NORAD/elements/galileo.txt >> new.tle
GET http://celestrak.com/NORAD/elements/visual.txt >> new.tle
GET http://celestrak.com/NORAD/elements/amateur.txt >> new.tle
GET http://celestrak.com/NORAD/elements/iridium.txt >> new.tle
GET http://celestrak.com/NORAD/elements/tle-new.txt >> new.tle
GET http://celestrak.com/NORAD/elements/geo.txt >> new.tle
|
skorjevec/stel-n9
|
stellarium/plugins/Satellites/util/get.sh
|
Shell
|
gpl-2.0
| 570 |
#!/bin/bash
# a script to install server dependencies
# provide messaging colors for output to console
txtbld=$(tput bold) # Bold
bldgrn=$(tput setaf 2) # green
bldred=${txtbld}$(tput setaf 1) # red
txtreset=$(tput sgr0)
elmslnecho(){
echo "${bldgrn}$1${txtreset}"
}
elmslnwarn(){
echo "${bldred}$1${txtreset}"
}
# Define seconds timestamp
timestamp(){
date +"%s"
}
start="$(timestamp)"
# make sure we're up to date
yes | yum update
# using yum to install the main packages
yes | yum -y install curl uuid patch git nano gcc make mysql55-server httpd24
# amazon packages on 56
yes | yum -y install php56 php56-common php56-opcache php56-fpm php56-pecl-apcu php56-cli php56-pdo php56-mysqlnd php56-gd php56-mbstring php56-mcrypt php56-xml php56-devel php56-pecl-ssh2 --skip-broken
yes | yum groupinstall 'Development Tools'
pecl channel-update pecl.php.net
# set httpd_can_sendmail so drupal mails go out
setsebool -P httpd_can_sendmail on
# start mysql to ensure that it is running
# todo pass some stuff in here... cause it's weird for amazon.
service mysqld restart
#install varnish
yum install varnish -y
sed -i 's/VARNISH_LISTEN_PORT=6081/VARNISH_LISTEN_PORT=80/g' /etc/sysconfig/varnish
sed -i 's/Listen 80/Listen 8080/g' /etc/httpd/conf/httpd.conf
cat /dev/null > /etc/varnish/default.vcl
cat /var/www/elmsln/scripts/server/varnish.txt > /etc/varnish/default.vcl
service varnish start
chkconfig varnish on
# optimize apc
echo "" >> /etc/php.d/40-apcu.ini
echo "apc.rfc1867=1" >> /etc/php.d/40-apcu.ini
echo "apc.rfc1867_prefix=upload_" >> /etc/php.d/40-apcu.ini
echo "apc.rfc1867_name=APC_UPLOAD_PROGRESS" >> /etc/php.d/40-apcu.ini
echo "apc.rfc1867_freq=0" >> /etc/php.d/40-apcu.ini
echo "apc.rfc1867_ttl=3600" >> /etc/php.d/40-apcu.ini
# optimize opcodecache for php 5.5
echo "opcache.enable=1" >> /etc/php.d/10-opcache.ini
echo "opcache.memory_consumption=256" >> /etc/php.d/10-opcache.ini
echo "opcache.max_accelerated_files=100000" >> /etc/php.d/10-opcache.ini
echo "opcache.max_wasted_percentage=10" >> /etc/php.d/10-opcache.ini
echo "opcache.revalidate_freq=2" >> /etc/php.d/10-opcache.ini
echo "opcache.validate_timestamps=1" >> /etc/php.d/10-opcache.ini
echo "opcache.fast_shutdown=1" >> /etc/php.d/10-opcache.ini
echo "opcache.interned_strings_buffer=8" >> /etc/php.d/10-opcache.ini
echo "opcache.enable_cli=1" >> /etc/php.d/10-opcache.ini
# Make sure apache knows what you are tyring to do with host files.
echo IncludeOptional conf.sites.d/*.conf >> /etc/httpd/conf/httpd.conf
echo 'ProxyTimeout 1800' >> /etc/httpd/conf/httpd.conf
# make an admin group
groupadd admin
groupadd elmsln
# run the handsfree installer that's the same for all deployments
# kick off hands free deployment
bash /var/www/elmsln/scripts/install/handsfree/handsfree-install.sh 3 $1 $2 $3 $3 $3 data- $4 $5 $5 elmsln $6
chkconfig mysqld on
service mysqld restart
## very smart of ami to have the php-fpm fallback already in place so you can just kill mod_php
rm /etc/httpd/conf.modules.d/10-php.conf -rf
#This needs to happen again after you remove mod_php
chkconfig httpd on
service httpd restart
#Turn on php-fpm service
chkconfig php-fpm on
service php-fpm start
cd $HOME
source .bashrc
end="$(timestamp)"
elmslnecho "This took $(expr $end - $start) seconds to complete the whole thing!"
|
proconnor15/elmsln
|
scripts/install/handsfree/amazon/amazon-install.sh
|
Shell
|
gpl-3.0
| 3,318 |
#!/bin/bash
FN="SomatiCAData_1.32.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/experiment/src/contrib/SomatiCAData_1.32.0.tar.gz"
"https://bioarchive.galaxyproject.org/SomatiCAData_1.32.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-somaticadata/bioconductor-somaticadata_1.32.0_src_all.tar.gz"
)
MD5="4102770e4a3216e572a689774573c489"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
cokelaer/bioconda-recipes
|
recipes/bioconductor-somaticadata/post-link.sh
|
Shell
|
mit
| 1,312 |
#!/bin/bash
###
# basic install and run test for atomic registry quickstart image
# run with "uninstall" argument to test tear down after test
###
# $1 is optional hostname override
set -o errexit
set -o pipefail
set -x
TEST_IMAGE=atomic-registry-quickstart
# we're going to use this for testing
# node ports aren't working with boxes default hostname localdomain.localhost
LOCALHOST=${1:-`hostname`}
CMD="docker exec -it origin"
USER=mary
PROJ=mary-project
function test_push() {
# login as $USER and do a basic docker workflow
$CMD oc login -u ${USER} -p test
$CMD oc new-project ${PROJ}
TOKEN=$($CMD oc whoami -t)
docker login -p ${TOKEN} -u unused -e [email protected] ${LOCALHOST}:5000
docker pull busybox
docker tag busybox ${LOCALHOST}:5000/${PROJ}/busybox
docker push ${LOCALHOST}:5000/${PROJ}/busybox
docker rmi busybox ${LOCALHOST}:5000/${PROJ}/busybox
docker logout
}
function test_cannot_push() {
# in shared mode...
# we pull $USERS's image, tag and try to push
# bob shouldn't be able to push
$CMD oc login -u bob -p test
TOKEN=$($CMD oc whoami -t)
docker login -p ${TOKEN} -u unused -e [email protected] ${LOCALHOST}:5000
docker pull ${LOCALHOST}:5000/${PROJ}/busybox
docker tag ${LOCALHOST}:5000/${PROJ}/busybox ${LOCALHOST}:5000/${PROJ}/busybox:evil
if docker push ${LOCALHOST}:5000/${PROJ}/busybox:evil; then
echo "registry-viewer user should not have been able to push to repo"
docker logout
exit 1
fi
docker rmi ${LOCALHOST}:5000/${PROJ}/busybox ${LOCALHOST}:5000/${PROJ}/busybox:evil
docker logout
}
# first we need to patch for the vagrant port mapping 443 -> 1443
$CMD oc login -u system:admin
$CMD oc patch oauthclient cockpit-oauth-client -p '{ "redirectURIs": [ "https://'"${LOCALHOST}"':1443" ] }'
test_push
test_cannot_push
|
tmckayus/oshinko-rest
|
vendor/github.com/openshift/origin/examples/atomic-registry/test.sh
|
Shell
|
apache-2.0
| 1,817 |
#!/bin/bash
# this file is mostly meant to be used by the author himself.
root=`pwd`
version=$1
home=~
force=$2
ngx-build $force $version \
--with-ld-opt="-Wl,-rpath,$LIBDRIZZLE_LIB:$LUAJIT_LIB" \
--without-mail_pop3_module \
--without-mail_imap_module \
--without-mail_smtp_module \
--without-http_upstream_ip_hash_module \
--without-http_empty_gif_module \
--without-http_memcached_module \
--without-http_referer_module \
--without-http_autoindex_module \
--without-http_auth_basic_module \
--without-http_userid_module \
--add-module=$root/../echo-nginx-module \
--add-module=$root/../lua-nginx-module \
--add-module=$root/../rds-json-nginx-module \
--add-module=$root/../headers-more-nginx-module \
--add-module=$root $opts \
--add-module=$root/../ndk-nginx-module \
--add-module=$root/../set-misc-nginx-module \
--with-select_module \
--with-poll_module \
--with-debug
#--with-cc-opt="-g3 -O0"
#--add-module=$home/work/nginx_eval_module-1.0.1 \
#--add-module=$root/../echo-nginx-module \
#--without-http_ssi_module # we cannot disable ssi because echo_location_async depends on it (i dunno why?!)
|
LomoX-Offical/nginx-openresty-windows
|
src/drizzle-nginx-module-0.1.10/util/build.sh
|
Shell
|
bsd-2-clause
| 1,373 |
# Usage: ./update.sh [blink-core-source-directory]
#
# Copies the needed files from a directory containing the original
# Decimal.h and Decimal.cpp source that we need.
# If [blink-core-source-directory] is not specified, this script will
# attempt to download the latest versions using svn.
# This was last updated with svn r148833
set -e
FILES=(
"LICENSE-APPLE"
"LICENSE-LGPL-2"
"LICENSE-LGPL-2.1"
"platform/Decimal.h"
"platform/Decimal.cpp"
)
OWN_NAME=`basename $0`
if [ $# -gt 1 ]; then
echo "$OWN_NAME: Too many arguments">&2
exit 1
fi
if [ $# -eq 1 ]; then
BLINK_CORE_DIR="$1"
for F in "${FILES[@]}"
do
P="$BLINK_CORE_DIR/$F"
if [ ! -f "$P" ]; then
echo "$OWN_NAME: Couldn't find file: $P">&2
exit 1
fi
done
for F in "${FILES[@]}"
do
P="$BLINK_CORE_DIR/$F"
cp "$P" .
done
else
SVN="svn --non-interactive --trust-server-cert"
REPO_PATH="https://src.chromium.org/blink/trunk/Source/core"
#REPO_PATH="https://svn.webkit.org/repository/webkit/trunk/Source/WebCore"
printf "Looking up latest Blink revision number..."
LATEST_REV=`$SVN info $REPO_PATH | grep '^Revision: ' | cut -c11-`
echo done.
for F in "${FILES[@]}"
do
printf "Exporting r$LATEST_REV of `basename $F`..."
$SVN export -r $LATEST_REV $REPO_PATH/$F 2>/dev/null 1>&2
echo done.
done
fi
# Apply patches:
patch -p3 < floor-ceiling.patch
patch -p3 < zero-serialization.patch
patch -p3 < comparison-with-nan.patch
patch -p3 < mfbt-abi-markers.patch
patch -p3 < to-moz-dependencies.patch
|
kostaspl/SpiderMonkey38
|
mfbt/decimal/update.sh
|
Shell
|
mpl-2.0
| 1,550 |
#!/bin/sh
# copied from projects/compose, with fixes for shellcheck
# shellcheck disable=SC2039
set -e
#########
#
# wait for docker socket to be ready, then run the rest of the command
#
########
RETRIES=${RETRIES:-"-1"}
WAIT=${WAIT:=10}
[ -n "$DEBUG" ] && set -x
# keep retrying until docker is ready or we hit our limit
retry_or_fail() {
local retry_count=0
local success=1
local cmd=$1
local retryMax=$2
local retrySleep=$3
local message=$4
until [ "$retry_count" -ge "$retryMax" ] && [ "$retryMax" -ne -1 ]; do
echo "trying to $message"
set +e
$cmd
success=$?
set -e
[ $success -eq 0 ] && break
retry_count=$(( retry_count+1 )) || true
echo "attempt number $retry_count failed to $message, sleeping $retrySleep seconds..."
sleep "$retrySleep"
done
# did we succeed?
if [ $success -ne 0 ]; then
echo "failed to $message after $retryMax tries. Exiting..." >&2
exit 1
fi
}
connect_to_docker() {
[ -S /var/run/docker.sock ] || return 1
curl --unix-socket /var/run/docker.sock http://localhost/containers/json >/dev/null 2>&1 || return 1
}
# try to connect to docker
retry_or_fail connect_to_docker "$RETRIES" "$WAIT" "connect to docker"
# if we got here, we succeeded
exec "$@"
|
justincormack/linuxkit
|
pkg/cadvisor/waitfordocker.sh
|
Shell
|
apache-2.0
| 1,253 |
function _cap_does_task_list_need_generating () {
if [ ! -f .cap_tasks~ ]; then return 0;
else
accurate=$(stat -f%m .cap_tasks~)
changed=$(stat -f%m config/deploy.rb)
return $(expr $accurate '>=' $changed)
fi
}
function _cap () {
if [ -f config/deploy.rb ]; then
if _cap_does_task_list_need_generating; then
echo "\nGenerating .cap_tasks~..." > /dev/stderr
cap show_tasks -q | cut -d " " -f 1 | sed -e '/^ *$/D' -e '1,2D'
> .cap_tasks~
fi
compadd `cat .cap_tasks~`
fi
}
compctl -K _cap cap
|
TheDahv/dotfiles
|
zsh/oh-my-zsh/plugins/cap/cap.plugin.zsh
|
Shell
|
mit
| 538 |
#!/usr/bin/env bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Script to produce a tarball release of the C-library and associated C API
# header file.
# Produces: lib_package/libtensorflow-gpu-darwin-x86_64.tar.gz
set -ex
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# See comments at the top of this file for details.
source "${SCRIPT_DIR}/../builds/libtensorflow.sh"
# Configure script
export TF_NEED_CUDA=1
export LD_LIBRARY_PATH="/usr/local/cuda/lib:/usr/local/cuda/extras/CUPTI/lib:${LD_LIBRARY_PATH}"
export PYTHON_BIN_PATH="/usr/bin/python"
export TF_NEED_GCP=0
export TF_NEED_HDFS=0
export TF_NEED_OPENCL=0
export COMPUTECPP_PATH="/usr/local"
export PATH="/usr/local/cuda/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin"
build_libtensorflow_tarball "-darwin-gpu-$(uname -m)"
|
scenarios/tensorflow
|
tensorflow/tools/ci_build/osx/libtensorflow_gpu.sh
|
Shell
|
apache-2.0
| 1,453 |
#!/bin/bash
DIR=`dirname $0`
case $2 in
p1) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/9.out $DIR/../../tests/whitebox/9.in && exit 0;;
p2) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/8.out $DIR/../../tests/whitebox/8.in && exit 0;;
p3) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/3.out $DIR/../../tests/whitebox/3.in && exit 0;;
p4) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/2.out $DIR/../../tests/whitebox/2.in && exit 0;;
p5) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/1.out $DIR/../../tests/whitebox/1.in && exit 0;;
p6) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/7.out $DIR/../../tests/whitebox/7.in && exit 0;;
p7) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/6.out $DIR/../../tests/whitebox/6.in && exit 0;;
p8) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/5.out $DIR/../../tests/whitebox/5.in && exit 0;;
p9) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/4.out $DIR/../../tests/whitebox/4.in && exit 0;;
n1) $DIR/../../../bin/genprog_tests.py --program digits $1 $DIR/../../tests/whitebox/10.out $DIR/../../tests/whitebox/10.in && exit 0;;
esac
exit 1
|
ProgramRepair/IntroClass
|
digits/ca94e3756cbf8d1490bad660c06307f5d678e3675bbea85359523809a4f06b370066767ea2d2d76d270e4712b464924f12e19dbf1a12d28b75d367ceb202dbb9/007/whitebox_test.sh
|
Shell
|
bsd-3-clause
| 1,395 |
#!/bin/bash
#
# Deploys the current Dagger website to the gh-pages branch of the GitHub
# repository. To test the site locally before deploying run `jekyll --server`
# in the website/ directory.
set -ex
REPO="[email protected]:square/dagger.git"
GROUP_ID="com.squareup.dagger"
ARTIFACT_ID="dagger"
DIR=temp-dagger-clone
# Delete any existing temporary website clone
rm -rf $DIR
# Clone the current repo into temp folder
git clone $REPO $DIR
# Move working directory into temp folder
cd $DIR
# Checkout and track the gh-pages branch
git checkout -t origin/gh-pages
# Delete everything
rm -rf *
# Copy website files from real repo
cp -R ../website/* .
# Download the latest javadoc
curl -L "http://repository.sonatype.org/service/local/artifact/maven/redirect?r=central-proxy&g=$GROUP_ID&a=$ARTIFACT_ID&v=LATEST&c=javadoc" > javadoc.zip
mkdir javadoc
unzip javadoc.zip -d javadoc
rm javadoc.zip
# Stage all files in git and create a commit
git add .
git add -u
git commit -m "Website at $(date)"
# Push the new files up to GitHub
git push origin gh-pages
# Delete our temp folder
cd ..
rm -rf $DIR
|
goinstant/dagger
|
deploy_website.sh
|
Shell
|
apache-2.0
| 1,106 |
#!/bin/bash
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Contains configuration values for the Ubuntu cluster
# Define all your cluster nodes, MASTER node comes first"
# And separated with blank space like <user_1@ip_1> <user_2@ip_2> <user_3@ip_3>
export nodes=${nodes:-"[email protected] [email protected] [email protected]"}
# Define all your nodes role: a(master) or i(minion) or ai(both master and minion), must be the order same
role=${role:-"ai i i"}
# If it practically impossible to set an array as an environment variable
# from a script, so assume variable is a string then convert it to an array
export roles=($role)
# Define minion numbers
export NUM_MINIONS=${NUM_MINIONS:-3}
# define the IP range used for service cluster IPs.
# according to rfc 1918 ref: https://tools.ietf.org/html/rfc1918 choose a private ip range here.
export SERVICE_CLUSTER_IP_RANGE=${SERVICE_CLUSTER_IP_RANGE:-192.168.3.0/24} # formerly PORTAL_NET
# define the IP range used for flannel overlay network, should not conflict with above SERVICE_CLUSTER_IP_RANGE
export FLANNEL_NET=${FLANNEL_NET:-172.16.0.0/16}
# Admission Controllers to invoke prior to persisting objects in cluster
export ADMISSION_CONTROL=NamespaceLifecycle,LimitRanger,ServiceAccount,ResourceQuota,SecurityContextDeny
SERVICE_NODE_PORT_RANGE=${SERVICE_NODE_PORT_RANGE:-"30000-32767"}
# Optional: Enable node logging.
ENABLE_NODE_LOGGING=false
LOGGING_DESTINATION=${LOGGING_DESTINATION:-elasticsearch}
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING=false
ELASTICSEARCH_LOGGING_REPLICAS=${ELASTICSEARCH_LOGGING_REPLICAS:-1}
# Optional: When set to true, heapster, Influxdb and Grafana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-true}"
# Extra options to set on the Docker command line. This is useful for setting
# --insecure-registry for local registries.
DOCKER_OPTS=${DOCKER_OPTS:-""}
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS="${KUBE_ENABLE_CLUSTER_DNS:-true}"
# DNS_SERVER_IP must be a IP in SERVICE_CLUSTER_IP_RANGE
DNS_SERVER_IP=${DNS_SERVER_IP:-"192.168.3.10"}
DNS_DOMAIN=${DNS_DOMAIN:-"cluster.local"}
DNS_REPLICAS=${DNS_REPLICAS:-1}
# Optional: Install Kubernetes UI
ENABLE_CLUSTER_UI="${KUBE_ENABLE_CLUSTER_UI:-true}"
# Optional: Enable setting flags for kube-apiserver to turn on behavior in active-dev
#RUNTIME_CONFIG=""
# Optional: Add http or https proxy when download easy-rsa.
# Add envitonment variable separated with blank space like "http_proxy=http://10.x.x.x:8080 https_proxy=https://10.x.x.x:8443"
PROXY_SETTING=${PROXY_SETTING:-""}
|
pedro-r-marques/kubernetes
|
cluster/ubuntu/config-default.sh
|
Shell
|
apache-2.0
| 3,252 |
#!/bin/bash
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
set -x
BINDIR=$(dirname $0)
HUE_ROOT=$PWD
export REPO_TRACE=1
. $BINDIR/build-functions
##############################
# Build prod tarball
##############################
# Use $SKIP_CLEAN if the cleaning is done outside of this script.
if [ -z "$SKIP_CLEAN" ]; then
echo "Cleaning repo."
git clean -xdf
rm -rf ext
git reset --hard HEAD
else
echo "Skipping cleaning of repo."
fi
build_hadoop
make prod
##############################
# Build Hue from within SDK dir
##############################
cd build/release/prod/
cd $(ls -d hue-* | grep -v tgz)
make apps
##############################
# Smoke tests
##############################
./build/env/bin/hue config_help
##############################
# Install
##############################
INSTALL_DIR=$(pwd)/../installdir make install
##############################
# Check install
##############################
cd ../installdir
./build/env/bin/hue config_help
|
rahul67/hue
|
tools/jenkins/jenkins-prod-tarball.sh
|
Shell
|
apache-2.0
| 1,725 |
#!/bin/bash
# SPDX-License-Identifier: GPL-2.0
# Copyright(c) 2020 Intel Corporation, Weqaar Janjua <[email protected]>
# AF_XDP selftests based on veth
#
# End-to-end AF_XDP over Veth test
#
# Topology:
# ---------
# -----------
# _ | Process | _
# / ----------- \
# / | \
# / | \
# ----------- | -----------
# | Thread1 | | | Thread2 |
# ----------- | -----------
# | | |
# ----------- | -----------
# | xskX | | | xskY |
# ----------- | -----------
# | | |
# ----------- | ----------
# | vethX | --------- | vethY |
# ----------- peer ----------
# | | |
# namespaceX | namespaceY
#
# AF_XDP is an address family optimized for high performance packet processing,
# it is XDP’s user-space interface.
#
# An AF_XDP socket is linked to a single UMEM which is a region of virtual
# contiguous memory, divided into equal-sized frames.
#
# Refer to AF_XDP Kernel Documentation for detailed information:
# https://www.kernel.org/doc/html/latest/networking/af_xdp.html
#
# Prerequisites setup by script:
#
# Set up veth interfaces as per the topology shown ^^:
# * setup two veth interfaces and one namespace
# ** veth<xxxx> in root namespace
# ** veth<yyyy> in af_xdp<xxxx> namespace
# ** namespace af_xdp<xxxx>
# * create a spec file veth.spec that includes this run-time configuration
# *** xxxx and yyyy are randomly generated 4 digit numbers used to avoid
# conflict with any existing interface
# * tests the veth and xsk layers of the topology
#
# See the source xdpxceiver.c for information on each test
#
# Kernel configuration:
# ---------------------
# See "config" file for recommended kernel config options.
#
# Turn on XDP sockets and veth support when compiling i.e.
# Networking support -->
# Networking options -->
# [ * ] XDP sockets
#
# Executing Tests:
# ----------------
# Must run with CAP_NET_ADMIN capability.
#
# Run (full color-coded output):
# sudo ./test_xsk.sh -c
#
# If running from kselftests:
# sudo make colorconsole=1 run_tests
#
# Run (full output without color-coding):
# sudo ./test_xsk.sh
. xsk_prereqs.sh
while getopts c flag
do
case "${flag}" in
c) colorconsole=1;;
esac
done
TEST_NAME="PREREQUISITES"
URANDOM=/dev/urandom
[ ! -e "${URANDOM}" ] && { echo "${URANDOM} not found. Skipping tests."; test_exit 1 1; }
VETH0_POSTFIX=$(cat ${URANDOM} | tr -dc '0-9' | fold -w 256 | head -n 1 | head --bytes 4)
VETH0=ve${VETH0_POSTFIX}
VETH1_POSTFIX=$(cat ${URANDOM} | tr -dc '0-9' | fold -w 256 | head -n 1 | head --bytes 4)
VETH1=ve${VETH1_POSTFIX}
NS0=root
NS1=af_xdp${VETH1_POSTFIX}
MTU=1500
setup_vethPairs() {
echo "setting up ${VETH0}: namespace: ${NS0}"
ip netns add ${NS1}
ip link add ${VETH0} type veth peer name ${VETH1}
if [ -f /proc/net/if_inet6 ]; then
echo 1 > /proc/sys/net/ipv6/conf/${VETH0}/disable_ipv6
fi
echo "setting up ${VETH1}: namespace: ${NS1}"
ip link set ${VETH1} netns ${NS1}
ip netns exec ${NS1} ip link set ${VETH1} mtu ${MTU}
ip link set ${VETH0} mtu ${MTU}
ip netns exec ${NS1} ip link set ${VETH1} up
ip link set ${VETH0} up
}
validate_root_exec
validate_veth_support ${VETH0}
validate_ip_utility
setup_vethPairs
retval=$?
if [ $retval -ne 0 ]; then
test_status $retval "${TEST_NAME}"
cleanup_exit ${VETH0} ${VETH1} ${NS1}
exit $retval
fi
echo "${VETH0}:${VETH1},${NS1}" > ${SPECFILE}
validate_veth_spec_file
echo "Spec file created: ${SPECFILE}"
test_status $retval "${TEST_NAME}"
## START TESTS
statusList=()
### TEST 1
TEST_NAME="XSK KSELFTEST FRAMEWORK"
echo "Switching interfaces [${VETH0}, ${VETH1}] to XDP Generic mode"
vethXDPgeneric ${VETH0} ${VETH1} ${NS1}
retval=$?
if [ $retval -eq 0 ]; then
echo "Switching interfaces [${VETH0}, ${VETH1}] to XDP Native mode"
vethXDPnative ${VETH0} ${VETH1} ${NS1}
fi
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 2
TEST_NAME="SKB NOPOLL"
vethXDPgeneric ${VETH0} ${VETH1} ${NS1}
params=("-S")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 3
TEST_NAME="SKB POLL"
vethXDPgeneric ${VETH0} ${VETH1} ${NS1}
params=("-S" "-p")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 4
TEST_NAME="DRV NOPOLL"
vethXDPnative ${VETH0} ${VETH1} ${NS1}
params=("-N")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 5
TEST_NAME="DRV POLL"
vethXDPnative ${VETH0} ${VETH1} ${NS1}
params=("-N" "-p")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 6
TEST_NAME="SKB SOCKET TEARDOWN"
vethXDPgeneric ${VETH0} ${VETH1} ${NS1}
params=("-S" "-T")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 7
TEST_NAME="DRV SOCKET TEARDOWN"
vethXDPnative ${VETH0} ${VETH1} ${NS1}
params=("-N" "-T")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 8
TEST_NAME="SKB BIDIRECTIONAL SOCKETS"
vethXDPgeneric ${VETH0} ${VETH1} ${NS1}
params=("-S" "-B")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
### TEST 9
TEST_NAME="DRV BIDIRECTIONAL SOCKETS"
vethXDPnative ${VETH0} ${VETH1} ${NS1}
params=("-N" "-B")
execxdpxceiver params
retval=$?
test_status $retval "${TEST_NAME}"
statusList+=($retval)
## END TESTS
cleanup_exit ${VETH0} ${VETH1} ${NS1}
for _status in "${statusList[@]}"
do
if [ $_status -ne 0 ]; then
test_exit $ksft_fail 0
fi
done
test_exit $ksft_pass 0
|
GuillaumeSeren/linux
|
tools/testing/selftests/bpf/test_xsk.sh
|
Shell
|
gpl-2.0
| 5,832 |
apt-get update
apt-get -y upgrade
apt-get -y install linux-headers-$(uname -r)
sed -i -e '/Defaults\s\+env_reset/a Defaults\texempt_group=sudo' /etc/sudoers
sed -i -e 's/%sudo ALL=(ALL:ALL) ALL/%sudo ALL=NOPASSWD:ALL/g' /etc/sudoers
echo "UseDNS no" >> /etc/ssh/sshd_config
|
Section9Labs/Cartero.packer
|
ubuntu/scripts/base.sh
|
Shell
|
lgpl-3.0
| 278 |
#!/usr/bin/env bash
# Copyright 2009 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# Generate Go code listing errors and other #defined constant
# values (ENAMETOOLONG etc.), by asking the preprocessor
# about the definitions.
unset LANG
export LC_ALL=C
export LC_CTYPE=C
if test -z "$GOARCH" -o -z "$GOOS"; then
echo 1>&2 "GOARCH or GOOS not defined in environment"
exit 1
fi
# Check that we are using the new build system if we should
if [[ "$GOOS" = "linux" ]] && [[ "$GOARCH" != "sparc64" ]]; then
if [[ "$GOLANG_SYS_BUILD" != "docker" ]]; then
echo 1>&2 "In the new build system, mkerrors should not be called directly."
echo 1>&2 "See README.md"
exit 1
fi
fi
CC=${CC:-cc}
if [[ "$GOOS" = "solaris" ]]; then
# Assumes GNU versions of utilities in PATH.
export PATH=/usr/gnu/bin:$PATH
fi
uname=$(uname)
includes_Darwin='
#define _DARWIN_C_SOURCE
#define KERNEL
#define _DARWIN_USE_64_BIT_INODE
#include <stdint.h>
#include <sys/attr.h>
#include <sys/types.h>
#include <sys/event.h>
#include <sys/ptrace.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/utsname.h>
#include <sys/wait.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <termios.h>
'
includes_DragonFly='
#include <sys/types.h>
#include <sys/event.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <net/ip_mroute/ip_mroute.h>
'
includes_FreeBSD='
#include <sys/capability.h>
#include <sys/param.h>
#include <sys/types.h>
#include <sys/event.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <sys/extattr.h>
#if __FreeBSD__ >= 10
#define IFT_CARP 0xf8 // IFT_CARP is deprecated in FreeBSD 10
#undef SIOCAIFADDR
#define SIOCAIFADDR _IOW(105, 26, struct oifaliasreq) // ifaliasreq contains if_data
#undef SIOCSIFPHYADDR
#define SIOCSIFPHYADDR _IOW(105, 70, struct oifaliasreq) // ifaliasreq contains if_data
#endif
'
includes_Linux='
#define _LARGEFILE_SOURCE
#define _LARGEFILE64_SOURCE
#ifndef __LP64__
#define _FILE_OFFSET_BITS 64
#endif
#define _GNU_SOURCE
// <sys/ioctl.h> is broken on powerpc64, as it fails to include definitions of
// these structures. We just include them copied from <bits/termios.h>.
#if defined(__powerpc__)
struct sgttyb {
char sg_ispeed;
char sg_ospeed;
char sg_erase;
char sg_kill;
short sg_flags;
};
struct tchars {
char t_intrc;
char t_quitc;
char t_startc;
char t_stopc;
char t_eofc;
char t_brkc;
};
struct ltchars {
char t_suspc;
char t_dsuspc;
char t_rprntc;
char t_flushc;
char t_werasc;
char t_lnextc;
};
#endif
#include <bits/sockaddr.h>
#include <sys/epoll.h>
#include <sys/eventfd.h>
#include <sys/inotify.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/prctl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/socket.h>
#include <sys/xattr.h>
#include <linux/if.h>
#include <linux/if_alg.h>
#include <linux/if_arp.h>
#include <linux/if_ether.h>
#include <linux/if_tun.h>
#include <linux/if_packet.h>
#include <linux/if_addr.h>
#include <linux/falloc.h>
#include <linux/filter.h>
#include <linux/fs.h>
#include <linux/keyctl.h>
#include <linux/netlink.h>
#include <linux/perf_event.h>
#include <linux/random.h>
#include <linux/reboot.h>
#include <linux/rtnetlink.h>
#include <linux/ptrace.h>
#include <linux/sched.h>
#include <linux/seccomp.h>
#include <linux/sockios.h>
#include <linux/wait.h>
#include <linux/icmpv6.h>
#include <linux/serial.h>
#include <linux/can.h>
#include <linux/vm_sockets.h>
#include <linux/taskstats.h>
#include <linux/genetlink.h>
#include <linux/stat.h>
#include <linux/watchdog.h>
#include <net/route.h>
#include <asm/termbits.h>
#ifndef MSG_FASTOPEN
#define MSG_FASTOPEN 0x20000000
#endif
#ifndef PTRACE_GETREGS
#define PTRACE_GETREGS 0xc
#endif
#ifndef PTRACE_SETREGS
#define PTRACE_SETREGS 0xd
#endif
#ifndef SOL_NETLINK
#define SOL_NETLINK 270
#endif
#ifdef SOL_BLUETOOTH
// SPARC includes this in /usr/include/sparc64-linux-gnu/bits/socket.h
// but it is already in bluetooth_linux.go
#undef SOL_BLUETOOTH
#endif
// Certain constants are missing from the fs/crypto UAPI
#define FS_KEY_DESC_PREFIX "fscrypt:"
#define FS_KEY_DESC_PREFIX_SIZE 8
#define FS_MAX_KEY_SIZE 64
'
includes_NetBSD='
#include <sys/types.h>
#include <sys/param.h>
#include <sys/event.h>
#include <sys/mman.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/termios.h>
#include <sys/ttycom.h>
#include <sys/wait.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/in_systm.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <netinet/if_ether.h>
// Needed since <sys/param.h> refers to it...
#define schedppq 1
'
includes_OpenBSD='
#include <sys/types.h>
#include <sys/param.h>
#include <sys/event.h>
#include <sys/mman.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/termios.h>
#include <sys/ttycom.h>
#include <sys/wait.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/if_var.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/in_systm.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <netinet/if_ether.h>
#include <net/if_bridge.h>
// We keep some constants not supported in OpenBSD 5.5 and beyond for
// the promise of compatibility.
#define EMUL_ENABLED 0x1
#define EMUL_NATIVE 0x2
#define IPV6_FAITH 0x1d
#define IPV6_OPTIONS 0x1
#define IPV6_RTHDR_STRICT 0x1
#define IPV6_SOCKOPT_RESERVED1 0x3
#define SIOCGIFGENERIC 0xc020693a
#define SIOCSIFGENERIC 0x80206939
#define WALTSIG 0x4
'
includes_SunOS='
#include <limits.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/mman.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <sys/mkdev.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_arp.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
'
includes='
#include <sys/types.h>
#include <sys/file.h>
#include <fcntl.h>
#include <dirent.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <netinet/ip6.h>
#include <netinet/tcp.h>
#include <errno.h>
#include <sys/signal.h>
#include <signal.h>
#include <sys/resource.h>
#include <time.h>
'
ccflags="$@"
# Write go tool cgo -godefs input.
(
echo package unix
echo
echo '/*'
indirect="includes_$(uname)"
echo "${!indirect} $includes"
echo '*/'
echo 'import "C"'
echo 'import "syscall"'
echo
echo 'const ('
# The gcc command line prints all the #defines
# it encounters while processing the input
echo "${!indirect} $includes" | $CC -x c - -E -dM $ccflags |
awk '
$1 != "#define" || $2 ~ /\(/ || $3 == "" {next}
$2 ~ /^E([ABCD]X|[BIS]P|[SD]I|S|FL)$/ {next} # 386 registers
$2 ~ /^(SIGEV_|SIGSTKSZ|SIGRT(MIN|MAX))/ {next}
$2 ~ /^(SCM_SRCRT)$/ {next}
$2 ~ /^(MAP_FAILED)$/ {next}
$2 ~ /^ELF_.*$/ {next}# <asm/elf.h> contains ELF_ARCH, etc.
$2 ~ /^EXTATTR_NAMESPACE_NAMES/ ||
$2 ~ /^EXTATTR_NAMESPACE_[A-Z]+_STRING/ {next}
$2 !~ /^ETH_/ &&
$2 !~ /^EPROC_/ &&
$2 !~ /^EQUIV_/ &&
$2 !~ /^EXPR_/ &&
$2 ~ /^E[A-Z0-9_]+$/ ||
$2 ~ /^B[0-9_]+$/ ||
$2 ~ /^(OLD|NEW)DEV$/ ||
$2 == "BOTHER" ||
$2 ~ /^CI?BAUD(EX)?$/ ||
$2 == "IBSHIFT" ||
$2 ~ /^V[A-Z0-9]+$/ ||
$2 ~ /^CS[A-Z0-9]/ ||
$2 ~ /^I(SIG|CANON|CRNL|UCLC|EXTEN|MAXBEL|STRIP|UTF8)$/ ||
$2 ~ /^IGN/ ||
$2 ~ /^IX(ON|ANY|OFF)$/ ||
$2 ~ /^IN(LCR|PCK)$/ ||
$2 !~ "X86_CR3_PCID_NOFLUSH" &&
$2 ~ /(^FLU?SH)|(FLU?SH$)/ ||
$2 ~ /^C(LOCAL|READ|MSPAR|RTSCTS)$/ ||
$2 == "BRKINT" ||
$2 == "HUPCL" ||
$2 == "PENDIN" ||
$2 == "TOSTOP" ||
$2 == "XCASE" ||
$2 == "ALTWERASE" ||
$2 == "NOKERNINFO" ||
$2 ~ /^PAR/ ||
$2 ~ /^SIG[^_]/ ||
$2 ~ /^O[CNPFPL][A-Z]+[^_][A-Z]+$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)DLY$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)[0-9]$/ ||
$2 ~ /^O?XTABS$/ ||
$2 ~ /^TC[IO](ON|OFF)$/ ||
$2 ~ /^IN_/ ||
$2 ~ /^LOCK_(SH|EX|NB|UN)$/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|EVFILT|NOTE|EV|SHUT|PROT|MAP|PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ ||
$2 ~ /^FALLOC_/ ||
$2 == "ICMPV6_FILTER" ||
$2 == "SOMAXCONN" ||
$2 == "NAME_MAX" ||
$2 == "IFNAMSIZ" ||
$2 ~ /^CTL_(HW|KERN|MAXNAME|NET|QUERY)$/ ||
$2 ~ /^KERN_(HOSTNAME|OS(RELEASE|TYPE)|VERSION)$/ ||
$2 ~ /^HW_MACHINE$/ ||
$2 ~ /^SYSCTL_VERS/ ||
$2 ~ /^(MS|MNT|UMOUNT)_/ ||
$2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ ||
$2 ~ /^(O|F|E?FD|NAME|S|PTRACE|PT)_/ ||
$2 ~ /^LINUX_REBOOT_CMD_/ ||
$2 ~ /^LINUX_REBOOT_MAGIC[12]$/ ||
$2 !~ "NLA_TYPE_MASK" &&
$2 ~ /^(NETLINK|NLM|NLMSG|NLA|IFA|IFAN|RT|RTCF|RTN|RTPROT|RTNH|ARPHRD|ETH_P)_/ ||
$2 ~ /^SIOC/ ||
$2 ~ /^TIOC/ ||
$2 ~ /^TCGET/ ||
$2 ~ /^TCSET/ ||
$2 ~ /^TC(FLSH|SBRKP?|XONC)$/ ||
$2 !~ "RTF_BITS" &&
$2 ~ /^(IFF|IFT|NET_RT|RTM|RTF|RTV|RTA|RTAX)_/ ||
$2 ~ /^BIOC/ ||
$2 ~ /^RUSAGE_(SELF|CHILDREN|THREAD)/ ||
$2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|LOCKS|MEMLOCK|MSGQUEUE|NICE|NOFILE|NPROC|RSS|RTPRIO|RTTIME|SIGPENDING|STACK)|RLIM_INFINITY/ ||
$2 ~ /^PRIO_(PROCESS|PGRP|USER)/ ||
$2 ~ /^CLONE_[A-Z_]+/ ||
$2 !~ /^(BPF_TIMEVAL)$/ &&
$2 ~ /^(BPF|DLT)_/ ||
$2 ~ /^CLOCK_/ ||
$2 ~ /^CAN_/ ||
$2 ~ /^CAP_/ ||
$2 ~ /^ALG_/ ||
$2 ~ /^FS_(POLICY_FLAGS|KEY_DESC|ENCRYPTION_MODE|[A-Z0-9_]+_KEY_SIZE|IOC_(GET|SET)_ENCRYPTION)/ ||
$2 ~ /^GRND_/ ||
$2 ~ /^KEY_(SPEC|REQKEY_DEFL)_/ ||
$2 ~ /^KEYCTL_/ ||
$2 ~ /^PERF_EVENT_IOC_/ ||
$2 ~ /^SECCOMP_MODE_/ ||
$2 ~ /^SPLICE_/ ||
$2 ~ /^(VM|VMADDR)_/ ||
$2 ~ /^IOCTL_VM_SOCKETS_/ ||
$2 ~ /^(TASKSTATS|TS)_/ ||
$2 ~ /^CGROUPSTATS_/ ||
$2 ~ /^GENL_/ ||
$2 ~ /^STATX_/ ||
$2 ~ /^UTIME_/ ||
$2 ~ /^XATTR_(CREATE|REPLACE)/ ||
$2 ~ /^ATTR_(BIT_MAP_COUNT|(CMN|VOL|FILE)_)/ ||
$2 ~ /^FSOPT_/ ||
$2 ~ /^WDIOC_/ ||
$2 !~ "WMESGLEN" &&
$2 ~ /^W[A-Z0-9]+$/ ||
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)}
$2 ~ /^__WCOREFLAG$/ {next}
$2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)}
{next}
' | sort
echo ')'
) >_const.go
# Pull out the error names for later.
errors=$(
echo '#include <errno.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print $2 }' |
sort
)
# Pull out the signal names for later.
signals=$(
echo '#include <signal.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print $2 }' |
egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT)' |
sort
)
# Again, writing regexps to a file.
echo '#include <errno.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print "^\t" $2 "[ \t]*=" }' |
sort >_error.grep
echo '#include <signal.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print "^\t" $2 "[ \t]*=" }' |
egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT)' |
sort >_signal.grep
echo '// mkerrors.sh' "$@"
echo '// Code generated by the command above; see README.md. DO NOT EDIT.'
echo
echo "// +build ${GOARCH},${GOOS}"
echo
go tool cgo -godefs -- "$@" _const.go >_error.out
cat _error.out | grep -vf _error.grep | grep -vf _signal.grep
echo
echo '// Errors'
echo 'const ('
cat _error.out | grep -f _error.grep | sed 's/=\(.*\)/= syscall.Errno(\1)/'
echo ')'
echo
echo '// Signals'
echo 'const ('
cat _error.out | grep -f _signal.grep | sed 's/=\(.*\)/= syscall.Signal(\1)/'
echo ')'
# Run C program to print error and syscall strings.
(
echo -E "
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <ctype.h>
#include <string.h>
#include <signal.h>
#define nelem(x) (sizeof(x)/sizeof((x)[0]))
enum { A = 'A', Z = 'Z', a = 'a', z = 'z' }; // avoid need for single quotes below
int errors[] = {
"
for i in $errors
do
echo -E ' '$i,
done
echo -E "
};
int signals[] = {
"
for i in $signals
do
echo -E ' '$i,
done
# Use -E because on some systems bash builtin interprets \n itself.
echo -E '
};
static int
intcmp(const void *a, const void *b)
{
return *(int*)a - *(int*)b;
}
int
main(void)
{
int i, e;
char buf[1024], *p;
printf("\n\n// Error table\n");
printf("var errors = [...]string {\n");
qsort(errors, nelem(errors), sizeof errors[0], intcmp);
for(i=0; i<nelem(errors); i++) {
e = errors[i];
if(i > 0 && errors[i-1] == e)
continue;
strcpy(buf, strerror(e));
// lowercase first letter: Bad -> bad, but STREAM -> STREAM.
if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
buf[0] += a - A;
printf("\t%d: \"%s\",\n", e, buf);
}
printf("}\n\n");
printf("\n\n// Signal table\n");
printf("var signals = [...]string {\n");
qsort(signals, nelem(signals), sizeof signals[0], intcmp);
for(i=0; i<nelem(signals); i++) {
e = signals[i];
if(i > 0 && signals[i-1] == e)
continue;
strcpy(buf, strsignal(e));
// lowercase first letter: Bad -> bad, but STREAM -> STREAM.
if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
buf[0] += a - A;
// cut trailing : number.
p = strrchr(buf, ":"[0]);
if(p)
*p = '\0';
printf("\t%d: \"%s\",\n", e, buf);
}
printf("}\n\n");
return 0;
}
'
) >_errors.c
$CC $ccflags -o _errors _errors.c && $GORUN ./_errors && rm -f _errors.c _errors _const.go _error.grep _signal.grep _error.out
|
fagongzi/netproxy
|
vendor/golang.org/x/sys/unix/mkerrors.sh
|
Shell
|
apache-2.0
| 14,259 |
#!/usr/bin/env bash
echo 'begin deploy'
# if hidden files are added, add capture them with .??*
cd .tmp && tar -czvf ../dist.tar.gz * && cd ..
node scripts/deploy
|
thunder033/pulsar-api
|
scripts/deploy.sh
|
Shell
|
isc
| 162 |
#!/bin/bash
# This script dumps the current site
# Can be called with the following arguments:
# 1. Type: dump/backup - is this a dump for sync, or a backup
# 2. Environment - if this is called on a server we might need to add the environment name, as specific paths etc. might
# need to be taken into account there - use "LOCAL" for local/default environment
# 3. Dump name - dumps can be named - a named backup will never be automatically deleted - supply "false" for default
# 4. Skip: skipfiles - if the fourth parameter supplied is called "skipfiles", then files will be skipped in the dump
#You need to supply either 'dump' or 'backup' as type
if [ -z "${1}" ]; then
echo "Please specify which type of dump - dump/backup";
exit;
fi
DUMPTYPE=$1
BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd ../../.. && pwd )";
MODULEDIR="$BASEDIR/ttools/sitesync-core";
#sourcing variables
source $MODULEDIR/lib/vars.sh; #$DUMP_PATH_DEFAULT is defined here
DUMP_PATH=$DUMP_PATH_DEFAULT;
#getting configuration variables
VARS="$BASEDIR/ttools/core/lib/vars.sh"
eval `$VARS`
#Script can be called with a second environment parameter
ENV=LOCAL;
if [ "${2}" ]; then
ENV=$2;
fi
ENVVARS="$BASEDIR/ttools/core/lib/vars-for-env.sh $ENV"
eval `$ENVVARS`
#specifics for backup type
if [[ "$DUMPTYPE" == "backup" ]]; then
DUMP_PATH="$BASEDIR/temp/dumps/$BACKUP_NAME";
#if a backup path has been set for the environment, use that instead
backupPathToEval="Environments_"$ENV"_Sitesync_BackupPath"
if [ "${!backupPathToEval}" != "" ]; then
DUMP_PATH="${!backupPathToEval}";
mkdir -p $DUMP_PATH
fi
DUMP_NAME=$(date +"%Y-%m-%d_%H-%M%Z");
#dump name can be called with a third backup name parameter
#in this case any above settings are overridden
if [[ "${3}" ]]; then
if [[ "${3}" != "false" ]]; then
DUMP_PATH="$BASEDIR/temp/dumps/$BACKUP_NAMED_NAME";
DUMP_NAME=$(date +"%Y-%m-%d_")$3;
fi
fi
fi
#making sure dump path exists
mkdir -p $DUMP_PATH/$DUMP_NAME;
echo "Dumping db and assets to $DUMP_PATH/$DUMP_NAME";
DBNAME="$DUMP_PATH/$DUMP_NAME/$DUMP_DBNAME";
FILESDIR="$DUMP_PATH/$DUMP_NAME/$DUMP_FILESDIR";
# skipping files if requested
if [[ "${4}" == "skipfiles" ]]; then
FILESDIR='false'
fi
#This is handled by each framework module individually
$BASEDIR/$Sitesync_FrameworkModule/lib/dump-current-site.sh $DBNAME $FILESDIR $ENV
#dump compression has been taken out for now
#echo "...and compressing the dump";
#
#cd $DUMP_PATH/$DUMP_NAME;
#nice -n 19 tar -zcf ../$DUMP_NAME.tar.gz *;
#
##we don't want to keep all the uncompressed versions for backups
##so we'll delete the backup directory, and only keep the tar
#if [[ "$DUMPTYPE" == "backup" ]]; then
# rm -rf $DUMP_PATH/$DUMP_NAME
#fi
#specifics for backup type - only keep x backups
#default is 6 but can be configured through config.yml
KEEP=$BACKUP_KEEP_DEFAULT;
if [ "$Sitesync_DumpBackupKeep" ]; then
KEEP=$Sitesync_DumpBackupKeep
fi
if [[ "$DUMPTYPE" == "backup" ]]; then
#only clean up if the type is backup and no name parameter has been submitted
if [ -z "${3}" ]; then
echo ""
echo "Keeping $KEEP latest backups"
echo ""
#regulating...
KEEP=$(($KEEP+1));
cd $DUMP_PATH;
#from http://stackoverflow.com/questions/6024088/linux-save-only-recent-10-folders-and-delete-the-rest
ls -dt */ | tail -n +$KEEP | xargs rm -rf
fi
fi
|
titledk/ttools-sitesync-core
|
lib/dump-current-site.sh
|
Shell
|
mit
| 3,354 |
export PATH=".:bin:/usr/local/bin:/usr/local/sbin:$ZSH/bin:$HOME/Dropbox/tools:$PATH"
export MANPATH="/usr/local/man:/usr/local/mysql/man:/usr/local/git/man:$MANPATH"
|
antonsutton/dotfiles
|
system/path.zsh
|
Shell
|
mit
| 168 |
#Ce script n'est pas vraiment un script d'install mais son execution devrait suffire
#Attention il efface tout le rep webapps
#on a pas réussit a configurer comme on voulais le elasticsearch en passant par /etc/init.d/elasticsearch donc on le fait à la main
#le problème c'est que plusieur instance peuvent etre créer si il ya une répétitition du script... ps -ef & kill est la seul solution que l'on a trouvé
#Du faite que les test de notre appli remplisse la base de donnée leur deuxième execution créer des erreurs et empeche la war de ce créer
#Ces test d'integration son effectuer uniquement lors de la configuration
/etc/init.d/mongodb stop
/etc/init.d/mongodb start
/etc/init.d/elasticsearch stop
/usr/share/elasticsearch/bin/elasticsearch -f -D es.config=/usr/share/elasticsearch/config/elasticsearch.yml -Des.index.store.type=memory &
mvn package -DskipTests
/etc/init.d/tomcat7 stop
mkdir -p /var/lib/tomcat7/webapps/*
rm -rvf /var/lib/tomcat7/webapps/windmeal.war
rm -rvf /var/lib/tomcat7/webapps/*
cp -rf target/windmeal-1.0-SNAPSHOT.war /var/lib/tomcat7/webapps/windmeal.war
/etc/init.d/tomcat7 start
|
DDeis/Windmeal
|
run.sh
|
Shell
|
mit
| 1,125 |
#!/bin/bash
read -p 'Enter 1-2 for the case of regular-unregular docs in Input/' prof
read -p 'Output name (csv) ' name
for file in `ls -a Input/*.xlsx`
do
echo "$file" | cut -d'.' -f1 | cut -d'/' -f2
echo -e "\n\n\t==>\033[31m $file \033[0m\n"
python profile_excel.py -t -u 10000 -s 0 $file > `echo "$file" | cut -d'.' -f1 | cut -d'/' -f2`".csv"
mv `echo "$file" | cut -d'.' -f1 | cut -d'/' -f2`".csv" "stats/"
done
for file in `ls -a stats/*.csv`
do
echo "$file" | cut -d'.' -f1 | cut -d'/' -f2
echo -e "\n\n\t==>\033[31m $file \033[0m\n"
python transform.py $file $prof > `echo "$file" | cut -d'.' -f1 | cut -d'/' -f2`"Transformed.csv"
mv `echo "$file" | cut -d'.' -f1 | cut -d'/' -f2`"Transformed.csv" "transform/"
done
FOO=""
for file in `ls -a transform/*.csv`
do
echo "$file" | cut -d'.' -f1 | cut -d'/' -f2
FOO="$FOO $file"
# echo -e "\n\n\t==>\033[31m $foo \033[0m\n"
done
echo -e "\n\n\t==>\033[31m $FOO \033[0m\n"
python csv_to_excel.py $FOO
python xlsTransform.py -f `echo "output.xls"` -s 1 -r 0 -o `echo "$name.xls"`
# rm `echo "output.xls"`
|
SimoRihani/DataProfiling
|
script.sh
|
Shell
|
mit
| 1,114 |
#!/bin/bash
#
# Description : VSCode
# Author : Jose Cerrejon Gonzalez (ulysess@gmail_dot._com)
# Version : 1.2.0 (28/Feb/21)
# Compatible : Raspberry Pi 4 (tested)
# Help : https://gitlab.com/paulcarroty/vscodium-deb-rpm-repo
#
. ../helper.sh || . ./scripts/helper.sh || . ./helper.sh || wget -q 'https://github.com/jmcerrejon/PiKISS/raw/master/scripts/helper.sh'
clear
check_board || { echo "Missing file helper.sh. I've tried to download it for you. Try to run the script again." && exit 1; }
readonly INSTALL_URL="https://aka.ms/linux-armhf-deb"
readonly INSTALL_64_BITS_URL="https://aka.ms/linux-arm64-deb"
INPUT=/tmp/vscod.$$
install_essential_extensions_pack() {
code --install-extension alefragnani.bookmarks
code --install-extension coenraads.bracket-pair-colorizer-2
code --install-extension naumovs.color-highlight
code --install-extension equinusocio.vsc-community-material-theme
code --install-extension mrmlnc.vscode-duplicate
code --install-extension liamhammett.inline-parameters
code --install-extension christian-kohler.path-intellisense
code --install-extension foxundermoon.shell-format
code --install-extension mads-hartmann.bash-ide-vscode
code --install-extension timonwong.shellcheck
code --install-extension tabnine.tabnine-vscode
code --install-extension glavin001.unibeautify-vscode
}
post_install() {
rm "$HOME"/code.deb
lxpanelctl restart
# Show code version
get_vscode_installed_version
# Ask to Install default extensions
echo "Now you can choose to install the next extensions:
· Bookmarks (alefragnani.bookmarks).
· Bracket Pair Colorizer 2 (coenraads.bracket-pair-colorizer-2).
· Color Highlight (naumovs.color-highlight).
· Community Material Theme (equinusocio.vsc-community-material-theme).
· Duplicate action (mrmlnc.vscode-duplicate).
· Inline Parameters for VSCode (liamhammett.inline-parameters).
· Path Intellisense (christian-kohler.path-intellisense).
· shell-format (foxundermoon.shell-format).
· shellcheck (timonwong.shellcheck).
· Bash IDE (mads-hartmann.bash-ide-vscode).
· TabNine (tabnine.tabnine-vscode).
· Unibeautify - Universal Formatter (glavin001.unibeautify-vscode).
"
read -p "Do you want to install those extensions (y/N)? " response
if [[ $response =~ [Yy] ]]; then
install_essential_extensions_pack
fi
}
# VSCode
uninstall_vscode() {
if [[ ! -f /usr/bin/code ]]; then
return 0
fi
echo -e "VSCode already installed."
read -p "Do you want to uninstall VSCode (y/N)? " response
if [[ $response =~ [Yy] ]]; then
sudo apt remove -y code/now && rm -rf ~/.vscode ~/.config/Code/
if [[ -e ~/.config/Code/ ]]; then
echo -e "I hate when this happens. I could not find the directory, Try to uninstall manually. Apologies."
exit_message
fi
echo -e "\nSuccessfully uninstalled."
exit_message
fi
exit_message
}
get_vscode_installed_version() {
local VSCODE_VERSION
VSCODE_VERSION=$(code --version 2>&1 | head -n 1)
echo -e "\nVersion installed: $VSCODE_VERSION\n"
}
install_vscode() {
uninstall_vscode
install_script_message
echo "
VSCode for Raspberry Pi
=======================
· Get the latest version from Microsoft's website (not using or adding source list repository).
· 32 or 64 bits.
· ~220 Mb occupied with no extensions.
· Ask if you want to install what I considered essential extensions, cause I'm a cool dev :)
"
read -p "Press [Enter] to continue..."
local VSCODE_INSTALL
echo -e "\nInstalling, please wait...\n"
if ! is_kernel_64_bits; then
VSCODE_INSTALL="$INSTALL_URL"
else
VSCODE_INSTALL="$INSTALL_64_BITS_URL"
fi
wget -q --show-progress "$VSCODE_INSTALL" -O "$HOME"/code.deb
echo
sudo dpkg -i "$HOME"/code.deb
post_install
echo -e "\nVSCode installed!. Go to Menu > Programming > Visual Studio Code or type code on a terminal."
exit_message
}
# VSCodium
uninstall_vscodium() {
if [[ ! -f /usr/bin/codium ]]; then
return 0
fi
echo -e "VSCodium already installed."
read -p "Do you want to uninstall it (y/N)? " response
if [[ $response =~ [Yy] ]]; then
sudo apt remove -y codium && rm -rf ~/.config/VSCodium/
sudo rm /etc/apt/sources.list.d/vscodium.list /etc/apt/trusted.gpg.d/vscodium-archive-keyring.gpg
if [[ -e ~/.config/VSCodium/ ]]; then
echo -e "I hate when this happens. I could not find the directory, Try to uninstall manually. Apologies."
exit_message
fi
echo -e "\nSuccessfully uninstalled."
exit_message
fi
exit_message
}
add_repo_vscodium() {
echo -e "Adding PHP & new repository /etc/apt/sources.list.d/vscodium.list..."
wget -qO - https://gitlab.com/paulcarroty/vscodium-deb-rpm-repo/raw/master/pub.gpg |
gpg --dearmor |
sudo dd of=/etc/apt/trusted.gpg.d/vscodium-archive-keyring.gpg
echo 'deb [signed-by=/etc/apt/trusted.gpg.d/vscodium-archive-keyring.gpg] https://paulcarroty.gitlab.io/vscodium-deb-rpm-repo/debs/ vscodium main' |
sudo tee /etc/apt/sources.list.d/vscodium.list
sudo apt update
}
install_vscodium() {
uninstall_vscodium
install_script_message
echo "
VSCodium for Raspberry Pi
=========================
· Get the latest version of VSCode removing the Telemetry.
· Add /etc/apt/sources.list.d/vscodium.list for future updates (If you uninstall VSCodium with PiKISS the repo is removed, too).
· 32 or 64 bits.
· ~220 Mb occupied with no extensions.
"
read -p "Press [Enter] to continue..."
echo -e "\nInstalling, please wait...\n"
add_repo_vscodium
sudo apt install -y codium
echo -e "\nVSCodium installed!. Go to Menu > Programming > VSCodium or type codium on a terminal."
exit_message
}
menu() {
while true; do
dialog --clear \
--title "[ VSCode/ium ]" \
--menu "Choose IDE:" 11 100 3 \
VSCodium "Free/Libre Open Source Software Binaries of VSCode" \
VSCode "VSCode is a freeware source-code editor made by Microsoft " \
Exit "Back to main menu" 2>"${INPUT}"
menuitem=$(<"${INPUT}")
case $menuitem in
VSCodium) clear && install_vscodium && return 0 ;;
VSCode) clear && install_vscode && return 0 ;;
Exit) exit 0 ;;
esac
done
}
menu
|
jmcerrejon/PiKISS
|
scripts/devs/vscode.sh
|
Shell
|
mit
| 6,502 |
# commented are part of the API but not yet implemented
# export SPACE_LOG_ENABLED=
# export SPACE_WORKQUEUE_MONGO_URL=
# export SPACE_WORKQUEUE_MONGO_OPLOG_URL=
# export SPACE_WORKQUEUE_COLLECTION_NAME=
# export SPACE_ES_LOG_ENABLED=
export SPACE_ES_COMMITS_MONGO_URL=
export SPACE_ES_COMMITS_MONGO_OPLOG_URL=
export SPACE_ES_COMMITS_COLLECTION_NAME=
#export SPACE_ES_COMMITS_PROCESSING_TIMEOUT=
export SPACE_ES_SNAPSHOTTING_ENABLED=
export SPACE_ES_SNAPSHOTTING_FREQUENCY=
export SPACE_ES_SNAPSHOTTING_COLLECTION_NAME=
export EXAMPLE_ORG_EMAIL=''
export EXAMPLE_ORG_PASSWORD=''
export DEFAULT_LANGUAGE='en'
export KADIRA_APP_ID=
export KADIRA_APP_SECRET=
|
meteor-space/donations
|
environment.example.sh
|
Shell
|
mit
| 658 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-1463-2
#
# Security announcement date: 2012-06-15 00:00:00 UTC
# Script generation date: 2017-01-01 21:02:39 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - unity-2d-panel:5.12.0-0ubuntu1.1
#
# Last versions recommanded by security team:
# - unity-2d-panel:5.12.0-0ubuntu1.1
#
# CVE List:
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade unity-2d-panel=5.12.0-0ubuntu1.1 -y
|
Cyberwatch/cbw-security-fixes
|
Ubuntu_12.04_LTS/x86_64/2012/USN-1463-2.sh
|
Shell
|
mit
| 623 |
#!/usr/bin/env bash
cd $(dirname $(readlink -f $0))
source ../../utils/libdeploy.bash
VIM_SPEEDDATING_CUSTOM_PATH="${HOME}/.vim/plugged/vim-speeddating/after/plugin"
mkdir -p ${VIM_SPEEDDATING_CUSTOM_PATH}
mkdir -p ${HOME}/.vim/undodir
link_package \
./config/speeddating.vim ${VIM_SPEEDDATING_CUSTOM_PATH}/speeddating.vim
|
tiborsimon/dotfiles
|
configs/vim/04_deploy_remaining_config.bash
|
Shell
|
mit
| 328 |
#!/bin/bash
IP="168.126.63.1"
for i in $IP; do
echo "word: $i"
done
IFS="."
for i in $IP; do
echo "word: $i"
done
|
honux77/practice
|
bash/naveru/ifs_test.sh
|
Shell
|
mit
| 120 |
#!/bin/bash -xe
date "+%Y-%m-%d %H:%M:%S"
apt-get update
apt-get -y --force-yes install software-properties-common jq curl
add-apt-repository --yes ppa:juju/stable
apt-get -y --force-yes update
apt-get -y --force-yes install juju-core sudo lxc git-core aufs-tools mysql-client
useradd -G sudo -s /bin/bash -m -d /home/ubuntu ubuntu
mkdir -p /root/.ssh
test -f /root/.ssh/juju || ssh-keygen -t rsa -b 4096 -f /root/.ssh/juju -N ''
echo "ubuntu ALL=(ALL) NOPASSWD:ALL" > "/etc/sudoers.d/90-cloud-init-users"
lxc-info -n trusty-base || lxc-create -t ubuntu-cloud -n trusty-base -- -r trusty -S /root/.ssh/juju
lxc-info -n juju || lxc-clone -s -B aufs trusty-base juju
lxc-info -n mysql || lxc-clone -s -B aufs trusty-base mysql
lxc-info -n frontend || lxc-clone -s -B aufs trusty-base frontend
for d in juju mysql frontend; do
lxc-start -d -n $d;
done
for d in juju mysql frontend; do
while (true) ; do
if [ "$(lxc-info -n $d -i awk '{print $2}')" != "" ]; then
break
fi
sleep 10s;
done
done
sleep 60s;
for d in juju mysql frontend; do
lxc-attach -n $d -- /usr/bin/ssh-keygen -A
lxc-attach -n $d -- /usr/sbin/service ssh restart
lxc-attach -n $d -- mkdir -p /home/ubuntu/.ssh/
cat /root/.ssh/juju.pub > /var/lib/lxc/$d/delta0/home/ubuntu/.ssh/authorized_keys
grep -q "lxc.start.auto" /var/lib/lxc/$d/config || echo "lxc.start.auto = 1" >> /var/lib/lxc/$d/config
grep -q "lxc.start.delay" /var/lib/lxc/$d/config || echo "lxc.start.delay = 5" >> /var/lib/lxc/$d/config
done
mkdir -p /home/ubuntu/.ssh/
cat /root/.ssh/juju.pub >> /home/ubuntu/.ssh/authorized_keys
chown -R ubuntu /home/ubuntu
juju generate-config
juju switch manual
JUJU_IP=$(lxc-info -n juju -i | awk '{print $2}')
FE_IP=$(lxc-info -n frontend -i | awk '{print $2}')
MYSQL_IP=$(lxc-info -n mysql -i | awk '{print $2}')
cat <<_EOF_ > /root/.juju/environments.yaml
default: manual
lxc-clone: true
lxc-clone-aufs: true
environments:
manual:
type: manual
bootstrap-host: ${JUJU_IP}
lxc-clone: true
lxc-clone-aufs: true
local:
type: local
default-series: trusty
lxc-clone: true
lxc-clone-aufs: true
_EOF_
mkdir -p /root/.juju/ssh/
cp /root/.ssh/juju /root/.juju/ssh/juju_id_rsa
cp /root/.ssh/juju.pub /root/.juju/ssh/juju_id_rsa.pub
juju bootstrap --debug
juju add-machine ssh:[email protected] #1
juju add-machine ssh:ubuntu@${FE_IP} #2
juju add-machine ssh:ubuntu@${MYSQL_IP} #3
mkdir -p charms/trusty
test -d charms/trusty/mysql || git clone -b trusty https://github.com/vtolstov/charm-mysql charms/trusty/mysql
test -d charms/trusty/lamp || git clone https://github.com/charms/lamp.git charms/trusty/lamp
juju deploy --repository=charms/ local:trusty/mysql --to 3 || juju deploy --repository=charms/ local:trusty/mysql --to 3 || exit 1;
test -d charms/trusty/haproxy || git clone -b trusty https://github.com/vtolstov/charm-haproxy charms/trusty/haproxy
juju set mysql dataset-size=50%
juju set mysql query-cache-type=ON
juju set mysql query-cache-size=-1
juju deploy --repository=charms/ local:trusty/lamp --to 2 || juju deploy --repository=charms/ local:trusty/lamp --to 2 || exit 1;
juju deploy --repository=charms/ local:trusty/haproxy --to 1 || juju deploy --repository=charms/ local:trusty/haproxy --to 1 || exit 1;
juju add-relation haproxy lamp
for s in mysql lamp haproxy; do
while true; do
juju status $s/0 --format=json| jq ".services.$s.units" | grep -q 'agent-state' && break
echo "waiting 5s"
sleep 5s
done
done
#iptables -t nat -A PREROUTING -i eth0 -p tcp -m tcp --dport 80 -j DNAT --to-destination ${FE_IP}:80
#iptables -A FORWARD -i eth0 -d ${FE_IP} -p tcp --dport 80 -j ACCEPT
while true; do
curl -L -s http://${FE_IP} 2>&1 | grep -q "Apache" && break
echo "waiting 5s"
sleep 5s
done
date "+%Y-%m-%d %H:%M:%S"
fstrim -v /
|
ealekseev/packer-charms
|
scripts/lamp.sh
|
Shell
|
mit
| 3,856 |
#!/bin/bash
python RunSimulation.py --Geo 1.0 --sim_num 60
|
xji3/IGCCodonSimulation
|
ShFiles/YDR418W_YEL054C_IGCgeo_1.0_sim_60.sh
|
Shell
|
mit
| 59 |
#!/usr/bin/env sh
ProjectDir="../../../../../"
cd $ProjectDir
export LD_LIBRARY_PATH=../tool/cuddn/v2/lib/:$LD_LIBRARY_PATH
CaffeRootDir="caffe/"
# build/tools/
Tools="build/tools/"
Tools=$CaffeRootDir$Tools
# models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/
ProtosDir="models/Pose/"
SubProtosDir="FLIC/cp.pt.d302/"
ExperName="lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/"
ExperPath=$ProtosDir$SubProtosDir$ExperName
mkdir -p $ExperPath
# models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/s2_solver.pt
solver_proto="s2_solver.pt"
solver_proto=$ExperPath$solver_proto
echo $solver_proto
# ../asserts/models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/
ModelsDir="../asserts/"
ModelsDir=$ModelsDir$ExperPath
mkdir -p $ModelsDir
# ../asserts/models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/s2_models/
model_path="s2_models/"
model_path=$ModelsDir$model_path
mkdir -p $model_path
echo $model_path
# ../asserts/models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/s2_log/
log_path="s2_log/"
log_path=$ModelsDir$log_path
mkdir -p $log_path
# prefix -- lof file
cur_file_prefix="flic_"
cur_log_filename=$(date -d "today" +"%Y-%m-%d-%H-%M-%S")
log_filename=$log_path$cur_file_prefix$cur_log_filename".log"
# execute file
caffe_bin="caffe"
caffe_bin=$Tools$caffe_bin
echo
echo "######################################"
echo
echo "Usage: "
echo " sh train_val_.sh [re_iter]"
echo
echo "######################################"
echo
sleep_time=1
sleep $sleep_time
# resume model file
if [ ! -n "$1" ] ;then
re_iter=0
resume_model_file="flic_iter_"$re_iter".caffemodel"
else
re_iter=$1
resume_model_file="flic_iter_"$re_iter".solverstate"
fi
resume_model_file=$model_path$resume_model_file
echo
echo "re_iter:" $re_iter
echo "snapshot path:" $resume_model_file
echo
# run & log command
if [ ! -n "$1" ] ;then
$caffe_bin train --solver=$solver_proto --weights=$resume_model_file 2>&1 | tee -a $log_filename
else
$caffe_bin train --solver=$solver_proto --snapshot=$resume_model_file 2>&1 | tee -a $log_filename
fi
echo "Done!"
|
zimenglan-sysu-512/pose_action_caffe
|
models/Pose/FLIC/cp.pt.d302/lecun-8x-2b-sd5_5-tmarks_output_dependent-with_input/s2_train.sh
|
Shell
|
mit
| 2,184 |
#!/usr/bin/env bash
# You can optionally do a full delete and recreate of the products
# - e.g. to completely re-do category mapping
# - e.g. because in certain circumstances it is faster than a sync and requires less RAM on your local machine.
# first unpublish (you do need this step, otherwise the delete will fail):
# product-csv-sync state --changeTo unpublish --projectKey $CT_PROJECT --clientId $CT_CLIENT_ID --clientSecret $CT_CLIENT_SECRET
# then delete:
# product-csv-sync state --changeTo delete --projectKey $CT_PROJECT --clientId $CT_CLIENT_ID --clientSecret $CT_CLIENT_SECRET
# for testing: import just the subset:
#./node_modules/.bin/product-csv-sync --projectKey $CT_PROJECT --clientId $CT_CLIENT_ID --clientSecret $CT_CLIENT_SECRET import --csv data/products4commercetools.subset.csv --matchBy sku --language de --allowRemovalOfVariants --publish --continueOnProblems
# import all:
./node_modules/.bin/product-csv-sync --projectKey $CT_PROJECT --clientId $CT_CLIENT_ID --clientSecret $CT_CLIENT_SECRET import --csv data/products4commercetools.csv --matchBy sku --language de --allowRemovalOfVariants --publish --continueOnProblems
|
nkuehn/commercetools-demo-site-setup
|
import-csv.sh
|
Shell
|
mit
| 1,157 |
fileId=$(curl -sb -H -u "$SAUCELABS_USERNAME:$SAUCELABS_ACCESS_KEY" --location \
--request POST 'https://api.us-west-1.saucelabs.com/v1/storage/upload' \
--form 'payload=@"../build/ExampleSwift.ipa"' \
--form 'name="ExampleSwift.ipa"' | json item.id)
envman add --key SAUCELABS_FILE_ID_UPLOADED --value $fileId
|
mercadopago/px-ios
|
.fastlane/scripts/saucelabs.sh
|
Shell
|
mit
| 312 |
#!/bin/bash
set -x # echo on
# IntelliJ IDEA is a Java integrated development environment (IDE) for developing computer software.
# It is developed by JetBrains (formerly known as IntelliJ), and is available as an Apache 2 Licensed community edition, and in a proprietary commercial edition.
# Both can be used for commercial development.
# Usage:
# $ ./ideaIC.sh
# $ ./ideaIC.sh --version="2017.2.4" --jdk=yes
# $ ./ideaIC.sh --version="2017.2" --jdk=no
version="2017.2.4"
without_jdk=""
for i in "$@"
do
case $i in
-v=*|--version=*)
version="${i#*=}"
shift # past argument=value
;;
-j=*|--jdk=*)
case "${i#*=}" in
("yes") without_jdk="" ;;
("no") without_jdk="-no-jdk" ;;
(*) without_jdk="" ;;
esac
shift # past argument=value
;;
esac
done
read -p "Вы уверены, что хотите загрузить файл ideaIC-$version$without_jdk.tar.gz, который потом будет распакован в /opt/jetbrains/? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
cur_dir=$(pwd)
# The first time pushd dir is called, pushd pushes the current directory onto the stack, then cds to dir and pushes it onto the stack.
pushd "$cur_dir"
wget "https://download.jetbrains.com/idea/ideaIC-$version$without_jdk.tar.gz" -P /tmp
wget "https://download.jetbrains.com/idea/ideaIC-$version$without_jdk.tar.gz.sha256" -P /tmp
cd /tmp
sha256sum -c "ideaIC-$version$without_jdk.tar.gz.sha256" 2>&1 | grep OK
cd /opt/
sudo mkdir jetbrains
cd /opt/jetbrains
sudo tar xf "/tmp/ideaIC-$version$without_jdk.tar.gz"
rm "/tmp/ideaIC-$version$without_jdk.tar.gz"
rm "/tmp/ideaIC-$version$without_jdk.tar.gz.sha256"
# echo "nohup /opt/jetbrains/idea-IC-172.4155.36/bin/idea.sh </dev/null >/dev/null 2>&1 &"
# popd removes the top directory off the stack, revealing a new top. Then it cds to the new top directory.
popd
fi
|
gusenov/auto-soft-install
|
install/ideaIC.sh
|
Shell
|
mit
| 2,025 |
#!/bin/bash
cd ..
nohup ./script/rundissect.sh --model alexnet_imagenet_full_conv_768 --layers "conv1 conv2 conv3 conv4 conv5" --dataset dataset/broden1_larger/ --workdir dissection_test --resolution 384 --force pid --probebatch 16 &
|
bonyuta0204/NetDissec
|
research/alexnet_imagenet_full_conv_768_1028.sh
|
Shell
|
mit
| 234 |
. functions.sh
# Install Cask
# brew tap caskroom/cask
# brew tap caskroom/versions
# brew tap caskroom/fonts
export HOMEBREW_CASK_OPTS="--appdir=/Applications"
# Core
brew cask install vlc
brew cask install google-chrome
brew cask install google-chrome-canary
brew cask install firefox
brew cask install firefoxnightly
# brew cask install vivaldi
brew cask install torbrowser
# Development
brew cask install sublime-text3 # the very best and fastest code editor
brew cask install visual-studio-code # next gen code editor
# brew cask install atom # (too slow)
brew cask install gitup # the absolute best git Gui for mac
brew cask install p4merge # merge, diff and resolve git conflicts
# brew cask install github-desktop
# brew cask install sourcetree
# brew cask install cyberduck
# brew cask install filezilla
brew cask install imageoptim
brew cask install codekit # needs licence
# brew cask install mamp
brew cask install java
brew cask install sequel-pro # view mysql database
brew cask install postgres
brew cask install owasp-zap # pen test web apps
# brew cask install dash # available via the appstore
# brew cask install virtualbox
brew cask install docker
# Utility
# brew cask install flux
brew cask install appcleaner # clean up after app uninstall
brew cask install spectacle # resize apps to maximise desktop space
brew cask install macpaw-gemini # needs licence
brew cask install blockblock # security app - warns about demons or agents that install themselves permanently
brew cask install onyx # mac utility toolbox
brew cask install gpgtools # pgp key management
# brew cask install keybase
brew cask install cryptomator
# brew cask install togglDesktop
# brew cask install caffeine # available via the appstore
brew cask install taskexplorer
brew cask install knockknock
brew cask install etrecheck # generate a mac status report to debug problems
brew cask install viscosity # needs licence
brew cask install paw # http api, needs licence
brew cask install keka # unzip 7zip
# Games
# brew cask install minecraft # needs licence
brew cask install steam
# Music/Video
brew cask install spotify
brew cask install adapter
brew cask install handbrake
brew cask install burn
# Productivity
brew cask install utox
# brew cask install pushpal
# brew cask install skype
# brew cask install hipchat
# brew cask install teamviewer
# brew cask install slack # available via the appstore
# clean-up
info "Cleaning brew-cask cache"
brew cask cleanup
|
publicarray/dotfiles
|
setup/Caskfile-old.sh
|
Shell
|
mit
| 2,468 |
#!/bin/bash
# Configures Git global configuration settings...
function git-setup {
local defaultUser="kbeckman"
local defaultEmail="[email protected]"
git config --global user.name "${1:-$defaultUser}"
git config --global user.email "${2:-$defaultEmail}"
if [[ $(host-os) == "mac" ]]; then
diff_str="/Applications/DiffMerge.app/Contents/MacOS/diffmerge \$LOCAL \$REMOTE"
merge_str="/Applications/DiffMerge.app/Contents/MacOS/diffmerge --merge --result=\$MERGED \$LOCAL \$BASE \$REMOTE"
else
diff_str="/usr/bin/diffmerge \$LOCAL \$REMOTE"
merge_str="/usr/bin/diffmerge --merge --result=\$MERGED \$LOCAL \$BASE \$REMOTE"
fi
git config --global diff.tool diffmerge
git config --global difftool.diffmerge.cmd "${diff_str}"
git config --global merge.tool diffmerge
git config --global mergetool.diffmerge.cmd "${merge_str}"
git config --global mergetool.keepBackup false
git config --global color.ui true
}
# Configures Git at a repository level...
function git-setup-repo
{
local defaultUser="kbeckman"
local defaultEmail="[email protected]"
git config user.name "${1:-$defaultUser}"
git config user.email "${2:-$defaultEmail}"
}
|
kbeckman/Scripts-Settings
|
Bash/FuncLib/functions-git.sh
|
Shell
|
mit
| 1,236 |
#!/bin/bash
mkdir -p ~/.ssh
cat ssh/config* > ~/.ssh/config
cat shell/.gitconfig > ~/.gitconfig
cat shell/.gitignore_global > ~/.gitignore_global
cat osx/.profile shell/aliases > ~/.profile
|
pgodel/dotfiles
|
install.sh
|
Shell
|
mit
| 193 |
function little_dotty {
DOTTY_PWD_BASENAME=`basename $PWD`
if [[ $ARGC -eq 0 ]]; then
echo "USAGE: $0 FILE";
else
if [[ $DOTTY_PWD_BASENAME != "dotfiles" ]]; then
echo "you are not in ~/dotfiles, im scared...";
else
if [ -h ~/.$1 ]; then
echo "already a symlink: ";
ls -l ~/.$1;
else
if [[ -a $1 ]]; then
ln -s ~/dotfiles/$1 ~/.$1;
ls -l ~/.$1;
else
echo "FILE $1 NOT FOUND";
fi
fi
fi
fi
};
echo " \n !!!!!!!!!!!!!!!!!!!!!!!\n !! SOURCED: !! \n !!\tlittle_dotty !!\n !!!!!!!!!!!!!!!!!!!!!!!\n"
|
talvdav/dotfiles
|
myzshlib/little_dotty.zsh
|
Shell
|
mit
| 742 |
#!/bin/sh
#
cd /app
#update packages
npm install
#run app
#pm2 start bin/www --watch --name app --log /log/access.log --error /log/error.log --output /log/output.log --no-daemon
forever -o /log/out.log -e /log/err.log bin/www
|
mosluce/dockerfiles
|
forever/run.sh
|
Shell
|
mit
| 228 |
python -m markdown \
-x markdown.extensions.toc \
-x markdown.extensions.tables \
-x markdown.extensions.def_list \
$1 | lynx -stdin
|
jmcguire/adventure-boilerplate
|
bin/test_md_in_lynx.sh
|
Shell
|
mit
| 162 |
for nodes in $(ironic node-list | tail -n +4 | head -n -1 | awk -F "| " '{print $2}');
do ironic node-show $nodes;
done
|
ibravo/tripleo
|
files/dothis.sh
|
Shell
|
mit
| 122 |
#--------------------------------------------------------------------[ COMMAND ]
function clv-scm () {
cmd=$1
shift
case $cmd in
"type")
clv-scm-type $@
;;
"update")
clv-scm-update $@
;;
*)
echo "Usage: clv scm (type|update) [dir]"
return 1
;;
esac
}
#-----------------------------------------------------------------------{ type }
# NB: Has a weakness where only the parent repository type will be detected
# So a mercurial repository inside of a git one will be detected as git
# I've only encountered this with pyenv and cpython repo but it does happen
function clv-scm-type () {
dir=${1:-$PWD}
if [[ -d $dir ]]; then
# git
if type git > /dev/null; then
if [[ $(cd $dir && git rev-parse --is-inside-work-tree 2> /dev/null) = 'true' ]]; then
echo git
return 0
fi
fi
# mercurial
if type hg > /dev/null; then
hg --cwd $dir root &> /dev/null
if [[ $? -eq 0 ]]; then
echo 'mercurial'
return 0
fi
fi
# subversion
if type svn > /dev/null; then
svn info $dir &> /dev/null
if [[ $? -eq 0 ]]; then
echo 'subversion'
return 0
fi
fi
# Unknown or not a directory
return 1
else
echo "No such directory $dir"
return 1
fi
}
autoload clv-scm-type
#---------------------------------------------------------------------{ update }
function clv-scm-update () {
dir=${1:-$PWD}
case $(clv-scm-type $1) in
"git")
(cd $dir && git pull)
;;
"mercurial")
(cd $dir && hg pull && hg update)
;;
"subversion")
(cd $dir && svn update)
;;
*)
echo "Cannot find scm repository"
return 1
;;
esac
}
autoload clv-scm-update
|
cleversoap/clv
|
lib/clv-scm.zsh
|
Shell
|
mit
| 2,092 |
#!/bin/sh
# Go to the right directory
cd /opt/openex-player/target
# Launch the worker
java -jar player-2.0.0.jar
|
Luatix/OpenEx
|
openex-player/entrypoint.sh
|
Shell
|
mit
| 116 |
#!/bin/sh
set -e
alias javac='~/.sdkman/candidates/java/13.0.1-open/bin/javac'
alias jlink='~/.sdkman/candidates/java/13.0.1-open/bin/jlink'
javac -version | grep 'javac 13'
jlink --version | grep 13
OUT=myjre
rm -rf $OUT
javac --module-source-path src \
--module-version 0.1 \
-d out -m hello.modules
jlink --module-path out --add-modules hello.modules,java.base --output $OUT
./$OUT/bin/java -m hello.modules/hello.HelloWorld
|
zxh0/jvm.go
|
test/hw_module/test.sh
|
Shell
|
mit
| 440 |
#!/bin/bash
set -euxo pipefail
IFS=$'\n\t'
# Set up homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
brew analytics off
# Utilities
# shellcheck disable=SC2006,SC2046
brew install \
bash-completion `: autocompletion for bash terminal` \
git `: more up-to-date git than what macos provides` \
grep `: faster gnu grep` \
htop `: better top` \
iftop `: top for network I/O` \
jq `: parse and prettify json` \
less `: more up-to-date less than what macos provides` \
ngrep `: read network traffic` \
nmap `: network map` \
coreutils `: utility for dereferencing symlinks` \
tree `: recursive ls` \
wget `: curl alternative` \
vim `: install vim 8.0` \
# System monitoring
gem install iStats
# Python
brew install python
brew install python3
# Install go
brew install go
# Go Tools
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
go get -u github.com/golang/lint/golint
# jEdit
git clone [email protected]:albertyw/jEdit-settings
rm -r ~/Library/jEdit
mv jEdit-settings ~/Library/jEdit
|
albertyw/dotfiles
|
scripts/install_macos.sh
|
Shell
|
mit
| 1,248 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PSNetworkUtils/PSNetworkUtils.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PSNetworkUtils/PSNetworkUtils.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
HParis/PSNetworkUtils
|
Example/Pods/Target Support Files/Pods-PSNetworkUtils_Example/Pods-PSNetworkUtils_Example-frameworks.sh
|
Shell
|
mit
| 3,731 |
script_dir=$(dirname "$(readlink -f "$0")")
export KB_DEPLOYMENT_CONFIG=$script_dir/../deploy.cfg
WD=/kb/module/work
if [ -f $WD/token ]; then
cat $WD/token | xargs sh $script_dir/../bin/run_integrate_probable_annotation_async_job.sh $WD/input.json $WD/output.json
else
echo "File $WD/token doesn't exist, aborting."
exit 1
fi
|
janakagithub/IntegrateMissingAnnotation
|
scripts/run_async.sh
|
Shell
|
mit
| 339 |
#!/bin/bash
SENDGRID_API_KEY=""
EMAIL_TO="$1"
FROM_EMAIL="root@$HOSTNAME"
FROM_NAME="$HOSTNAME"
SUBJECT="$2"
read bodyHTML
maildata='{"personalizations": [{"to": [{"email": "'${EMAIL_TO}'"}]}],"from": {"email": "'${FROM_EMAIL}'",
"name": "'${FROM_NAME}'"},"subject": "'${SUBJECT}'","content": [{"type": "text/html", "value": "'${bodyHTML}'"}]}'
curl --request POST \
--url https://api.sendgrid.com/v3/mail/send \
--header 'Authorization: Bearer '$SENDGRID_API_KEY \
--header 'Content-Type: application/json' \
--data "'$maildata'"
|
countable-web/satchel
|
bin/sg.bash
|
Shell
|
mit
| 548 |
#!/bin/bash -ev
#
# Installation Script
# Written by: Tommy Lincoln <[email protected]>
# Github: https://github.com/pajamapants3000
# Legal: See LICENSE in parent directory
#
#
# Dependencies
#**************
# Begin Required
#xml__simple-2.20
# End Required
# Begin Recommended
# End Recommended
# Begin Optional
# End Optional
# Begin Kernel
# End Kernel
#
# Installation
#**************
# Check for previous installation:
PROCEED="yes"
REINSTALL=0
grep icon_naming_utils-0.8.90 /list-$CHRISTENED"-"$SURNAME > /dev/null && ((\!$?)) &&\
REINSTALL=1 && echo "Previous installation detected, proceed?" && read PROCEED
[ $PROCEED = "yes" ] || [ $PROCEED = "y" ] || exit 0
# Download:
wget http://tango.freedesktop.org/releases/icon-naming-utils-0.8.90.tar.bz2
#
# md5sum:
echo "dd8108b56130b9eedc4042df634efa66 icon-naming-utils-0.8.90.tar.bz2" | md5sum -c ;\
( exit ${PIPESTATUS[0]} )
#
tar -xvf icon-naming-utils-0.8.90.tar.bz2
cd icon-naming-utils-0.8.90
./configure --prefix=/usr
make
#
as_root make install
cd ..
as_root rm -rf icon-naming-utils-0.8.90
#
# Add to installed list for this computer:
echo "icon_naming_utils-0.8.90" >> /list-$CHRISTENED"-"$SURNAME
#
###################################################
|
pajamapants3000/BLFS_scripts_etc
|
scripts/icon_naming_utils-0.8.90.sh
|
Shell
|
mit
| 1,234 |
#!/usr/bin/env bash
#
# cli-test: Tests for god
#
# (C) 2013 Unitech.io Inc.
# MIT LICENSE
#
# Yes, we have tests in bash. How mad science is that?
node="`type -P node`"
nodeVersion="`$node -v`"
pm2="`type -P node` `pwd`/bin/pm2"
script="echo"
file_path="test/fixtures"
function fail {
echo -e "######## \033[31m ✘ $1\033[0m"
exit 1
}
function success {
echo -e "\033[32m------------> ✔ $1\033[0m"
}
function spec {
[ $? -eq 0 ] || fail "$1"
success "$1"
}
function ispec {
[ $? -eq 1 ] || fail "$1"
success "$1"
}
echo -e "\033[1mRunning tests:\033[0m"
echo "####################### DEBUG ############################"
echo "PM2 Command = " $pm2
echo "PM2 version = " $pm2 -V
echo "Node version = " $nodeVersion
$node -e "var os = require('os'); console.log('arch : %s\nplatform : %s\nrelease : %s\ntype : %s\nmem : %d', os.arch(), os.platform(), os.release(), os.type(), os.totalmem())"
echo "###################### !DEBUG! ###########################"
cd $file_path
$pm2 kill
spec "kill daemon"
$pm2 start eyayimfake
ispec "should fail if script doesnt exist"
$pm2
ispec "No argument"
$pm2 start cluster-pm2.json
spec "Should start well formated json with name for file prefix"
$pm2 list
spec "Should list processes succesfully"
$pm2 start multi-echo.json
spec "Should start multiple applications"
$pm2 generate echo
spec "Should generate echo sample json"
$pm2 start echo-pm2.json -f
spec "Should start echo service"
$pm2 logs &
spec "Should display logs"
TMPPID=$!
sleep 1
kill $!
spec "Should kill logs"
$pm2 web
spec "Should start web interface"
sleep 0.3
JSON_FILE='/tmp/web-json'
wget -q http://localhost:9615/ -O $JSON_FILE
cat $JSON_FILE | grep "HttpInterface.js" > /dev/null
spec "Should get the right JSON with HttpInterface file launched"
$pm2 flush
spec "Should clean logs"
cat ~/.pm2/logs/echo-out.log | wc -l
spec "File Log should be cleaned"
sleep 0.3
wget -q http://localhost:9615/ -O $JSON_FILE
cat $JSON_FILE | grep "restart_time\":0" > /dev/null
spec "Should get the right JSON with HttpInterface file launched"
#
# Restart only one process
#
$pm2 restart 1
sleep 0.3
wget -q http://localhost:9615/ -O $JSON_FILE
OUT=`cat $JSON_FILE | grep -o "restart_time\":1" | wc -l`
[ $OUT -eq 1 ] || fail "$1"
success "$1"
#
# Restart all processes
#
$pm2 restartAll
spec "Should restart all processes"
sleep 0.3
wget -q http://localhost:9615/ -O $JSON_FILE
OUT=`cat $JSON_FILE | grep -o "restart_time\":1" | wc -l`
[ $OUT -eq 7 ] || fail "$1"
success "$1"
$pm2 list
$pm2 dump
spec "Should dump current processes"
ls ~/.pm2/dump.pm2
spec "Dump file should be present"
$pm2 stopAll
spec "Should stop all processes"
$pm2 kill
$pm2 resurrect
spec "Should resurect all apps"
$pm2 stopAll
spec "Should stop all processes"
$pm2 kill
spec "Should kill daemon"
|
1stvamp/pm2
|
test/cli.sh
|
Shell
|
mit
| 2,829 |
#!/usr/bin/env bash
# Base16 Chalk - Gnome Terminal color scheme install script
# Chris Kempson (http://chriskempson.com)
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="Base 16 Chalk 256"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="base-16-chalk-256"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# Because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
# Check that uuidgen is available
type $UUIDGEN >/dev/null 2>&1 || { echo >&2 "Requires uuidgen but it's not installed. Aborting!"; exit 1; }
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# Copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# Add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# Update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#151515', '#fb9fb1', '#acc267', '#ddb26f', '#6fc2ef', '#e1a3ee', '#12cfc0', '#d0d0d0', '#505050', '#fb9fb1', '#acc267', '#ddb26f', '#6fc2ef', '#e1a3ee', '#12cfc0', '#f5f5f5']"
dset background-color "'#151515'"
dset foreground-color "'#d0d0d0'"
dset bold-color "'#d0d0d0'"
dset bold-color-same-as-fg "true"
dset cursor-colors-set "true"
dset cursor-background-color "'#d0d0d0'"
dset cursor-foreground-color "'#151515'"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append the Base16 profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#151515:#fb9fb1:#acc267:#ddb26f:#6fc2ef:#e1a3ee:#12cfc0:#d0d0d0:#505050:#fb9fb1:#acc267:#ddb26f:#6fc2ef:#e1a3ee:#12cfc0:#f5f5f5"
gset string background_color "#151515"
gset string foreground_color "#d0d0d0"
gset string bold_color "#d0d0d0"
gset bool bold_color_same_as_fg "true"
gset bool cursor-colors-set "true"
gset string cursor-background-color "'#d0d0d0'"
gset string cursor-foreground-color "'#151515'"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
dcelasun/dotfiles
|
config/base16-gnome-terminal/color-scripts/base16-chalk-256.sh
|
Shell
|
mit
| 4,159 |
#!/bin/bash
mysql -u root --password="root_password" -e "drop database db;"
mysql -u root --password="root_password" -e "create database db;"
echo "no" | python /var/www/smartcontrol/manage.py syncdb
echo "import scripts.init_db" | python /var/www/smartcontrol/manage.py shell
echo "\n"
|
miccrun/smartcontrol
|
scripts/reset.sh
|
Shell
|
mit
| 288 |
#!/bin/bash
#PBS -q workq
#PBS -l nodes=1:ppn=20
#PBS -l walltime=12:00:00
export OMP_NUM_THREADS=1
INPUT=$A
OUTDIR=$B
MATDIR=/home/pmotter/work/UF_Collection_Matrix-Market
EXEDIR=/home/pmotter/work/trilinos-prediction/tpetra_solvers
echo $INPUT
echo $OUTDIR
echo $MATDIR
echo $EXEDIR
mkdir -p $OUTDIR
OLDIFS=$IFS
IFS=,
[ ! -f $INPUT ] && { echo "$INPUT file not found"; exit 99; }
while read matrix solved <&3
do
COUNT=$((COUNT + 1))
if [ "$solved" -ne 1 ]
then
echo "$COUNT solving $matrix"
sed -i "s/${matrix}, 0/${matrix}, -1/g" "$A"
mpirun -np 12 ${EXEDIR}/tpetra_solvers ${MATDIR}/${matrix} -d ${OUTDIR} &&
sed -i "s/${matrix}, -1/${matrix}, 1/g" "$A"
else
echo "$COUNT : skipping $matrix"
fi
done 3< "${INPUT}"
IFS="${OLDIFS}"
|
patemotter/trilinos-prediction
|
tpetra_solvers/batch_scripts/supermic/supermic_np12.sh
|
Shell
|
mit
| 804 |
#! /bin/sh
BASEDIR=`dirname $0`
BASEDIR=`readlink -f $BASEDIR`
cd $BASEDIR
ERL_LIBS=$BASEDIR:$BASEDIR/deps
export ERL_LIBS
SNAME=adventures@localhost
if [ "$1" = "shell" ]; then
erl -remsh $SNAME -sname rem
else
exec erl +K true -noinput -noshell \
-sasl errlog_type error \
-sname $SNAME \
-s adventures_app
fi
|
joekinley/adventures-in-erlang
|
run.sh
|
Shell
|
mit
| 348 |
#!/bin/bash
declare -r GITHUB_REPOSITORY="makabde/dotfiles"
declare -r DOTFILES_ORIGIN="[email protected]:$GITHUB_REPOSITORY.git"
declare -r DOTFILES_TARBALL_URL="https://github.com/$GITHUB_REPOSITORY/tarball/master"
declare -r DOTFILES_UTILS_URL="https://raw.githubusercontent.com/$GITHUB_REPOSITORY/master/src/os/utils.sh"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
declare dotfilesDirectory="$HOME/Dotfiles"
declare skipQuestions=false
# ----------------------------------------------------------------------
# | Helper Functions |
# ----------------------------------------------------------------------
download() {
local url="$1"
local output="$2"
if command -v "curl" &> /dev/null; then
curl -LsSo "$output" "$url" &> /dev/null
# │││└─ write output to file
# ││└─ show error messages
# │└─ don't show the progress meter
# └─ follow redirects
return $?
elif command -v "wget" &> /dev/null; then
wget -qO "$output" "$url" &> /dev/null
# │└─ write output to file
# └─ don't show output
return $?
fi
return 1
}
download_dotfiles() {
local tmpFile=""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
print_in_purple "\n • Download and extract archive\n\n"
tmpFile="$(mktemp /tmp/XXXXX)"
download "$DOTFILES_TARBALL_URL" "$tmpFile"
print_result $? "Download archive" "true"
printf "\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if ! $skipQuestions; then
ask_for_confirmation "Do you want to store the dotfiles in '$dotfilesDirectory'?"
if ! answer_is_yes; then
dotfilesDirectory=""
while [ -z "$dotfilesDirectory" ]; do
ask "Please specify another location for the dotfiles (path): "
dotfilesDirectory="$(get_answer)"
done
fi
# Ensure the `dotfiles` directory is available
while [ -e "$dotfilesDirectory" ]; do
ask_for_confirmation "'$dotfilesDirectory' already exists, do you want to overwrite it?"
if answer_is_yes; then
rm -rf "$dotfilesDirectory"
break
else
dotfilesDirectory=""
while [ -z "$dotfilesDirectory" ]; do
ask "Please specify another location for the dotfiles (path): "
dotfilesDirectory="$(get_answer)"
done
fi
done
printf "\n"
else
rm -rf "$dotfilesDirectory" &> /dev/null
fi
mkdir -p "$dotfilesDirectory"
print_result $? "Create '$dotfilesDirectory'" "true"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Extract archive in the `dotfiles` directory.
extract "$tmpFile" "$dotfilesDirectory"
print_result $? "Extract archive" "true"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
rm -rf "$tmpFile"
print_result $? "Remove archive"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
cd "$dotfilesDirectory/src/os" \
|| return 1
}
download_utils() {
local tmpFile=""
tmpFile="$(mktemp /tmp/XXXXX)"
download "$DOTFILES_UTILS_URL" "$tmpFile" \
&& . "$tmpFile" \
&& rm -rf "$tmpFile" \
&& return 0
return 1
}
extract() {
local archive="$1"
local outputDir="$2"
if command -v "tar" &> /dev/null; then
tar -zxf "$archive" --strip-components 1 -C "$outputDir"
return $?
fi
return 1
}
verify_os() {
declare -r MINIMUM_MACOS_VERSION="10.10"
declare -r MINIMUM_UBUNTU_VERSION="18.04"
local os_name="$(get_os)"
local os_version="$(get_os_version)"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Check if the OS is `macOS` and
# it's above the required version.
if [ "$os_name" == "macos" ]; then
if is_supported_version "$os_version" "$MINIMUM_MACOS_VERSION"; then
return 0
else
printf "Sorry, this script is intended only for macOS %s+" "$MINIMUM_MACOS_VERSION"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Check if the OS is `Ubuntu` and
# it's above the required version.
elif [ "$os_name" == "ubuntu" ]; then
if is_supported_version "$os_version" "$MINIMUM_UBUNTU_VERSION"; then
return 0
else
printf "Sorry, this script is intended only for Ubuntu %s+" "$MINIMUM_UBUNTU_VERSION"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
else
printf "Sorry, this script is intended only for macOS and Ubuntu!"
fi
return 1
}
# ----------------------------------------------------------------------
# | Main |
# ----------------------------------------------------------------------
main() {
# Ensure that the following actions
# are made relative to this file's path.
cd "$(dirname "${BASH_SOURCE[0]}")" \
|| exit 1
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Load utils
if [ -x "utils.sh" ]; then
. "utils.sh" || exit 1
else
download_utils || exit 1
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Ensure the OS is supported and
# it's above the required version.
verify_os \
|| exit 1
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
skip_questions "$@" \
&& skipQuestions=true
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ask_for_sudo
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Check if this script was run directly (./<path>/setup.sh),
# and if not, it most likely means that the dotfiles were not
# yet set up, and they will need to be downloaded.
printf "%s" "${BASH_SOURCE[0]}" | grep "setup.sh" &> /dev/null \
|| download_dotfiles
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
./create_directories.sh
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
./create_symbolic_links.sh "$@"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
./create_local_config_files.sh
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
./install/main.sh
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# ./preferences/main.sh
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if cmd_exists "git"; then
if [ "$(git config --get remote.origin.url)" != "$DOTFILES_ORIGIN" ]; then
./initialize_git_repository.sh "$DOTFILES_ORIGIN"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if ! $skipQuestions; then
./update_content.sh
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if ! $skipQuestions; then
./restart.sh
fi
}
main "$@"
|
makabde/dotfiles
|
src/os/setup.sh
|
Shell
|
mit
| 7,381 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DLA-394-1
#
# Security announcement date: 2016-01-18 00:00:00 UTC
# Script generation date: 2017-01-01 21:09:06 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - passenger:2.2.11debian-2+deb6u1
#
# Last versions recommanded by security team:
# - passenger:2.2.11debian-2+deb6u1
#
# CVE List:
# - CVE-2015-7519
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade passenger=2.2.11debian-2+deb6u1 -y
|
Cyberwatch/cbw-security-fixes
|
Debian_6_(Squeeze)/x86_64/2016/DLA-394-1.sh
|
Shell
|
mit
| 641 |
#!/bin/bash
#
# This script configures PostgreSQL, generates passwords,
# and stores the generated database info into
# $REPO_DIR/server/config/settings.py
## Load configuration
# find the scripts directory (note the /..)
DIR="$( builtin cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
source $DIR/load_config.sh
cd "$REPO_DIR"
echo "Setting up PostgreSQL ($PSQL_VERSION)..."
## Generate random passwords
DB_PASS=$(< /dev/urandom tr -dc A-Z-a-z-0-9 | head -c16)
USER_PSQL_PASS=$(< /dev/urandom tr -dc A-Z-a-z-0-9 | head -c16)
SECRET_KEY=$(< /dev/urandom tr -dc A-Z-a-z-0-9 | head -c64)
# increase OS shared memory
sudo sed -e "/^# Patched by labelmaterial installer:/d" \
-e "/^kernel.shmall/d" \
-e "/^kernel.shmmax/d" \
/etc/sysctl.conf > tmp.conf
sudo mv tmp.conf /etc/sysctl.conf
echo "# Patched by labelmaterial installer:" >> /etc/sysctl.conf
echo "kernel.shmall = $KERNEL_SHMALL" >> /etc/sysctl.conf
echo "kernel.shmmax = $KERNEL_SHMMAX" >> /etc/sysctl.conf
# apply new settings to kernel
sudo sysctl -p
# set up new cluster if it's not already there
if [ "$(sudo pg_lsclusters | grep $PSQL_VERSION | grep -c $DB_CLUSTER)" -lt "1" ]; then
if [[ -z "$DB_DIR" ]]; then
# default directory
sudo pg_createcluster $PSQL_VERSION $DB_CLUSTER
else
# custom directory
sudo mkdir -p $DB_DIR
sudo chown postgres:postgres $DB_DIR
sudo pg_createcluster -d $DB_DIR $PSQL_VERSION $DB_CLUSTER
fi
fi
# start if not already started
set +e
sudo pg_ctlcluster $PSQL_VERSION $DB_CLUSTER start
set -e
# find port
DB_PORT=$(sudo pg_lsclusters | grep $PSQL_VERSION | grep $DB_CLUSTER | awk '{print $3}')
if [[ -z "$DB_PORT" ]]; then
sudo pg_lsclusters
echo "Error: cannot find PostgreSQL port"
else
echo "PostgreSQL running on port $DB_PORT"
fi
# find config
POSTGRESQL_CONF=/etc/postgresql/$PSQL_VERSION/$DB_CLUSTER/postgresql.conf
if [[ ! -s "$POSTGRESQL_CONF" ]]; then
echo "Error: cannot find PostgreSQL config file: \"$POSTGRESQL_CONF\""
exit 1
fi
# comment out old configuration
sudo sed -r "s/^(\s*client_encoding\s*=.*)$/#\1/" $POSTGRESQL_CONF | \
sudo sed -r "s/^#+\s*client_encoding\s*=\s*'UTF8'\s*$//" | \
sudo sed -r "s/^(\s*default_transaction_isolation\s*=.*)$/#\1/" | \
sudo sed -r "s/^#+\s*default_transaction_isolation\s*=\s*'read committed'\s*$//" | \
sudo sed -r "s/^(\s*timezone\s*=.*)$/#\1/" | \
sudo sed -r "s/^#+\s*timezone\s*=\s*'UTC'\s*$//" | \
sudo sed -r "s/^# config for $PROJECT_NAME:$//" | \
sudo sed -r '/^$/N;/\n$/D' \
> tmp.conf
# add django configuration
echo "# config for $PROJECT_NAME:" >> tmp.conf
echo "client_encoding = 'UTF8'" >> tmp.conf
echo "default_transaction_isolation = 'read committed'" >> tmp.conf
echo "timezone = 'UTC'" >> tmp.conf
sudo mv -f tmp.conf $POSTGRESQL_CONF
# increase memory from the small default of 24MB
sudo sed -r "s/^shared_buffers\s+=.*/shared_buffers = $PSQL_SHARED_BUFFERS/" $POSTGRESQL_CONF > tmp.conf
sudo mv -f tmp.conf $POSTGRESQL_CONF
sudo grep shared_buffers $POSTGRESQL_CONF
# restart postgres with updated shared_buffers
if sudo service postgresql restart; then
echo "success"
else
echo "Could not restart the database server. To fix this, you can try:"
echo "Edit the config file:"
echo " $DIR/config.sh"
echo "and decrease the value of PSQL_SHARED_BUFFERS ($PSQL_SHARED_BUFFERS)."
echo "Re-run the installer to try again."
exit 1
fi
# setup pgpass password file
rm -f ~/.pgpass
echo "*:*:$DB_NAME:$DB_USER:$DB_PASS" >> ~/.pgpass
echo "*:*:*:$USER:$USER_PSQL_PASS" >> ~/.pgpass
chmod 600 ~/.pgpass
# create $USER as a superuser if it does not exist
echo "Creating a PostgreSQL superuser to match your username ($USER)."
echo "CREATE USER \"$USER\" WITH SUPERUSER LOGIN PASSWORD '$USER_PSQL_PASS'" | sudo -u postgres psql -p $DB_PORT
echo "ALTER USER \"$USER\" WITH SUPERUSER LOGIN PASSWORD '$USER_PSQL_PASS'" | sudo -u postgres psql -p $DB_PORT
set +e # this will complain if run a second time, so ignore the error
createdb -p $DB_PORT $USER --encoding=UTF8
set -e
# create project user if it does not exist
echo "CREATE USER \"$DB_USER\" WITH LOGIN PASSWORD '$DB_PASS'" | psql -p $DB_PORT
echo "ALTER USER \"$DB_USER\" WITH LOGIN PASSWORD '$DB_PASS'" | psql -p $DB_PORT
# create database
echo "Destroying any existing database with name '$DB_NAME'"
set +e # this will complain if run a second time, so ignore the error
dropdb -p $DB_PORT $DB_NAME
set -e
createdb -p $DB_PORT --owner=$DB_USER --encoding=UTF8 $DB_NAME
# set up hba conf
HBA_CONF=$(sudo ls -1 /etc/postgresql/$PSQL_VERSION/$DB_CLUSTER/pg_hba.conf | tail -n 1)
if [[ ! -s "$HBA_CONF" ]]; then
echo "Error: cannot find PostgreSQL config file: \"$HBA_CONF\""
exit 1
fi
if [[ $(sudo grep -cE "local\s+$DB_NAME\s+$DB_USER\s+md5" $HBA_CONF) -eq 0 ]]; then
sudo bash -c "echo '' >> $HBA_CONF"
sudo bash -c "echo 'local $DB_NAME $DB_USER md5' >> $HBA_CONF"
fi
# correct 'all' permissions in hbaconf
if [[ $(sudo grep -cE 'local\s+all\s+all\s+peer' $HBA_CONF) -eq 1 ]]; then
sudo sed -r 's/local\s+all\s+all\s+peer/local all all md5/' $HBA_CONF > tmp.conf
sudo mv -f tmp.conf $HBA_CONF
fi
# show resulting conf file
sudo tail $HBA_CONF
# restart postgres with new settings
sudo service postgresql restart
# load initial database data
echo "Loading initial data..."
if [[ ! -s "$PSQL_DUMP_FILE" ]]; then
echo "Downloading data: $PSQL_DUMP_URL --> $PSQL_DUMP_FILE..."
wget $PSQL_DUMP_URL -O "$PSQL_DUMP_FILE"
fi
if [[ -s "$PSQL_DUMP_FILE" ]]; then
gunzip -c "$PSQL_DUMP_FILE" | psql -p $DB_PORT $DB_NAME $DB_USER
else
echo "Note: could not find pg_dump file \"$PSQL_DUMP_FILE\""
fi
##
## Fill in settings
##
echo "Filling in Django settings..."
# Set up settings_local.py
sed -e "s|'ADMIN_NAME'|'$ADMIN_NAME'|g" \
-e "s|'ADMIN_EMAIL'|'$ADMIN_EMAIL'|g" \
-e "s|'DB_NAME'|'$DB_NAME'|g" \
-e "s|'DB_USER'|'$DB_USER'|g" \
-e "s|'DB_PASS'|'$DB_PASS'|g" \
-e "s|'DB_PORT'|'$DB_PORT'|g" \
-e "s|'SRC_DIR'|'$SRC_DIR'|g" \
-e "s|'DATA_DIR'|'$DATA_DIR'|g" \
-e "s|'PROJECT_NAME'|'$PROJECT_NAME'|g" \
-e "s|'SERVER_NAME'|'$SERVER_NAME'|g" \
-e "s|'SERVER_IP'|'$SERVER_IP'|g" \
-e "s|'TIME_ZONE'|'$TIME_ZONE'|g" \
-e "s|'SECRET_KEY'|'$SECRET_KEY'|g" \
$SRC_DIR/config/settings_local_template.py > \
$SRC_DIR/config/settings_local.py
# (this has to be after settings_local.py is set up)
echo "Running migrations..."
bash "$DIR/migrate_database.sh"
echo "Setting up database cache..."
set +e
$VENV_DIR/bin/python $SRC_DIR/manage.py createcachetable db-cache
set -e
echo "$0: done"
|
seanbell/opensurfaces
|
scripts/install/install_postgres.sh
|
Shell
|
mit
| 6,517 |
#!/bin/bash
# ============================================================================== #
# MIT License #
# #
# Copyright (c) 2017 Donato Rimenti #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
# ============================================================================== #
# #
# DESCRIPTION : Solution for MongoDB University M102's Homework 5-1. #
# AUTHOR : Donato Rimenti #
# COPYRIGHT : Copyright (c) 2017 Donato Rimenti #
# LICENSE : MIT #
# #
# ============================================================================== #
# Creates the data directories.
mkdir 1
mkdir 2
mkdir 3
# Starts the servers for the replica set.
mongod --port 27001 --replSet rs --dbpath 1 --fork
mongod --port 27002 --replSet rs --dbpath 2 --fork
mongod --port 27003 --replSet rs --dbpath 3 --fork
# Waits for the servers to go up.
sleep 5
# Initializes the replica set.
mongo --port 27001 --eval "rs.initiate({ _id: 'rs', members: [{ _id: 0, host : 'localhost:27001' }, { _id: 1, host : 'localhost:27002' }, { _id: 2, host : 'localhost:27003', arbiterOnly: true }]}); sleep(5000); print('Solution : ' + rs.status().members[2].state);"
|
aurasphere/mongodb-university-classes
|
M102 - MongoDB for DBAs/Chapter 5 - Replication Part 2/Homework 5.1/Homework 5.1 Solution.sh
|
Shell
|
mit
| 3,095 |
#!/bin/bash
# echo First convert FASTQ files to a fake SAM format:
$RQS/fastq2fsam.sh $1
output=$2/`basename $1`.fsam
mv $1.fsam $output
# echo Generate our dictionary with read multiplicity r=2:
#../generate_dict 2 dict.db $1.fsam
$RQS/generate_dict 2 $2/dict.db $output
# echo In this example, we compress the corpus we used to generate the dictionary:
$RQS/sparsify $2/dict.db $output
# echo After sparsification, we still need to cut-off high quality scores:
# echo "Let's choose a threshold of 'I', or Phred quality 40 under most encodings"
# echo "../threshold 'I' *.fsam.filtered"
$RQS/threshold 'I' $output.filtered
# echo Compression is now done, but let\'s see how well we did using BZIP2:
# echo "We'll cut out column 11 (the quality scores) and pipe it through BZIP2"
# echo and then compute the bits per quality value needed.
for filename in $output.filtered.reduced
do
file=`basename $filename`
cut -f11 $filename > $2/$file.qual
orig_size=`wc -c < $2/$file.qual`
orig_lines=`wc -l < $2/$file.qual`
orig_size=`echo "$orig_size - $orig_lines" | bc`
bzip2 -f $2/$file.qual
new_size=`wc -c < $2/$file.qual.bz2`
#rm $file.qual.bz2
echo -e $file:'\t' `echo "scale=4; 1/( $orig_size / ( $new_size * 8)) " | bc` bits / quality score
done
# echo "Although we're basically done, you might want to convert your files back"
# echo "from this fake SAM format to a FASTQ file:"
# echo "../fsam2fastq.sh *.filtered.reduced"
$RQS/fsam2fastq.sh $output.filtered.reduced
mv $output.filtered.reduced.fastq $2/`basename $1`
|
cmhill/q-compression
|
src/run_rqs.sh
|
Shell
|
mit
| 1,596 |
#!/usr/bin/env sh
~/documents/caffe/build/tools/caffe train \
--solver=./solver_fine4.prototxt \
--weights=./models/caffenet_train_fine2_iter_10000.caffemodel \
--gpu 0
|
JasonTam/ndsb2015
|
fine_tune.sh
|
Shell
|
mit
| 181 |
#!/bin/bash
grep -e 'Temperature' log |cut -d '=' -f 2 |nl >tlog.dat
grep -e '^Density' log |cut -d '=' -f 2 |nl >rlog.dat
grep -e '^Velocity' log |cut -d '=' -f 2 |nl >ulog.dat
gnuplot <<- EOF
set xlabel "t"
set ylabel "log(Error)"
set logscale y
#set format y "%s*10^{%S}"
set format y "%.2e"
set term png
set output "log.png"
plot "tlog.dat" using 1:2 with l title "rho Convergence rate", \
"rlog.dat" using 1:2 with l title "T Convergence rate", \
"ulog.dat" using 1:2 with l title "U Convergence rate"
EOF
rm *log.dat
|
zhulianhua/dugksFoam
|
src/scripts/plotLog.sh
|
Shell
|
mit
| 580 |
source $HOME/.dotfiles/common/functions.sh
# ln for Sublime
if [ -f "/usr/local/bin/subl" ]; then
msg_checking "ln for Sublime"
else
ln -s "/Applications/Sublime\ Text.app/Contents/SharedSupport/bin/subl" /usr/local/bin/subl
fi
|
andrevvalle/dotfiles
|
sublime/global.sh
|
Shell
|
mit
| 237 |
prepare_sailor() {
echo "node ./node_modules/elasticio-sailor-nodejs/run.js" > sail.sh
echo "node ./node_modules/elasticio-sailor-nodejs/runService.js \${1} \${2} \${3}" > serve.sh
}
update_component_json() {
comp_file="component.json"
sailor_ver=`read_json "$build_dir/node_modules/elasticio-sailor-nodejs/package.json" ".version"`
ruby_command="require 'json';"
ruby_command+="obj = JSON.parse(File.read('$comp_file'));"
ruby_command+="obj['language'] = 'nodejs';"
ruby_command+="obj['sailor_version'] = '$sailor_ver';"
ruby_command+="File.open('$comp_file', 'w'){ |f| f << JSON.pretty_generate(obj)};"
ruby -e "$ruby_command"
}
run_tests() {
if [[ $(read_json "$build_dir/package.json" ".scripts.test") != "" ]]; then
npm test
else
warning "No tests specified. Please do it ASAP!"
fi
}
remove_dev_dependences() {
npm prune --production
}
validate_compatibility() {
sailor_ver=`read_json "$build_dir/node_modules/elasticio-sailor-nodejs/package.json" ".version"`
info "Validating Sailor version $sailor_ver compatibility with Node.js version $node_engine"
pushd "$bp_dir/vendor/compatibility" >/dev/null
npm install --unsafe-perm --quiet --userconfig $build_dir/.npmrc 2>&1 | indent
popd >/dev/null
info "Installed required dependencies"
node $bp_dir/vendor/compatibility/validate.js -n $node_engine -s $sailor_ver 2>&1
}
|
elasticio/elasticio-buildpack-nodejs
|
lib/elasticio.sh
|
Shell
|
mit
| 1,393 |
# Warning: make sure iphas-dr2-arxiv.bbl exists and is an up-to-date bbl file
tar -cvf arxiv.tar iphas-dr2-arxiv.tex iphas-dr2-arxiv.bbl mn2e.bst mn2e.cls aas_macros.sty tables/columns.tex figures/footprint/footprint_small.png figures/depth/depth_r.pdf figures/depth/depth_i.pdf figures/depth/depth_h.pdf figures/seeing/seeing_r.pdf figures/seeing/seeing_i.pdf figures/seeing/seeing_ha.pdf figures/caldiagram/ccd-uncalibrated.pdf figures/caldiagram/ccd-calibrated.pdf figures/calibration/APASS-IPHAS-DR2_ishift.pdf figures/calibration/APASS-IPHAS-DR2_rshift.pdf figures/calibration/colourbar_apass_r.pdf figures/calibration/colourbar_apass_i.pdf figures/calibration/SDSS-IPHAS_rshift.pdf figures/calibration/colourbar_sdss_r.pdf figures/calibration/SDSS-IPHAS_ishift.pdf figures/calibration/colourbar_sdss_i.pdf figures/magdist/magdist-r.pdf figures/uncertainties/uncertainties.pdf figures/repeatability/repeatability.pdf figures/repeatability/repeatability-reliable.pdf figures/sourcecount/sourcecount.pdf figures/diagrams/ccd-* figures/diagrams/cmd-* figures/sh2-82/sh2-82-*
gzip arxiv.tar
|
barentsen/iphas-dr2
|
paper/tar-arxiv.sh
|
Shell
|
mit
| 1,091 |
sudo apt-get install libcurl3-dev
|
Anton04/OpenEnergyPlayground
|
nodered/packets.sh
|
Shell
|
mit
| 34 |
config() {
NEW="$1"
OLD="$(dirname $NEW)/$(basename $NEW .new)"
# If there's no config file by that name, mv it over:
if [ ! -r $OLD ]; then
mv $NEW $OLD
elif [ "$(cat $OLD | md5sum)" = "$(cat $NEW | md5sum)" ]; then
# toss the redundant copy
rm $NEW
fi
# Otherwise, we leave the .new copy for the admin to consider...
}
config etc/wvdial.conf.new
config etc/ppp/peers/wvdial-pipe.new
config etc/ppp/peers/wvdial.new
|
panosmdma/SlackOnly-SlackBuilds
|
network/wvdial/doinst.sh
|
Shell
|
mit
| 445 |
#!/bin/sh
# This hook compiles a style.scss file after each BufWritePost event on
# files ending in "scss".
sass style.scss style.css
|
ahw/vim-hooks
|
examples/scss.bufwritepost.vimhook.recompile-sass.sh
|
Shell
|
mit
| 136 |
#!/bin/bash
# Petit script pour changer les sources pour apt-get, cela permet d'aller beaucoup plus vite que les serveurs USA où il y a trop de monde :-(
# zf191002.1023
# source:
echo -e "
Ne fonctionne pas pour l'instant ! zf191002.1023
"
exit
#sed -i -e "s/http:\/\/archive.ubuntu.com/http:\/\/mirror.switch.ch\/ftp\/mirror\/ubuntu/g" /etc/apt/sources.list
sudo sed -i -e "s/http:\/\/archive.ubuntu.com/http:\/\/ubuntu.ethz.ch/g" /etc/apt/sources.list
|
zuzu59/deploy-proxmox
|
change_sources_depots.sh
|
Shell
|
mit
| 460 |
# Get script's dir
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
CHOICES=$(dialog --stdout --checklist 'Select what you want to setup' \
0 0 4 linux 'Prepares the machine to export 32 and 64 bits linux templates' on \
android 'Prepares the machine to export android templates' on \
windows 'Prepares the machine to export 32 and 64 bits windows templates' on \
html5 'Prepares the machine to export HTML5 templates' on )
echo $CHOICES
echo $PWD
if [[ "$CHOICES" =~ linux ]];
then
source $DIR/linux-setup.sh
fi
if [[ "$CHOICES" =~ android ]];
then
source $DIR/android-setup.sh
fi
if [[ "$CHOICES" =~ windows ]];
then
source $DIR/windows-setup.sh
fi
if [[ "$CHOICES" =~ html5 ]];
then
source $DIR/html5-setup.sh
fi
exit 0
|
brunosxs/godotbox
|
scripts/main-menu.sh
|
Shell
|
mit
| 791 |
#!/bin/bash
MXE_INCLUDE_PATH=/mnt/mxe/usr/i686-w64-mingw32.static/include
MXE_LIB_PATH=/mnt/mxe/usr/i686-w64-mingw32.static/lib
i686-w64-mingw32.static-qmake-qt5 \
BOOST_LIB_SUFFIX=-mt \
BOOST_THREAD_LIB_SUFFIX=_win32-mt \
BOOST_INCLUDE_PATH=$MXE_INCLUDE_PATH/boost \
BOOST_LIB_PATH=$MXE_LIB_PATH \
OPENSSL_INCLUDE_PATH=$MXE_INCLUDE_PATH/openssl \
OPENSSL_LIB_PATH=$MXE_LIB_PATH \
BDB_INCLUDE_PATH=$MXE_INCLUDE_PATH \
BDB_LIB_PATH=$MXE_LIB_PATH \
MINIUPNPC_INCLUDE_PATH=$MXE_INCLUDE_PATH \
MINIUPNPC_LIB_PATH=$MXE_LIB_PATH \
QMAKE_LRELEASE=/mnt/mxe/usr/i686-w64-mingw32.static/qt5/bin/lrelease GUY-qt.pro
make -f Makefile.Release
|
anonfgc/GUY
|
compile.sh
|
Shell
|
mit
| 643 |
#!/bin/bash
find ~/.emacs.d/ -iname '*.el' -print0 | xargs -I{} -0 emacs -batch -f batch-byte-compile {}
|
morgen-peschke/bash-config
|
bin/compile-emacs-libraries.sh
|
Shell
|
mit
| 105 |
#!/usr/bin/env bash
# wget -c http://chromedriver.storage.googleapis.com/2.24/chromedriver_linux64.zip
# sudo ln -s /home/vagrant/node_modules node_modules
# Run SONARQUBE
# sudo docker run -d --name sonarqube -p 9000:9000 -p 9092:9092 sonarqube
# # docker run -d -p 9000:9000 -p 9092:9092 sonarqube
# # Run keycloak
# docker run -e KEYCLOAK_USER=admin -e KEYCLOAK_PASSWORD=admin -p 9990:9990 -p 9998:8080 jboss/keycloak
# # Run Nexus
# mkdir /home/vagrant/data
# mkdir /home/vagrant/data/nexus-data && chown -R 200 /home/vagrant/data/nexus-data
# docker run -d -p 8081:8081 --name nexus -v /srv/nexus-data:/sonatype-work sonatype/nexus
# if [ -e /.installed ]; then
# echo 'Already installed.'
# else
# echo ''
# echo 'INSTALLING'
# echo '----------'
# # Add Google public key to apt
# wget -q -O - "https://dl-ssl.google.com/linux/linux_signing_key.pub" | sudo apt-key add -
# # Add Google to the apt-get source list
# echo "deb http://dl.google.com/linux/chrome/deb/ stable main" | tee -a /etc/apt/sources.list
# # Update app-get
# apt-get update -yq
# # Install Java, Chrome, Xvfb, and unzip
# apt-get -y install google-chrome-stable unzip
# # Download and copy the ChromeDriver to /usr/local/bin
# cd /tmp
# wget "https://chromedriver.googlecode.com/files/chromedriver_linux64_2.2.zip"
# wget "https://selenium.googlecode.com/files/selenium-server-standalone-2.35.0.jar"
# unzip chromedriver_linux64_2.2.zip
# mv chromedriver /usr/local/bin
# mv selenium-server-standalone-2.35.0.jar /usr/local/bin
# # So that running `vagrant provision` doesn't redownload everything
# touch /.installed
# fi
# # Start Xvfb, Chrome, and Selenium in the background
# export DISPLAY=:10
# cd /vagrant
# echo "Starting Xvfb ..."
# Xvfb :10 -screen 0 1366x768x24 -ac &
# echo "Starting Google Chrome ..."
# google-chrome --remote-debugging-port=9222 &
# echo "Starting Selenium ..."
# cd /usr/local/bin
# nohup java -jar ./selenium-server-standalone-2.35.0.jar &
|
OElabed/ice-microservices
|
vagrant/provision/setup.sh
|
Shell
|
mit
| 2,079 |
#!/bin/bash -xe
#
# First-time init for dhcpd
# Remove any stale lock file
rm -f /run/dhcpd.pid
# Create the leases file
touch /var/lib/dhcp/dhcpd.leases
|
zultron/docker-provisioning
|
dhcpd/init.sh
|
Shell
|
mit
| 156 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2010:0061
#
# Security announcement date: 2010-01-22 14:11:11 UTC
# Script generation date: 2017-01-01 21:10:04 UTC
#
# Operating System: CentOS 5
# Architecture: i386
#
# Vulnerable packages fix on version:
# - gzip.x86_64:1.3.3-18.el4_8.1
# - gzip.i386:1.3.3-18.el4_8.1
#
# Last versions recommanded by security team:
# - gzip.x86_64:1.3.3-18.el4_8.1
# - gzip.i386:1.3.3-18.el4_8.1
#
# CVE List:
# - CVE-2010-0001
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install gzip.x86_64-1.3.3 -y
sudo yum install gzip.i386-1.3.3 -y
|
Cyberwatch/cbw-security-fixes
|
CentOS_5/i386/2010/CESA-2010:0061.sh
|
Shell
|
mit
| 699 |
BASEDIR=$(dirname $0)
if [ ! -d ~/.oh-my-zsh ];
then
wget --no-check-certificate http://install.ohmyz.sh -O - | sh
fi
if [ -f ~/.zshrc ];
then
mv ~/.zshrc ~/.zshrc.bak
fi
ln -s ${BASEDIR}/.zshrc ~/.zshrc
if [ -f ~/.gitconfig ];
then
mv ~/.gitconfig ~/.gitconfig.bak
fi
ln -s ${BASEDIR}/.gitconfig ~/.gitconfig
|
mitchellolsthoorn/dotfiles
|
setup.sh
|
Shell
|
mit
| 328 |
#!/bin/bash
. env.conf
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PACKAGE_MANAGER="apt-get"
do_install() {
printf "Installing... "
crontab crontab
sh -c "$PACKAGE_MANAGER -y update"
sh -c "$PACKAGE_MANAGER -y install git"
sh -c "$PACKAGE_MANAGER -y install make"
if [ -n "$(python -mplatform | grep -i Ubuntu)" ]; then
# Ubuntu
apt-get -y update
apt-get -y install apt-transport-https ca-certificates
apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
# Only for 14.04 (change for any other ubuntu version):
if [ -n "$(python -mplatform | grep -i 14.04)" ]; then
echo "deb https://apt.dockerproject.org/repo ubuntu-trusty main" | sudo tee /etc/apt/sources.list.d/docker.list
elif [ -n "$(python -mplatform | grep -i 16.04)" ]; then
echo "deb https://apt.dockerproject.org/repo ubuntu-xenial main" | sudo tee /etc/apt/sources.list.d/docker.list
else
echo "Unsupported ubuntu version." >&2
exit 1;
fi
apt-get -y update
apt-get -y purge lxc-docker
apt-cache policy docker-engine
apt-get install -y linux-image-extra-$(uname -r)
apt-get install -y --force-yes docker-engine
service docker start
docker run hello-world
else
echo "Unsupported OS." >&2
exit 1;
fi
git submodule update --init
printf "Installed\n"
}
do_start() {
echo "Starting..."
# Folders:
mkdir -p $DIR/logs
# Start components:
make build run
echo "Started"
}
get_date() {
date -u +"%Y-%m-%dT%H:%M:%SZ"
}
do_stop() {
echo "Stopping... "
make stop LOG="$DIR/logs/$SERVER_NAME-$(get_date).log"
echo "Stopped"
}
do_clean() {
echo "Cleaning... "
make clean
echo "Clean"
}
do_gen_cert() {
cd $DIR/letsencrypt
if ./letsencrypt-auto certonly --standalone -d $SERVER_FQDN ; then
mkdir -p $DIR/certs
rm $DIR/certs/*
cp /etc/letsencrypt/live/$SERVER_FQDN/fullchain.pem $DIR/certs/cert.pem
cp /etc/letsencrypt/live/$SERVER_FQDN/privkey.pem $DIR/certs/privkey.pem
else
mkdir -p $DIR/certs
rm $DIR/certs/*
openssl req -x509 -newkey rsa:2048 -keyout $DIR/certs/privkey.pem -out $DIR/certs/cert.pem -days 365 -nodes
fi
cd $DIR
}
case "$1" in
install)
do_install;
;;
start)
do_start;
;;
restart|reload|force-reload)
do_stop;
do_start;
;;
stop)
do_stop;
;;
clean)
do_clean;
;;
generate-cert)
do_gen_cert;
;;
*)
echo "Usage: $0 start|stop|install|clean|generate-cert" >&2
exit 3
;;
esac
|
kkleidal/PoshServerTemplate
|
manage.sh
|
Shell
|
mit
| 2,835 |
#!/usr/bin/env bash
#/srv/scripts/create-vhost-laravel.sh "centbox.app" "/var/www/default"
sudo mkdir -p /etc/nginx/sites_enabled 2>/dev/null
sudo mkdir -p /etc/nginx/ssl 2>/dev/null
PATH_SSL="/etc/nginx/ssl"
PATH_KEY="${PATH_SSL}/${1}.key"
PATH_CSR="${PATH_SSL}/${1}.csr"
PATH_CRT="${PATH_SSL}/${1}.crt"
if [ ! -f $PATH_KEY ] || [ ! -f $PATH_CSR ] || [ ! -f $PATH_CRT ]
then
sudo openssl genrsa -out "$PATH_KEY" 2048 2>/dev/null
sudo openssl req -new -key "$PATH_KEY" -out "$PATH_CSR" -subj "/CN=$1/O=Vagrant/C=UK" 2>/dev/null
sudo openssl x509 -req -days 365 -in "$PATH_CSR" -signkey "$PATH_KEY" -out "$PATH_CRT" 2>/dev/null
fi
block="server {
listen ${3:-80};
listen ${4:-443} ssl http2;
server_name $1;
root \"$2\";
ssl_certificate /etc/nginx/ssl/$1.crt;
ssl_certificate_key /etc/nginx/ssl/$1.key;
include /etc/nginx/sites_conf/common_general.conf;
include /etc/nginx/sites_conf/common_codeigniter.conf;
include /etc/nginx/sites_conf/common_errors.conf;
include /etc/nginx/sites_conf/common_log.conf;
include /etc/nginx/sites_conf/common_php.conf;
}
"
sudo test -e /etc/nginx/sites_enabled/$1.conf || sudo touch /etc/nginx/sites_enabled/$1.conf
sudo sh -c "echo '$block' > /etc/nginx/sites_enabled/$1.conf"
|
danydavila/CentBox
|
scripts/create-vhost-codeigniter.sh
|
Shell
|
mit
| 1,274 |
#!/bin/bash
echo "Installing dotfiles"
source install/link.sh
if [ "$(uname)" == "Darwin" ]; then
echo "Running on OSX"
echo "Brewing all the things"
source install/install_tools.sh
echo "Updating OSX settings"
source install/osx.sh
echo "Installing node (from nvm)"
source install/nvm.sh
echo "Configuring nginx"
# create a backup of the original nginx.conf
mv /usr/local/etc/nginx/nginx.conf /usr/local/etc/nginx/nginx.original
ln -s ~/.dotfiles/nginx/nginx.conf /usr/local/etc/nginx/nginx.conf
# symlink the code.dev from dotfiles
ln -s ~/.dotfiles/nginx/code.dev /usr/local/etc/nginx/sites-enabled/code.dev
fi
echo "creating vim directories"
mkdir -p ~/.vim-tmp
echo "Creating Sites, Code, Chef, Notes directories! :D Its the little things!"
mkdir -p ~/Documents/Code
mkdir -p ~/Documents/Code/Sites
mkdir -p ~/Documents/Code/Chef_Projects
mkdir -p ~/Documents/Notes
echo "Creating personalizable exports i.e for work duh..api keys?"
touch ~/.localrc
echo "Configuring zsh as default shell"
chsh -s $(which zsh)
echo "Done."
|
hacker1db/Dotfiles
|
install.sh
|
Shell
|
mit
| 1,105 |
#!/usr/bin/env node
/*global require, console, process*/
// Requires
var optparse = require('optparse'),
RepoDiffReporter = require('./diffReporter').RepoDiffReporter;
// shell options
var switches = [
['-h', '--help', "Shows this help section."],
['--lk DIR', "Root directory of the Lively Kernel git repository"],
['--ww DIR', "Root directory of the Webwerksatt svn repository"],
['--output FILE', "JSON file to write the diff report into"]],
parser = new optparse.OptionParser(switches),
lkDir, wwDir, outFile;
parser.on("help", function() {
console.log(parser.toString());
process.exit(0);
});
parser.on("lk", function(name, value) { lkDir = value });
parser.on("ww", function(name, value) { wwDir = value });
parser.on("output", function(name, value) { outFile = value });
parser.parse(process.argv);
// now do stuff
var settings = {
lk: {root: lkDir, updateMethod: "updateGIT"},
ww: {root: wwDir, updateMethod: "updateSVN"},
reportFile: outFile
};
RepoDiffReporter.createReport(settings);
|
LivelyKernel/livelykernel-scripts
|
scripts/ww-diff/runDiff.sh
|
Shell
|
mit
| 1,058 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=Max32
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/Strip2.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=Strip2.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=strip2.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/strip2.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/strip2.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/strip2.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
MattAtHazmat/PIC32LEDStrip
|
PIC32LEDStrip2/Strip2/firmware/Strip2.X/nbproject/Package-Max32.bash
|
Shell
|
cc0-1.0
| 1,361 |
#!/bin/bash
. $RADIVSCRIPTS/funcs/functions_radivity.sh
. $RADIVSCRIPTS/funcs/functions_mod.sh
. $RADIVSCRIPTS/funcs/functions_irrad.sh
. $RADIVSCRIPTS/funcs/progbar.sh
runtime_initial=$(date +"%s")
expert_mode=false
path_to_module=$PWD"/modules/irrad"
root_path=$(echo $PWD)
logfile=$1
fullusername=$2
if [ ! -z $3 ]; then
expert_mode=true
fi
#time_run=$(echo $(date) | sed -e 's/, /_/g;s/ /_/g;s/:/-/g')
time_run=$(date +%F_%H-%M-%S)
outd=$root_path/out/"irrad_images_"$time_run
mkdir $outd
echo -e "******************\nIrradiation Mapping:\n******************" >> $logfile
if [ ! -d $path_to_module ];then
mkdir -p $path_to_module/tmp
fi
if [ ! -d $path_to_module/tmp ];then
mkdir $path_to_module/tmp
else
rm -fr $path_to_module/tmp/*
fi
if [ ! -d $path_to_module/climate ];then
mkdir $path_to_module/climate
else
rm -fr $path_to_module/climate/*
fi
if [ ! -d $path_to_module/rotated_sky ];then
mkdir $path_to_module/rotated_sky
fi
if [ ! -d $path_to_module/config ];then
mkdir $path_to_module/config
fi
if [ ! -d $path_to_module/octrees ];then
mkdir $path_to_module/octrees
fi
if [ ! -d $path_to_module/out ];then
mkdir $path_to_module/out
fi
get_climate irr
climate_file=$(ls $path_to_module/climate/*.dat)
climate_irrad=$path_to_module"/climate/"$(basename $climate_file .dat)"_irrad.dat"
get_options_irrad $climate_irrad $logfile
irrad_options.sh $options --log $logfile --expert $expert_mode 2> /dev/null
if [ "$?" -eq 666 ]; then
exit 666
fi
cp $path_to_module/tmp/scale.bmp $outd
mkdir $outd/renderings
cp $path_to_module/out/*.pic $outd/renderings
mv $path_to_module/out/*.gif $outd
|
wolfris/radivity
|
radivity/scripts/irr.sh
|
Shell
|
gpl-2.0
| 1,661 |
#!/bin/sh
plasmapkg2 --type kwinscript -i .
mkdir -p ~/.local/share/kservices5
ln -sf ~/.local/share/kwin/scripts/quarter-tiling/metadata.desktop ~/.local/share/kservices5/kwin-script-quarter-tiling.desktop
|
Jazqa/kwin-quarter-tiling
|
install.sh
|
Shell
|
gpl-2.0
| 208 |
#! /bin/sh -e
# tup - A file-based build system
#
# Copyright (C) 2011-2020 Mike Shal <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Make sure we can export environment variables.
export FOO="hey"
. ./tup.sh
check_no_windows shell
cat > Tupfile << HERE
: |> sh ok.sh > %o |> out.txt
HERE
cat > ok.sh << HERE
echo "foo is \$FOO"
HERE
update
echo 'foo is ' | diff - out.txt
cat > Tupfile << HERE
export FOO
: |> sh ok.sh > %o |> out.txt
HERE
tup touch Tupfile
update
echo 'foo is hey' | diff - out.txt
export FOO="yo"
update
echo 'foo is yo' | diff - out.txt
cat > Tupfile << HERE
export FOO
: |> ^ run script > %o^ sh ok.sh > %o |> out.txt
HERE
tup touch Tupfile
update
echo 'foo is yo' | diff - out.txt
tup_dep_exist . ok.sh . 'sh ok.sh > out.txt'
tup_dep_no_exist $ FOO 0 .
tup_dep_exist $ FOO . 'sh ok.sh > out.txt'
cat > Tupfile << HERE
: |> ^ run script > %o^ sh ok.sh > %o |> out.txt
HERE
tup touch Tupfile
update
echo 'foo is ' | diff - out.txt
# Have to modify the environment variable before tup removes it.
export FOO="latest"
update
tup_object_no_exist $ FOO
eotup
|
ppannuto/tup
|
test/t4057-environ3.sh
|
Shell
|
gpl-2.0
| 1,702 |
etgz(){
echo "not implemented"
}
dtgz(){
local directory=${2:/tmp}
openssl enc -d -aes-256-cbc -in $1 |tar zvx -C $2
}
|
agustim/myscripts
|
endecode.sh
|
Shell
|
gpl-2.0
| 126 |
#!/bin/sh
echo "$1"
/usr/bin/thunderbird -compose "$1" > /dev/null 2>&1 &
|
gchiesa/sp2html-server
|
bin/mail.sh
|
Shell
|
gpl-2.0
| 76 |
#!/bin/sh
#
# Migrate records from ARMS 1B to 2A.
#
#
# Usage: arms1b-2a.sh
#
# Copyright (C) 2014, QCIF Ltd.
#----------------------------------------------------------------
# usage:
# arms1b-2a nodename
#
help()
{
cat << eof
Working directory has to be one level above stroage: e.g. /opt/redbox
Usage: $0 nodename
eof
}
die() {
echo $@ >&2
exit 1
}
if [ -z $1 ]; then
help
die "Node name is needed"
fi
NodeName=$1
workingDir=`pwd`
storageDir='storage/1793582ab247f6442162a75562dcc548'
scriptDir=/home/li/Documents/redbox-dev/ARMS333
if [ ! -d $storageDir ]; then
echo "$storageDir does not exist in $workingDir"
exit 0
fi
PATTERN="jsonConfigPid=arms-"
echo "grep -rl $PATTERN $storageDir/*"
grep -r $PATTERN $storageDir/* | python $scriptDir/filter_node.py $NodeName
if [ $? -gt 0 ]; then
die "Failed to filter records for $NodeName. Fix previous error. Maybe just no records?"
fi
grep -rl arms-$NodeName $workingDir/$NodeName/$storageDir/* | xargs sed -i s/arms-$NodeName/arms/
grep -rl arms-$NodeName $workingDir/$NodeName/$storageDir/*
if [ $? -eq 1 ]; then
echo "job might successfully completed: no arms-$NodeName found from $workingDir/$NodeName/$storageDir"
fi
tar zxf $scriptDir/rule_files.tar.gz -C $workingDir/$NodeName/$storageDir/ || die "failed to extract rule files"
# Prepare package for node people
cp -rp $workingDir/solr $workingDir/$NodeName/
cd $NodeName/
tar zcf $NodeName.tar.gz *
cd -
mv $NodeName/$NodeName.tar.gz .
echo "Give this to node operator"
ls -l $NodeName.tar.gz
|
qcif/rdsi-arms
|
support/1B-2AMigration/arms1b-2a.sh
|
Shell
|
gpl-2.0
| 1,534 |
#!/bin/bash
# Colorize and add text parameters
red=$(tput setaf 1) # red
grn=$(tput setaf 2) # green
cya=$(tput setaf 6) # cyan
txtbld=$(tput bold) # Bold
bldred=${txtbld}$(tput setaf 1) # red
bldgrn=${txtbld}$(tput setaf 2) # green
bldblu=${txtbld}$(tput setaf 4) # blue
bldcya=${txtbld}$(tput setaf 6) # cyan
txtrst=$(tput sgr0) # Reset
# Get Build Startup Time
if [ -z "$OUT_TARGET_HOST" ]
then
res1=$(date +%s.%N)
else
res1=$(gdate +%s.%N)
fi
# Path to build your kernel
k=~/m7-gpe
# Directory for the any kernel updater
t=$k/packages
# Date to add to zip
today=$(date +"%m%d%Y")
# Clean Kernel
echo "${bldcya}Clean ${bldcya}Kernel${txtrst}"
make clean
# Clean old builds
echo "${bldred}Clean ${bldred}Out ${bldred}Folder${txtrst}"
rm -rf $k/out
# Setup the build
cd $k/arch/arm/configs/m7-configs
for c in *
do
cd $k
# Setup output directory
mkdir -p "out/$c"
cp -R "$t/system" out/$c
cp -R "$t/META-INF" out/$c
cp -R "$t/boot" out/$c
cp -R "$t/config" out/$c
cp -R "$t/l2m" out/$c
cp -R "$t/no_l2m" out/$c
mkdir -p "out/$c/system/lib/modules/"
m=$k/out/$c/system/lib/modules
TOOLCHAIN=/home/tal/linaro-toolchains/arm-cortex_a15/arm-cortex_a15-linux-gnueabihf-linaro_4.9.1-2014.06/bin/arm-cortex_a15-linux-gnueabihf-
export ARCH=arm
export SUBARCH=arm
# make mrproper
#make CROSS_COMPILE=$TOOLCHAIN -j`grep 'processor' /proc/cpuinfo | wc -l` mrproper
# remove backup files
find ./ -name '*~' | xargs rm
# rm compile.log
# make kernel
make 'm7_defconfig'
make -j`grep 'processor' /proc/cpuinfo | wc -l` CROSS_COMPILE=$TOOLCHAIN #>> compile.log 2>&1 || exit -1
# Grab modules & zImage
echo ""
echo "<<>><<>> ${bldred}Collecting ${bldred}modules ${bldred}and ${bldred}zimage${txtrst} <<>><<>>"
echo ""
cp $k/arch/arm/boot/zImage out/$c/boot/zImage
for mo in $(find . -name "*.ko"); do
cp "${mo}" $m
done
# Build Zip
clear
echo "${bldcya}Creating ${bldcya}$z.zip${txtrst}"
# Version Number to add to zip
echo "${bldblu}Version ${bldblu}Number${txtrst}"
VERSION=$(cat '.version')
z=$c-"r${VERSION}"-$today
cd $k/out/$c/
7z a "$z.zip"
mv $z.zip $k/out/$z.zip
# cp $k/out/$z.zip $db/$z.zip
# rm -rf $k/out/$c
# Line below for debugging purposes, uncomment to stop script after each config is run
#read this
done
# Get Build Time
if [ -z "$OUT_TARGET_HOST" ]
then
res2=$(date +%s.%N)
else
res2=$(gdate +%s.%N)
fi
echo "${bldgrn}Total ${bldblu}time ${bldred}elapsed: ${txtrst}${grn}$(echo "($res2 - $res1) / 60"|bc ) minutes ($(echo "$res2 - $res1"|bc ) seconds) ${txtrst}"
echo "************************************************************************"
echo "${bldylw}${bldred}Build ${bldcya}Numba ${bldblu}${VERSION} ${txtrst}"
echo "${bldylw}${bldred}My ${bldcya}Kernels ${bldblu}Build ${bldred}Fast${txtrst}"
echo "************************************************************************"
|
talnoah/m7-gpe
|
build.sh
|
Shell
|
gpl-2.0
| 3,043 |
#!/bin/tcsh -xef
3dMEMA -prefix memJudge_MDD-CTL_tlrcMSK \
-jobs 4 \
-groups CTL MDD\
-mask tlrcTemplate_mask+tlrc \
-set judge_CTL \
PARC_sub_2699 rbuck.recallGAM.PARC_sub_2699+tlrc'[17]' rbuck.recallGAM.PARC_sub_2699+tlrc'[18]' \
PARC_sub_2754 rbuck.recallGAM.PARC_sub_2754+tlrc'[17]' rbuck.recallGAM.PARC_sub_2754+tlrc'[18]' \
PARC_sub_2778 rbuck.recallGAM.PARC_sub_2778+tlrc'[17]' rbuck.recallGAM.PARC_sub_2778+tlrc'[18]' \
PARC_sub_2784 rbuck.recallGAM.PARC_sub_2784+tlrc'[17]' rbuck.recallGAM.PARC_sub_2784+tlrc'[18]' \
PARC_sub_2786 rbuck.recallGAM.PARC_sub_2786+tlrc'[17]' rbuck.recallGAM.PARC_sub_2786+tlrc'[18]' \
PARC_sub_2792 rbuck.recallGAM.PARC_sub_2792+tlrc'[17]' rbuck.recallGAM.PARC_sub_2792+tlrc'[18]' \
PARC_sub_2796 rbuck.recallGAM.PARC_sub_2796+tlrc'[17]' rbuck.recallGAM.PARC_sub_2796+tlrc'[18]' \
PARC_sub_2825 rbuck.recallGAM.PARC_sub_2825+tlrc'[17]' rbuck.recallGAM.PARC_sub_2825+tlrc'[18]' \
PARC_sub_2834 rbuck.recallGAM.PARC_sub_2834+tlrc'[17]' rbuck.recallGAM.PARC_sub_2834+tlrc'[18]' \
PARC_sub_2841 rbuck.recallGAM.PARC_sub_2841+tlrc'[17]' rbuck.recallGAM.PARC_sub_2841+tlrc'[18]' \
PARC_sub_2848 rbuck.recallGAM.PARC_sub_2848+tlrc'[17]' rbuck.recallGAM.PARC_sub_2848+tlrc'[18]' \
PARC_sub_2885 rbuck.recallGAM.PARC_sub_2885+tlrc'[17]' rbuck.recallGAM.PARC_sub_2885+tlrc'[18]' \
PARC_sub_2945 rbuck.recallGAM.PARC_sub_2945+tlrc'[17]' rbuck.recallGAM.PARC_sub_2945+tlrc'[18]' \
PARC_sub_2955 rbuck.recallGAM.PARC_sub_2955+tlrc'[17]' rbuck.recallGAM.PARC_sub_2955+tlrc'[18]' \
PARC_sub_2958 rbuck.recallGAM.PARC_sub_2958+tlrc'[17]' rbuck.recallGAM.PARC_sub_2958+tlrc'[18]' \
PARC_sub_2987 rbuck.recallGAM.PARC_sub_2987+tlrc'[17]' rbuck.recallGAM.PARC_sub_2987+tlrc'[18]' \
PARC_sub_2993 rbuck.recallGAM.PARC_sub_2993+tlrc'[17]' rbuck.recallGAM.PARC_sub_2993+tlrc'[18]' \
PARC_sub_3010 rbuck.recallGAM.PARC_sub_3010+tlrc'[17]' rbuck.recallGAM.PARC_sub_3010+tlrc'[18]' \
-set judge_MDD \
PARC_sub_2718 rbuck.recallGAM.PARC_sub_2718+tlrc'[17]' rbuck.recallGAM.PARC_sub_2718+tlrc'[18]' \
PARC_sub_2726 rbuck.recallGAM.PARC_sub_2726+tlrc'[17]' rbuck.recallGAM.PARC_sub_2726+tlrc'[18]' \
PARC_sub_2736 rbuck.recallGAM.PARC_sub_2736+tlrc'[17]' rbuck.recallGAM.PARC_sub_2736+tlrc'[18]' \
PARC_sub_2747 rbuck.recallGAM.PARC_sub_2747+tlrc'[17]' rbuck.recallGAM.PARC_sub_2747+tlrc'[18]' \
PARC_sub_2759 rbuck.recallGAM.PARC_sub_2759+tlrc'[17]' rbuck.recallGAM.PARC_sub_2759+tlrc'[18]' \
PARC_sub_2761 rbuck.recallGAM.PARC_sub_2761+tlrc'[17]' rbuck.recallGAM.PARC_sub_2761+tlrc'[18]' \
PARC_sub_2787 rbuck.recallGAM.PARC_sub_2787+tlrc'[17]' rbuck.recallGAM.PARC_sub_2787+tlrc'[18]' \
PARC_sub_2788 rbuck.recallGAM.PARC_sub_2788+tlrc'[17]' rbuck.recallGAM.PARC_sub_2788+tlrc'[18]' \
PARC_sub_2799 rbuck.recallGAM.PARC_sub_2799+tlrc'[17]' rbuck.recallGAM.PARC_sub_2799+tlrc'[18]' \
PARC_sub_2829 rbuck.recallGAM.PARC_sub_2829+tlrc'[17]' rbuck.recallGAM.PARC_sub_2829+tlrc'[18]' \
PARC_sub_2838 rbuck.recallGAM.PARC_sub_2838+tlrc'[17]' rbuck.recallGAM.PARC_sub_2838+tlrc'[18]' \
PARC_sub_2853 rbuck.recallGAM.PARC_sub_2853+tlrc'[17]' rbuck.recallGAM.PARC_sub_2853+tlrc'[18]' \
PARC_sub_2865 rbuck.recallGAM.PARC_sub_2865+tlrc'[17]' rbuck.recallGAM.PARC_sub_2865+tlrc'[18]' \
PARC_sub_2874 rbuck.recallGAM.PARC_sub_2874+tlrc'[17]' rbuck.recallGAM.PARC_sub_2874+tlrc'[18]' \
PARC_sub_2879 rbuck.recallGAM.PARC_sub_2879+tlrc'[17]' rbuck.recallGAM.PARC_sub_2879+tlrc'[18]' \
PARC_sub_2903 rbuck.recallGAM.PARC_sub_2903+tlrc'[17]' rbuck.recallGAM.PARC_sub_2903+tlrc'[18]' \
PARC_sub_2917 rbuck.recallGAM.PARC_sub_2917+tlrc'[17]' rbuck.recallGAM.PARC_sub_2917+tlrc'[18]' \
PARC_sub_2927 rbuck.recallGAM.PARC_sub_2927+tlrc'[17]' rbuck.recallGAM.PARC_sub_2927+tlrc'[18]' \
PARC_sub_2938 rbuck.recallGAM.PARC_sub_2938+tlrc'[17]' rbuck.recallGAM.PARC_sub_2938+tlrc'[18]' \
PARC_sub_2939 rbuck.recallGAM.PARC_sub_2939+tlrc'[17]' rbuck.recallGAM.PARC_sub_2939+tlrc'[18]' \
PARC_sub_2956 rbuck.recallGAM.PARC_sub_2956+tlrc'[17]' rbuck.recallGAM.PARC_sub_2956+tlrc'[18]' \
-max_zeros .33 \
-model_outliers \
-unequal_variance \
-residual_Z
|
sorensje/dissertation
|
3dmem_test_CTL-MDD_memJudge.sh
|
Shell
|
gpl-2.0
| 4,204 |
#!/usr/bin/bash
# build_tempsys.sh
# -------------------------------------------------------
# R3VRSL: Behavioral.Code.Memory
# build: 1.0
# Github: https://github.com/r3vrsl
# ---------------------------------------------------
# R3VRSL: 2-12-2016
# Copyright (c) 2016: R3VRSL Development
# URL: https://r3vrsl.com
# --------------------------------
# License: GPL-2.0+
# URL: http://opensource.org/licenses/gpl-license.php
# ---------------------------------------------------
# R3VRSL is free software:
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software
# Foundation, either version 2 of the License, or (at your discretion)
# any later version.
# ------------------
###########################################
##---------------------------------------##
## BEGIN - INITIAL VARIABLE DECLARATIONS ##
##---------------------------------------##
###########################################
# Set build variables
set +h
umask 022
R3VRSL=/mnt/r3vrsl
LC_ALL=POSIX
R3VRSL_TGT=$(uname -m)-r3vrsl-linux-gnu
PATH=/tools/bin:/bin:/usr/bin
export R3VRSL LC_ALL R3VRSL_TGT PATH
# Sets a logging timestamp
TIMESTAMP="$(date +"%Y%m%d-%H%M%S")"
# Sets build mount point
export R3VRSL=/mnt/r3vrsl
# Sets terminal colors
BLINK="\e[5m"
BLUE="\e[1m\e[34m"
CYAN="\e[1m\e[36m"
GREEN="\e[1m\e[32m"
GREY="\e[1m\e[30m"
NOCOLOR="\e[0m"
RED="\e[1m\e[31m"
UNDERLINE_TEXT="\e[4m"
WHITE="\e[1m\e[37m"
YELLOW="\e[1m\e[33m"
#########################################
##-------------------------------------##
## END - INITIAL VARIABLE DECLARATIONS ##
##-------------------------------------##
#########################################
##############################
##--------------------------##
## BEGIN - SCRIPT FUNCTIONS ##
##--------------------------##
##############################
#----------------------------------#
# BEGIN - DISPLAY LAYOUT FUNCTIONS #
#----------------------------------#
# Creates uniform look during script execution when called after clear command
HEADER1 () {
echo -e " ${GREY}__________________________________________${NOCOLOR}"
echo -e "\n ${GREY}R${RED}3${GREY}VRSL${NOCOLOR} ${WHITE}Behavioral.Code.Memory Build:${NOCOLOR}1.0"
echo -e " ${GREY}__________________________________________${NOCOLOR}\n"
}
# Simple divider
DIVIDER1 () {
case $1 in
BLUE) echo -e "\n\n ${BLUE}----------------------------------------------------------${NOCOLOR}\n\n";;
CYAN) echo -e "\n\n ${CYAN}----------------------------------------------------------${NOCOLOR}\n\n";;
GREEN) echo -e "\n\n ${GREEN}----------------------------------------------------------${NOCOLOR}\n\n";;
GREY) echo -e "\n\n ${GREY}----------------------------------------------------------${NOCOLOR}\n\n";;
RED) echo -e "\n\n ${RED}----------------------------------------------------------${NOCOLOR}\n\n";;
WHITE) echo -e "\n\n ${WHITE}----------------------------------------------------------${NOCOLOR}\n\n";;
YELLOW) echo -e "\n\n ${YELLOW}----------------------------------------------------------${NOCOLOR}\n\n";;
*) echo -e "\n\n ----------------------------------------------------------\n\n";;
esac
}
# Clears $ amount of lines when called
CLEARLINE () {
# To use, set CLINES=$# before function if you need to clear more than one line
if [ -z "$CLINES" ]; then
tput cuu 1 && tput el
else
tput cuu "$CLINES" && tput el
unset CLINES
fi
}
# Creates a $ line gap for easier log review
SPACER () {
case $1 in
15) printf "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n";;
10) printf "\n\n\n\n\n\n\n\n\n\n";;
*) printf "\n\n\n\n\n";;
esac
}
#--------------------------------#
# END - DISPLAY LAYOUT FUNCTIONS #
#--------------------------------#
#--------------------------------------------------#
# BEGIN - TEMPORARY SYSTEM PACKAGE BUILD FUNCTIONS #
#--------------------------------------------------#
BUILD_BINUTILS_PASS1 () {
clear && HEADER1
echo -e " ${GREEN}Building binutils-2.25.1 PASS 1...${NOCOLOR}\n\n"
sleep 5
#####################
## Binutils-2.25.1 ##
## =============== ##
## PASS -1- ##
#############################################################################################################
## To determine SBUs, use the following command: ##
## ============================================= ##
## time { ../binutils-2.25.1/configure --prefix=/tools --with-sysroot=$R3VRSL --with-lib-path=/tools/lib \ ##
## --target=$R3VRSL_TGT --disable-nls --disable-werror && make && case $(uname -m) in \ ##
## x86_64) mkdir -v /tools/lib && ln -sv lib /tools/lib64 ;; esac && make install; } ##
## ================================================================================= ##
## Example results for a single SBU measurement with the following hardware: ##
## ========================================================================= ##
## 8GB Memory, Intel Core i3, SSD: ##
## real - 2m 1.212s ##
## user - 1m 32.530s ##
## sys - 0m 5.540s ##
## ================ ##
#############################################################################################################
## Example results for full temporary system build with the following hardware: ##
## ============================================================================ ##
## 16GB Memory, Intel Core i3, SSD: ##
## real - 38m 13.192s ##
## user - 35m 39.140s ##
## sys - 2m 20.787s ##
## ================== ##
#############################################################################################################
tar xf binutils-2.25.1.tar.gz &&
cd binutils-2.25.1/
mkdir -v ../binutils-build
cd ../binutils-build
../binutils-2.25.1/configure \
--prefix=/tools \
--with-sysroot=$R3VRSL \
--with-lib-path=/tools/lib \
--target=$R3VRSL_TGT \
--disable-nls \
--disable-werror &&
make &&
case $(uname -m) in
x86_64) mkdir -v /tools/lib && ln -sv lib /tools/lib64 ;;
esac &&
make install &&
cd "$R3VRSL"/sources
rm -rf binutils-2.25.1 binutils-build/
printf "\n\n"
echo -e " ${GREEN}binutils-2.25.1 PASS 1 completed...${NOCOLOR}"
SPACER
sleep 5
}
BUILD_GCC_PASS1 () {
clear && HEADER1
echo -e " ${GREEN}Building gcc-5.2.0 PASS 1...${NOCOLOR}\n\n" && sleep 3
###############
## Gcc-5.2.0 ##
## ========= ##
## PASS -1- ##
###############
tar xf gcc-5.2.0.tar.gz
cd gcc-5.2.0/
tar -xf ../mpfr-3.1.3.tar.gz
mv -v mpfr-3.1.3 mpfr
tar -xf ../gmp-6.0.0.tar.gz
mv -v gmp-6.0.0 gmp
tar -xf ../mpc-1.0.3.tar.gz
mv -v mpc-1.0.3 mpc
for file in $(find gcc/config -name linux64.h -o -name linux.h -o -name sysv4.h); do
cp -uv $file{,.orig}
sed -e 's@/lib\(64\)\?\(32\)\?/ld@/tools&@g' -e 's@/usr@/tools@g' $file.orig > $file
echo '
#undef STANDARD_STARTFILE_PREFIX_1
#undef STANDARD_STARTFILE_PREFIX_2
#define STANDARD_STARTFILE_PREFIX_1 "/tools/lib/"
#define STANDARD_STARTFILE_PREFIX_2 ""' >> $file
touch $file.orig
done
mkdir -v ../gcc-build
cd ../gcc-build
../gcc-5.2.0/configure \
--target=$R3VRSL_TGT \
--prefix=/tools \
--with-glibc-version=2.11 \
--with-sysroot=$R3VRSL \
--with-newlib \
--without-headers \
--with-local-prefix=/tools \
--with-native-system-header-dir=/tools/include \
--disable-nls \
--disable-shared \
--disable-multilib \
--disable-decimal-float \
--disable-threads \
--disable-libatomic \
--disable-libgomp \
--disable-libquadmath \
--disable-libssp \
--disable-libvtv \
--disable-libstdcxx \
--enable-languages=c,c++ &&
make &&
make install &&
cd "$R3VRSL"/sources
rm -rf gcc-5.2.0 gcc-build/
echo -e "\n\n ${GREEN}gcc-5.2.0 PASS 1 completed${NOCOLOR}"
SPACER 15
sleep 5
}
# In Development
|
InterGenOS/r3vrsl_dev
|
r3vrsl_core_buildscripts/build_tempsys.sh
|
Shell
|
gpl-2.0
| 9,918 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/LedBlinking.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=LedBlinking.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=ledblinking.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/ledblinking.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/ledblinking.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/ledblinking.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
nemesyslab/ee1001
|
labs/MPLAB/LedBlinking.X/nbproject/Package-default.bash
|
Shell
|
gpl-2.0
| 1,393 |
sh startjava.sh && sh startserver.sh
|
tcslab/Syndesi
|
start.sh
|
Shell
|
gpl-2.0
| 37 |
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2015 Xilinx, Inc. All Rights Reserved.
#
if [ -z "$PATH" ]; then
PATH=/afs/ece/support/xilinx/xilinx.release/Vivado/ids_lite/ISE/bin/lin64:/afs/ece/support/xilinx/xilinx.release/Vivado/bin
else
PATH=/afs/ece/support/xilinx/xilinx.release/Vivado/ids_lite/ISE/bin/lin64:/afs/ece/support/xilinx/xilinx.release/Vivado/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=/afs/ece/support/xilinx/xilinx.release/Vivado/ids_lite/ISE/lib/lin64
else
LD_LIBRARY_PATH=/afs/ece/support/xilinx/xilinx.release/Vivado/ids_lite/ISE/lib/lin64:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD=`dirname "$0"`
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
# pre-commands:
/bin/touch .write_bitstream.begin.rst
EAStep vivado -log adau1761_test.vdi -applog -m64 -messageDb vivado.pb -mode batch -source adau1761_test.tcl -notrace
|
545/Atari7800
|
lab3sound/lab3sound.runs/impl_1/runme.sh
|
Shell
|
gpl-2.0
| 1,080 |
#!/bin/sh
VERSION=`date "+%Y.%m.%d"`
#VERSION=2010.05
# This is the same location as DEPLOYDIR in macosx-build-dependencies.sh
export MACOSX_DEPLOY_DIR=$PWD/../libraries/install
`dirname $0`/release-common.sh -v $VERSION
if [[ $? != 0 ]]; then
exit 1
fi
echo "Sanity check of the app bundle..."
`dirname $0`/macosx-sanity-check.py OpenSCAD.app/Contents/MacOS/OpenSCAD
if [[ $? != 0 ]]; then
exit 1
fi
cp OpenSCAD-$VERSION.dmg ~/Documents/Dropbox/Public
ln -sf OpenSCAD-$VERSION.dmg ~/Documents/Dropbox/Public/OpenSCAD-latest.dmg
echo "Upload in progress..."
|
theorbtwo/openscad-enhanced
|
scripts/publish-macosx.sh
|
Shell
|
gpl-2.0
| 567 |
#!/bin/bash
# Word List Manipulator (wlm)
# Version 0.9 last edit 13-05-2013 23:00
# Build: 0905
# Credits to ;
# ============
# Gitsnik, because he's awesome :)
# Pureh@te as used and learned a lot from his wordlist_tool script.
# Members at unix.com, have always received expert help there.
# http://cfajohnson.com/shell/ssr/08-The-Dating-Game.shtml for datelist updates.
# Google ;)
# =============
# Google code source: http://code.google.com/p/wordlist-manipulator/source/browse/wlm
#
#FIXED SETTINGS
RED=$(tput setaf 1 && tput bold)
GREEN=$(tput setaf 2 && tput bold)
STAND=$(tput sgr0)
BLUE=$(tput setaf 6 && tput bold)
CURR_VERS=$(sed -n 3p $0 | cut -c 11-13)
CURR_BUILD=$(sed -n 4p $0 | cut -c 10-13)
#
#Check if running as root
if [[ $UID -ne 0 ]]; then
echo "$GREEN$0$STAND should be run as root for best / most stable results."
echo -ne "Continue anyway ? y/n "
read proceed
if [[ "$proceed" == "y" || "$proceed" == "Y" ]] ; then
echo
else
exit 1
fi
fi
#
#
#--------------
# MENU ITEM 1
#==============
# CASE OPTIONS
###############
function f_case {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Case options"
echo $STAND"--------------------"
echo "1 Change case of first letter
2 Change case of last letter
3 Change all lower case to upper case
4 Change all upper case to lower case
5 Invert case (lower to upper, upper to lower)
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read case_menu
if [ "$case_menu" == "q" ] || [ "$case_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$case_menu" != [1-5] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_case
fi
#
# Option 1
# Changing first letter to lower or upper case
# --------------------------------------------
if [ $case_menu = "1" ] ; then
echo ""
echo $BLUE"Change first letter to lower case or upper case"$STAND
echo $STAND""
f_inout
echo -ne $STAND"Change all first letters to upper case or lower case ? U / L "$GREEN
read first_letter
until [[ "$first_letter" == "u" ]] || [[ "$first_letter" == "l" ]] || [[ "$first_letter" == "U" ]] || [[ "$first_letter" == "L" ]] ; do
echo -ne $RED"Please enter either U or L for upper or lower case$STAND U / L "$GREEN
read first_letter
done
echo $STAND"Working .."
if [ "$first_letter" == "l" ] || [ "$first_letter" == "L" ] ; then
sudo sed 's/^./\l&/' $wlm_infile > $wlm_outfile
elif [ "$first_letter" == "u" ] || [ "$first_letter" == "U" ] ; then
sudo sed 's/^./\u&/' $wlm_infile > $wlm_outfile
fi
echo $STAND""
f_complete
#
# Option 2
# Changing last letter to lower or upper case
# -------------------------------------------
elif [ $case_menu = "2" ] ; then
echo ""
echo $BLUE"Change last letter to lower case or upper case"$STAND
echo $STAND""
f_inout
echo -ne $STAND"Change all last letters to upper case or lower case ? U / L "$GREEN
read last_letter
until [[ "$last_letter" == "u" ]] || [[ "$last_letter" == "l" ]] || [[ "$last_letter" == "U" ]] || [[ "$last_letter" == "L" ]] ; do
echo -ne $RED"Please enter either U or L for upper or lower case$STAND U / L "$GREEN
read last_letter
done
echo $STAND"Working .."
if [ "$last_letter" == "l" ] || [ "$last_letter" == "L" ] ; then
sudo sed 's/.$/\l&/' $wlm_infile > $wlm_outfile
elif [ "$last_letter" == "u" ] || [ "$last_letter" == "U" ] ; then
sudo sed 's/.$/\u&/' $wlm_infile > $wlm_outfile
fi
echo $STAND""
f_complete
#
# Option 3
# Change all lower case to upper case
# -----------------------------------
elif [ $case_menu = "3" ] ; then
echo ""
echo $BLUE"Change all lower case to Upper case"$STAND
echo $STAND""
f_inout
echo $STAND"Working .."
sudo tr '[:lower:]' '[:upper:]' < $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 4
# Change all upper case to lower case
# -----------------------------------
elif [ $case_menu = "4" ] ; then
echo ""
echo $BLUE"Change all Upper case to lower case"$STAND
echo $STAND""
f_inout
echo $STAND"Working .."
sudo tr '[:upper:]' '[:lower:]' < $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 5
# Invert case from original input
# --------------------------------
elif [ $case_menu = "5" ] ; then
echo ""
echo $BLUE"Invert case from original input"$STAND
echo $STAND""
f_inout
echo $STAND"Working .."
sudo tr 'a-z A-Z' 'A-Z a-z' < $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
fi
}
#
#
#--------------
# MENU ITEM 2
#==============
# COMBINATION OPTIONS
#####################
f_combine () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Combination options"
echo $STAND"--------------------"
echo "1 Combine words from 1 list to each word in another list
2 Combine all wordlists in a directory to 1 wordlist
Q Return to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read comb_menu
if [ "$comb_menu" == "q" ] || [ "$comb_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$comb_menu" != [1-2] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_combine
fi
#
# Option 1
# Combine words from 1 list to each word in another list
# ------------------------------------------------------
if [ "$comb_menu" == "1" ] ; then
echo ""
echo $BLUE"Combine words from one wordlist to all words in another wordlist"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to which you want words appended: "$GREEN
read comb_infile1
while [ ! -f $comb_infile1 ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read comb_infile1
done
echo -ne $STAND"Enter /path/to/wordlist to append to $BLUE$comb_infile1$STAND: "$GREEN
read comb_infile2
while [ ! -f $comb_infile2 ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read comb_infile2
done
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$wlm_outfile$RED will be overwritten"
sleep 1
else
echo $STAND"Process cancelled, returning to menu"
f_menu
fi
fi
echo $STAND"Working .."
sudo awk > $wlm_outfile 'NR == FNR {
l2[FNR] = $0
fnr = FNR; next
}
{
for (i = 0; ++i <= fnr;)
print $0 l2[i]
}' $comb_infile2 $comb_infile1
echo $STAND""
f_complete
#
# Option 2
# Combine all wordlists in a directory
# ------------------------------------
elif [ "$comb_menu" == "2" ] ; then
echo ""
echo $BLUE"Combine all wordlists in a directory to 1 wordlist."
echo $STAND""
echo -ne $STAND"Enter directory where the wordlists are stored \n(ie. /root/wordlists) : "$GREEN
read directory
while [ ! -d "$directory" ] || [ "$directory" == "" ] ; do
echo $RED"Directory does not exist or cannot be found"$STAND
echo -ne $STAND"Enter existing directory: "
read directory
done
ls $directory > files_temp
echo $STAND"! Note that ALL files in directory $GREEN$directory$STAND will be combined;"$BLUE
cat files_temp
echo $STAND""
echo -ne $STAND"Continue or Quit ? C / Q "$GREEN
read go_for_it
if [ "$go_for_it" == "c" ] || [ "$go_for_it" == "C" ] ; then
rm files_temp
echo $STAND ""
else
echo $STAND""
echo "Quitting .."
rm files_temp
sleep 0.5
exit
fi
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, add data to existing file ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $STAND"Working.."
sleep 1
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
sudo cat $directory/* >> "$wlm_outfile"
echo $STAND""
f_complete
fi
}
#
#
#--------------
# MENU ITEM 3
#============
# PREPENDING / PREFIXING OPTIONS
################################
f_prefix () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Prefix options"
echo $STAND"--------------------"
echo "1 Prefix numeric values in sequence to a wordlist (ie. 0 - 99999)
2 Prefix fixed number of numeric values in sequence to a wordlist (ie. 00000 - 99999)
3 Prefix word / characters to a wordlist
Q Back to menu"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read pref_menu
if [ "$pref_menu" == "q" ] || [ "$pref_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$pref_menu" != [1-3] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_prefix
fi
#
# Option 1
# Prefix numbers in sequence to a list
# ------------------------------------
if [ "$pref_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Prefix numeric values in sequence to a wordlist (ie. 0 - 99999)"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to prefix numbers to: "$GREEN
read pref_nums
while [ ! -f "$pref_nums" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read pref_nums
done
#Check if any '%' characters in the file which could cause errors
grep "%" $pref_nums > prefnums_errors
exist=$(sed -n '$=' prefnums_errors)
if [ "$exist" == "" ] ; then
rm prefnums_errors
elif [ "$exist" != "" ] ; then
echo $RED"Lines with '%' character exist in file which will not be processed"
echo -ne $STAND"View these lines ? y/n "$GREEN
read view
if [ "$view" == "y" ] || [ "$view" == "Y" ] ; then
cat prefnums_errors
else
echo $STAND""
fi
rm prefnums_errors
fi
#
#Enter output file to write the changes to
echo -ne $STAND"Enter desired output file name: "$GREEN
read pref_nums_out
if [ -f "$pref_nums_out" ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$pref_nums_out$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo -ne $STAND"Enter how many numeric values you want to Prefix (max 5): "$GREEN
read numbs
echo $STAND"Working .."
if [ "$numbs" == 1 ] ; then
for i in $(cat $pref_nums); do seq -f "%01.0f$i" 0 9; done > "$pref_nums_out"
elif [ "$numbs" == 2 ] ; then
for i in $(cat $pref_nums); do seq -f "%01.0f$i" 0 99; done > "$pref_nums_out"
elif [ "$numbs" == 3 ] ; then
for i in $(cat $pref_nums); do seq -f "%01.0f$i" 0 999; done > "$pref_nums_out"
elif [ "$numbs" == 4 ] ; then
for i in $(cat $pref_nums); do seq -f "%01.0f$i" 0 9999; done > "$pref_nums_out"
elif [ "$numbs" == 5 ] ; then
for i in $(cat $pref_nums); do seq -f "%01.0f$i" 0 99999; done > "$pref_nums_out"
fi
echo $STAND""
echo "$GREEN$pref_nums_out$STAND has been created; "
head -n 3 $pref_nums_out
echo ".."
tail -n 3 $pref_nums_out
echo $STAND""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
#
# Option 2
# Prefix fixed number of numberic values to a list
# ------------------------------------------------
elif [ "$pref_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Prefix fixed number of numeric values in sequence to a wordlist (ie. 00000 - 99999)"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to prefix numbers to: "$GREEN
read pref_numf
while [ ! -f $pref_numf ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read $pref_numf
done
#Check if any '%' characters in the file which could cause errors
grep "%" $pref_numf > prefnumf_errors
exist=$(sed -n '$=' prefnumf_errors)
if [ "$exist" == "" ] ; then
rm prefnumf_errors
elif [ "$exist" != "" ] ; then
echo $RED"Lines with '%' character exist in file which will not be processed"
echo -ne $STAND"View these lines ? y/n "$GREEN
read view
if [ "$view" == "y" ] || [ "$view" == "Y" ] ; then
cat prefnumf_errors
else
echo $STAND""
fi
rm prefnumf_errors
fi
#
#Enter output file to write the changes to
echo -ne $STAND"Enter desired output file name: "$GREEN
read pref_numf_out
if [ -f $pref_numf_out ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$pref_numf_out$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu "
sleep 1
f_menu
fi
fi
echo -ne $STAND"Enter how many numeric values you want to Prefix (max 5): "$GREEN
read numbf
echo $STAND"Working .."
if [ "$numbf" == 1 ] ; then
for i in $(cat $pref_numf); do seq -f "%0$numbf.0f$i" 0 9; done > "$pref_numf_out"
elif [ "$numbf" == 2 ] ; then
for i in $(cat $pref_numf); do seq -f "%0$numbf.0f$i" 0 99; done > "$pref_numf_out"
elif [ "$numbf" == 3 ] ; then
for i in $(cat $pref_numf); do seq -f "%0$numbf.0f$i" 0 999; done > "$pref_numf_out"
elif [ "$numbf" == 4 ] ; then
for i in $(cat $pref_numf); do seq -f "%0$numbf.0f$i" 0 9999; done > "$pref_numf_out"
elif [ "$numbf" == 5 ] ; then
for i in $(cat $pref_numf); do seq -f "%0$numbf.0f$i" 0 99999; done > "$pref_numf_out"
fi
echo $STAND""
echo "$GREEN$pref_numf_out$STAND has been created; "
head -n 3 $pref_numf_out
echo ".."
tail -n 3 $pref_numf_out
echo $STAND""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
#
# Option 3
# Prefix word / characters to a list
# ----------------------------------
elif [ "$pref_menu" == "3" ] ; then
echo $STAND""
echo $BLUE"Prefix word / characters to a wordlist"
echo $STAND""
f_inout
echo -ne $STAND"Enter word/characters you want prefixed: "$GREEN
read pref_char
echo $STAND"Working .."
sudo sed "s/^./"$pref_char"&/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
fi
}
#
#
#------------
# MENU ITEM 4
#============
# APPENDING / SUFFIXING OPTIONS
###############################
f_suffix () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Suffix options"
echo $STAND"--------------------"
echo "1 Suffix numeric values in sequence to a wordlist (ie. 0 - 99999)
2 Suffix fixed number of numeric values in sequence to a wordlist (ie. 00000 - 99999)
3 Suffix word / characters to a wordlist
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read suf_menu
if [ "$suf_menu" == "q" ] || [ "$suf_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$suf_menu" != [1-3] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_suffix
fi
#
# Option 1
# Suffix numbers in sequence to a list
# ------------------------------------
if [ "$suf_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Suffix numeric values in sequence to a wordlist (ie. 0 - 99999)"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to suffix numbers to: "$GREEN
read suf_nums
while [ ! -f $suf_nums ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read suf_nums
done
#Check if any '%' characters in the file which could cause errors
grep "%" $suf_nums > sufnums_errors
exist=$(sed -n '$=' sufnums_errors)
if [[ "$exist" == "" ]] ; then
rm sufnums_errors
elif [ "$exist" != "" ] ; then
echo $RED"Lines with '%' character exist in file which will not be processed"
echo -ne $STAND"View these lines ? y/n "$GREEN
read view
if [ "$view" == "y" ] || [ "$view" == "Y" ] ; then
cat sufnums_errors
else
echo $STAND""
fi
rm sufnums_errors
fi
#Enter output file to write the changes to
echo -ne $STAND"Enter desired output file name: "$GREEN
read suf_nums_out
if [ -f $suf_nums_out ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$suf_nums_out$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo -ne $STAND"Enter how many numeric values you want to suffix (max 5): "$GREEN
read numbs
echo $STAND"Working .."
if [ "$numbs" == 1 ] ; then
for i in $(cat $suf_nums); do seq -f "$i%01.0f" 0 9; done > "$suf_nums_out"
elif [ "$numbs" == 2 ] ; then
for i in $(cat $suf_nums); do seq -f "$i%01.0f" 0 99; done > "$suf_nums_out"
elif [ "$numbs" == 3 ] ; then
for i in $(cat $suf_nums); do seq -f "$i%01.0f" 0 999; done > "$suf_nums_out"
elif [ "$numbs" == 4 ] ; then
for i in $(cat $suf_nums); do seq -f "$i%01.0f" 0 9999; done > "$suf_nums_out"
elif [ "$numbs" == 5 ] ; then
for i in $(cat $suf_nums); do seq -f "$i%01.0f" 0 99999; done > "$suf_nums_out"
fi
echo $STAND""
echo "$GREEN$suf_nums_out$STAND has been created; "
head -n 3 $suf_nums_out
echo ".."
tail -n 3 $suf_nums_out
echo $STAND""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
#
# Option 2
# Suffix fixed number of numberic values to a list
# ------------------------------------------------
elif [ "$suf_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Suffix fixed number of numeric values in sequence to a wordlist (ie. 00000 - 99999)"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to suffix numbers to: "$GREEN
read suf_numf
while [ ! -f $suf_numf ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read suf_numf
done
#Check if any '%' characters in the file which could cause errors
grep "%" $suf_numf > sufnumf_errors
exist=$(sed -n '$=' sufnumf_errors)
if [ "$exist" == "" ] ; then
rm sufnumf_errors
elif [ "$exist" != "" ] ; then
echo $RED"Lines with '%' character exist in file which will not be processed"
echo -ne $STAND"View these lines ? y/n "$GREEN
read view
if [ "$view" == "y" ] || [ "$view" == "Y" ] ; then
cat sufnumf_errors
else
echo $STAND""
fi
rm sufnumf_errors
fi
#Enter output file to write the changes to
echo -ne $STAND"Enter desired output file name: "$GREEN
read suf_numf_out
if [ -f $suf_numf_out ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$suf_numf_out$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo -ne $STAND"Enter how many numeric values you want to Suffix (max 5): "$GREEN
read numbf
echo $STAND"Working .."
if [ "$numbf" == 1 ] ; then
for i in $(cat $suf_numf); do seq -f "$i%0$numbf.0f" 0 9; done > "$suf_numf_out"
elif [ "$numbf" == 2 ] ; then
for i in $(cat $suf_numf); do seq -f "$i%0$numbf.0f" 0 99; done > "$suf_numf_out"
elif [ "$numbf" == 3 ] ; then
for i in $(cat $suf_numf); do seq -f "$i%0$numbf.0f" 0 999; done > "$suf_numf_out"
elif [ "$numbf" == 4 ] ; then
for i in $(cat $suf_numf); do seq -f "$i%0$numbf.0f" 0 9999; done > "$suf_numf_out"
elif [ "$numbf" == 5 ] ; then
for i in $(cat $suf_numf); do seq -f "$i%0$numbf.0f" 0 99999; done > "$suf_numf_out"
fi
echo $STAND""
echo "$GREEN$suf_numf_out$STAND has been created; "
head -n 3 $suf_numf_out
echo ".."
tail -n 3 $suf_numf_out
echo $STAND""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
#
# Option 3
# Suffix word / characters to a list
# ----------------------------------
elif [ "$suf_menu" == "3" ] ; then
echo $STAND""
echo $BLUE"Suffix word / characters to a wordlist"
echo $STAND""
f_inout
echo -ne $STAND"Enter word/characters you want suffixed: "$GREEN
read suf_char
echo $STAND"Working .."
sudo sed "s/.$/&"$suf_char"/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
fi
}
#
#
#------------
# MENU ITEM 5
#============
# INCLUDING CHARACTERS /WORD
############################
f_inclu () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Inclusion options"
echo $STAND"--------------------"
echo "1 Include characters/word as from a certain position from START of word.
2 Include characters as from a certain position from END of word.
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read incl_menu
if [ "$incl_menu" == "q" ] || [ "$incl_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$incl_menu" != [1-2] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_inclu
fi
#
# Option 1
# Include characters from start of word
# -------------------------------------
if [ "$incl_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Include characters/word as from a certain position from START of word"
echo $STAND""
f_inout
echo -ne $STAND"Enter the word/characters you want included in each word: "$GREEN
read inclu_char
echo -ne $STAND"Enter from what position (after how many characters)
the word/characters should be included: "$GREEN
read inclus_pos
echo $STAND"Working .."
sudo sed "s/^.\{$inclus_pos\}/&$inclu_char/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
#
# Option 2
# Include Characters
# ------------------
elif [ "$incl_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Include characters as from a certain position from END of word"
echo $STAND
f_inout
echo -ne $STAND"Enter the word/characters you want included in each word: "$GREEN
read inclu_char
echo -ne $STAND"Enter before what position (before how many characters before end of word)
the word/characters should be included: "$GREEN
read inclus_pos
echo $STAND"Working .."
sudo sed "s/.\{$inclus_pos\}$/$inclu_char&/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
fi
}
#
#
#------------
# MENU ITEM 6
#============
# SUBSTITION OPTIONS
####################
f_subs () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Substitution options"
echo $STAND"--------------------"
echo "1 Substitute/Replace characters from START of word.
2 Substitute/Replace characters from END of word.
3 Substitute/Replace characters at a certain position.
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read subs_menu
if [ "$subs_menu" == "q" ] || [ "$subs_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$subs_menu" != [1-3] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_subs
fi
#
# Option 1
# Substitute characters from start of word
# ----------------------------------------
if [ "$subs_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Substitute/Replace characters from START of word"
echo $STAND""
f_inout
echo -ne $STAND"Enter the word/characters you want to replace substituted characters with: "$GREEN
read subs_char
echo -ne $STAND"Enter the number of characters from start of word to replace: "$GREEN
read subs_num
echo $STAND"Working .."
sudo sed "s/^.\{$subs_num\}/$subs_char/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
#
# Option 2
# Substitute characters before end of word
# ----------------------------------------
elif [ "$subs_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Substitute/Replace characters from END of word"
echo $STAND""
f_inout
echo -ne $STAND"Enter the word/characters you want to replace the sustituted characters with: "$GREEN
read subs_char
echo -ne $STAND"Enter the number of characters at the end of word you want to replace: "$GREEN
read subs_num
echo $STAND"Working .."
sudo sed "s/.\{$subs_num\}$/$subs_char/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
#
# Option 3
# Substitute / replace characters in a certain position
# -----------------------------------------------------
elif [ "$subs_menu" == "3" ] ; then
echo $STAND""
echo $BLUE"Substitute/Replace characters at a certain position"
echo $STAND""
f_inout
echo -ne $STAND"Enter the word/characters you want to replace the sustituted characters with: "$GREEN
read subs_char
echo -ne $STAND"Enter the start position of characters you want to replace (ie. 2)
(position 1 will start from 2nd character, position 4 will start from 5th character, etc): "$GREEN
read subs_poss
echo -ne $STAND"Enter how many characters after start position you want to replace (ie.2); "$GREEN
read subs_pose
echo $STAND"Working .."
sudo sed -r "s/^(.{$subs_poss})(.{$subs_pose})/\1$subs_char/" "$wlm_infile" > "$wlm_outfile"
echo $STAND""
f_complete
fi
}
#
#
#------------
# MENU ITEM 7
#============
# OPTIMIZATION OPTIONS
######################
f_tidy () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Optimization options"
echo $STAND"--------------------"
echo "1 Full optimization of wordlist.
2 Optimize wordlist for WPA.
3 Sort wordlist on length of words.
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read tidy_menu
if [ "$tidy_menu" == "q" ] || [ "$tidy_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$tidy_menu" != [1-3] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_tidy
fi
#
# Option 1
# Full optimization of wordlist
# -----------------------------
if [ "$tidy_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Full optimization of wordlist"
echo $STAND""
f_inout
##full optimize##
echo -en $STAND"Enter a minimum password length: "$GREEN
read min
echo -en $STAND"Enter a maximum password length: "$GREEN
read max
echo $STAND""
echo -en $STAND"Hit return to start processing the file "$STAND
read return
if [ "$return" == "" ]; then
echo $GREEN">$STAND Removing duplicates from the file.."
cat $wlm_infile | uniq > working.tmp
echo $GREEN">$STAND Deleting words which do not meet length requirement.."
pw-inspector -i working.tmp -o working1.tmp -m $min -M $max
echo $GREEN">$STAND Removing all non ascii chars if they exist.."
tr -cd '\11\12\40-\176' < working1.tmp > working.tmp
echo $GREEN">$STAND Removing all comments.."
sed '1p; /^[[:blank:]]*#/d; s/[[:blank:]][[:blank:]]*#.*//' working.tmp > working1.tmp
echo $GREEN">$STAND Removing any leading white spaces, tabs and CRLF from the file.."
sed -e 's/^[ \t]*//' working1.tmp > working.tmp
dos2unix -f -q working.tmp
echo $GREEN">$STAND One more pass to sort and remove any duplicates.."
cat working.tmp | sort | uniq > working1.tmp
sudo mv working1.tmp $wlm_outfile
echo $GREEN">$STAND Cleaning up temporary files.."
rm -rf working*.tmp
fi
echo $STAND""
f_complete
#
# Option 2
# Optimization of wordlist for WPA
# --------------------------------
elif [ "$tidy_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Optimization of wordlist for WPA/WPA2"
echo $STAND""
f_inout
echo "Working .."
pw-inspector -i $wlm_infile -o /root/temp_outfile -m 8 -M 63
sudo cat /root/temp_outfile | sort | uniq > $wlm_outfile
rm -rf /root/temp_outfile
echo $STAND""
f_complete
#
# Option 3
# --------
elif [ "$tidy_menu" == "3" ] ; then
echo $STAND""
echo $BLUE"Sort wordlist based on wordsize/length"$STAND
echo "(can speed up cracking process with some programmes)"
echo $STAND""
f_inout
echo "Working .."
sudo awk '{ print length(), $0 | "sort -n" }' $wlm_infile | sed 's/[^ ]* //' > $wlm_outfile
f_complete
fi
}
#
#
#------------
# MENU ITEM 8
#============
# SPLIT FUNCTIONS
##################
f_split () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Split wordlists"
echo $STAND"--------------------"
echo "1 Split wordlists into user defined max linecount per split file.
2 Split wordlists into user defined max sizes per split file.
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read split_menu
if [ "$split_menu" == "q" ] || [ "$split_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$split_menu" != [1-3] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_split
fi
#
# Option 1
# Split files by linecount
#-------------------------
if [ "$split_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Split wordlists into user defined max linecount per split file"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to split : "$GREEN
read split_in
while [ ! -f "$split_in" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read split_in
done
#Enter output file to write the changes to
echo -ne $STAND"Enter output files' prefix: "$GREEN
read split_out
echo $STAND""
#
# Test for existence of prefixed files in working directory
echo "Checking for existing files in working directory with same pre-fix.."
sleep 0.5
find $split_out* > exist_temp
exist=$(sed -n '$=' exist_temp)
if [ "$exist" == "" ] ; then
echo $GREEN"No files with same prefix found in working directory, proceding.."
rm exist_temp
echo $STAND""
elif [ "$exist" != "" ] ; then
echo $RED"Files with same prefix found in working directory; "$STAND
cat exist_temp
echo $STAND""
# Delete existing files with same prefix before starting so as
echo -ne $STAND"Delete above files before proceding ? y/n "$GREEN
read delete
if [ "$delete" == "y" ] || [ "$delete" == "Y" ] ; then
echo $STAND"deleting existing files.."
sleep 0.5
echo $STAND""
for line in $(cat exist_temp) ; do
rm $line
done
else
echo ""
echo $STAND"Returning to menu.."
rm exist_temp
sleep 1
f_split
fi
rm exist_temp
fi
#
#
B=$( stat -c %s $split_in )
KB=$( echo "scale=2;$B / 1024" | bc )
MB=$( echo "scale=2;($B/1024)/1024" | bc )
GB=$( echo "scale=2;(($B/1024)/1024)/1024" | bc )
echo -e $STAND"Wordlist $GREEN$split_in$STAND size: $KB KB$STAND $GREEN$MB MB$STAND $GB GB$STAND"
linecount=$(wc -l $split_in | cut -d " " -f 1)
echo "Wordlist $GREEN$split_in$STAND Linecount: $GREEN$linecount$STAND"
echo ""
echo -ne $STAND"Enter number of lines you want per each split file: "$GREEN
read lines_in
#Calculate the number of files resulting from user input
est_count=$(echo "scale=3;$linecount / $lines_in" | bc)
if [ "$est_count" != *.000 ] ; then
size=$(echo "$linecount/$lines_in+1" | bc)
elif [ "$est_count" == *.000 ] ; then
size=$(echo "$linecount/$lines_in" | bc)
fi
#
echo -ne $STAND"This will result in an estimated $GREEN$size$STAND files, continue ? y/n "$GREEN
read go_for_it
if [ "$go_for_it" == "y" ] || [ "$go_for_it" == "Y" ] ; then
echo ""
echo $STAND"Working .."
else echo $STAND"Quitting to menu"
sleep 0.5
f_split
fi
SFX=$(echo $size | wc -c)
SFX=$(echo $[$SFX -1])
split -a$SFX -d -l $lines_in $split_in $split_out
echo ""
ls $split_out* > split_out_temp
echo $STAND ""
echo $STAND"The following files have been created"
echo $STAND"-------------------------------------"$STAND
for line in $(cat split_out_temp) ; do
B=$( stat -c %s $line )
KB=$( echo "scale=2;$B / 1024" | bc )
MB=$( echo "scale=2;($B/1024)/1024" | bc )
GB=$( echo "scale=2;(($B/1024)/1024)/1024" | bc )
echo -e "$GREEN$line$STAND $KB KB \t $GREEN$MB MB$STAND \t $GB GB$STAND"
done
echo $STAND""
rm split_out_temp
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_menu
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
echo $STAND""
#
# Option 2
# Split files by size
#--------------------
elif [ "$split_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Split wordlists into user defined max size (in MB) per split file"
echo $STAND""
echo -ne $STAND"Enter /path/to/wordlist to split : "$GREEN
read split_in
while [ ! -f "$split_in" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read split_in
done
#Enter output file to write the changes to
echo -ne $STAND"Enter output files' prefix: "$GREEN
read split_out
echo $STAND""
#
# Test for existence of same prefix in working directory
echo "Checking for existing files in working directory with same pre-fix.."
sleep 0.5
find $split_out* > exist_temp
exist=$(sed -n '$=' exist_temp)
if [ "$exist" == "" ] ; then
echo $GREEN"No files with same prefix found in working directory, proceding.."
rm exist_temp
echo $STAND""
elif [ "$exist" != "" ] ; then
echo $RED"Files with same prefix found in working directory; "$STAND
cat exist_temp
echo $STAND""
echo -ne $STAND"Delete above files before proceding ? y/n "$GREEN
read delete
if [ "$delete" == "y" ] || [ "$delete" == "Y" ] ; then
echo $STAND"deleting existing files.."
sleep 0.5
echo $STAND""
for line in $(cat exist_temp) ; do
rm $line
done
else
echo ""
echo $STAND"Returning to menu.."
rm exist_temp
sleep 1
f_misc
fi
rm exist_temp
fi
#Wordlist size
B=$( stat -c %s $split_in )
KB=$( echo "scale=2;$B / 1024" | bc )
MB=$( echo "scale=2;($B/1024)/1024" | bc )
GB=$( echo "scale=2;(($B/1024)/1024)/1024" | bc )
echo $STAND"File size of $GREEN$split_in$STAND ;"
echo $STAND"Bytes = $RED$B"
echo $STAND"Kilobytes = $RED$KB"
echo $STAND"Megabytes = $RED$MB"
echo $STAND"Gigabytes = $RED$GB"
echo $STAND""
echo -ne "Enter max size of each split file in Megabytes (whole numbers only!): "$GREEN
read split_size
est_size=$(echo "scale=3;$MB / $split_size" | bc)
if [ "$est_size" != *.000 ] ; then
size=$(echo "$MB/$split_size+1" | bc)
elif [ "$est_size" == *.000 ] ; then
size=$(echo "$MB/$split_size" | bc)
fi
#est_size=$(printf "%.0f" $(echo "scale=2;$MB/$split_size" | bc))
echo -ne $STAND"This will result in an estimated $GREEN$size$STAND files, continue ? y/n "$GREEN
read go_for_it
if [ "$go_for_it" == "y" ] || [ "$go_for_it" == "Y" ] ; then
echo ""
echo $STAND"Working .."
else echo $STAND"Quitting to menu"
sleep 1
f_split
fi
#split_size_b=$( echo "(($split_size * 1024) * 1024)" | bc )
split -d -C "$split_size"M $split_in $split_out
ls $split_out* > split_out_temp
echo $STAND ""
echo $STAND"The following files have been created"
echo $STAND"-------------------------------------"$STAND
for line in $(cat split_out_temp) ; do
B=$( stat -c %s $line )
KB=$( echo "scale=2;$B / 1024" | bc )
MB=$( echo "scale=2;($B/1024)/1024" | bc )
GB=$( echo "scale=2;(($B/1024)/1024)/1024" | bc )
echo -e "$GREEN$line$STAND $KB KB \t $GREEN$MB MB$STAND \t $GB GB$STAND"
done
echo $STAND""
rm split_out_temp
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_menu
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
echo $STAND""
fi
}
#
#
#------------
# MENU ITEM 9
#============
# REMOVAL / DELETION OPTIONS
############################
f_delete () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Removal/Character removal options"
echo $STAND"---------------------------------"
echo "1 Remove X number of characters from start of word.
2 Remove X number of characters from end of word.
3 Remove specific characters globally from words.
4 Remove words containing specific characters.
5 Remove lines with X number of identical adjacent characters.
6 Remove lines existing in 1 list from another list.
(dont use on large lists, work in progress)
7 Remove words which do NOT have X number of numeric values.
8 Removing words which have X number of repeated characters.
9 Remove words of a certain length.
10Remove characters from and including specified character.
Q Back to menu
"
#Check to ensure correct menu entry
echo -ne $STAND"Enter choice from above menu: "$GREEN
read del_menu
if [ "$del_menu" == "q" ] || [ "$del_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$del_menu" -gt 10 ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_delete
fi
# Option 1
# Removing X number of characters from start of word
# --------------------------------------------------
if [ "$del_menu" == "1" ] ; then
echo $STAND""
echo $BLUE"Remove X number of characters from start of word"$STAND
echo $STAND""
f_inout
echo -ne $STAND"Enter how many characters you want to remove from start of word: "$GREEN
read dels_char
echo $STAND"Working .."
sudo sed "s/^.\{$dels_char\}//" $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 2
# Removing X number of characters from end of word
# ------------------------------------------------
elif [ "$del_menu" == "2" ] ; then
echo $STAND""
echo $BLUE"Remove X number of characters from end of word"$STAND
echo $STAND""
f_inout
echo -ne "Enter how many characters you want to remove from end of word: "
read pos_char
echo $STAND"Working .."
sudo sed "s/.\{$pos_char\}$//" $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 3
# Removing specific characters globally from wordlist
# ---------------------------------------------------
elif [ "$del_menu" == "3" ] ; then
echo $STAND""
echo $BLUE"Remove specific character globally from words in wordlist"
echo $STAND""
f_inout
echo -ne "Enter the character you want removed globally from wordlist: "$GREEN
read char_remove
grep $char_remove $wlm_infile > rem_temp
rem_count=$(wc -l rem_temp | cut -d " " -f 1)
if [ "$rem_count" == "0" ] ; then
echo $STAND"Character $GREEN$char_remove$STAND was not found in $GREEN$wlm_infile$STAND."
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_delete
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
fi
echo $STAND"Working .."
rm rem_temp
sudo sed "s/$char_remove//g" $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 4
# Removing words containing specific characters from wordlist
# -----------------------------------------------------------
elif [ "$del_menu" == "4" ] ; then
echo $STAND""
echo $BLUE"Remove words containing specific character from wordlist"
echo $STAND""
f_inout
echo -ne $STAND"Enter the character to check for: "$GREEN
read char_remove
grep $char_remove $wlm_infile > rem_temp
rem_count=$(wc -l rem_temp | cut -d " " -f 1)
if [ "$rem_count" == "0" ] ; then
echo $STAND"Character $GREEN$char_remove$STAND was not found in $GREEN$wlm_infile$STAND"
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_delete
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
fi
echo "$GREEN$rem_count$STAND words will be removed."
echo $STAND"Working .."
sudo sed "/$char_remove/d" $wlm_infile > $wlm_outfile
rm rem_temp
echo $STAND""
f_complete
#
# Option 5
# Remove words with more than X number of identical adjacent characters from wordlist
# -----------------------------------------------------------------------------------
elif [ "$del_menu" == "5" ] ; then
echo $STAND""
echo $BLUE"Remove words with more than X number of identical adjacent charaters from wordlist"
echo $STAND""
f_inout
echo -ne $STAND"Enter how many identical adjacent characters should be allowed: "$GREEN
read ident_numb
echo $STAND"Working .."
sudo sed "/\([^A-Za-z0-9_]\|[A-Za-z0-9]\)\1\{$ident_numb,\}/d" $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 6
# Remove words existing in one list from another list
# ---------------------------------------------------
elif [ "$del_menu" == "6" ] ; then
echo $STAND""
echo $BLUE"Remove words existing in 1 list from another list"
echo "Very simple/bad coding on this..use on SMALL files only"
echo $STAND""
#Enter wordlist file to process
echo -ne $STAND"Enter /path/to/wordlist to process: "$GREEN
read wlm_infile
while [ ! -f $wlm_infile ] || [ "$wlm_infile" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read wlm_infile
done
echo $STAND"Enter /path/to/wordlist which contains the words to check for"
echo -ne $STAND"(Words in this list will be removed from wordlist to process): "$GREEN
read read_in
#Enter output file to write the changes to
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$wlm_outfile$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo "Working .."
sudo grep -v -x -f $read_in $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 7
# Removing words which do not have X number of numeric values
# -----------------------------------------------------------
elif [ "$del_menu" == "7" ] ; then
echo $STAND""
echo $BLUE"Remove words which do not have X number of numeric values"
echo $STAND""
f_inout
echo -ne $STAND"Enter how many numeric values should be allowed: "$GREEN
read ident_numb
echo $STAND"Working .."
sudo nawk 'gsub("[0-9]","&",$0)=='$ident_numb'' $wlm_infile > $wlm_outfile
echo $STAND""
f_complete
#
# Option 8
# Removing words which have N number of repeated characters
# ----------------------------------------------------------
elif [ "$del_menu" == "8" ] ; then
echo $STAND""
echo $BLUE"Remove words which have X number of repeated characters"
echo $STAND""
f_inout
#Enter characters to check for
echo $STAND"Enter the character you don't want repeated more than N times"
echo -ne "(Hit enter for any character): " $GREEN
read rep_char
if [ "$rep_char" == "" ] ; then
#Enter how many times it may occur in the words
echo -ne $STAND"How many times may characters be repeated in a word: "$GREEN
read rep_time
echo ""
echo "Working.."
sudo sed "/\(.\)\(.*\1\)\{$rep_time,\}/d" $wlm_infile > $wlm_outfile
fi
if [ "$rep_char" != "" ] ; then
echo -ne $STAND"How many times may $GREEN$rep_char$STAND be repeated in a word: "$GREEN
read rep_time
echo ""
echo $STAND"Working.."
sudo sed "/\($rep_char\)\(.*\1\)\{$rep_time,\}/d" $wlm_infile > $wlm_outfile
fi
echo $STAND""
f_complete
#
# Option 9
# Removing words which have a certain length
# ------------------------------------------
elif [ "$del_menu" == "9" ] ; then
echo $STAND""
echo $BLUE"Remove all words with X length from the list"
echo $STAND""
f_inout
#Enter the length of words you want removed
echo -ne $STAND"Enter the length of words you want removed from wordlist: "$GREEN
read LEN_REM
echo $STAND"Working.."
sudo awk "length != $LEN_REM" $wlm_infile > $wlm_outfile
echo ""
f_complete
#
# Option 10
# Remove all characters after and including a certain character
# example: password:12345, remove as from : --> password
# -------------------------------------------------------------
elif [ "$del_menu" == "10" ] ; then
echo $STAND""
echo $BLUE"Remove all characters after and including a specified character"
echo $STAND""
f_inout
#Enter the length of words you want removed
echo -ne $STAND"Enter the character from when you want the rest removed: "$GREEN
read REM_REM
echo $STAND"Working.."
sudo sed "s/\($REM_REM\).*//" $wlm_infile > $wlm_outfile
echo ""
f_complete
fi
}
#
#
#-------------
# MENU ITEM 10
#=============
# MISCELLANEOUS OPTIONS
#######################
f_misc () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Miscellaneous Fun"
echo $STAND"--------------------"
echo "1 Check what size a crunch created wordlist would be.
2 Create a date wordlist
3 Strip SSIDs from a kismet generated .nettxt file.
4 Basic leetify options for wordlists.
5 Create all possible (leetify) permutations of a wordlist (Gitsnik's permute.pl).
6 Decode / Encode text with ROT18
7 Decode / Encode text with ROT47
8 Check all possible shift values to decode a Caesar cipher shifted text
Q Back to menu
"
echo -ne $STAND"Enter choice from above menu: "$GREEN
read misc_menu
if [ "$misc_menu" == "q" ] || [ "$misc_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$misc_menu" != [1-8] ]]; then
echo $RED"must be an entry from the above menu $STAND"
sleep 1
f_misc
fi
##
## Option 1
## CRUNCH_SIZE
##============
if [ "$misc_menu" == "1" ] ; then
clear
echo $BLUE"Crunch_Size ;)"$STAND
echo $STAND"Check what size a newly created wordlist would be"
echo "when creating a wordlist with for instance 'crunch'."
echo "This only calculates based on the same min max word length"
echo $STAND""
echo $STAND"Choose the number of characters that will be used making the wordlist"
echo $STAND"====================================================================="
echo "Example ;"
echo $RED"10 $STAND = Numeric only"
echo $RED"16 $STAND = Hexadecimal"
echo $RED"26 $STAND = Alpha only"
echo $RED"33 $STAND = Special characters including space"
echo $RED"36 $STAND = Alpha + Numeric"
echo $RED"52 $STAND = Lowercase+Uppercase alpha"
echo $RED"62 $STAND = Lower+Uppercase alpha + Numeric"
echo $RED"95 $STAND = Lower+Uppercase alpha +Numeric+SpecialCharacters including space"
echo
echo -ne $STAND"Enter number of characters to be used: "$RED
read X
echo -ne $STAND"Enter length of words/passphrases: "$RED
read Y
echo $STAND"How many passwords/second can your system handle ?"$STAND
echo -ne $STAND"(or hit Enter to simply ignore this query) "$RED
read passec
# Calculations based on binary sizes ;
# For comma seperated values for groups of 3 digits pipe the below calculation out through sed ;
# sed -r ':L;s=\b([0-9]+)([0-9]{3})\b=\1,\2=g;t L'
B=$( echo "scale=3;($X^$Y)*($Y+1)" | bc )
KB=$( echo "scale=3;($X^$Y)*($Y+1) / 1024" | bc )
MB=$( echo "scale=3;(($X^$Y)*($Y+1)/1024)/1024" | bc )
GB=$( echo "scale=3;((($X^$Y)*($Y+1)/1024)/1024)/1024" | bc )
TB=$( echo "scale=3;(((($X^$Y)*($Y+1)/1024)/1024)/1024)/1024" | bc )
PB=$( echo "scale=3;((((($X^$Y)*($Y+1)/1024)/1024)/1024)/1024)/1024" | bc )
#
# Calculation for number of results ;
# For comma seperated values for groups of 3 digits pipe the below calculation out through sed ;
# sed -r ':L;s=\b([0-9]+)([0-9]{3})\b=\1,\2=g;t L'
NMBR=$( echo "($X^$Y)" | bc )
echo $STAND""
#
# Outcome of calculations ;
if [ "$passec" == "" ] ; then
echo $STAND"Estimated number of words/passphrases in wordlist: $GREEN$NMBR$STAND"
echo $STAND""
elif [ "$passec" != "" ] ; then
hours=$( echo "scale=2;((($NMBR/$passec)/60)/60)" | bc )
days=$( echo "scale=2;(((($NMBR/$passec)/60)/60)/24)" | bc )
echo $STAND"Estimated number of words/passphrases in wordlist: $GREEN$NMBR$STAND"
echo $STAND"Estimated duration to go through full list: $GREEN$hours$STAND hours ($GREEN$days$STAND days)"
echo $STAND""
fi
#
echo $STAND"Estimated wordlist size ; "
echo $GREEN"B $STAND(Bytes) = $GREEN$B"
echo $GREEN"KB $STAND(Kilobytes) = $GREEN$KB"
echo $GREEN"MB $STAND(Megabytes) = $GREEN$MB"
echo $GREEN"GB $STAND(Gigabytes) = $GREEN$GB"
echo $GREEN"TB $STAND(Terabytes) = $GREEN$TB"
echo $GREEN"PB $STAND(Petabytes) = $GREEN$PB"
echo $STAND""
#
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
##
## Option 2
## Datelist
## ========
elif [ "$misc_menu" == "2" ] ; then
clear
echo $BLUE"TAPE's
| | | | | (_) | |
__| | __ _| |_ ___| |_ ___| |_
/ _ |/ _ | __|/ _ \ | / __| __|
| (_| | (_| | |_ | __/ | \__ \ |_
\____|\____|\__|\___|_|_|___/\__|
v0.7a$STAND"
echo $BLUE"30 days hath September, April, June and November.."
echo $STAND""
echo $BLUE"Create a wordlist from a range of dates"
echo $STAND"======================================="
#Enter startdate
echo -ne $STAND"Enter startdate in format yyyy-mm-dd: "$GREEN
read startdate
startyear=$(echo $startdate | cut -d - -f 1)
startmonth=$(echo $startdate | cut -d - -f 2)
startday=$(echo $startdate | cut -d - -f 3)
#Check for incorrect start date entry
syear_len=$(echo "$startyear" | wc -L)
if [[ "$syear_len" -ne 4 ]] ; then
echo $RED"Begin year error: $startyear$STAND, year entries must have 4 digits"
sleep 2
f_misc
fi
if [[ "$startmonth" -lt "01" || "$startmonth" -gt "12" ]] ; then
echo $RED"Begin month error: $startmonth$STAND, months can only be between 01 - 12"
sleep 2
f_misc
fi
if [[ "$startday" -lt "01" || "$startday" -gt "31" ]] ; then
echo $RED"Begin day error: $startday$STAND, days can only be between 01 - 31"
sleep 2
f_misc
fi
#
#Enter enddate
echo -ne $STAND"Enter enddate in formate yyyy-mm-dd: "$GREEN
read enddate
endyear=$(echo $enddate | cut -d - -f 1)
endmonth=$(echo $enddate | cut -d - -f 2)
endday=$(echo $enddate | cut -d - -f 3)
#Check for incorrect end date entry
eyear_len=$(echo "$endyear" | wc -L)
if [[ "$eyear_len" -ne 4 ]] ; then
echo $RED"End year error: $endyear$STAND, year entries must have 4 digits"
sleep 2
f_misc
fi
if [[ "$endmonth" -lt "01" || "$endmonth" -gt "12" ]] ; then
echo $RED"End month error: $endmonth$STAND, months can only be between 01 - 12"
sleep 2
f_misc
fi
if [[ "$endday" -lt "01" || "$endday" -gt "31" ]] ; then
echo $RED"End day error: $endday$STAND, days can only be between 01 - 31"
sleep 2
f_misc
fi
#
#
#
# Output file to save the date wordlist to
echo -ne $STAND"Enter desired output file name: "$GREEN
read date_outfile
while [ "$date_outfile" == "" ] ; do
echo -ne $RED"Enter desired output file name: "$GREEN
read date_outfile
done
if [ -f $date_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$date_outfile$RED will be overwritten$STAND"
else
echo $STAND"Process cancelled, quitting"
sleep 1
exit
fi
fi
#
#
#
# Desired output format
echo $STAND""
echo $STAND"Enter desired output format as below;"
echo -ne $STAND"ddmmyy / ddmmyyyy / mmddyy / mmddyyyy / yymmdd / yyyymmdd: "$GREEN
read format
until [ "$format" == "ddmmyy" ] || [ "$format" == "ddmmyyyy" ] || [ "$format" == "yymmdd" ] || [ "$format" == "yyyymmdd" ] || [ "$format" == "mmddyyyy" ] || [ "$format" == "mmddyy" ]; do
echo $RED"Please enter a correct output format;"
echo -ne $STAND"ddmmyy / ddmmyyyy / mmddyy / mmddyyyy / yymmdd / yyyymmdd: "$GREEN
read format
done
#
#
#
# Desired Spacing character, if any
echo $STAND""
echo -ne $STAND"Enter spacing character or hit enter for no spacing character: "$GREEN
read space
echo $STAND"Working .."
#
#
#
#List the years
echo $startyear > dates_years
while [ "$startyear" != "$endyear" ] ; do
startyear=$(expr $startyear + 1)
echo $startyear >> dates_years
done
#
#
#
echo "$GREEN>$STAND Listing range of years and months .."
#Add a '-' spacer to simplify later manipulations
sed 's/^.\{4\}/&-/' -i dates_years
#Add months to list of years
for i in $(cat dates_years) ; do seq -f $i%02.0f 01 12 ; done > dates_months
sed 's/.$/&-/' -i dates_months
#
#
#
#Add days to list of years & months
echo "$GREEN>$STAND Checking for leapyears and listing correct days per month .."
for i in $(cat dates_months)
do
mnth=$(echo $i | cut -d - -f 2)
year=$(echo $i | cut -d - -f 1)
if [[ "$mnth" == "02" ]] ; then
if [[ `expr "$year" % 4` == 0 && `expr "$year" % 100` != 0 ]] ; then
seq -f $i%02.0f 01 29
elif [[ `expr "$year" % 4` == 0 && `expr "$year" % 100` != 0 && `expr "$year" % 400` == 0 ]] ; then
seq -f $i%02.0f 01 29
else
seq -f $i%02.0f 01 28
fi
elif [[ "$mnth" == "04" || "$mnth" == "06" || "$mnth" == "09" || "$mnth" == "11" ]] ; then
seq -f $i%02.0f 01 30
elif [[ "$mnth" == "01" || "$mnth" == "03" || "$mnth" == "05" || "$mnth" == "07" || "$mnth" == "08" || "$mnth" == "10"|| "$mnth" == "12" ]] ; then
seq -f $i%02.0f 01 31
fi
done > datelist_temp
#
#
#
#Remove dates before/after start/end date.
sed -n "/$startdate/,/$enddate/p" datelist_temp > date_list1_temp
#
#
# Ensure correct format and spacing character in output
echo "$GREEN>$STAND Creating desired format with spacing character (if any) .. "
# format ddmmyy
if [ "$format" == "ddmmyy" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
sed 's/^..//' -i date_list1_temp
awk -F- '{print $3 $2 $1}' date_list1_temp > dates_sort.txt
sudo sed 's/\(.\{2\}\)/&\//;s/\(.\{5\}\)/&\//' dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -n "$space" ] ; then
sed 's/^..//' -i date_list1_temp
awk -F- '{print $3 $2 $1}' date_list1_temp > dates_sort.txt
sudo sed "s/\(.\{2\}\)/&"$space"/;s/\(.\{5\}\)/&$space/" dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -z "$space" ] ; then
sed 's/^..//' -i date_list1_temp
sudo awk -F- '{print $3 $2 $1}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
# format ddmmyyyy
elif [ "$format" == "ddmmyyyy" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
awk -F- '{print $3 $2 $1}' date_list1_temp > dates_sort.txt
sudo sed 's/\(.\{2\}\)/&\//;s/\(.\{5\}\)/&\//' dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -n "$space" ] ; then
awk -F- '{print $3 $2 $1}' date_list1_temp > dates_sort.txt
sudo sed "s/\(.\{2\}\)/&"$space"/;s/\(.\{5\}\)/&$space/" dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -z "$space" ] ; then
sudo awk -F- '{print $3 $2 $1}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
# format yymmdd
elif [ "$format" == "yymmdd" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
sed 's/^..//' -i date_list1_temp
sudo sed 's/-/\//g' date_list1_temp > "$date_outfile"
rm date_list1_temp
elif [ -n "$space" ] ; then
sed 's/^..//' -i date_list1_temp
sudo sed "s/-/$space/g" date_list1_temp > "$date_outfile"
rm date_list1_temp
elif [ -z "$space" ] ; then
sed 's/^..//' -i date_list1_temp
sudo awk -F- '{print $1 $2 $3}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
# format yyyymmdd
elif [ "$format" == "yyyymmdd" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
sudo sed 's/-/\//g' date_list1_temp > "$date_outfile"
rm date_list1_temp
elif [ -n "$space" ] ; then
sudo sed "s/-/$space/g" date_list1_temp > "$date_outfile"
rm date_list1_temp
elif [ -z "$space" ] ; then
sudo awk -F- '{print $1 $2 $3}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
#format mmddyyyy
elif [ "$format" == "mmddyyyy" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
awk -F- '{print $2 $3 $1}' date_list1_temp > dates_sort.txt
sudo sed 's/\(.\{2\}\)/&\//;s/\(.\{5\}\)/&\//' dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -n "$space" ] ; then
awk -F- '{print $2 $3 $1}' date_list1_temp > dates_sort.txt
sudo sed "s/\(.\{2\}\)/&"$space"/;s/\(.\{5\}\)/&$space/" dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -z "$space" ] ; then
sudo awk -F- '{print $2 $3 $1}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
#format mmddyy
elif [ "$format" == "mmddyy" ] ; then
if [ -n "$space" ] && [ "$space" == "/" ] ; then
sed 's/^..//' -i date_list1_temp
awk -F- '{print $2 $3 $1}' date_list1_temp > dates_sort.txt
sudo sed 's/\(.\{2\}\)/&\//;s/\(.\{5\}\)/&\//' dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -n "$space" ] ; then
sed 's/^..//' -i date_list1_temp
awk -F- '{print $2 $3 $1}' date_list1_temp > dates_sort.txt
sudo sed "s/\(.\{2\}\)/&"$space"/;s/\(.\{5\}\)/&$space/" dates_sort.txt > "$date_outfile"
rm date_list1_temp && rm dates_sort.txt
elif [ -z "$space" ] ; then
sed 's/^..//' -i date_list1_temp
sudo awk -F- '{print $2 $3 $1}' date_list1_temp > "$date_outfile"
rm date_list1_temp
fi
fi
# Remove created temp files
echo "$GREEN>$STAND Tidying up .."
rm dates_years
rm dates_months
rm datelist_temp
#
echo $STAND""
echo "Datelist $GREEN$date_outfile$STAND has been created ;"
head -n 3 $date_outfile
echo ".."
tail -n 3 $date_outfile
echo $STAND ""
#
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_menu
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
##
## Option 3
## ========
elif [ "$misc_menu" == "3" ] ; then
clear
echo $BLUE" _____ _____ _____ _____ _ _
/ ____|/ ____|_ _| __ \ | | (_)
| (___ | (___ | | | | | |___| |_ _ __ _ _ __
\___ \ \___ \ | | | | | / __| __| '__| | '_ \\
____) |____) |_| |_| |__| \__ \ |_| | | | |_) |
|_____/|_____/|_____|_____/|___/\__|_| |_| .__/
v0.2.1 by TAPE | |
|_|$STAND
Strip SSIDs from kismet generated .nettxt files"
echo $STAND""
echo -ne $STAND"Enter /path/to/file.nettxt to process: "$GREEN
read ssid_infile
while [ ! -f $ssid_infile ] || [ "$ssid_infile" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read ssid_infile
done
echo -ne $STAND"Enter desired output file name: "$GREEN
read ssid_outfile
if [ -f $ssid_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$ssid_outfile$RED will be overwritten"$STAND
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo "Working .."
#stripping the SSIDs from nettxt file
sudo grep SSID $ssid_infile | egrep -v 'BSSID|SSID [0-9]' | cut -c 18- | sed 's/"//g' | sed 's/ *$//g' | sort -fu > $ssid_outfile
WC=$(cat $ssid_outfile | wc -l)
echo $STAND""
echo "$GREEN$ssid_outfile$STAND has been created with $GREEN$WC$STAND entries;"
head -n 3 $ssid_outfile
echo ".."
tail -n 3 $ssid_outfile
echo ""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
f_menu
elif [ "$return" == "q" ] || [ "$return" == "Q" ] ; then
echo $STAND""
exit
fi
echo $STAND""
##
##
## Option 4
## ========
elif [ "$misc_menu" == "4" ] ; then
clear
echo $BLUE"Basic leetifying options of wordlist"$STAND
echo "------------------------------------"
echo
f_inout
#
echo $STAND""
echo "Enter alteration set to use to leetify wordlist"
echo "(For more simultaneous alterations, see Option 5)"
echo "------------------------------------------------------------------------------
aA bB cC dD eE fF gG hH iI jJ kK lL mM nN oO pP qQ rR sS tT uU vV wW xX yY zZ
1) @4 bB cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 5\$ 77 uU vV wW xX yY zZ
2) 44 68 cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 55 77 uU vV wW xX yY 22
3) @4 b8 cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 5\$ 77 uU vV wW xX yY 22
4) @@ bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR \$\$ 77 uU vV wW xX yY zZ
5) 44 bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR \$\$ 77 uU vV wW xX yY zZ
6) 44 bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR 55 77 uU vV wW xX yY zZ
"
echo -ne $STAND"Enter choice from above menu (1-6): "$GREEN
read char_menu
if [ "$char_menu" == "q" ] || [ "$char_menu" == "Q" ] ; then
echo $STAND""
f_menu
elif [[ "$char_menu" != [1-6] ]]; then
echo $RED"must be an entry from the above charset menu $STAND"
sleep 1
f_misc
fi
echo "Working .."
if [ "$char_menu" == "1" ] ; then
echo "Leetified wordlist using charset;
@4 bB cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 5\$ 77 uU vV wW xX yY zZ
-----------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/@/g' -e 's/A/4/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/5/g' -e 's/S/\$/g' -e 's/t/7/g' -e 's/T/7/g' >> "$wlm_outfile"
done
elif [ "$char_menu" == "2" ] ; then
echo "Leetified wordlist using charset;
44 68 cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 55 77 uU vV wW xX yY 22
-----------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/4/g' -e 's/A/4/g' -e 's/b/6/g' -e 's/B/8/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/g/9/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/5/g' -e 's/S/5/g' -e 's/t/7/g' -e 's/T/7/g' -e 's/z/2/g' -e 's/Z/2/g' >> "$wlm_outfile"
done
elif [ "$char_menu" == "3" ] ; then
echo "Leetified wordlist using charset;
@4 b8 cC dD 33 fF 9G hH iI jJ kK 11 mM nN 00 pP qQ rR 5\$ 77 uU vV wW xX yY 22
-----------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/@/g' -e 's/A/4/g' -e 's/B/8/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/g/9/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/5/g' -e 's/S/\$/g' -e 's/t/7/g' -e 's/T/7/g' -e 's/z/2/g' -e 's/Z/2/g' >> "$wlm_outfile"
done
elif [ "$char_menu" == "4" ] ; then
echo "Leetified wordlist using charset;
@@ bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR \$\$ 77 uU vV wW xX yY zZ
-----------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/@/g' -e 's/A/@/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/\$/g' -e 's/S/\$/g' -e 's/t/7/g' -e 's/T/7/g' >> "$wlm_outfile"
done
elif [ "$char_menu" == "5" ] ; then
echo "Leetified wordlist using charset;
44 bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR \$\$ 77 uU vV wW xX yY zZ
-------------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/4/g' -e 's/A/4/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/\$/g' -e 's/S/\$/g' -e 's/t/7/g' -e 's/T/7/g' >> "$wlm_outfile"
done
elif [ "$char_menu" == "6" ] ; then
echo "Leetified wordlist using charset;
44 bB cC dD 33 fF gG hH iI jJ kK 11 mM nN 00 pP qQ rR 55 77 uU vV wW xX yY zZ
-----------------------------------------------------------------------------" > $wlm_outfile
for i in $(cat $wlm_infile) ; do
echo $i | sed -e 's/a/4/g' -e 's/A/4/g' -e 's/e/3/g' -e 's/E/3/g' -e 's/l/1/g' -e 's/L/1/g' -e 's/o/0/g' -e 's/O/0/g' -e 's/s/5/g' -e 's/S/5/g' -e 's/t/7/g' -e 's/T/7/g' >> "$wlm_outfile"
done
fi
echo $STAND""
f_complete
##
##
## Option 5
## ========
elif [ "$misc_menu" == "5" ] ; then
echo $STAND""
echo $BLUE"Gitsnik's permute.pl script"$STAND
echo $BLUE"Create all possible Leetify permutations of words in file"$STAND
echo $RED"WARNING! Will massively increase wordlist size!"$STAND
echo $STAND""
f_inout
echo "Working .."
echo '
#!/usr/bin/perl
use strict;
use warnings;
my %permution = (
"a" => [ "a", "4", "@", "&", "A" ],
"b" => "bB",
"c" => "cC",
"d" => "dD",
"e" => "3Ee",
"f" => "fF",
"g" => "gG9",
"h" => "hH",
"i" => "iI!|1",
"j" => "jJ",
"k" => "kK",
"l" => "lL!71|",
"m" => "mM",
"n" => "nN",
"o" => "oO0",
"p" => "pP",
"q" => "qQ",
"r" => "rR",
"s" => "sS5\$",
"t" => "tT71+",
"u" => "uU",
"v" => "vV",
"w" => ["w", "W", "\\/\\/"],
"x" => "xX",
"y" => "yY",
"z" => "zZ2",
);
# End config
while(my $word = <>) {
chomp $word;
my @string = split //, lc($word);
&permute(0, @string);
}
sub permute {
my $num = shift;
my @str = @_;
my $len = @str;
if($num >= $len) {
foreach my $char (@str) {
print $char;
}
print "\n";
return;
}
my $per = $permution{$str[$num]};
if($per) {
my @letters = ();
if(ref($per) eq "ARRAY") {
@letters = @$per;
} else {
@letters = split //, $per;
}
$per = "";
foreach $per (@letters) {
my $s = "";
for(my $i = 0; $i < $len; $i++) {
if($i eq 0) {
if($i eq $num) {
$s = $per;
} else {
$s = $str[0];
}
} else {
if($i eq $num) {
$s .= $per;
} else {
$s .= $str[$i];
}
}
}
my @st = split //, $s;
&permute(($num + 1), @st);
}
} else {
&permute(($num + 1), @str);
}
}
' > wlm_permute.pl
sudo perl wlm_permute.pl $wlm_infile > $wlm_outfile
rm wlm_permute.pl
echo $STAND""
f_complete
##
##
## Option 6
## ========
elif [ "$misc_menu" == "6" ] ; then
clear
echo $BLUE"Decode/Encode text with ROT18"$STAND
echo "-----------------------------"
echo
# Input file
echo -ne $STAND"Enter /path/to/textfile you want to decode/encode: "$GREEN
read wlm_infile
while [ ! -f $wlm_infile ] || [ "$wlm_infile" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read wlm_infile
done
# Output file to save the decoded/encoded text to
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
while [ "$wlm_outfile" == "" ] ; do
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
done
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$wlm_outfile$RED will be overwritten$STAND"
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
echo "Working.."
sudo cat "$wlm_infile" | tr a-zA-Z0-45-9 n-za-mN-ZA-M5-90-4 > "$wlm_outfile"
echo ""
echo $STAND"rot18 decoding/encoding of file $GREEN$wlm_infile$STAND complete"
echo
f_complete
##
##
## Option 7
## ========
elif [ "$misc_menu" == "7" ] ; then
clear
echo $BLUE"Decode/Encode text with ROT47"$STAND
echo "-----------------------------"
echo
# Input file
echo -ne $STAND"Enter /path/to/textfile you want to decode/encode: "$GREEN
read wlm_infile
while [ ! -f $wlm_infile ] || [ "$wlm_infile" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read wlm_infile
done
# Output file to save the decoded/encoded text to
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
while [ "$wlm_outfile" == "" ] ; do
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
done
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$wlm_outfile$RED will be overwritten$STAND"
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
sudo cat "$wlm_infile" | tr '!-~' 'P-~!-O' > "$wlm_outfile"
echo ""
echo $STAND"rot47 decoding/encoding of file $GREEN$wlm_infile$STAND complete"
echo
f_complete
##
##Option 8
##Check all Caesar shift possbilities on section of text
########################################################
elif [ "$misc_menu" == "8" ] ; then
clear
echo $BLUE"Check all possible shift values to decode Caesar shift text"$STAND
echo "-----------------------------------------------------------"
echo
echo -ne "Enter some text to check Caesar shifts on: "$GREEN
read CAESAR_INPUT
echo $STAND""
IFS=""
for SHIFT in {1..26} ; do SHIFT=$((expr $SHIFT - 26)|sed 's/-//') & printf "[$BLUE%2d$STAND]--> %s" "$SHIFT" && echo "$CAESAR_INPUT" |
while IFS= read -r -n1 c
do
c=$(printf "%d\n" \'$c)
if (($c>=65 && $c<=90)) || (($c>=97 && $c<=122)) ; then
enc=$(expr $c + $SHIFT)
if (($c>=65 && $c<=90)) && (($enc>90)) ; then
enc=$(expr $c + $SHIFT - 26)
elif (($c>=97 && $c<=122)) && (($enc>122)) ; then
enc=$(expr $c + $SHIFT - 26)
elif (($c<65)) || (($c>90 && $c<97)) || (($c>122)) ; then
enc=$c
fi
else enc=$c
fi
printf $GREEN"\x$(printf %x $enc)"$STAND
done
echo
done
echo
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
fi
}
#------------
# MENU ITEM F
#============
# INFORMATION ON FILE
#####################
f_info () {
clear
echo $BLUE"__ __ _ __ __
\ \/\/ /| |__ | \/ |
\_/\_/ |____||_|\/|_|$STAND
by TAPE"
echo ""
echo $STAND"WordList Manipulator"
echo $BLUE"File information"
echo $STAND"--------------------"
echo -ne "Enter /path/to/wordlist: "$GREEN
read info
while [ ! -f $info ] || [ "$info" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read info
done
echo $STAND""
echo "Gathering information on file, please be patient.."
echo ""
#Number of lines
count=$(wc -l $info | cut -d " " -f 1)
#Longest line;
length=$(wc -L $info | cut -d " " -f 1)
# General file info
file_info=$( file $info | cut -d ":" -f 2 )
#
echo $STAND"File type:$GREEN$file_info$STAND"
echo $STAND""
echo $STAND"Wordcount/number of lines: "$GREEN$count$STAND
echo $STAND""
echo $STAND"Maximum word/line length: $GREEN$length$STAND"
echo $STAND""
echo $STAND"File size"
echo $STAND"---------"
B=$( stat -c %s $info )
KB=$( echo "scale=2;$B / 1024" | bc )
MB=$( echo "scale=2;($B/1024)/1024" | bc )
GB=$( echo "scale=2;(($B/1024)/1024)/1024" | bc )
TB=$( echo "scale=2;((($B/1024)/1024)/1024)/1024" | bc )
echo $GREEN" B $STAND(Bytes) = $GREEN$B"
echo $GREEN"KB $STAND(Kilobytes) = $GREEN$KB"
echo $GREEN"MB $STAND(Megabytes) = $GREEN$MB"
echo $GREEN"GB $STAND(Gigabytes) = $GREEN$GB"
echo $STAND""
echo $STAND"Example of file entries"
echo $STAND"-----------------------"$GREEN
head -n 3 $info
echo ".."
tail -n 3 $info
echo $STAND""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
}
#
#
#------------
# MENU ITEM H
#============
# HELP INFORMATION
##################
f_help () {
clear
echo $BLUE"__ __ _ __ __
\ \/\/ /| |__ | \/ |
\_/\_/ |____||_|\/|_|$STAND
by TAPE"
echo ""
echo $BLUE"WordList Manipulator$RED v$CURR_VERS$STAND build $RED$CURR_BUILD$STAND"
echo $STAND""
echo -ne "Hit enter to continue "
read continue
if [ "$continue" == "" ] ; then
echo "
q/Q to quit;
Introduction
============
Why did I spend hours doing this ?
Well, I just suck at remembering code for the simple things..
Normally you would use a tool like crunch or maskprocessor to create
any wordlist you want, however on occasion, you will need to alter what
is already available.
WLM provides an easy menu interface listing the most frequently used
manipulation options.
So in short;
RUNNING SCRIPT
--------------
The file needs to be made executable, so if script is not running;
> chmod 755 'filename'
or
> chmod +x 'filename'
BASIC USAGE
-----------
Choose an option from the main menu and then an option from the
sub menu.
Depending on the choice made, you will be prompted for an input file
to work on.
WLM will verify whether the input file exists, if not you will be
prompted to enter the correct filename (or correct /path/to/file)
You will (depending on the option chosen) be prompted to give a
desired output file name.
If this exists, you will be prompted to confirm whether to overwrite
the existing file or else to quit to main menu.
BUGS / LIMITATIONS
==================
This script was written for use on Backtrack and has been tested on Backtrack as from v5.
The script has also been tested on BackBox Linux with good results.
No other platforms have been tested.
When prefixing or suffixing numbers to a wordlist, an error message
will be given if there are '%' characters in the wordlist and that
line will be skipped.
Including a 'space' does not work for;
Prefixing, Suffixing, Inclusion and Substitution options.
Splitting files based on size only accepts whole numbers
(so 2 / 25 / 100 not 2.5 / 10.6 etc)
Probably many more, please let me know what you find !
tape dot rulez at gmail dot com
ALL OPTIONS
-----------
Running the script followed by a word will create all possible permutations of that
word (Gitsnik's permute.pl script).
Running the script without any input will offer a menu with the below items ;
1. Case Options;
1.1 Change case of first letter of each word in the wordlist.
1.2 Change case of last letter of each word in the wordlist.
1.3 Change all lower case to upper case.
1.4 Change all upper case to lower case.
1.5 Invert case of each letter in each word.
2. Combination options;
2.1 Combine words from 1 list to all words in another list.
2.2 Combine all wordlists in a directory into 1 wordlist.
3. Prefix characters to wordlist;
3.1 Prefix numeric values in sequence (ie. 0-999)
3.2 Prefix fixed number of numeric values in sequence (ie. 000-999)
3.3 Prefix a word or characters to wordlist.
Some characters will require you to escape them using backslash (\)
also space does not work, so this function has some limitations.
4. Append / Suffix characters to wordlist;
4.1 Suffix numeric values in sequence (ie. 0-999)
4.2 Suffix fixed number of numeric values in sequence (ie. 000-999)
4.3 Suffix a word or characters to wordlist.
Some characters will require you to escape them using backslash (\)
also space does not work, so this function has some limitations.
5. Include characters
5.1 Include characters from a certain postion from start of word.
5.2 Include characters from a certain postion from end of word.
Some characters will require you to escape them using backslash (\)
also space does not work, so this function has some limitations.
6. Substitute/Replace characters
6.1 Substitute/Replace characters from start of word.
6.2 Substitute/Replace characters from end of word.
6.3 Substitute/Replace characters at specified positions in list.
Some characters will require you to escape them using backslash (\)
also space does not work, so this function has some limitations.
7. Optimize / tidy up wordlist.
7.1 Full optimization of wordlist.
7.2 Optimize for WPA (min 8 chars max 63 chars).
7.3 Sort words based on wordlength
(can help process speed with some programmes [cRARk])
8. Split options
8.1 Split wordlists based on a user defined max linecount in each slit file.
8.2 Split wordlists based on a user defined max size of each split file.
9. Removal / Deletion options
9.1 Remove characters at a certain position from start of word.
9.2 Remove characters at a certain position before end of word.
9.3 Remove specific characters globally from words.
9.4 Remove words containing specific characters from wordlist.
9.5 Remove words with more than X number of identical adjacent characters from wordlist.
9.6 Remove words existing in 1 list from another list (test version only for small lists).
9.7 Remove words that do not have X number of numeric values.
9.8 Remove words that have X number of repeated characters.
9.9 Remove words of a certain length.
9.10 Remove all characters after a specified character.
10. Miscellaneous fun
10.1 Check possible wordlist sizes (with same min-max length only).
10.2 Create a wordlist from a range of dates (datelist).
10.3 Strip SSIDs from a kismet generated .nettxt file.
10.4 Basic leetify options for wordlist.
10.5 Leetify/Permute wordlist (Gitsnik's permute.pl script).
10.6 ROT18 encode/decode text
10.7 ROT47 encode/decode text
10.8 Check all possible shifts to decode a Caesar cipher shifted text.
f. File information
Gives information on aspects of selected file ;
- Filetype
- Wordcount of file
- Longest line
- File Size
- first 3 and last 3 lines of file
h. This information.
u. Check for updates to the script.
q/Q to quit
" | less
fi
}
#
##
### Update function
####################
f_update () {
clear
echo $STAND"Wordlist Manipulator"
echo $BLUE"Check for updates"
echo $STAND"--------------------"
echo
echo -ne $STAND"Check the latest version available ? y/n "$GREEN
read UPD
echo $STAND""
LOC=$(pwd)
if [[ $UPD == "y" || $UPD == "Y" ]] ; then
echo $GREEN">$STAND Checking Internet connection.."
wget -q --tries=10 --timeout=5 http://www.google.com -O /tmp/index.google &> /dev/null
if [ ! -s /tmp/index.google ];then
echo $RED"No internet connection found..$STAND"
sleep 2
f_menu
fi
echo $GREEN">$STAND Downloading latest version for checking.."
wget -q http://wordlist-manipulator.googlecode.com/svn/wlm -O $LOC/wlm.tmp
echo $GREEN">$STAND Checking if latest version in use.."
NEW_VERS=$(sed -n 3p $LOC/wlm.tmp | cut -c 11-13)
NEW_BUILD=$(sed -n 4p $LOC/wlm.tmp | cut -c 10-13)
if [ $CURR_VERS != $NEW_VERS ] || [ $CURR_BUILD -lt $NEW_BUILD ] ; then
echo -ne $STAND"Version in use is $GREEN$CURR_VERS$STAND build $GREEN$CURR_BUILD$STAND
Latest available is $GREEN$NEW_VERS$STAND build $GREEN$NEW_BUILD$STAND, update ? y/n "$GREEN
read UPD1
if [[ $UPD1 == "y" || $UPD1 == "Y" ]] ; then
if [ -d /opt/backbox ] ; then
sudo cp $LOC/wlm.tmp /opt/wlm/wlm
sudo cp $LOC/wlm.tmp /usr/bin/wlm
sudo cp $LOC/wlm.tmp /menu/auditing/miscellaneous/wlm
echo $STAND""
tail -n 30 /usr/bin/wlm | sed -n "/$CURR_VERS/,\$p"
echo $STAND""
echo "Please restart$GREEN wlm$STAND script"
echo $STAND""
rm $LOC/wlm.tmp
exit
fi
chmod +x $LOC/wlm.tmp
mv $LOC/wlm.tmp $LOC/wlm
echo $STAND""
echo $STAND"Latest WLM version has been saved to $GREEN$LOC/wlm$STAND"
echo $STAND""
tail -n 30 $LOC/wlm | sed -n "/$CURR_VERS/,\$p"
echo $STAND""
echo $STAND""
echo "Please restart$GREEN wlm$STAND script"
echo $STAND""
exit
else
echo $STAND""
rm $LOC/wlm.tmp
f_menu
fi
elif [ $CURR_VERS == $NEW_VERS ] && [ $CURR_BUILD == $NEW_BUILD ] ; then
echo $RED"Current version in use is the latest version available;$GREEN v$NEW_VERS$STAND build $GREEN$NEW_BUILD$STAND"
sleep 3
rm $LOC/wlm.tmp
f_menu
fi
else
echo $STAND""
f_menu
fi
}
#
##
### Read infile and outfile
###########################
f_inout () {
# Input file to alter
echo -ne $STAND"Enter /path/to/wordlist you want to edit: "$GREEN
read wlm_infile
while [ ! -f $wlm_infile ] || [ "$wlm_infile" == "" ] ; do
echo -ne $RED"File does not exist, enter /path/to/file: "$GREEN
read wlm_infile
done
# Output file to save the editted wordlist to
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
while [ "$wlm_outfile" == "" ] ; do
echo -ne $STAND"Enter desired output file name: "$GREEN
read wlm_outfile
done
if [ -f $wlm_outfile ] ; then
echo -ne $RED"File already exists, overwrite ? y/n "$GREEN
read over
if [ "$over" == "y" ] || [ "$over" == "Y" ] ; then
echo $RED"Existing file $GREEN$wlm_outfile$RED will be overwritten$STAND"
else
echo $STAND"Process cancelled, returning to menu"
sleep 1
f_menu
fi
fi
}
#
##
### Creation completion and return or quit option
#################################################
f_complete () {
echo "$GREEN$wlm_outfile$STAND has been created;"
head -n 3 $wlm_outfile
echo ".."
tail -n 3 $wlm_outfile
echo ""
echo -ne $STAND"hit Enter to return to menu or q/Q to quit "$GREEN
read return
if [ "$return" == "" ] ; then
echo $STAND""
elif [ "$return" == "q" ] || [ "$return" == "Q" ]; then
echo $STAND""
exit
fi
}
#
##
### MENU
########
f_menu () {
while :
do
clear
echo $BLUE"__ __ _ __ __
\ \/\/ /| |__ | \/ |
\_/\_/ |____||_|\/|_|$STAND
by TAPE"
echo $BLUE"WordList Manipulator"
echo $STAND"===================="
cat << !
1 Case options
2 Combinations
3 Prepend / Prefix
4 Append / Suffix
5 Inclusion Options
6 Substitution Options
7 Tidy up / optimize wordlist
8 Split files
9 Removal / Deletion options
10 Miscellaneous Fun
f File information
h Version and listing of all functions
u Update
Q Exit
!
echo ""
echo -ne $STAND"Choose from the above menu: "$GREEN
read menu
case $menu in
1) f_case ;;
2) f_combine ;;
3) f_prefix ;;
4) f_suffix ;;
5) f_inclu ;;
6) f_subs ;;
7) f_tidy ;;
8) f_split ;;
9) f_delete ;;
10) f_misc ;;
f) f_info ;;
h) f_help ;;
u) f_update ;;
q) echo $STAND"" && exit ;;
Q) echo $STAND"" && exit ;;
*) echo $RED"\"$menu\" is not a valid menu item "$STAND; sleep 0.5 ;;
esac
done
}
#
##
### TEST FOR DIRECT WORD INPUT
##############################
if [ $# -ne 0 ]; then
INPUT=$(echo "$@")
echo '
#!/usr/bin/perl
use strict;
use warnings;
my %permution = (
"a" => [ "a", "4", "@", "&", "A" ],
"b" => "bB",
"c" => "cC",
"d" => "dD",
"e" => "3Ee",
"f" => "fF",
"g" => "gG9",
"h" => "hH",
"i" => "iI!|1",
"j" => "jJ",
"k" => "kK",
"l" => "lL!71|",
"m" => "mM",
"n" => "nN",
"o" => "oO0",
"p" => "pP",
"q" => "qQ",
"r" => "rR",
"s" => "sS5\$",
"t" => "tT71+",
"u" => "uU",
"v" => "vV",
"w" => ["w", "W", "\\/\\/"],
"x" => "xX",
"y" => "yY",
"z" => "zZ2",
);
# End config
while(my $word = <>) {
chomp $word;
my @string = split //, lc($word);
&permute(0, @string);
}
sub permute {
my $num = shift;
my @str = @_;
my $len = @str;
if($num >= $len) {
foreach my $char (@str) {
print $char;
}
print "\n";
return;
}
my $per = $permution{$str[$num]};
if($per) {
my @letters = ();
if(ref($per) eq "ARRAY") {
@letters = @$per;
} else {
@letters = split //, $per;
}
$per = "";
foreach $per (@letters) {
my $s = "";
for(my $i = 0; $i < $len; $i++) {
if($i eq 0) {
if($i eq $num) {
$s = $per;
} else {
$s = $str[0];
}
} else {
if($i eq $num) {
$s .= $per;
} else {
$s .= $str[$i];
}
}
}
my @st = split //, $s;
&permute(($num + 1), @st);
}
} else {
&permute(($num + 1), @str);
}
}
' > wlm_permute.pl
echo $INPUT | perl wlm_permute.pl
echo $STAND"
Thanks to Gitsnik's permute.pl script
-------------------------------------
"
rm wlm_permute.pl
else
f_menu
fi
# Version History
#0.1 Released 11-10-2011
#
#0.2 Released 18-10-2011
# > Fixed bugs with suffixing numeric values to lists
# > Increased speed of checking file information by using wc
# > Included wordlist size checker (crunch_size)
# > Included a split option
# > Tidied up code and updated help
#
#0.3 Released 18-12-2011
# > Updated crunch_size to include estimated time based on user input on expected pwds/sec on their system.
# > Updated Split options to include splitting by filesize.
# > Tidied up how estimated number of files is calculated for split function.
# > Tidied up various bits of code and made the code more 'uniform' in approach.
# > Included msg to advise of lines which wont be processed for the prefixing/suffixing of numbers (due to % char)
# > Included removal/deletion options.
# > Included date wordlist creation option with an updated datelist script.
# > Included SSIDstrip to create SSID wordlists from kismet generated .nettxt files.
#
#0.4 Released 09-06-2012
# > Included possibility to invert the case in words (lower->upper & upper->lower).
# > Included basic error checks to the datelist script to avoid erroneous input.
# > Included possibility to remove words that do not have X number of numeric values.
# > Included possibility to remove words with N number of repeated characters.
# > Included basic leetifying options, not terribly happy with how that is done, but suppose better than nothing.
# > Temporary (? ;) ) inclusion of Gitsnik's great permute.pl script also able to run on direct input
# pending my pitiful bash endeavours to reproduce the same thing..
#
#0.5 Released 20-08-2012
# > Fixed bug in datelist script that ignored July month -- Thanks to Stepking2
# > Fixed bug in datelist leapyear script that caused whole century years to ignore Feburary -- Thanks to Stepking2
# > Fixed bug in removal of last characters in word in menu option 9
# > Replaced repetetive code with functions
#
#0.6
# > Made all menus and queries, which weren't already, uniformly presented where possible.
# > Included 7.3 Wordlist optimization option (sort on word/string length)
# > Deleted the unused 8.3 menu option.
# > Inlcuded option to delete words of a certain length from file.
#
#0.7 Released 21-10-2012
# > Updated split options by including the -a switch variable to allow for sufficient suffix numbers
# depending on for files which will be created when splitting files on linecount based on user input.
# > Included rudimentary update function. To be improved..
#
#0.8 Released 24-10-2012
# Build 0801
# - Improved update function by introducing builds for easier future update checks.
# Build 0802 25-10-2012
# - Included mention of this list of changes when updating from current version.
# Build 0803 26-10-2012
# - Included an 'echo' when quitting from main menu to prevent colour from being altered
# in terminal upon quitting in BBox.
# - Corrected few typoes
# - Included build number when calling for version info.
# Build 0804 27-10-2012
# - Included more 'sudos' to allow for better funcionality when using in BBox.
# - Updated update function for BBox
#
#0.9 Released 31-03-2013
# Build 0901
# - Included rot18 decoding/encoding option under menu item 10; 10.6
# - Included rot47 decoding/encoding option under menu item 10; 10.7
# - Updated wordlist optmization (7.1) script for better performance and added dos2unix run.
# - Included check for running as root at script startup.
# Build 0902 12-05-2013
# - Included option 9.10 to allow deletion of characters as from and including a specified character.
# Build 0903 12-05-2013
# - Included option to check all possible shift values on a Caesar cipher text under menu item 10; 10.8
# Build 0904 13-05-2013
# - Fixed spacing not being done in Caesar cipher shift check
# Build 0905 14-05-2013
# - Fixed ROT18/47 commands not printing out first and last 3 words in created list.
#
#
# To Do List
# ----------
# * Better include sudo requirements.
# * Verify OS/Distro - presence of required tools (pw-inspector) ?
# * Include reverse option ?
# * ? Continue to make it better ? ;)
|
ausarbluhd/EternalLLC
|
scripts/wlm.sh
|
Shell
|
gpl-2.0
| 84,631 |
#!/bin/bash
#
# Script by Christoph Daniel Miksche
# License: GNU General Public License
#
# Contact:
# > http://christoph.miksche.org
# > Twitter: CMiksche
# > GitHub: CMiksche
#
# Default User
defaultuser=""
# Default User Password
defaultpass=""
# Name of your Website
website=""
# Database
# Options: mysql
database="mysql"
# FTP-Server
# Options: vsftpd
ftpserver="vsftpd"
|
CMiksche/DebianWebserverConfig
|
01-config.sh
|
Shell
|
gpl-2.0
| 378 |
#!/bin/sh /etc/rc.common
STOP=01
if [ ! "$CONFIGLOADED" ]; then
if [ -r /etc/rc.d/config.sh ]; then
sync
. /etc/rc.d/config.sh 2>/dev/null
CONFIGLOADED="1"
fi
fi
stop ()
{
touch /tmp/upgrade_chk.txt
sync;
if [ "$CONFIG_IFX_MODEL_NAME" = "ARX382_GW_EL_ADSL" ] ; then
sleep 3;
fi
while : ; do
grep -q "(upgrade)" /proc/*/stat && {
sleep 3;
echo -en "\n ####################################\n"
echo -en "\n Hold until upgrade process completes\n"
echo -en "\n ####################################\n"
} || break
done
if [ "$CONFIG_IFX_MODEL_NAME" = "ARX382_GW_EL_ADSL" -a -f "/tmp/devm_reboot_chk.txt" ] ; then
sleep 20;
fi
sync;
}
|
kbridgers/VOLTE4GFAX
|
package/feeds/ltq_feeds_netcomp_cpe/ifx_config_common_features/files/etc/init.d/upgradesync.sh
|
Shell
|
gpl-2.0
| 669 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.