code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#! /usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script executes a program that will forward some or all of the logs to a running instance of Chainsaw v2.
# To use this script, start Chainsaw on a host and create a new XMLSocketReceiver. This script
# accepts the following command line parameters
#
# host [required] - host running Chainsaw. Must be accessible via the network from this server
# port [required] - port that XMLSocketReceiver is listening on.
# filter [optional] - filter for log file names, * and ? are valid wildcards
# start [optional] - filter log messages beginning at this time (format is yyyyMMddHHmmss)
# end [optional] - filter log messages ending at this time (default is now, format is yyyyMMddHHmmss)
# level [optional] - filter log messages with this level and higher
# regex [optional] - filter log messages that match this regex (follows java.util.regex.Pattern syntax)
#
#
# Example:
#
# LogForwarder.sh -h 127.0.0.1 -p 4448 -f tserver* -s 2010010100001 -e 20100101235959 -l INFO -m .*scan.*
#
# Start: Resolve Script Directory
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
bin=$( cd -P "$( dirname "$SOURCE" )" && pwd )
SOURCE=$(readlink "$SOURCE")
[[ $SOURCE != /* ]] && SOURCE="$bin/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
bin=$( cd -P "$( dirname "$SOURCE" )" && pwd )
script=$( basename "$SOURCE" )
# Stop: Resolve Script Directory
. "$bin"/config.sh
"${JAVA_HOME}/bin/java" -cp "$ACCUMULO_HOME/lib" org.apache.accumulo.server.util.SendLogToChainsaw -d "$ACCUMULO_LOG_DIR" "$@"
|
adamjshook/accumulo
|
assemble/bin/LogForwarder.sh
|
Shell
|
apache-2.0
| 2,457 |
#!/bin/bash
function INFO(){
echo -e "\e[104m\e[97m[INFO]\e[49m\e[39m $@"
}
function IMPORTANT(){
echo -e "\e[105m\e[97m[IMPORTANT]\e[49m\e[39m $@"
}
function SLEEP(){
echo -n $(INFO "Sleeping(${1} secs)..")
sleep ${1}
echo "Done"
}
function PAUSE(){
TMP=$(mktemp /tmp/namazu-wait.XXXXX)
IMPORTANT "PAUSING. remove ${TMP} to continue"
while [ -e $TMP ]; do
sleep 1
done
}
|
osrg/namazu
|
example/template/materials/lib.sh
|
Shell
|
apache-2.0
| 419 |
#!/bin/sh
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script signs the Chromoting binaries, builds the Chrome Remote Desktop
# installer and then packages it into a .dmg. It requires that Iceberg be
# installed (for 'freeze').
#
# usage: sign_and_build.sh output_dir input_dir codesign_keychain codesign_id
#
# The final disk image (dmg) is placed in |output_dir|.
set -e -u
# Binaries to sign.
ME2ME_HOST='PrivilegedHelperTools/org.chromium.chromoting.me2me_host'
UNINSTALLER='Applications/@@HOST_UNINSTALLER_NAME@@.app'
# Iceberg creates this directory to write its output.
PKG_DIR=build
# The Chromoting Host installer is a meta-package that consists of 3
# components:
# * Chromoting Host Service package
# * Chromoting Host Uninstaller package
# * Keystone package(GoogleSoftwareUpdate - for Official builds only)
PKGPROJ_HOST='ChromotingHost.packproj'
PKGPROJ_HOST_SERVICE='ChromotingHostService.packproj'
PKGPROJ_HOST_UNINSTALLER='ChromotingHostUninstaller.packproj'
# Final (user-visible) mpkg name.
PKG_FINAL='@@HOST_PKG@@.mpkg'
DMG_TEMP=dmg_tmp
DMG_NAME='@@DMG_NAME@@'
DMG_DIR="${DMG_TEMP}/${DMG_NAME}"
DMG_FILENAME='@@DMG_NAME@@.dmg'
ME="$(basename "${0}")"
readonly ME
err() {
echo "[$(date +'%Y-%m-%d %H:%M:%S%z')]: ${@}" >&2
}
err_exit() {
err "${@}"
exit 1
}
# shell_safe_path ensures that |path| is safe to pass to tools as a
# command-line argument. If the first character in |path| is "-", "./" is
# prepended to it. The possibly-modified |path| is output.
shell_safe_path() {
local path="${1}"
if [[ "${path:0:1}" = "-" ]]; then
echo "./${path}"
else
echo "${path}"
fi
}
verify_empty_dir() {
local dir="${1}"
if [[ ! -d "${dir}" ]]; then
mkdir "${dir}"
fi
shopt -s nullglob dotglob
local dir_contents=("${dir}"/*)
shopt -u nullglob dotglob
if [[ ${#dir_contents[@]} -ne 0 ]]; then
err "Output directory must be empty"
exit 1
fi
}
sign() {
local name="${1}"
local keychain="${2}"
local id="${3}"
if [[ ! -e "${name}" ]]; then
err_exit "Input file doesn't exist: ${name}"
fi
echo Signing "${name}"
codesign -vv -s "${id}" --keychain "${keychain}" "${name}"
codesign -v "${name}"
}
sign_binaries() {
local input_dir="${1}"
local keychain="${2}"
local id="${3}"
sign "${input_dir}/${ME2ME_HOST}" "${keychain}" "${id}"
sign "${input_dir}/${UNINSTALLER}" "${keychain}" "${id}"
}
build_package() {
local pkg="${1}"
echo "Building .pkg from ${pkg}"
freeze "${pkg}"
}
build_packages() {
local input_dir="${1}"
build_package "${input_dir}/${PKGPROJ_HOST_SERVICE}"
build_package "${input_dir}/${PKGPROJ_HOST_UNINSTALLER}"
build_package "${input_dir}/${PKGPROJ_HOST}"
}
build_dmg() {
local input_dir="${1}"
local output_dir="${2}"
# TODO(garykac): Change this to use the pkg-dmg script.
# Create the .dmg.
echo "Building .dmg..."
mkdir -p "${input_dir}/${DMG_DIR}/${PKG_FINAL}"
# Copy .mpkg installer.
echo "Copying ${input_dir}/${PKG_DIR}/${PKG_FINAL}"
ditto "${input_dir}/${PKG_DIR}/${PKG_FINAL}" \
"${input_dir}/${DMG_DIR}/${PKG_FINAL}"
# Copy .keystone_install script to top level of .dmg.
# Keystone calls this script during upgrades.
cp "${input_dir}/Scripts/keystone_install.sh" \
"${input_dir}/${DMG_DIR}/.keystone_install"
# Build the .dmg from the directory.
hdiutil create "${output_dir}/${DMG_FILENAME}" \
-srcfolder "${input_dir}/${DMG_DIR}" -ov -quiet
if [[ ! -f "${output_dir}/${DMG_FILENAME}" ]]; then
err_exit "Unable to create disk image: ${DMG_FILENAME}"
fi
}
usage() {
echo "Usage: ${ME}: output_dir input_dir codesign_keychain codesign_id" >&2
}
main() {
local output_dir="$(shell_safe_path "${1}")"
local input_dir="$(shell_safe_path "${2}")"
local codesign_keychain="$(shell_safe_path "${3}")"
local codesign_id="${4}"
verify_empty_dir "${output_dir}"
sign_binaries "${input_dir}" "${codesign_keychain}" "${codesign_id}"
build_packages "${input_dir}"
# TODO(garykac): Sign final .mpkg.
build_dmg "${input_dir}" "${output_dir}"
}
if [[ ${#} -ne 4 ]]; then
usage
exit 1
fi
main "${@}"
exit ${?}
|
gx1997/chrome-loongson
|
remoting/host/installer/mac/do_signing.sh
|
Shell
|
bsd-3-clause
| 4,260 |
#!/bin/sh
set -e
bundle exec rails db:create
bundle exec rails db:migrate
|
gazayas/tebukuro
|
script/deploy-tasks.sh
|
Shell
|
bsd-3-clause
| 76 |
bundle exec ruby bin/puma \
-t 4 -b "ssl://localhost:9292?key=examples%2Fpuma%2Fpuma_keypair.pem&cert=examples%2Fpuma%2Fcert_puma.pem&verify_mode=none" \
test/rackup/realistic_response.ru &
PID1=$!
sleep 5
wrk -c 4 -d 30 --latency https://localhost:9292
kill $PID1
|
looker/puma
|
benchmarks/wrk/ssl_realistic_response.sh
|
Shell
|
bsd-3-clause
| 300 |
#!/bin/bash
# Verify the the working tree modifices no sql files relative to the main
# branch. This will always pass on a deploy because the working tree is an
# unmodified copy of the main branch.
target_treeish=${HAIL_TARGET_SHA:-$(git merge-base main HEAD)}
modified_sql_file_list=$(mktemp)
git diff --name-status $target_treeish sql \
| grep -Ev $'^A|^M\t[^/]+/sql/(estimated-current.sql|delete-[^ ]+-tables.sql)' \
> $modified_sql_file_list
if [ "$(cat $modified_sql_file_list | wc -l)" -ne 0 ]
then
cat $modified_sql_file_list
echo 'At least one migration file was modified. See above.'
exit 1
fi
|
hail-is/hail
|
check-sql.sh
|
Shell
|
mit
| 636 |
#!/bin/bash
#################################################################################################
#
# Licensed Materials - Property of IBM
# © Copyright IBM Corporation 2014. All Rights Reserved.
# This sample program is provided AS IS and may be used, executed, copied and modified without
# royalty payment by customer (a) for its own instruction and study, (b) in order to develop
# applications designed to run with an IBM product, either for customer's own internal use or for
# redistribution by customer, as part of such an application, in customer's own products.
#
################################################################################################
SCRIPT_DIR=`dirname "$0"`
# Lets setup all our known variables
OPTIND=1
BASE_DIR=${SCRIPT_DIR}/..
declare -a FILES_WTIH_APP_NAME=( ${BASE_DIR}/Android/Summit/app/src/main/res/values/strings.xml ${BASE_DIR}/iOS/ReadyAppRetail/ReadyAppRetail/en.lproj/Main.storyboard ${BASE_DIR}/docs/index.html ${BASE_DIR}/html/index.html)
APP_NAME=`cat "${SCRIPT_DIR}/appname.txt"`
NEW_APP_NAME="NewName"
###############################################################################
#
# Functions Section
#
###############################################################################
################################################################################################
## Standard usage statement. Lets make sure they know how to call the script.
################################################################################################
usage() {
echo "changeAppName.sh -n <app-name>"
echo " -n The new name of the application"
}
################################################################################################
## Standard failure method...lets print a message out then fail with the "appropriate" return
## code
################################################################################################
fail() {
MESSAGE=${1}
RC=${2}
echo ""
echo "${MESSAGE}"
echo ""
usage
exit ${RC}
}
################################################################################################
## Function will take a file name, a string to search for and a replacement string. The function
## will loop through the lines in the file and replace any instances of the search string with
## the replacement string. This can be used to replace any string with any other string in
## any text file.
################################################################################################
updateFile() {
FILE=$1
TXT_TO_REPLACE=$2
REPLACEMENT_TXT=$3
BACKUP_FILE="${FILE}.bak"
echo "Changing '${TXT_TO_REPLACE}' to '${REPLACEMENT_TXT}' in file: ${FILE}"
cp ${FILE} ${BACKUP_FILE}
output=`sed "s/${TXT_TO_REPLACE}/${REPLACEMENT_TXT}/g" ${BACKUP_FILE} > ${FILE}`
cmdrc=$?
rm -f ${BACKUP_FILE}
}
###############################################################################
#
# Lets parse our arguments.
# Basically just saying lets parse args, im aware of h and n, and n requires an argument.
#
###############################################################################
while getopts ":hn:" opt; do
case "${opt}" in
h) usage
exit 0
;;
n) NEW_APP_NAME=$OPTARG
;;
esac
done
shift $((OPTIND-1))
[ "$1" = "--" ] && shift
#echo "name=$APP_NAME, leftovers: '$@'"ß
###############################################################################
#
# Now lets do some real work.
#
###############################################################################
################################################################################################
## Checking to see if the passed in application name is valid...should not be
## the original string nor some empty string
################################################################################################
if [ "${NEW_APP_NAME}" = "" -o "${NEW_APP_NAME}" = "NewName" ]; then
fail "You need to supply a new application name. The -n parameter cannot be empty." 1
fi
for file in ${FILES_WTIH_APP_NAME[@]} ; do
updateFile "${file}" "${APP_NAME}" "${NEW_APP_NAME}"
done
echo ${NEW_APP_NAME}>"${SCRIPT_DIR}/appname.txt"
exit 0
|
IBM-MIL/IBM-Ready-App-for-Retail
|
scripts/changeAppName.sh
|
Shell
|
epl-1.0
| 4,183 |
#!/bin/sh
if [ -z "$1" ]; then
echo "need a filename"
exit 1
fi
echo "Is $1 the Application you want to clean up? [y/N]"
read A
case $A in
[Yy]* )
echo "* Running cleanup"
;;
[Nn]* )
echo "Aborting..."
exit 1
;;
* )
echo you did not say yes or no;;
esac
echo "* Pre cleanup size:" $(du -sh $1 | awk '{print $1}')
echo "* Removing hicolors icon theme"
rm -rf $1/Contents/Resources/share/icons/hicolor
echo "* Stripping libraries"
find $1 -name '*.dylib' -exec strip -u -r {} +
echo "* Done, size:" $(du -sh $1 | awk '{print $1}')
|
dffischer/Abraca
|
macosx/cleanup.sh
|
Shell
|
gpl-2.0
| 549 |
#!/bin/sh
$1 -e "use $2" 1>/dev/null 2>&1
echo $?
|
churchcommunitybuilder/bombard
|
utils/check.sh
|
Shell
|
gpl-2.0
| 54 |
## this script is for updating htaccess files to limit entry 11-8-12. \
## purpose: user can direct password protection of admin/conf directories; entire xGDBvm website; or change MySQL password for user 'gdbuser'
## useage: sudo /xGDBvm/scripts/AdminSecurityUpdate.sh
## Assign directory ownership to Apache (NOT reflexive -R) so scripts can copy or create .htaccess files from template dist.htaccess here.
chown apache:xgdb /xGDBvm
chown apache:xgdb /xGDBvm/admin/
chown apache:xgdb /xGDBvm/XGDB/conf/
## Change permissions on /xGDBvm/admin/dbpass so that Apache can write mysql password to it.
chmod 664 /xGDBvm/admin/dbpass
chown apache:xgdb /xGDBvm/admin/dbpass
|
vpbrendel/xGDBvm
|
scripts/AdminSecurityUpdate.sh
|
Shell
|
gpl-3.0
| 673 |
#!/bin/sh
if [ "x${enable_prof}" = "x1" ] ; then
export MALLOC_CONF="prof:true,prof_active:true,lg_prof_sample:0,prof_stats:true"
fi
|
arangodb/arangodb
|
3rdParty/jemalloc/v5.2.1/test/unit/prof_stats.sh
|
Shell
|
apache-2.0
| 136 |
#!/bin/sh
sed -e 's/\(^.*#\) \(0x\w\+\)$/\2 \1/' "$@" | sort | sed -e 's/\(0x\w\+\) \(.*#\)$/\2 \1/'
|
xranby/apitrace
|
specs/scripts/sort.sh
|
Shell
|
mit
| 101 |
#!/bin/bash
# Make sure the id is here
if [ "$1" == "" ]
then
echo "Usage:\n test_pull_request.sh ID\n"
exit 1
fi
ID=$1
CURRENT_BRANCH=`git rev-parse --abbrev-ref HEAD`
# "realpath" utility is not installed by default on all Linuxen and we need the true path
SCRIPTDIR=$(python2.7 -c 'import os,sys;print os.path.dirname(os.path.realpath(sys.argv[1]))' "$0")
# Make sure we're in the correct place to run git commands no matter where we were called from
cd `dirname "$SCRIPTDIR"`
# Make sure we have upstream
git remote | grep -q upstream
if [ "$?" == "1" ]
then
git remote add upstream https://github.com/hacklab-fi/asylum.git
fi
git fetch upstream master
# Local branch exists, remove it
git rev-parse --verify test-$ID
if [ "$?" == "0" ]
then
git branch -D test-$ID
fi
git fetch upstream pull/$ID/head:test-$ID
if [ "$?" != "0" ]
then
exit 1
fi
# Store current branch
BRANCH=`git rev-parse --abbrev-ref HEAD`
set -e
git checkout test-$ID
git rebase upstream/master
# Run tests
$SCRIPTDIR/run_unit_tests.sh
git checkout $BRANCH
git branch -D test-$ID
echo "Temporary branch test-$ID was removed, to check it out again run: git fetch upstream pull/$ID/head:test-$ID"
|
rambo/asylum
|
docker/test_pull_request.sh
|
Shell
|
mit
| 1,199 |
#!/usr/bin/env bash
# vim: set ft=sh sw=4 sts=4 et :
if [ -d world_TEST_dir ] ; then
rm -fr world_TEST_dir
else
true
fi
|
impulze/paludis
|
paludis/environments/paludis/world_TEST_cleanup.sh
|
Shell
|
gpl-2.0
| 130 |
# ltmain.sh - Provide generalized library-building support services.
# NOTE: Changing this file will not affect anything until you rerun ltconfig.
#
# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001
# Free Software Foundation, Inc.
# Originally by Gordon Matzigkeit <[email protected]>, 1996
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Check that we have a working $echo.
if test "X$1" = X--no-reexec; then
# Discard the --no-reexec flag, and continue.
shift
elif test "X$1" = X--fallback-echo; then
# Avoid inline document here, it may be left over
:
elif test "X`($echo '\t') 2>/dev/null`" = 'X\t'; then
# Yippee, $echo works!
:
else
# Restart under the correct shell, and then maybe $echo will work.
exec $SHELL "$0" --no-reexec ${1+"$@"}
fi
if test "X$1" = X--fallback-echo; then
# used as fallback echo
shift
cat <<EOF
$*
EOF
exit 0
fi
# The name of this program.
progname=`$echo "$0" | sed 's%^.*/%%'`
modename="$progname"
# Constants.
PROGRAM=ltmain.sh
PACKAGE=libtool
VERSION=1.4a-GCC3.0
TIMESTAMP=" (1.641.2.256 2001/05/28 20:09:07 with GCC-local changes)"
default_mode=
help="Try \`$progname --help' for more information."
magic="%%%MAGIC variable%%%"
mkdir="mkdir"
mv="mv -f"
rm="rm -f"
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
Xsed='sed -e 1s/^X//'
sed_quote_subst='s/\([\\`\\"$\\\\]\)/\\\1/g'
SP2NL='tr \040 \012'
NL2SP='tr \015\012 \040\040'
# NLS nuisances.
# Only set LANG and LC_ALL to C if already set.
# These must not be set unconditionally because not all systems understand
# e.g. LANG=C (notably SCO).
# We save the old values to restore during execute mode.
if test "${LC_ALL+set}" = set; then
save_LC_ALL="$LC_ALL"; LC_ALL=C; export LC_ALL
fi
if test "${LANG+set}" = set; then
save_LANG="$LANG"; LANG=C; export LANG
fi
if test "$LTCONFIG_VERSION" != "$VERSION"; then
echo "$modename: ltconfig version \`$LTCONFIG_VERSION' does not match $PROGRAM version \`$VERSION'" 1>&2
echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
exit 1
fi
if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
echo "$modename: not configured to build any kind of library" 1>&2
echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
exit 1
fi
# Global variables.
mode=$default_mode
nonopt=
prev=
prevopt=
run=
show="$echo"
show_help=
execute_dlfiles=
lo2o="s/\\.lo\$/.${objext}/"
o2lo="s/\\.${objext}\$/.lo/"
taglist=
# Parse our command line options once, thoroughly.
while test $# -gt 0
do
arg="$1"
shift
case $arg in
-*=*) optarg=`$echo "X$arg" | $Xsed -e 's/[-_a-zA-Z0-9]*=//'` ;;
*) optarg= ;;
esac
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
execute_dlfiles)
execute_dlfiles="$execute_dlfiles $arg"
;;
tag)
tagname="$arg"
# Check whether tagname contains only valid characters
case $tagname in
*[!-_A-Za-z0-9,/]*)
echo "$progname: invalid tag name: $tagname" 1>&2
exit 1
;;
esac
case $tagname in
CC)
# Don't test for the "default" C tag, as we know, it's there, but
# not specially marked.
taglist="$taglist $tagname"
;;
*)
if grep "^### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "$0" > /dev/null; then
taglist="$taglist $tagname"
# Evaluate the configuration.
eval "`sed -n -e '/^### BEGIN LIBTOOL TAG CONFIG: '$tagname'$/,/^### END LIBTOOL TAG CONFIG: '$tagname'$/p' < $0`"
else
echo "$progname: ignoring unknown tag $tagname" 1>&2
fi
;;
esac
;;
*)
eval "$prev=\$arg"
;;
esac
prev=
prevopt=
continue
fi
# Have we seen a non-optional argument yet?
case $arg in
--help)
show_help=yes
;;
--version)
echo "$PROGRAM (GNU $PACKAGE) $VERSION$TIMESTAMP"
exit 0
;;
--config)
sed -n -e '/^### BEGIN LIBTOOL CONFIG/,/^### END LIBTOOL CONFIG/p' < "$0"
# Now print the configurations for the tags.
for tagname in $taglist; do
sed -n -e "/^### BEGIN LIBTOOL TAG CONFIG: $tagname$/,/^### END LIBTOOL TAG CONFIG: $tagname$/p" < "$0"
done
exit 0
;;
--debug)
echo "$progname: enabling shell trace mode"
set -x
;;
--dry-run | -n)
run=:
;;
--features)
echo "host: $host"
if test "$build_libtool_libs" = yes; then
echo "enable shared libraries"
else
echo "disable shared libraries"
fi
if test "$build_old_libs" = yes; then
echo "enable static libraries"
else
echo "disable static libraries"
fi
exit 0
;;
--finish) mode="finish" ;;
--mode) prevopt="--mode" prev=mode ;;
--mode=*) mode="$optarg" ;;
--quiet | --silent)
show=:
;;
--tag) prevopt="--tag" prev=tag ;;
--tag=*)
set tag "$optarg" ${1+"$@"}
shift
prev=tag
;;
-dlopen)
prevopt="-dlopen"
prev=execute_dlfiles
;;
-*)
$echo "$modename: unrecognized option \`$arg'" 1>&2
$echo "$help" 1>&2
exit 1
;;
*)
nonopt="$arg"
break
;;
esac
done
if test -n "$prevopt"; then
$echo "$modename: option \`$prevopt' requires an argument" 1>&2
$echo "$help" 1>&2
exit 1
fi
# If this variable is set in any of the actions, the command in it
# will be execed at the end. This prevents here-documents from being
# left over by shells.
exec_cmd=
if test -z "$show_help"; then
# Infer the operation mode.
if test -z "$mode"; then
case $nonopt in
*cc | *++ | gcc* | *-gcc*)
mode=link
for arg
do
case $arg in
-c)
mode=compile
break
;;
esac
done
;;
*db | *dbx | *strace | *truss)
mode=execute
;;
*install*|cp|mv)
mode=install
;;
*rm)
mode=uninstall
;;
*)
# If we have no mode, but dlfiles were specified, then do execute mode.
test -n "$execute_dlfiles" && mode=execute
# Just use the default operation mode.
if test -z "$mode"; then
if test -n "$nonopt"; then
$echo "$modename: warning: cannot infer operation mode from \`$nonopt'" 1>&2
else
$echo "$modename: warning: cannot infer operation mode without MODE-ARGS" 1>&2
fi
fi
;;
esac
fi
# Only execute mode is allowed to have -dlopen flags.
if test -n "$execute_dlfiles" && test "$mode" != execute; then
$echo "$modename: unrecognized option \`-dlopen'" 1>&2
$echo "$help" 1>&2
exit 1
fi
# Change the help message to a mode-specific one.
generic_help="$help"
help="Try \`$modename --help --mode=$mode' for more information."
# These modes are in order of execution frequency so that they run quickly.
case $mode in
# libtool compile mode
compile)
modename="$modename: compile"
# Get the compilation command and the source file.
base_compile=
prev=
lastarg=
srcfile="$nonopt"
suppress_output=
user_target=no
for arg
do
case $prev in
"") ;;
xcompiler)
# Aesthetically quote the previous argument.
prev=
lastarg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
# Add the previous argument to base_compile.
if test -z "$base_compile"; then
base_compile="$lastarg"
else
base_compile="$base_compile $lastarg"
fi
continue
;;
esac
# Accept any command-line options.
case $arg in
-o)
if test "$user_target" != "no"; then
$echo "$modename: you cannot specify \`-o' more than once" 1>&2
exit 1
fi
user_target=next
;;
-static)
build_old_libs=yes
continue
;;
-prefer-pic)
pic_mode=yes
continue
;;
-prefer-non-pic)
pic_mode=no
continue
;;
-Xcompiler)
prev=xcompiler
continue
;;
-Wc,*)
args=`$echo "X$arg" | $Xsed -e "s/^-Wc,//"`
lastarg=
IFS="${IFS= }"; save_ifs="$IFS"; IFS=','
for arg in $args; do
IFS="$save_ifs"
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
lastarg="$lastarg $arg"
done
IFS="$save_ifs"
lastarg=`$echo "X$lastarg" | $Xsed -e "s/^ //"`
# Add the arguments to base_compile.
if test -z "$base_compile"; then
base_compile="$lastarg"
else
base_compile="$base_compile $lastarg"
fi
continue
;;
esac
case $user_target in
next)
# The next one is the -o target name
user_target=yes
continue
;;
yes)
# We got the output file
user_target=set
libobj="$arg"
continue
;;
esac
# Accept the current argument as the source file.
lastarg="$srcfile"
srcfile="$arg"
# Aesthetically quote the previous argument.
# Backslashify any backslashes, double quotes, and dollar signs.
# These are the only characters that are still specially
# interpreted inside of double-quoted scrings.
lastarg=`$echo "X$lastarg" | $Xsed -e "$sed_quote_subst"`
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
case $lastarg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
lastarg="\"$lastarg\""
;;
esac
# Add the previous argument to base_compile.
if test -z "$base_compile"; then
base_compile="$lastarg"
else
base_compile="$base_compile $lastarg"
fi
done
case $user_target in
set)
;;
no)
# Get the name of the library object.
libobj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%'`
;;
*)
$echo "$modename: you must specify a target with \`-o'" 1>&2
exit 1
;;
esac
# Recognize several different file suffixes.
# If the user specifies -o file.o, it is replaced with file.lo
xform='[cCFSfmso]'
case $libobj in
*.ada) xform=ada ;;
*.adb) xform=adb ;;
*.ads) xform=ads ;;
*.asm) xform=asm ;;
*.c++) xform=c++ ;;
*.cc) xform=cc ;;
*.class) xform=class ;;
*.cpp) xform=cpp ;;
*.cxx) xform=cxx ;;
*.f90) xform=f90 ;;
*.for) xform=for ;;
*.java) xform=java ;;
esac
libobj=`$echo "X$libobj" | $Xsed -e "s/\.$xform$/.lo/"`
case $libobj in
*.lo) obj=`$echo "X$libobj" | $Xsed -e "$lo2o"` ;;
*)
$echo "$modename: cannot determine name of library object from \`$libobj'" 1>&2
exit 1
;;
esac
# Infer tagged configuration to use if any are available and
# if one wasn't chosen via the "--tag" command line option.
# Only attempt this if the compiler in the base compile
# command doesn't match the default compiler.
if test -n "$available_tags" && test -z "$tagname"; then
# APPLE LOCAL begin handle ~ in pathnames 2002-01-14 sts
# Since CC may have args with shell metachars in them, add
# doublequotes to args so it looks the same as $base_compile.
qCC=
for argu in $CC; do
case $argu in
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
argu="\"$argu\""
;;
esac
# Add the previous argument to qCC.
if test -z "$qCC"; then
qCC="$argu"
else
qCC="$qCC $argu"
fi
done
# APPLE LOCAL end handle ~ in pathnames 2002-01-14 sts
case $base_compile in
# APPLE LOCAL handle ~ in pathnames 2002-01-14 sts
"$qCC "*) ;;
# Blanks in the command may have been stripped by the calling shell,
# but not from the CC environment variable when ltconfig was run.
# APPLE LOCAL handle ~ in pathnames 2002-01-14 sts
"`$echo $qCC` "*) ;;
*)
for z in $available_tags; do
if grep "^### BEGIN LIBTOOL TAG CONFIG: $z$" < "$0" > /dev/null; then
# Evaluate the configuration.
eval "`sed -n -e '/^### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^### END LIBTOOL TAG CONFIG: '$z'$/p' < $0`"
case $base_compile in
"$CC "*)
# The compiler in the base compile command matches
# the one in the tagged configuration.
# Assume this is the tagged configuration we want.
tagname=$z
break
;;
"`$echo $CC` "*)
tagname=$z
break
;;
esac
fi
done
# If $tagname still isn't set, then no tagged configuration
# was found and let the user know that the "--tag" command
# line option must be used.
if test -z "$tagname"; then
echo "$modename: unable to infer tagged configuration"
echo "$modename: specify a tag with \`--tag'" 1>&2
exit 1
# else
# echo "$modename: using $tagname tagged configuration"
fi
;;
esac
fi
objname=`$echo "X$obj" | $Xsed -e 's%^.*/%%'`
xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$obj"; then
xdir=
else
xdir=$xdir/
fi
lobj=${xdir}$objdir/$objname
if test -z "$base_compile"; then
$echo "$modename: you must specify a compilation command" 1>&2
$echo "$help" 1>&2
exit 1
fi
# Delete any leftover library objects.
if test "$build_old_libs" = yes; then
removelist="$obj $lobj $libobj ${libobj}T"
else
removelist="$lobj $libobj ${libobj}T"
fi
$run $rm $removelist
trap "$run $rm $removelist; exit 1" 1 2 15
# On Cygwin there's no "real" PIC flag so we must build both object types
case $host_os in
cygwin* | mingw* | pw32* | os2*)
pic_mode=default
;;
esac
if test $pic_mode = no && test "$deplibs_check_method" != pass_all; then
# non-PIC code in shared libraries is not supported
pic_mode=default
fi
# Calculate the filename of the output object if compiler does
# not support -o with -c
if test "$compiler_c_o" = no; then
output_obj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext}
lockfile="$output_obj.lock"
removelist="$removelist $output_obj $lockfile"
trap "$run $rm $removelist; exit 1" 1 2 15
else
output_obj=
need_locks=no
lockfile=
fi
# Lock this critical section if it is needed
# We use this script file to make the link, it avoids creating a new file
if test "$need_locks" = yes; then
until $run ln "$0" "$lockfile" 2>/dev/null; do
$show "Waiting for $lockfile to be removed"
sleep 2
done
elif test "$need_locks" = warn; then
if test -f "$lockfile"; then
echo "\
*** ERROR, $lockfile exists and contains:
`cat $lockfile 2>/dev/null`
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit 1
fi
echo $srcfile > "$lockfile"
fi
if test -n "$fix_srcfile_path"; then
eval srcfile=\"$fix_srcfile_path\"
fi
$run $rm "$libobj" "${libobj}T"
# Create a libtool object file (analogous to a ".la" file),
# but don't create it if we're doing a dry run.
test -z "$run" && cat > ${libobj}T <<EOF
# $libobj - a libtool object file
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# Name of the PIC object.
EOF
# Only build a PIC object if we are building libtool libraries.
if test "$build_libtool_libs" = yes; then
# Without this assignment, base_compile gets emptied.
fbsd_hideous_sh_bug=$base_compile
if test "$pic_mode" != no; then
command="$base_compile $srcfile $pic_flag"
else
# Don't build PIC code
command="$base_compile $srcfile"
fi
if test ! -d ${xdir}$objdir; then
$show "$mkdir ${xdir}$objdir"
$run $mkdir ${xdir}$objdir
status=$?
if test $status -ne 0 && test ! -d ${xdir}$objdir; then
exit $status
fi
fi
if test -z "$output_obj"; then
# Place PIC objects in $objdir
command="$command -o $lobj"
fi
$run $rm "$lobj" "$output_obj"
$show "$command"
if $run eval "$command"; then :
else
test -n "$output_obj" && $run $rm $removelist
exit 1
fi
if test "$need_locks" = warn &&
test x"`cat $lockfile 2>/dev/null`" != x"$srcfile"; then
echo "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit 1
fi
# Just move the object if needed, then go on to compile the next one
if test -n "$output_obj" && test "x$output_obj" != "x$lobj"; then
$show "$mv $output_obj $lobj"
if $run $mv $output_obj $lobj; then :
else
error=$?
$run $rm $removelist
exit $error
fi
fi
# Append the name of the PIC object to the libtool object file.
test -z "$run" && cat >> ${libobj}T <<EOF
pic_object='$objdir/$objname'
EOF
# Allow error messages only from the first compilation.
suppress_output=' >/dev/null 2>&1'
else
# No PIC object so indicate it doesn't exist in the libtool
# object file.
test -z "$run" && cat >> ${libobj}T <<EOF
pic_object=none
EOF
fi
# Only build a position-dependent object if we build old libraries.
if test "$build_old_libs" = yes; then
if test "$pic_mode" != yes; then
# Don't build PIC code
command="$base_compile $srcfile"
else
command="$base_compile $srcfile $pic_flag"
fi
if test "$compiler_c_o" = yes; then
command="$command -o $obj"
fi
# Suppress compiler output if we already did a PIC compilation.
command="$command$suppress_output"
$run $rm "$obj" "$output_obj"
$show "$command"
if $run eval "$command"; then :
else
$run $rm $removelist
exit 1
fi
if test "$need_locks" = warn &&
test x"`cat $lockfile 2>/dev/null`" != x"$srcfile"; then
echo "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit 1
fi
# Just move the object if needed
if test -n "$output_obj" && test "x$output_obj" != "x$obj"; then
$show "$mv $output_obj $obj"
if $run $mv $output_obj $obj; then :
else
error=$?
$run $rm $removelist
exit $error
fi
fi
# Append the name of the non-PIC object the libtool object file.
# Only append if the libtool object file exists.
test -z "$run" && cat >> ${libobj}T <<EOF
# Name of the non-PIC object.
non_pic_object='$objname'
EOF
else
# Append the name of the non-PIC object the libtool object file.
# Only append if the libtool object file exists.
test -z "$run" && cat >> ${libobj}T <<EOF
# Name of the non-PIC object.
non_pic_object=none
EOF
fi
$run $mv "${libobj}T" "${libobj}"
# Unlock the critical section if it was locked
if test "$need_locks" != no; then
$run $rm "$lockfile"
fi
exit 0
;;
# libtool link mode
link | relink)
modename="$modename: link"
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
# It is impossible to link a dll without this setting, and
# we shouldn't force the makefile maintainer to figure out
# which system we are compiling for in order to pass an extra
# flag for every libtool invokation.
# allow_undefined=no
# FIXME: Unfortunately, there are problems with the above when trying
# to make a dll which has undefined symbols, in which case not
# even a static library is built. For now, we need to specify
# -no-undefined on the libtool link line when we can be certain
# that all symbols are satisfied, otherwise we get a static library.
allow_undefined=yes
;;
*)
allow_undefined=yes
;;
esac
libtool_args="$nonopt"
base_compile="$nonopt"
compile_command="$nonopt"
finalize_command="$nonopt"
compile_rpath=
finalize_rpath=
compile_shlibpath=
finalize_shlibpath=
convenience=
old_convenience=
deplibs=
old_deplibs=
compiler_flags=
linker_flags=
dllsearchpath=
lib_search_path=`pwd`
avoid_version=no
dlfiles=
dlprefiles=
dlself=no
export_dynamic=no
export_symbols=
export_symbols_regex=
generated=
libobjs=
ltlibs=
module=no
no_install=no
objs=
non_pic_objects=
prefer_static_libs=no
preload=no
prev=
prevarg=
release=
rpath=
xrpath=
perm_rpath=
temp_rpath=
thread_safe=no
vinfo=
# We need to know -static, to get the right output filenames.
for arg
do
case $arg in
-all-static | -static)
if test "X$arg" = "X-all-static"; then
if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
$echo "$modename: warning: complete static linking is impossible in this configuration" 1>&2
fi
if test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
else
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
fi
build_libtool_libs=no
build_old_libs=yes
prefer_static_libs=yes
break
;;
esac
done
# See if our shared archives depend on static archives.
test -n "$old_archive_from_new_cmds" && build_old_libs=yes
# Go through the arguments, transforming them on the way.
while test $# -gt 0; do
arg="$1"
base_compile="$base_compile $arg"
shift
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
qarg=\"`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`\" ### testsuite: skip nested quoting test
;;
*) qarg=$arg ;;
esac
libtool_args="$libtool_args $qarg"
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
output)
compile_command="$compile_command @OUTPUT@"
finalize_command="$finalize_command @OUTPUT@"
;;
esac
case $prev in
dlfiles|dlprefiles)
if test "$preload" = no; then
# Add the symbol object into the linking commands.
compile_command="$compile_command @SYMFILE@"
finalize_command="$finalize_command @SYMFILE@"
preload=yes
fi
case $arg in
*.la | *.lo) ;; # We handle these cases below.
force)
if test "$dlself" = no; then
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
self)
if test "$prev" = dlprefiles; then
dlself=yes
elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
dlself=yes
else
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
*)
if test "$prev" = dlfiles; then
dlfiles="$dlfiles $arg"
else
dlprefiles="$dlprefiles $arg"
fi
prev=
continue
;;
esac
;;
expsyms)
export_symbols="$arg"
if test ! -f "$arg"; then
$echo "$modename: symbol file \`$arg' does not exist"
exit 1
fi
prev=
continue
;;
expsyms_regex)
export_symbols_regex="$arg"
prev=
continue
;;
release)
release="-$arg"
prev=
continue
;;
objectlist)
if test -f "$arg"; then
save_arg=$arg
moreargs=
for fil in `cat $save_arg`
do
# moreargs="$moreargs $fil"
arg=$fil
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if (sed -e '2q' $arg | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
pic_object=
non_pic_object=
# Read the .lo file
# If there is no directory component, then add one.
case $arg in
*/* | *\\*) . $arg ;;
*) . ./$arg ;;
esac
if test -z "$pic_object" || \
test -z "$non_pic_object" ||
test "$pic_object" = none && \
test "$non_pic_object" = none; then
$echo "$modename: cannot find name of object for \`$arg'" 1>&2
exit 1
fi
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
if test "$pic_object" != none; then
# Prepend the subdirectory the object is found in.
pic_object="$xdir$pic_object"
if test "$prev" = dlfiles; then
if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
dlfiles="$dlfiles $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test "$prev" = dlprefiles; then
# Preload the old-style object.
dlprefiles="$dlprefiles $pic_object"
prev=
fi
# A PIC object.
libobjs="$libobjs $pic_object"
arg="$pic_object"
fi
# Non-PIC object.
if test "$non_pic_object" != none; then
# Prepend the subdirectory the object is found in.
non_pic_object="$xdir$non_pic_object"
# A standard non-PIC object
non_pic_objects="$non_pic_objects $non_pic_object"
if test -z "$pic_object" || test "$pic_object" = none ; then
arg="$non_pic_object"
fi
fi
else
# Only an error if not doing a dry-run.
if test -z "$run"; then
$echo "$modename: \`$arg' is not a valid libtool object" 1>&2
exit 1
else
# Dry-run case.
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"`
non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"`
libobjs="$libobjs $pic_object"
non_pic_objects="$non_pic_objects $non_pic_object"
fi
fi
done
else
$echo "$modename: link input file \`$save_arg' does not exist"
exit 1
fi
arg=$save_arg
prev=
continue
;;
rpath | xrpath)
# We need an absolute path.
case $arg in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
$echo "$modename: only absolute run-paths are allowed" 1>&2
exit 1
;;
esac
if test "$prev" = rpath; then
case "$rpath " in
*" $arg "*) ;;
*) rpath="$rpath $arg" ;;
esac
else
case "$xrpath " in
*" $arg "*) ;;
*) xrpath="$xrpath $arg" ;;
esac
fi
prev=
continue
;;
xcompiler)
compiler_flags="$compiler_flags $qarg"
prev=
compile_command="$compile_command $qarg"
finalize_command="$finalize_command $qarg"
continue
;;
xlinker)
linker_flags="$linker_flags $qarg"
compiler_flags="$compiler_flags $wl$qarg"
prev=
compile_command="$compile_command $wl$qarg"
finalize_command="$finalize_command $wl$qarg"
continue
;;
*)
eval "$prev=\"\$arg\""
prev=
continue
;;
esac
fi # test -n $prev
prevarg="$arg"
case $arg in
-all-static)
if test -n "$link_static_flag"; then
compile_command="$compile_command $link_static_flag"
finalize_command="$finalize_command $link_static_flag"
fi
continue
;;
-allow-undefined)
# FIXME: remove this flag sometime in the future.
$echo "$modename: \`-allow-undefined' is deprecated because it is the default" 1>&2
continue
;;
-avoid-version)
avoid_version=yes
continue
;;
-dlopen)
prev=dlfiles
continue
;;
-dlpreopen)
prev=dlprefiles
continue
;;
-export-dynamic)
export_dynamic=yes
continue
;;
-export-symbols | -export-symbols-regex)
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
$echo "$modename: more than one -exported-symbols argument is not allowed"
exit 1
fi
if test "X$arg" = "X-export-symbols"; then
prev=expsyms
else
prev=expsyms_regex
fi
continue
;;
# The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
# so, if we see these flags be careful not to treat them like -L
-L[A-Z][A-Z]*:*)
case $with_gcc/$host in
no/*-*-irix*)
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
;;
esac
continue
;;
-L*)
dir=`$echo "X$arg" | $Xsed -e 's/^-L//'`
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
$echo "$modename: cannot determine absolute directory name of \`$dir'" 1>&2
exit 1
fi
dir="$absdir"
;;
esac
case "$deplibs " in
*" -L$dir "*) ;;
*)
deplibs="$deplibs -L$dir"
lib_search_path="$lib_search_path $dir"
;;
esac
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
case :$dllsearchpath: in
*":$dir:"*) ;;
*) dllsearchpath="$dllsearchpath:$dir";;
esac
;;
esac
continue
;;
-l*)
if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
case $host in
*-*-cygwin* | *-*-pw32* | *-*-beos*)
# These systems don't actually have a C or math library (as such)
continue
;;
*-*-mingw* | *-*-os2*)
# These systems don't actually have a C library (as such)
test "X$arg" = "X-lc" && continue
;;
esac
fi
deplibs="$deplibs $arg"
continue
;;
-module)
module=yes
continue
;;
-no-fast-install)
fast_install=no
continue
;;
-no-install)
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
# The PATH hackery in wrapper scripts is required on Windows
# in order for the loader to find any dlls it needs.
$echo "$modename: warning: \`-no-install' is ignored for $host" 1>&2
$echo "$modename: warning: assuming \`-no-fast-install' instead" 1>&2
fast_install=no
;;
*) no_install=yes ;;
esac
continue
;;
-no-undefined)
allow_undefined=no
continue
;;
-objectlist)
prev=objectlist
continue
;;
-o) prev=output ;;
-release)
prev=release
continue
;;
-rpath)
prev=rpath
continue
;;
-R)
prev=xrpath
continue
;;
-R*)
dir=`$echo "X$arg" | $Xsed -e 's/^-R//'`
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
$echo "$modename: only absolute run-paths are allowed" 1>&2
exit 1
;;
esac
case "$xrpath " in
*" $dir "*) ;;
*) xrpath="$xrpath $dir" ;;
esac
continue
;;
-static)
# The effects of -static are defined in a previous loop.
# We used to do the same as -all-static on platforms that
# didn't have a PIC flag, but the assumption that the effects
# would be equivalent was wrong. It would break on at least
# Digital Unix and AIX.
continue
;;
-thread-safe)
thread_safe=yes
continue
;;
-version-info)
prev=vinfo
continue
;;
-Wc,*)
args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wc,//'`
arg=
IFS="${IFS= }"; save_ifs="$IFS"; IFS=','
for flag in $args; do
IFS="$save_ifs"
case $flag in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
flag="\"$flag\""
;;
esac
arg="$arg $wl$flag"
compiler_flags="$compiler_flags $flag"
done
IFS="$save_ifs"
arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
;;
-Wl,*)
args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'`
arg=
IFS="${IFS= }"; save_ifs="$IFS"; IFS=','
for flag in $args; do
IFS="$save_ifs"
case $flag in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
flag="\"$flag\""
;;
esac
arg="$arg $wl$flag"
compiler_flags="$compiler_flags $wl$flag"
linker_flags="$linker_flags $flag"
done
IFS="$save_ifs"
arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
;;
-Xcompiler)
prev=xcompiler
continue
;;
-Xlinker)
prev=xlinker
continue
;;
# Some other compiler flag.
-* | +*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
;;
*.$objext)
# A standard object.
objs="$objs $arg"
;;
*.lo)
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if (sed -e '2q' $arg | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
pic_object=
non_pic_object=
# Read the .lo file
# If there is no directory component, then add one.
case $arg in
*/* | *\\*) . $arg ;;
*) . ./$arg ;;
esac
if test -z "$pic_object" || \
test -z "$non_pic_object" ||
test "$pic_object" = none && \
test "$non_pic_object" = none; then
$echo "$modename: cannot find name of object for \`$arg'" 1>&2
exit 1
fi
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
if test "$pic_object" != none; then
# Prepend the subdirectory the object is found in.
pic_object="$xdir$pic_object"
if test "$prev" = dlfiles; then
if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
dlfiles="$dlfiles $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test "$prev" = dlprefiles; then
# Preload the old-style object.
dlprefiles="$dlprefiles $pic_object"
prev=
fi
# A PIC object.
libobjs="$libobjs $pic_object"
arg="$pic_object"
fi
# Non-PIC object.
if test "$non_pic_object" != none; then
# Prepend the subdirectory the object is found in.
non_pic_object="$xdir$non_pic_object"
# A standard non-PIC object
non_pic_objects="$non_pic_objects $non_pic_object"
if test -z "$pic_object" || test "$pic_object" = none ; then
arg="$non_pic_object"
fi
fi
else
# Only an error if not doing a dry-run.
if test -z "$run"; then
$echo "$modename: \`$arg' is not a valid libtool object" 1>&2
exit 1
else
# Dry-run case.
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"`
non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"`
libobjs="$libobjs $pic_object"
non_pic_objects="$non_pic_objects $non_pic_object"
fi
fi
;;
*.$libext)
# An archive.
deplibs="$deplibs $arg"
old_deplibs="$old_deplibs $arg"
continue
;;
*.la)
# A libtool-controlled library.
if test "$prev" = dlfiles; then
# This library was specified with -dlopen.
dlfiles="$dlfiles $arg"
prev=
elif test "$prev" = dlprefiles; then
# The library was specified with -dlpreopen.
dlprefiles="$dlprefiles $arg"
prev=
else
deplibs="$deplibs $arg"
fi
continue
;;
# Some other compiler argument.
*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
;;
esac # arg
# Now actually substitute the argument into the commands.
if test -n "$arg"; then
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
fi
done # argument parsing loop
if test -n "$prev"; then
$echo "$modename: the \`$prevarg' option requires an argument" 1>&2
$echo "$help" 1>&2
exit 1
fi
# Infer tagged configuration to use if any are available and
# if one wasn't chosen via the "--tag" command line option.
# Only attempt this if the compiler in the base link
# command doesn't match the default compiler.
if test -n "$available_tags" && test -z "$tagname"; then
case $base_compile in
"$CC "*) ;;
# Blanks in the command may have been stripped by the calling shell,
# but not from the CC environment variable when ltconfig was run.
"`$echo $CC` "*) ;;
*)
for z in $available_tags; do
if grep "^### BEGIN LIBTOOL TAG CONFIG: $z$" < "$0" > /dev/null; then
# Evaluate the configuration.
eval "`sed -n -e '/^### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^### END LIBTOOL TAG CONFIG: '$z'$/p' < $0`"
case $base_compile in
"$CC "*)
# The compiler in $compile_command matches
# the one in the tagged configuration.
# Assume this is the tagged configuration we want.
tagname=$z
break
;;
"`$echo $CC` "*)
tagname=$z
break
;;
esac
fi
done
# If $tagname still isn't set, then no tagged configuration
# was found and let the user know that the "--tag" command
# line option must be used.
if test -z "$tagname"; then
echo "$modename: unable to infer tagged configuration"
echo "$modename: specify a tag with \`--tag'" 1>&2
exit 1
# else
# echo "$modename: using $tagname tagged configuration"
fi
;;
esac
fi
if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
eval arg=\"$export_dynamic_flag_spec\"
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
fi
# calculate the name of the file, without its directory
outputname=`$echo "X$output" | $Xsed -e 's%^.*/%%'`
libobjs_save="$libobjs"
if test -n "$shlibpath_var"; then
# get the directories listed in $shlibpath_var
eval shlib_search_path=\`\$echo \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\`
else
shlib_search_path=
fi
eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
output_objdir=`$echo "X$output" | $Xsed -e 's%/[^/]*$%%'`
if test "X$output_objdir" = "X$output"; then
output_objdir="$objdir"
else
output_objdir="$output_objdir/$objdir"
fi
# Create the object directory.
if test ! -d $output_objdir; then
$show "$mkdir $output_objdir"
$run $mkdir $output_objdir
status=$?
if test $status -ne 0 && test ! -d $output_objdir; then
exit $status
fi
fi
# Determine the type of output
case $output in
"")
$echo "$modename: you must specify an output file" 1>&2
$echo "$help" 1>&2
exit 1
;;
*.$libext) linkmode=oldlib ;;
*.lo | *.$objext) linkmode=obj ;;
*.la) linkmode=lib ;;
*) linkmode=prog ;; # Anything else should be a program.
esac
specialdeplibs=
libs=
# Find all interdependent deplibs by searching for libraries
# that are linked more than once (e.g. -la -lb -la)
for deplib in $deplibs; do
case "$libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
libs="$libs $deplib"
done
if test $linkmode = lib; then
libs="$predeps $libs $compiler_lib_search_path $postdeps"
# Compute libraries that are listed more than once in $predeps
# $postdeps and mark them as special (i.e., whose duplicates are
# not to be eliminated).
pre_post_deps=
for pre_post_dep in $predeps $postdeps; do
case "$pre_post_deps " in
*" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
esac
pre_post_deps="$pre_post_deps $pre_post_dep"
done
pre_post_deps=
fi
deplibs=
newdependency_libs=
newlib_search_path=
need_relink=no # whether we're linking any uninstalled libtool libraries
notinst_deplibs= # not-installed libtool libraries
notinst_path= # paths that contain not-installed libtool libraries
case $linkmode in
lib)
passes="conv link"
for file in $dlfiles $dlprefiles; do
case $file in
*.la) ;;
*)
$echo "$modename: libraries can \`-dlopen' only libtool libraries: $file" 1>&2
exit 1
;;
esac
done
;;
prog)
compile_deplibs=
finalize_deplibs=
alldeplibs=no
newdlfiles=
newdlprefiles=
passes="conv scan dlopen dlpreopen link"
;;
*) passes="conv"
;;
esac
for pass in $passes; do
if test $linkmode = prog; then
# Determine which files to process
case $pass in
dlopen)
libs="$dlfiles"
save_deplibs="$deplibs" # Collect dlpreopened libraries
deplibs=
;;
dlpreopen) libs="$dlprefiles" ;;
link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
esac
fi
for deplib in $libs; do
lib=
found=no
case $deplib in
-l*)
if test $linkmode = oldlib && test $linkmode = obj; then
$echo "$modename: warning: \`-l' is ignored for archives/objects: $deplib" 1>&2
continue
fi
if test $pass = conv; then
deplibs="$deplib $deplibs"
continue
fi
name=`$echo "X$deplib" | $Xsed -e 's/^-l//'`
for searchdir in $newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path; do
# Search the libtool library
lib="$searchdir/lib${name}.la"
if test -f "$lib"; then
found=yes
break
fi
done
if test "$found" != yes; then
# deplib doesn't seem to be a libtool library
if test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test $linkmode = lib && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
;; # -l
-L*)
case $linkmode in
lib)
deplibs="$deplib $deplibs"
test $pass = conv && continue
newdependency_libs="$deplib $newdependency_libs"
newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
;;
prog)
if test $pass = conv; then
deplibs="$deplib $deplibs"
continue
fi
if test $pass = scan; then
deplibs="$deplib $deplibs"
newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
;;
*)
$echo "$modename: warning: \`-L' is ignored for archives/objects: $deplib" 1>&2
;;
esac # linkmode
continue
;; # -L
-R*)
if test $pass = link; then
dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'`
# Make sure the xrpath contains only unique directories.
case "$xrpath " in
*" $dir "*) ;;
*) xrpath="$xrpath $dir" ;;
esac
fi
deplibs="$deplib $deplibs"
continue
;;
*.la) lib="$deplib" ;;
*.$libext)
if test $pass = conv; then
deplibs="$deplib $deplibs"
continue
fi
case $linkmode in
lib)
if test "$deplibs_check_method" != pass_all; then
echo
echo "*** Warning: This library needs some functionality provided by $deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
else
echo
echo "*** Warning: Linking the shared library $output against the"
echo "*** static library $deplib is not portable!"
deplibs="$deplib $deplibs"
fi
continue
;;
prog)
if test $pass != link; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
continue
;;
esac # linkmode
;; # *.$libext
*.lo | *.$objext)
if test $pass = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
# If there is no dlopen support or we're linking statically,
# we need to preload.
newdlprefiles="$newdlprefiles $deplib"
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
newdlfiles="$newdlfiles $deplib"
fi
continue
;;
%DEPLIBS%)
alldeplibs=yes
continue
;;
esac # case $deplib
if test $found = yes || test -f "$lib"; then :
else
$echo "$modename: cannot find the library \`$lib'" 1>&2
exit 1
fi
# Check to see that this really is a libtool archive.
if (sed -e '2q' $lib | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit 1
fi
ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'`
test "X$ladir" = "X$lib" && ladir="."
dlname=
dlopen=
dlpreopen=
libdir=
library_names=
old_library=
# If the library was installed with an old release of libtool,
# it will not redefine variable installed.
installed=yes
# Read the .la file
case $lib in
*/* | *\\*) . $lib ;;
*) . ./$lib ;;
esac
if test "$linkmode,$pass" = "lib,link" ||
test "$linkmode,$pass" = "prog,scan" ||
{ test $linkmode = oldlib && test $linkmode = obj; }; then
# Add dl[pre]opened files of deplib
test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
fi
if test $pass = conv; then
# Only check for convenience libraries
deplibs="$lib $deplibs"
if test -z "$libdir"; then
if test -z "$old_library"; then
$echo "$modename: cannot find name of link library for \`$lib'" 1>&2
exit 1
fi
# It is a libtool convenience library, so add in its objects.
convenience="$convenience $ladir/$objdir/$old_library"
old_convenience="$old_convenience $ladir/$objdir/$old_library"
tmp_libs=
for deplib in $dependency_libs; do
deplibs="$deplib $deplibs"
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
tmp_libs="$tmp_libs $deplib"
done
elif test $linkmode != prog && test $linkmode != lib; then
$echo "$modename: \`$lib' is not a convenience library" 1>&2
exit 1
fi
continue
fi # $pass = conv
# Get the name of the library we link against.
linklib=
for l in $old_library $library_names; do
linklib="$l"
done
if test -z "$linklib"; then
$echo "$modename: cannot find name of link library for \`$lib'" 1>&2
exit 1
fi
# This library was specified with -dlopen.
if test $pass = dlopen; then
if test -z "$libdir"; then
$echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2
exit 1
fi
if test -z "$dlname" || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
# If there is no dlname, no dlopen support or we're linking
# statically, we need to preload.
dlprefiles="$dlprefiles $lib"
else
newdlfiles="$newdlfiles $lib"
fi
continue
fi # $pass = dlopen
# We need an absolute path.
case $ladir in
[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
*)
abs_ladir=`cd "$ladir" && pwd`
if test -z "$abs_ladir"; then
$echo "$modename: warning: cannot determine absolute directory name of \`$ladir'" 1>&2
$echo "$modename: passing it literally to the linker, although it might fail" 1>&2
abs_ladir="$ladir"
fi
;;
esac
laname=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
# Find the relevant object directory and library name.
if test "X$installed" = Xyes; then
if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
$echo "$modename: warning: library \`$lib' was moved." 1>&2
dir="$ladir"
absdir="$abs_ladir"
libdir="$abs_ladir"
else
dir="$libdir"
absdir="$libdir"
fi
else
dir="$ladir/$objdir"
absdir="$abs_ladir/$objdir"
# Remove this search path later
notinst_path="$notinst_path $abs_ladir"
fi # $installed = yes
name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
# This library was specified with -dlpreopen.
if test $pass = dlpreopen; then
if test -z "$libdir"; then
$echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2
exit 1
fi
# Prefer using a static library (so that no silly _DYNAMIC symbols
# are required to link).
if test -n "$old_library"; then
newdlprefiles="$newdlprefiles $dir/$old_library"
# Otherwise, use the dlname, so that lt_dlopen finds it.
elif test -n "$dlname"; then
newdlprefiles="$newdlprefiles $dir/$dlname"
else
newdlprefiles="$newdlprefiles $dir/$linklib"
fi
fi # $pass = dlpreopen
if test -z "$libdir"; then
# Link the convenience library
if test $linkmode = lib; then
deplibs="$dir/$old_library $deplibs"
elif test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$dir/$old_library $compile_deplibs"
finalize_deplibs="$dir/$old_library $finalize_deplibs"
else
deplibs="$lib $deplibs"
fi
continue
fi
if test $linkmode = prog && test $pass != link; then
newlib_search_path="$newlib_search_path $ladir"
deplibs="$lib $deplibs"
linkalldeplibs=no
if test "$link_all_deplibs" != no || test -z "$library_names" ||
test "$build_libtool_libs" = no; then
linkalldeplibs=yes
fi
tmp_libs=
for deplib in $dependency_libs; do
case $deplib in
-L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
esac
# Need to link against all dependency_libs?
if test $linkalldeplibs = yes; then
deplibs="$deplib $deplibs"
else
# Need to hardcode shared library paths
# or/and link against static libraries
newdependency_libs="$deplib $newdependency_libs"
fi
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
tmp_libs="$tmp_libs $deplib"
done # for deplib
continue
fi # $linkmode = prog...
link_static=no # Whether the deplib will be linked statically
if test -n "$library_names" &&
{ test "$prefer_static_libs" = no || test -z "$old_library"; }; then
# Link against this shared library
if test "$linkmode,$pass" = "prog,link" ||
{ test $linkmode = lib && test $hardcode_into_libs = yes; }; then
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) compile_rpath="$compile_rpath $absdir"
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir"
esac
;;
esac
if test $linkmode = prog; then
# We need to hardcode the library path
if test -n "$shlibpath_var"; then
# Make sure the rpath contains only unique directories.
case "$temp_rpath " in
*" $dir "*) ;;
*" $absdir "*) ;;
*) temp_rpath="$temp_rpath $dir" ;;
esac
fi
fi
fi # $linkmode,$pass = prog,link...
if test "$alldeplibs" = yes &&
{ test "$deplibs_check_method" = pass_all ||
{ test "$build_libtool_libs" = yes &&
test -n "$library_names"; }; }; then
# We only need to search for static libraries
continue
fi
if test "$installed" = no; then
notinst_deplibs="$notinst_deplibs $lib"
need_relink=yes
fi
if test -n "$old_archive_from_expsyms_cmds"; then
# figure out the soname
set dummy $library_names
realname="$2"
shift; shift
libname=`eval \\$echo \"$libname_spec\"`
# use dlname if we got it. it's perfectly good, no?
if test -n "$dlname"; then
soname="$dlname"
elif test -n "$soname_spec"; then
# bleh windows
case $host in
*cygwin*)
major=`expr $current - $age`
versuffix="-$major"
;;
esac
eval soname=\"$soname_spec\"
else
soname="$realname"
fi
# Make a new name for the extract_expsyms_cmds to use
soroot="$soname"
soname=`echo $soroot | sed -e 's/^.*\///'`
newlib="libimp-`echo $soname | sed 's/^lib//;s/\.dll$//'`.a"
# If the library has no export list, then create one now
if test -f "$output_objdir/$soname-def"; then :
else
$show "extracting exported symbol list from \`$soname'"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
eval cmds=\"$extract_expsyms_cmds\"
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
# Create $newlib
if test -f "$output_objdir/$newlib"; then :; else
$show "generating import library for \`$soname'"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
eval cmds=\"$old_archive_from_expsyms_cmds\"
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
# make sure the library variables are pointing to the new library
dir=$output_objdir
linklib=$newlib
fi # test -n $old_archive_from_expsyms_cmds
if test $linkmode = prog || test "$mode" != relink; then
add_shlibpath=
add_dir=
add=
lib_linked=yes
case $hardcode_action in
immediate | unsupported)
if test "$hardcode_direct" = no; then
add="$dir/$linklib"
elif test "$hardcode_minus_L" = no; then
case $host in
*-*-sunos*) add_shlibpath="$dir" ;;
esac
add_dir="-L$dir"
add="-l$name"
elif test "$hardcode_shlibpath_var" = no; then
add_shlibpath="$dir"
add="-l$name"
else
lib_linked=no
fi
;;
relink)
if test "$hardcode_direct" = yes; then
add="$dir/$linklib"
elif test "$hardcode_minus_L" = yes; then
add_dir="-L$dir"
add="-l$name"
elif test "$hardcode_shlibpath_var" = yes; then
add_shlibpath="$dir"
add="-l$name"
else
lib_linked=no
fi
;;
*) lib_linked=no ;;
esac
if test "$lib_linked" != yes; then
$echo "$modename: configuration error: unsupported hardcode properties"
exit 1
fi
if test -n "$add_shlibpath"; then
case :$compile_shlibpath: in
*":$add_shlibpath:"*) ;;
*) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
esac
fi
if test $linkmode = prog; then
test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
test -n "$add" && compile_deplibs="$add $compile_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
if test "$hardcode_direct" != yes && \
test "$hardcode_minus_L" != yes && \
test "$hardcode_shlibpath_var" = yes; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
esac
fi
fi
fi
if test $linkmode = prog || test "$mode" = relink; then
add_shlibpath=
add_dir=
add=
# Finalize command for both is simple: just hardcode it.
if test "$hardcode_direct" = yes; then
add="$libdir/$linklib"
elif test "$hardcode_minus_L" = yes; then
add_dir="-L$libdir"
add="-l$name"
elif test "$hardcode_shlibpath_var" = yes; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
esac
add="-l$name"
else
# We cannot seem to hardcode it, guess we'll fake it.
add_dir="-L$libdir"
add="-l$name"
fi
if test $linkmode = prog; then
test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
fi
fi
elif test $linkmode = prog; then
if test "$alldeplibs" = yes &&
{ test "$deplibs_check_method" = pass_all ||
{ test "$build_libtool_libs" = yes &&
test -n "$library_names"; }; }; then
# We only need to search for static libraries
continue
fi
# Try to link the static library
# Here we assume that one of hardcode_direct or hardcode_minus_L
# is not unsupported. This is valid on all known static and
# shared platforms.
if test "$hardcode_direct" != unsupported; then
test -n "$old_library" && linklib="$old_library"
compile_deplibs="$dir/$linklib $compile_deplibs"
finalize_deplibs="$dir/$linklib $finalize_deplibs"
else
compile_deplibs="-l$name -L$dir $compile_deplibs"
finalize_deplibs="-l$name -L$dir $finalize_deplibs"
fi
elif test "$build_libtool_libs" = yes; then
# Not a shared library
if test "$deplibs_check_method" != pass_all; then
# We're trying link a shared library against a static one
# but the system doesn't support it.
# Just print a warning and add the library to dependency_libs so
# that the program can be linked against the static library.
echo
echo "*** Warning: This library needs some functionality provided by $lib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
if test "$module" = yes; then
echo "*** Therefore, libtool will create a static module, that should work "
echo "*** as long as the dlopening application is linked with the -dlopen flag."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** \`nm' from GNU binutils and a full rebuild may help."
fi
if test "$build_old_libs" = no; then
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
else
convenience="$convenience $dir/$old_library"
old_convenience="$old_convenience $dir/$old_library"
deplibs="$dir/$old_library $deplibs"
link_static=yes
fi
fi # link shared/static library?
if test $linkmode = lib; then
if test -n "$dependency_libs" &&
{ test $hardcode_into_libs != yes || test $build_old_libs = yes ||
test $link_static = yes; }; then
# Extract -R from dependency_libs
temp_deplibs=
for libdir in $dependency_libs; do
case $libdir in
-R*) temp_xrpath=`$echo "X$libdir" | $Xsed -e 's/^-R//'`
case " $xrpath " in
*" $temp_xrpath "*) ;;
*) xrpath="$xrpath $temp_xrpath";;
esac;;
*) temp_deplibs="$temp_deplibs $libdir";;
esac
done
dependency_libs="$temp_deplibs"
fi
newlib_search_path="$newlib_search_path $absdir"
# Link against this library
test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
# ... and its dependency_libs
tmp_libs=
for deplib in $dependency_libs; do
newdependency_libs="$deplib $newdependency_libs"
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
tmp_libs="$tmp_libs $deplib"
done
if test $link_all_deplibs != no; then
# Add the search paths of all dependency libraries
for deplib in $dependency_libs; do
case $deplib in
-L*) path="$deplib" ;;
*.la)
dir=`$echo "X$deplib" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$deplib" && dir="."
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
$echo "$modename: warning: cannot determine absolute directory name of \`$dir'" 1>&2
absdir="$dir"
fi
;;
esac
if grep "^installed=no" $deplib > /dev/null; then
path="-L$absdir/$objdir"
else
eval libdir=`sed -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
if test -z "$libdir"; then
$echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
exit 1
fi
if test "$absdir" != "$libdir"; then
$echo "$modename: warning: \`$deplib' seems to be moved" 1>&2
fi
path="-L$absdir"
fi
;;
*) continue ;;
esac
case " $deplibs " in
*" $path "*) ;;
*) deplibs="$path $deplibs" ;;
esac
done
fi # link_all_deplibs != no
fi # linkmode = lib
done # for deplib in $libs
if test $pass = dlpreopen; then
# Link the dlpreopened libraries before other libraries
for deplib in $save_deplibs; do
deplibs="$deplib $deplibs"
done
fi
if test $pass != dlopen; then
test $pass != scan && dependency_libs="$newdependency_libs"
if test $pass != conv; then
# Make sure lib_search_path contains only unique directories.
lib_search_path=
for dir in $newlib_search_path; do
case "$lib_search_path " in
*" $dir "*) ;;
*) lib_search_path="$lib_search_path $dir" ;;
esac
done
newlib_search_path=
fi
if test "$linkmode,$pass" != "prog,link"; then
vars="deplibs"
else
vars="compile_deplibs finalize_deplibs"
fi
for var in $vars dependency_libs; do
# Add libraries to $var in reverse order
eval tmp_libs=\"\$$var\"
new_libs=
for deplib in $tmp_libs; do
case $deplib in
-L*) new_libs="$deplib $new_libs" ;;
*)
case " $specialdeplibs " in
*" $deplib "*) new_libs="$deplib $new_libs" ;;
*)
case " $new_libs " in
*" $deplib "*) ;;
*) new_libs="$deplib $new_libs" ;;
esac
;;
esac
;;
esac
done
tmp_libs=
for deplib in $new_libs; do
case $deplib in
-L*)
case " $tmp_libs " in
*" $deplib "*) ;;
*) tmp_libs="$tmp_libs $deplib" ;;
esac
;;
*) tmp_libs="$tmp_libs $deplib" ;;
esac
done
eval $var=\"$tmp_libs\"
done # for var
fi
if test "$pass" = "conv" &&
{ test "$linkmode" = "lib" || test "$linkmode" = "prog"; }; then
libs="$deplibs" # reset libs
deplibs=
fi
done # for pass
if test $linkmode = prog; then
dlfiles="$newdlfiles"
dlprefiles="$newdlprefiles"
fi
case $linkmode in
oldlib)
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen' is ignored for archives" 1>&2
fi
if test -n "$rpath"; then
$echo "$modename: warning: \`-rpath' is ignored for archives" 1>&2
fi
if test -n "$xrpath"; then
$echo "$modename: warning: \`-R' is ignored for archives" 1>&2
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for archives" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for archives" 1>&2
fi
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
$echo "$modename: warning: \`-export-symbols' is ignored for archives" 1>&2
fi
# Now set the variables for building old libraries.
build_libtool_libs=no
oldlibs="$output"
objs="$objs$old_deplibs"
;;
lib)
# Make sure we only generate libraries of the form `libNAME.la'.
case $outputname in
lib*)
name=`$echo "X$outputname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
eval libname=\"$libname_spec\"
;;
*)
if test "$module" = no; then
$echo "$modename: libtool library \`$output' must begin with \`lib'" 1>&2
$echo "$help" 1>&2
exit 1
fi
if test "$need_lib_prefix" != no; then
# Add the "lib" prefix for modules if required
name=`$echo "X$outputname" | $Xsed -e 's/\.la$//'`
eval libname=\"$libname_spec\"
else
libname=`$echo "X$outputname" | $Xsed -e 's/\.la$//'`
fi
;;
esac
if test -n "$objs"; then
if test "$deplibs_check_method" != pass_all; then
$echo "$modename: cannot build libtool library \`$output' from non-libtool objects on this host:$objs" 2>&1
exit 1
else
echo
echo "*** Warning: Linking the shared library $output against the non-libtool"
echo "*** objects $objs is not portable!"
libobjs="$libobjs $objs"
fi
fi
if test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen self' is ignored for libtool libraries" 1>&2
fi
set dummy $rpath
if test $# -gt 2; then
$echo "$modename: warning: ignoring multiple \`-rpath's for a libtool library" 1>&2
fi
install_libdir="$2"
oldlibs=
if test -z "$rpath"; then
if test "$build_libtool_libs" = yes; then
# Building a libtool convenience library.
# Some compilers have problems with a `.al' extension so
# convenience libraries should have the same extension an
# archive normally would.
oldlibs="$output_objdir/$libname.$libext $oldlibs"
build_libtool_libs=convenience
build_old_libs=yes
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for convenience libraries" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for convenience libraries" 1>&2
fi
else
# Parse the version information argument.
IFS="${IFS= }"; save_ifs="$IFS"; IFS=':'
set dummy $vinfo 0 0 0
IFS="$save_ifs"
if test -n "$8"; then
$echo "$modename: too many parameters to \`-version-info'" 1>&2
$echo "$help" 1>&2
exit 1
fi
current="$2"
revision="$3"
age="$4"
# Check that each of the things are valid numbers.
case $current in
0 | [1-9] | [1-9][0-9] | [1-9][0-9][0-9]) ;;
*)
$echo "$modename: CURRENT \`$current' is not a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit 1
;;
esac
case $revision in
0 | [1-9] | [1-9][0-9] | [1-9][0-9][0-9]) ;;
*)
$echo "$modename: REVISION \`$revision' is not a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit 1
;;
esac
case $age in
0 | [1-9] | [1-9][0-9] | [1-9][0-9][0-9]) ;;
*)
$echo "$modename: AGE \`$age' is not a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit 1
;;
esac
if test $age -gt $current; then
$echo "$modename: AGE \`$age' is greater than the current interface number \`$current'" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit 1
fi
# Calculate the version variables.
major=
versuffix=
verstring=
case $version_type in
none) ;;
darwin)
# Like Linux, but with the current version available in
# verstring for coding it into the library header
major=.`expr $current - $age`
versuffix="$major.$age.$revision"
# Darwin ld doesn't like 0 for these options...
minor_current=`expr $current + 1`
verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
;;
freebsd-aout)
major=".$current"
versuffix=".$current.$revision";
;;
freebsd-elf)
major=".$current"
versuffix=".$current";
;;
irix)
major=`expr $current - $age + 1`
verstring="sgi$major.$revision"
# Add in all the interfaces that we are compatible with.
loop=$revision
while test $loop != 0; do
iface=`expr $revision - $loop`
loop=`expr $loop - 1`
verstring="sgi$major.$iface:$verstring"
done
# Before this point, $major must not contain `.'.
major=.$major
versuffix="$major.$revision"
;;
linux)
major=.`expr $current - $age`
versuffix="$major.$age.$revision"
;;
osf)
major=.`expr $current - $age`
versuffix=".$current.$age.$revision"
verstring="$current.$age.$revision"
# Add in all the interfaces that we are compatible with.
loop=$age
while test $loop != 0; do
iface=`expr $current - $loop`
loop=`expr $loop - 1`
verstring="$verstring:${iface}.0"
done
# Make executables depend on our current version.
verstring="$verstring:${current}.0"
;;
sunos)
major=".$current"
versuffix=".$current.$revision"
;;
windows)
# Use '-' rather than '.', since we only want one
# extension on DOS 8.3 filesystems.
major=`expr $current - $age`
versuffix="-$major"
;;
*)
$echo "$modename: unknown library version type \`$version_type'" 1>&2
echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
exit 1
;;
esac
# Clear the version info if we defaulted, and they specified a release.
if test -z "$vinfo" && test -n "$release"; then
major=
verstring="0.0"
if test "$need_version" = no; then
versuffix=
else
versuffix=".0.0"
fi
fi
# Remove version info from name if versioning should be avoided
if test "$avoid_version" = yes && test "$need_version" = no; then
major=
versuffix=
verstring=""
fi
# Check to see if the archive will have undefined symbols.
if test "$allow_undefined" = yes; then
if test "$allow_undefined_flag" = unsupported; then
$echo "$modename: warning: undefined symbols not allowed in $host shared libraries" 1>&2
build_libtool_libs=no
build_old_libs=yes
fi
else
# Don't allow undefined symbols.
allow_undefined_flag="$no_undefined_flag"
fi
fi
if test "$mode" != relink; then
# Remove our outputs, but don't remove object files since they
# may have been created when compiling PIC objects.
removelist=
tempremovelist=`echo "$output_objdir/*"`
for p in $tempremovelist; do
case $p in
*.$objext)
;;
$output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
removelist="$removelist $p"
;;
*) ;;
esac
done
if test -n "$removelist"; then
$show "${rm}r $removelist"
$run ${rm}r $removelist
fi
fi
# Now set the variables for building old libraries.
if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
oldlibs="$oldlibs $output_objdir/$libname.$libext"
# Transform .lo files to .o files.
oldobjs="$objs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP`
fi
# Eliminate all temporary directories.
for path in $notinst_path; do
lib_search_path=`echo "$lib_search_path " | sed -e 's% $path % %g'`
deplibs=`echo "$deplibs " | sed -e 's% -L$path % %g'`
dependency_libs=`echo "$dependency_libs " | sed -e 's% -L$path % %g'`
done
if test -n "$xrpath"; then
# If the user specified any rpath flags, then add them.
temp_xrpath=
for libdir in $xrpath; do
temp_xrpath="$temp_xrpath -R$libdir"
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir" ;;
esac
done
if test $hardcode_into_libs != yes || test $build_old_libs = yes; then
dependency_libs="$temp_xrpath $dependency_libs"
fi
fi
# Make sure dlfiles contains only unique files that won't be dlpreopened
old_dlfiles="$dlfiles"
dlfiles=
for lib in $old_dlfiles; do
case " $dlprefiles $dlfiles " in
*" $lib "*) ;;
*) dlfiles="$dlfiles $lib" ;;
esac
done
# Make sure dlprefiles contains only unique files
old_dlprefiles="$dlprefiles"
dlprefiles=
for lib in $old_dlprefiles; do
case "$dlprefiles " in
*" $lib "*) ;;
*) dlprefiles="$dlprefiles $lib" ;;
esac
done
if test "$build_libtool_libs" = yes; then
if test -n "$rpath"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos*)
# these systems don't actually have a c library (as such)!
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C library is in the System framework
deplibs="$deplibs -framework System"
;;
*-*-netbsd*)
# Don't link with libc until the a.out ld.so is fixed.
;;
*)
# Add libc to deplibs on all other systems if necessary.
if test $build_libtool_need_lc = "yes"; then
deplibs="$deplibs -lc"
fi
;;
esac
fi
# Transform deplibs into only deplibs that can be linked in shared.
name_save=$name
libname_save=$libname
release_save=$release
versuffix_save=$versuffix
major_save=$major
# I'm not sure if I'm treating the release correctly. I think
# release should show up in the -l (ie -lgmp5) so we don't want to
# add it in twice. Is that correct?
release=""
versuffix=""
major=""
newdeplibs=
droppeddeps=no
case $deplibs_check_method in
pass_all)
# Don't check for shared/static. Everything works.
# This might be a little naive. We might want to check
# whether the library exists or not. But this is on
# osf3 & osf4 and I'm not really sure... Just
# implementing what was already the behaviour.
newdeplibs=$deplibs
;;
test_compile)
# This code stresses the "libraries are programs" paradigm to its
# limits. Maybe even breaks it. We compile a program, linking it
# against the deplibs as a proxy for the library. Then we can check
# whether they linked in statically or dynamically with ldd.
$rm conftest.c
cat > conftest.c <<EOF
int main() { return 0; }
EOF
$rm conftest
$LTCC -o conftest conftest.c $deplibs
if test $? -eq 0 ; then
ldd_output=`ldd conftest`
for i in $deplibs; do
name="`expr $i : '-l\(.*\)'`"
# If $name is empty we are operating on a -L argument.
if test -n "$name" && test "$name" != "0"; then
libname=`eval \\$echo \"$libname_spec\"`
deplib_matches=`eval \\$echo \"$library_names_spec\"`
set dummy $deplib_matches
deplib_match=$2
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
newdeplibs="$newdeplibs $i"
else
droppeddeps=yes
echo
echo "*** Warning: This library needs some functionality provided by $i."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
fi
else
newdeplibs="$newdeplibs $i"
fi
done
else
# Error occured in the first compile. Let's try to salvage the situation:
# Compile a seperate program for each library.
for i in $deplibs; do
name="`expr $i : '-l\(.*\)'`"
# If $name is empty we are operating on a -L argument.
if test -n "$name" && test "$name" != "0"; then
$rm conftest
$LTCC -o conftest conftest.c $i
# Did it work?
if test $? -eq 0 ; then
ldd_output=`ldd conftest`
libname=`eval \\$echo \"$libname_spec\"`
deplib_matches=`eval \\$echo \"$library_names_spec\"`
set dummy $deplib_matches
deplib_match=$2
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
newdeplibs="$newdeplibs $i"
else
droppeddeps=yes
echo
echo "*** Warning: This library needs some functionality provided by $i."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
fi
else
droppeddeps=yes
echo
echo "*** Warning! Library $i is needed by this library but I was not able to"
echo "*** make it link in! You will probably need to install it or some"
echo "*** library that it depends on before this library will be fully"
echo "*** functional. Installing it before continuing would be even better."
fi
else
newdeplibs="$newdeplibs $i"
fi
done
fi
;;
file_magic*)
set dummy $deplibs_check_method
file_magic_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"`
for a_deplib in $deplibs; do
name="`expr $a_deplib : '-l\(.*\)'`"
# If $name is empty we are operating on a -L argument.
if test -n "$name" && test "$name" != "0"; then
libname=`eval \\$echo \"$libname_spec\"`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
# Follow soft links.
if ls -lLd "$potent_lib" 2>/dev/null \
| grep " -> " >/dev/null; then
continue
fi
# The statement above tries to avoid entering an
# endless loop below, in case of cyclic links.
# We might still enter an endless loop, since a link
# loop can be closed while we follow links,
# but so what?
potlib="$potent_lib"
while test -h "$potlib" 2>/dev/null; do
potliblink=`ls -ld $potlib | sed 's/.* -> //'`
case $potliblink in
[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
*) potlib=`$echo "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";;
esac
done
if eval $file_magic_cmd \"\$potlib\" 2>/dev/null \
| sed 10q \
| egrep "$file_magic_regex" > /dev/null; then
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
break 2
fi
done
done
if test -n "$a_deplib" ; then
droppeddeps=yes
echo
echo "*** Warning: This library needs some functionality provided by $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
fi
else
# Add a -L argument.
newdeplibs="$newdeplibs $a_deplib"
fi
done # Gone through all deplibs.
;;
match_pattern*)
set dummy $deplibs_check_method
match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"`
for a_deplib in $deplibs; do
name="`expr $a_deplib : '-l\(.*\)'`"
# If $name is empty we are operating on a -L argument.
if test -n "$name" && test "$name" != "0"; then
libname=`eval \\$echo \"$libname_spec\"`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
if eval echo \"$potent_lib\" 2>/dev/null \
| sed 10q \
| egrep "$match_pattern_regex" > /dev/null; then
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
break 2
fi
done
done
if test -n "$a_deplib" ; then
droppeddeps=yes
echo
echo "*** Warning: This library needs some functionality provided by $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
fi
else
# Add a -L argument.
newdeplibs="$newdeplibs $a_deplib"
fi
done # Gone through all deplibs.
;;
none | unknown | *)
newdeplibs=""
if $echo "X $deplibs" | $Xsed -e 's/ -lc$//' \
-e 's/ -[LR][^ ]*//g' -e 's/[ ]//g' |
grep . >/dev/null; then
echo
if test "X$deplibs_check_method" = "Xnone"; then
echo "*** Warning: inter-library dependencies are not supported in this platform."
else
echo "*** Warning: inter-library dependencies are not known to be supported."
fi
echo "*** All declared inter-library dependencies are being dropped."
droppeddeps=yes
fi
;;
esac
versuffix=$versuffix_save
major=$major_save
release=$release_save
libname=$libname_save
name=$name_save
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
newdeplibs=`$echo "X $newdeplibs" | $Xsed -e 's/ -lc / -framework System /'`
;;
esac
if test "$droppeddeps" = yes; then
if test "$module" = yes; then
echo
echo "*** Warning: libtool could not satisfy all declared inter-library"
echo "*** dependencies of module $libname. Therefore, libtool will create"
echo "*** a static module, that should work as long as the dlopening"
echo "*** application is linked with the -dlopen flag."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** \`nm' from GNU binutils and a full rebuild may help."
fi
if test "$build_old_libs" = no; then
oldlibs="$output_objdir/$libname.$libext"
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
else
echo "*** The inter-library dependencies that have been dropped here will be"
echo "*** automatically added whenever a program is linked with this library"
echo "*** or is declared to -dlopen it."
if test $allow_undefined = no; then
echo
echo "*** Since this library must not contain undefined symbols,"
echo "*** because either the platform does not support them or"
echo "*** it was explicitly requested with -no-undefined,"
echo "*** libtool will only create a static version of it."
if test "$build_old_libs" = no; then
oldlibs="$output_objdir/$libname.$libext"
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
fi
fi
# Done checking deplibs!
deplibs=$newdeplibs
fi
# All the library-specific variables (install_libdir is set above).
library_names=
old_library=
dlname=
# Test again, we may have decided not to build it any more
if test "$build_libtool_libs" = yes; then
if test $hardcode_into_libs = yes; then
# Hardcode the library paths
hardcode_libdirs=
dep_rpath=
rpath="$finalize_rpath"
test "$mode" != relink && rpath="$compile_rpath$rpath"
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
dep_rpath="$dep_rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) perm_rpath="$perm_rpath $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
eval dep_rpath=\"$hardcode_libdir_flag_spec\"
fi
if test -n "$runpath_var" && test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
rpath="$rpath$dir:"
done
eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
fi
test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
fi
shlibpath="$finalize_shlibpath"
test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
if test -n "$shlibpath"; then
eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
fi
# Get the real and link names of the library.
eval library_names=\"$library_names_spec\"
set dummy $library_names
realname="$2"
shift; shift
if test -n "$soname_spec"; then
eval soname=\"$soname_spec\"
else
soname="$realname"
fi
test -z "$dlname" && dlname=$soname
lib="$output_objdir/$realname"
for link
do
linknames="$linknames $link"
done
# # Ensure that we have .o objects for linkers which dislike .lo
# # (e.g. aix) in case we are running --disable-static
# for obj in $libobjs; do
# xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'`
# if test "X$xdir" = "X$obj"; then
# xdir="."
# else
# xdir="$xdir"
# fi
# baseobj=`$echo "X$obj" | $Xsed -e 's%^.*/%%'`
# oldobj=`$echo "X$baseobj" | $Xsed -e "$lo2o"`
# if test ! -f $xdir/$oldobj && test "$baseobj" != "$oldobj"; then
# $show "(cd $xdir && ${LN_S} $baseobj $oldobj)"
# $run eval '(cd $xdir && ${LN_S} $baseobj $oldobj)' || exit $?
# fi
# done
# Use standard objects if they are pic
test -z "$pic_flag" && libobjs=`$echo "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
$show "generating symbol list for \`$libname.la'"
export_symbols="$output_objdir/$libname.exp"
$run $rm $export_symbols
eval cmds=\"$export_symbols_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
if test -n "$export_symbols_regex"; then
$show "egrep -e \"$export_symbols_regex\" \"$export_symbols\" > \"${export_symbols}T\""
$run eval 'egrep -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
$show "$mv \"${export_symbols}T\" \"$export_symbols\""
$run eval '$mv "${export_symbols}T" "$export_symbols"'
fi
fi
fi
if test -n "$export_symbols" && test -n "$include_expsyms"; then
$run eval '$echo "X$include_expsyms" | $SP2NL >> "$export_symbols"'
fi
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
else
gentop="$output_objdir/${outputname}x"
$show "${rm}r $gentop"
$run ${rm}r "$gentop"
$show "$mkdir $gentop"
$run $mkdir "$gentop"
status=$?
if test $status -ne 0 && test ! -d "$gentop"; then
exit $status
fi
generated="$generated $gentop"
for xlib in $convenience; do
# Extract the objects.
case $xlib in
[\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;;
*) xabs=`pwd`"/$xlib" ;;
esac
xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'`
xdir="$gentop/$xlib"
$show "${rm}r $xdir"
$run ${rm}r "$xdir"
$show "$mkdir $xdir"
$run $mkdir "$xdir"
status=$?
if test $status -ne 0 && test ! -d "$xdir"; then
exit $status
fi
$show "(cd $xdir && $AR x $xabs)"
$run eval "(cd \$xdir && $AR x \$xabs)" || exit $?
libobjs="$libobjs "`find $xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP`
done
fi
fi
if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
eval flag=\"$thread_safe_flag_spec\"
linker_flags="$linker_flags $flag"
fi
# Make a backup of the uninstalled library when relinking
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}U && $mv $realname ${realname}U)' || exit $?
fi
# Do each of the archive commands.
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
eval cmds=\"$archive_expsym_cmds\"
else
eval cmds=\"$archive_cmds\"
fi
if len=`expr "X$cmds" : ".*"` &&
test $len -le $max_cmd_len; then
:
else
# The command line is too long to link in one step, link piecewise.
$echo "creating reloadable object files..."
# Save the value of $output and $libobjs because we want to
# use them later. If we have whole_archive_flag_spec, we
# want to use save_libobjs as it was before
# whole_archive_flag_spec was expanded, because we can't
# assume the linker understands whole_archive_flag_spec.
# This may have to be revisited, in case too many
# convenience libraries get linked in and end up exceeding
# the spec.
if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
fi
save_output=$output
# Clear the reloadable object creation command queue and
# initialize k to one.
test_cmds=
concat_cmds=
objlist=
delfiles=
last_robj=
k=1
output=$output_objdir/$save_output-${k}.$objext
# Loop over the list of objects to be linked.
for obj in $save_libobjs
do
eval test_cmds=\"$reload_cmds $objlist $last_robj\"
if test "X$objlist" = X ||
{ len=`expr "X$test_cmds" : ".*"` &&
test $len -le $max_cmd_len; }; then
objlist="$objlist $obj"
else
# The command $test_cmds is almost too long, add a
# command to the queue.
if test $k -eq 1 ; then
# The first file doesn't have a previous command to add.
eval concat_cmds=\"$reload_cmds $objlist $last_robj\"
else
# All subsequent reloadable object files will link in
# the last one created.
eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj\"
fi
last_robj=$output_objdir/$save_output-${k}.$objext
k=`expr $k + 1`
output=$output_objdir/$save_output-${k}.$objext
objlist=$obj
len=1
fi
done
# Handle the remaining objects by creating one last
# reloadable object file. All subsequent reloadable object
# files will link in the last one created.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\"
# Set up a command to remove the reloadale object files
# after they are used.
i=0
while test $i -lt $k
do
i=`expr $i + 1`
delfiles="$delfiles $output_objdir/$save_output-${i}.$objext"
done
$echo "creating a temporary reloadable object file: $output"
# Loop through the commands generated above and execute them.
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $concat_cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
libobjs=$output
# Restore the value of output.
output=$save_output
if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
fi
# Expand the library linking commands again to reset the
# value of $libobjs for piecewise linking.
# Do each of the archive commands.
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
eval cmds=\"$archive_expsym_cmds\"
else
eval cmds=\"$archive_cmds\"
fi
# Append the command to remove the reloadable object files
# to the just-reset $cmds.
eval cmds=\"\$cmds~$rm $delfiles\"
fi
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
# Restore the uninstalled library and exit
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}T && $mv $realname ${realname}T && $mv "$realname"U $realname)' || exit $?
exit 0
fi
# Create links to the real library.
for linkname in $linknames; do
if test "$realname" != "$linkname"; then
$show "(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)"
$run eval '(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)' || exit $?
fi
done
# If -module or -export-dynamic was specified, set the dlname.
if test "$module" = yes || test "$export_dynamic" = yes; then
# On all known operating systems, these are identical.
dlname="$soname"
fi
fi
;;
obj)
if test -n "$deplibs"; then
$echo "$modename: warning: \`-l' and \`-L' are ignored for objects" 1>&2
fi
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen' is ignored for objects" 1>&2
fi
if test -n "$rpath"; then
$echo "$modename: warning: \`-rpath' is ignored for objects" 1>&2
fi
if test -n "$xrpath"; then
$echo "$modename: warning: \`-R' is ignored for objects" 1>&2
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for objects" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for objects" 1>&2
fi
case $output in
*.lo)
if test -n "$objs$old_deplibs"; then
$echo "$modename: cannot build library object \`$output' from non-libtool objects" 1>&2
exit 1
fi
libobj="$output"
obj=`$echo "X$output" | $Xsed -e "$lo2o"`
;;
*)
libobj=
obj="$output"
;;
esac
# Delete the old objects.
$run $rm $obj $libobj
# Objects from convenience libraries. This assumes
# single-version convenience libraries. Whenever we create
# different ones for PIC/non-PIC, this we'll have to duplicate
# the extraction.
reload_conv_objs=
gentop=
# reload_cmds runs $LD directly, so let us get rid of
# -Wl from whole_archive_flag_spec
wl=
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
eval reload_conv_objs=\"\$reload_objs $whole_archive_flag_spec\"
else
gentop="$output_objdir/${obj}x"
$show "${rm}r $gentop"
$run ${rm}r "$gentop"
$show "$mkdir $gentop"
$run $mkdir "$gentop"
status=$?
if test $status -ne 0 && test ! -d "$gentop"; then
exit $status
fi
generated="$generated $gentop"
for xlib in $convenience; do
# Extract the objects.
case $xlib in
[\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;;
*) xabs=`pwd`"/$xlib" ;;
esac
xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'`
xdir="$gentop/$xlib"
$show "${rm}r $xdir"
$run ${rm}r "$xdir"
$show "$mkdir $xdir"
$run $mkdir "$xdir"
status=$?
if test $status -ne 0 && test ! -d "$xdir"; then
exit $status
fi
$show "(cd $xdir && $AR x $xabs)"
$run eval "(cd \$xdir && $AR x \$xabs)" || exit $?
reload_conv_objs="$reload_objs "`find $xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP`
done
fi
fi
# Create the old-style object.
reload_objs="$objs$old_deplibs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
output="$obj"
eval cmds=\"$reload_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
# Exit if we aren't doing a library object file.
if test -z "$libobj"; then
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
exit 0
fi
if test "$build_libtool_libs" != yes; then
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
# Create an invalid libtool object if no PIC, so that we don't
# accidentally link it into a program.
# $show "echo timestamp > $libobj"
# $run eval "echo timestamp > $libobj" || exit $?
exit 0
fi
if test -n "$pic_flag" || test "$pic_mode" != default; then
# Only do commands if we really have different PIC objects.
reload_objs="$libobjs $reload_conv_objs"
output="$libobj"
eval cmds=\"$reload_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
# else
# # Just create a symlink.
# $show $rm $libobj
# $run $rm $libobj
# xdir=`$echo "X$libobj" | $Xsed -e 's%/[^/]*$%%'`
# if test "X$xdir" = "X$libobj"; then
# xdir="."
# else
# xdir="$xdir"
# fi
# baseobj=`$echo "X$libobj" | $Xsed -e 's%^.*/%%'`
# oldobj=`$echo "X$baseobj" | $Xsed -e "$lo2o"`
# $show "(cd $xdir && $LN_S $oldobj $baseobj)"
# $run eval '(cd $xdir && $LN_S $oldobj $baseobj)' || exit $?
fi
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
exit 0
;;
prog)
case $host in
*cygwin*) output=`echo $output | sed -e 's,.exe$,,;s,$,.exe,'` ;;
esac
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for programs" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for programs" 1>&2
fi
if test "$preload" = yes; then
if test "$dlopen_support" = unknown && test "$dlopen_self" = unknown &&
test "$dlopen_self_static" = unknown; then
$echo "$modename: warning: \`AC_LIBTOOL_DLOPEN' not used. Assuming no dlopen support."
fi
fi
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
compile_deplibs=`$echo "X $compile_deplibs" | $Xsed -e 's/ -lc / -framework System /'`
finalize_deplibs=`$echo "X $finalize_deplibs" | $Xsed -e 's/ -lc / -framework System /'`
;;
esac
compile_command="$compile_command $compile_deplibs"
finalize_command="$finalize_command $finalize_deplibs"
if test -n "$rpath$xrpath"; then
# If the user specified any rpath flags, then add them.
for libdir in $rpath $xrpath; do
# This is the magic to use -rpath.
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir" ;;
esac
done
fi
# Now hardcode the library paths
rpath=
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
rpath="$rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) perm_rpath="$perm_rpath $libdir" ;;
esac
fi
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
case :$dllsearchpath: in
*":$libdir:"*) ;;
*) dllsearchpath="$dllsearchpath:$libdir";;
esac
;;
esac
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
compile_rpath="$rpath"
rpath=
hardcode_libdirs=
for libdir in $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
rpath="$rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$finalize_perm_rpath " in
*" $libdir "*) ;;
*) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
finalize_rpath="$rpath"
dlsyms=
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
if test -n "$NM" && test -n "$global_symbol_pipe"; then
dlsyms="${outputname}S.c"
else
$echo "$modename: not configured to extract global symbols from dlpreopened files" 1>&2
fi
fi
if test -n "$dlsyms"; then
case $dlsyms in
"") ;;
*.c)
# Discover the nlist of each of the dlfiles.
nlist="$output_objdir/${outputname}.nm"
$show "$rm $nlist ${nlist}S ${nlist}T"
$run $rm "$nlist" "${nlist}S" "${nlist}T"
# Parse the name list into a source file.
$show "creating $output_objdir/$dlsyms"
test -z "$run" && $echo > "$output_objdir/$dlsyms" "\
/* $dlsyms - symbol resolution table for \`$outputname' dlsym emulation. */
/* Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP */
#ifdef __cplusplus
extern \"C\" {
#endif
/* Prevent the only kind of declaration conflicts we can make. */
#define lt_preloaded_symbols some_other_symbol
/* External symbol declarations for the compiler. */\
"
if test "$dlself" = yes; then
$show "generating symbol list for \`$output'"
test -z "$run" && $echo ': @PROGRAM@ ' > "$nlist"
# Add our own program objects to the symbol list.
progfiles="$objs$old_deplibs"
for arg in $progfiles; do
$show "extracting global C symbols from \`$arg'"
$run eval "$NM $arg | $global_symbol_pipe >> '$nlist'"
done
if test -n "$exclude_expsyms"; then
$run eval 'egrep -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
$run eval '$mv "$nlist"T "$nlist"'
fi
if test -n "$export_symbols_regex"; then
$run eval 'egrep -e "$export_symbols_regex" "$nlist" > "$nlist"T'
$run eval '$mv "$nlist"T "$nlist"'
fi
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
export_symbols="$output_objdir/$output.exp"
$run $rm $export_symbols
$run eval "sed -n -e '/^: @PROGRAM@$/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
else
$run eval "sed -e 's/\([][.*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$output.exp"'
$run eval 'grep -f "$output_objdir/$output.exp" < "$nlist" > "$nlist"T'
$run eval 'mv "$nlist"T "$nlist"'
fi
fi
for arg in $dlprefiles; do
$show "extracting global C symbols from \`$arg'"
name=`echo "$arg" | sed -e 's%^.*/%%'`
$run eval 'echo ": $name " >> "$nlist"'
$run eval "$NM $arg | $global_symbol_pipe >> '$nlist'"
done
if test -z "$run"; then
# Make sure we have at least an empty file.
test -f "$nlist" || : > "$nlist"
if test -n "$exclude_expsyms"; then
egrep -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
$mv "$nlist"T "$nlist"
fi
# Try sorting and uniquifying the output.
if grep -v "^: " < "$nlist" | sort +2 | uniq > "$nlist"S; then
:
else
grep -v "^: " < "$nlist" > "$nlist"S
fi
if test -f "$nlist"S; then
eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$dlsyms"'
else
echo '/* NONE */' >> "$output_objdir/$dlsyms"
fi
$echo >> "$output_objdir/$dlsyms" "\
#undef lt_preloaded_symbols
#if defined (__STDC__) && __STDC__
# define lt_ptr_t void *
#else
# define lt_ptr_t char *
# define const
#endif
/* The mapping between symbol names and symbols. */
const struct {
const char *name;
lt_ptr_t address;
}
lt_preloaded_symbols[] =
{\
"
sed -n -e 's/^: \([^ ]*\) $/ {\"\1\", (lt_ptr_t) 0},/p' \
-e 's/^. \([^ ]*\) \([^ ]*\)$/ {"\2", (lt_ptr_t) \&\2},/p' \
< "$nlist" >> "$output_objdir/$dlsyms"
$echo >> "$output_objdir/$dlsyms" "\
{0, (lt_ptr_t) 0}
};
/* This works around a problem in FreeBSD linker */
#ifdef FREEBSD_WORKAROUND
static const void *lt_preloaded_setup() {
return lt_preloaded_symbols;
}
#endif
#ifdef __cplusplus
}
#endif\
"
fi
pic_flag_for_symtable=
case $host in
# compiling the symbol table file with pic_flag works around
# a FreeBSD bug that causes programs to crash when -lm is
# linked before any other PIC object. But we must not use
# pic_flag when linking with -static. The problem exists in
# FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
*-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
case "$compile_command " in
*" -static "*) ;;
*) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND";;
esac;;
*-*-hpux*)
case "$compile_command " in
*" -static "*) ;;
*) pic_flag_for_symtable=" $pic_flag";;
esac
esac
# Now compile the dynamic symbol file.
$show "(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable \"$dlsyms\")"
$run eval '(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable "$dlsyms")' || exit $?
# Clean up the generated files.
$show "$rm $output_objdir/$dlsyms $nlist ${nlist}S ${nlist}T"
$run $rm "$output_objdir/$dlsyms" "$nlist" "${nlist}S" "${nlist}T"
# Transform the symbol file into the correct name.
compile_command=`$echo "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"`
finalize_command=`$echo "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"`
;;
*)
$echo "$modename: unknown suffix for \`$dlsyms'" 1>&2
exit 1
;;
esac
else
# We keep going just in case the user didn't refer to
# lt_preloaded_symbols. The linker will fail if global_symbol_pipe
# really was required.
# Nullify the symbol file.
compile_command=`$echo "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"`
finalize_command=`$echo "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"`
fi
if test $need_relink = no || test "$build_libtool_libs" != yes; then
# Replace the output file specification.
compile_command=`$echo "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
link_command="$compile_command$compile_rpath"
# We have no uninstalled library dependencies, so finalize right now.
$show "$link_command"
$run eval "$link_command"
status=$?
# Delete the generated files.
if test -n "$dlsyms"; then
$show "$rm $output_objdir/${outputname}S.${objext}"
$run $rm "$output_objdir/${outputname}S.${objext}"
fi
exit $status
fi
if test -n "$shlibpath_var"; then
# We should set the shlibpath_var
rpath=
for dir in $temp_rpath; do
case $dir in
[\\/]* | [A-Za-z]:[\\/]*)
# Absolute path.
rpath="$rpath$dir:"
;;
*)
# Relative path: add a thisdir entry.
rpath="$rpath\$thisdir/$dir:"
;;
esac
done
temp_rpath="$rpath"
fi
if test -n "$compile_shlibpath$finalize_shlibpath"; then
compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
fi
if test -n "$finalize_shlibpath"; then
finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
fi
compile_var=
finalize_var=
if test -n "$runpath_var"; then
if test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
rpath="$rpath$dir:"
done
compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
if test -n "$finalize_perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $finalize_perm_rpath; do
rpath="$rpath$dir:"
done
finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
fi
if test "$no_install" = yes; then
# We don't need to create a wrapper script.
link_command="$compile_var$compile_command$compile_rpath"
# Replace the output file specification.
link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
# Delete the old output file.
$run $rm $output
# Link the executable and exit
$show "$link_command"
$run eval "$link_command" || exit $?
exit 0
fi
if test "$hardcode_action" = relink; then
# Fast installation is not supported
link_command="$compile_var$compile_command$compile_rpath"
relink_command="$finalize_var$finalize_command$finalize_rpath"
$echo "$modename: warning: this platform does not like uninstalled shared libraries" 1>&2
$echo "$modename: \`$output' will be relinked during installation" 1>&2
else
if test "$fast_install" != no; then
link_command="$finalize_var$compile_command$finalize_rpath"
if test "$fast_install" = yes; then
relink_command=`$echo "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'`
else
# fast_install is set to needless
relink_command=
fi
else
link_command="$compile_var$compile_command$compile_rpath"
relink_command="$finalize_var$finalize_command$finalize_rpath"
fi
fi
# Replace the output file specification.
link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
# Delete the old output files.
$run $rm $output $output_objdir/$outputname $output_objdir/lt-$outputname
$show "$link_command"
$run eval "$link_command" || exit $?
# Now create the wrapper script.
$show "creating $output"
# Quote the relink command for shipping.
if test -n "$relink_command"; then
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"`
relink_command="$var=\"$var_value\"; export $var; $relink_command"
fi
done
relink_command="cd `pwd`; $relink_command"
relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
fi
# Quote $echo for shipping.
if test "X$echo" = "X$SHELL $0 --fallback-echo"; then
case $0 in
[\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $0 --fallback-echo";;
*) qecho="$SHELL `pwd`/$0 --fallback-echo";;
esac
qecho=`$echo "X$qecho" | $Xsed -e "$sed_quote_subst"`
else
qecho=`$echo "X$echo" | $Xsed -e "$sed_quote_subst"`
fi
# Only actually do things if our run command is non-null.
if test -z "$run"; then
# win32 will think the script is a binary if it has
# a .exe suffix, so we strip it off here.
case $output in
*.exe) output=`echo $output|sed 's,.exe$,,'` ;;
esac
# test for cygwin because mv fails w/o .exe extensions
case $host in
*cygwin*) exeext=.exe ;;
*) exeext= ;;
esac
$rm $output
trap "$rm $output; exit 1" 1 2 15
$echo > $output "\
#! $SHELL
# $output - temporary wrapper script for $objdir/$outputname
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# The $output program cannot be directly executed until all the libtool
# libraries that it depends on are installed.
#
# This wrapper script should never be moved out of the build directory.
# If it is, it will not operate correctly.
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
Xsed='sed -e 1s/^X//'
sed_quote_subst='$sed_quote_subst'
# The HP-UX ksh and POSIX shell print the target directory to stdout
# if CDPATH is set.
if test \"\${CDPATH+set}\" = set; then CDPATH=:; export CDPATH; fi
relink_command=\"$relink_command\"
# This environment variable determines our operation mode.
if test \"\$libtool_install_magic\" = \"$magic\"; then
# install mode needs the following variable:
notinst_deplibs='$notinst_deplibs'
else
# When we are sourced in execute mode, \$file and \$echo are already set.
if test \"\$libtool_execute_magic\" != \"$magic\"; then
echo=\"$qecho\"
file=\"\$0\"
# Make sure echo works.
if test \"X\$1\" = X--no-reexec; then
# Discard the --no-reexec flag, and continue.
shift
elif test \"X\`(\$echo '\t') 2>/dev/null\`\" = 'X\t'; then
# Yippee, \$echo works!
:
else
# Restart under the correct shell, and then maybe \$echo will work.
exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"}
fi
fi\
"
$echo >> $output "\
# Find the directory that this script lives in.
thisdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\`
test \"x\$thisdir\" = \"x\$file\" && thisdir=.
# Follow symbolic links until we get to the real thisdir.
file=\`ls -ld \"\$file\" | sed -n 's/.*-> //p'\`
while test -n \"\$file\"; do
destdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\`
# If there was a directory component, then change thisdir.
if test \"x\$destdir\" != \"x\$file\"; then
case \"\$destdir\" in
[\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
*) thisdir=\"\$thisdir/\$destdir\" ;;
esac
fi
file=\`\$echo \"X\$file\" | \$Xsed -e 's%^.*/%%'\`
file=\`ls -ld \"\$thisdir/\$file\" | sed -n 's/.*-> //p'\`
done
# Try to get the absolute directory name.
absdir=\`cd \"\$thisdir\" && pwd\`
test -n \"\$absdir\" && thisdir=\"\$absdir\"
"
if test "$fast_install" = yes; then
echo >> $output "\
program=lt-'$outputname'$exeext
progdir=\"\$thisdir/$objdir\"
if test ! -f \"\$progdir/\$program\" || \\
{ file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | sed 1q\`; \\
test \"X\$file\" != \"X\$progdir/\$program\"; }; then
file=\"\$\$-\$program\"
if test ! -d \"\$progdir\"; then
$mkdir \"\$progdir\"
else
$rm \"\$progdir/\$file\"
fi"
echo >> $output "\
# relink executable if necessary
if test -n \"\$relink_command\"; then
if relink_command_output=\`eval \$relink_command 2>&1\`; then :
else
$echo \"\$relink_command_output\" >&2
$rm \"\$progdir/\$file\"
exit 1
fi
fi
$mv \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
{ $rm \"\$progdir/\$program\";
$mv \"\$progdir/\$file\" \"\$progdir/\$program\"; }
$rm \"\$progdir/\$file\"
fi"
else
echo >> $output "\
program='$outputname'
progdir=\"\$thisdir/$objdir\"
"
fi
echo >> $output "\
if test -f \"\$progdir/\$program\"; then"
# Export our shlibpath_var if we have one.
if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
$echo >> $output "\
# Add our own library path to $shlibpath_var
$shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
# Some systems cannot cope with colon-terminated $shlibpath_var
# The second colon is a workaround for a bug in BeOS R4 sed
$shlibpath_var=\`\$echo \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\`
export $shlibpath_var
"
fi
# fixup the dll searchpath if we need to.
if test -n "$dllsearchpath"; then
$echo >> $output "\
# Add the dll search path components to the executable PATH
PATH=$dllsearchpath:\$PATH
"
fi
$echo >> $output "\
if test \"\$libtool_execute_magic\" != \"$magic\"; then
# Run the actual program with our arguments.
"
case $host in
# win32 systems need to use the prog path for dll
# lookup to work
*-*-cygwin* | *-*-pw32*)
$echo >> $output "\
exec \$progdir/\$program \${1+\"\$@\"}
"
;;
# Backslashes separate directories on plain windows
*-*-mingw | *-*-os2*)
$echo >> $output "\
exec \$progdir\\\\\$program \${1+\"\$@\"}
"
;;
*)
$echo >> $output "\
# Export the path to the program.
PATH=\"\$progdir:\$PATH\"
export PATH
exec \$program \${1+\"\$@\"}
"
;;
esac
$echo >> $output "\
\$echo \"\$0: cannot exec \$program \${1+\"\$@\"}\"
exit 1
fi
else
# The program doesn't exist.
\$echo \"\$0: error: \$progdir/\$program does not exist\" 1>&2
\$echo \"This script is just a wrapper for \$program.\" 1>&2
echo \"See the $PACKAGE documentation for more information.\" 1>&2
exit 1
fi
fi\
"
chmod +x $output
fi
exit 0
;;
esac
# See if we need to build an old-fashioned archive.
for oldlib in $oldlibs; do
if test "$build_libtool_libs" = convenience; then
oldobjs="$libobjs_save"
addlibs="$convenience"
build_libtool_libs=no
else
if test "$build_libtool_libs" = module; then
oldobjs="$libobjs_save"
build_libtool_libs=no
else
oldobjs="$objs$old_deplibs $non_pic_objects"
fi
addlibs="$old_convenience"
fi
if test -n "$addlibs"; then
gentop="$output_objdir/${outputname}x"
$show "${rm}r $gentop"
$run ${rm}r "$gentop"
$show "$mkdir $gentop"
$run $mkdir "$gentop"
status=$?
if test $status -ne 0 && test ! -d "$gentop"; then
exit $status
fi
generated="$generated $gentop"
# Add in members from convenience archives.
for xlib in $addlibs; do
# Extract the objects.
case $xlib in
[\\/]* | [A-Za-z]:[\\/]*) xabs="$xlib" ;;
*) xabs=`pwd`"/$xlib" ;;
esac
xlib=`$echo "X$xlib" | $Xsed -e 's%^.*/%%'`
xdir="$gentop/$xlib"
$show "${rm}r $xdir"
$run ${rm}r "$xdir"
$show "$mkdir $xdir"
$run $mkdir "$xdir"
status=$?
if test $status -ne 0 && test ! -d "$xdir"; then
exit $status
fi
$show "(cd $xdir && $AR x $xabs)"
$run eval "(cd \$xdir && $AR x \$xabs)" || exit $?
oldobjs="$oldobjs "`find $xdir -name \*.${objext} -print | $NL2SP`
done
fi
# Do each command in the archive commands.
if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
eval cmds=\"$old_archive_from_new_cmds\"
else
# # Ensure that we have .o objects in place in case we decided
# # not to build a shared library, and have fallen back to building
# # static libs even though --disable-static was passed!
# for oldobj in $oldobjs; do
# if test ! -f $oldobj; then
# xdir=`$echo "X$oldobj" | $Xsed -e 's%/[^/]*$%%'`
# if test "X$xdir" = "X$oldobj"; then
# xdir="."
# else
# xdir="$xdir"
# fi
# baseobj=`$echo "X$oldobj" | $Xsed -e 's%^.*/%%'`
# obj=`$echo "X$baseobj" | $Xsed -e "$o2lo"`
# $show "(cd $xdir && ${LN_S} $obj $baseobj)"
# $run eval '(cd $xdir && ${LN_S} $obj $baseobj)' || exit $?
# fi
# done
eval cmds=\"$old_archive_cmds\"
if len=`expr "X$cmds" : ".*"` &&
test $len -le $max_cmd_len; then
:
else
# the command line is too long to link in one step, link in parts
$echo "using piecewise archive linking..."
save_RANLIB=$RANLIB
RANLIB=:
objlist=
concat_cmds=
save_oldobjs=$oldobjs
# GNU ar 2.10+ was changed to match POSIX; thus no paths are
# encoded into archives. This makes 'ar r' malfunction in
# this piecewise linking case whenever conflicting object
# names appear in distinct ar calls; check, warn and compensate.
if (for obj in $save_oldobjs
do
$echo "X$obj" | $Xsed -e 's%^.*/%%'
done | sort | sort -uc >/dev/null 2>&1); then
:
else
$echo "$modename: warning: object name conflicts; overriding AR_FLAGS to 'cq'" 1>&2
$echo "$modename: warning: to ensure that POSIX-compatible ar will work" 1>&2
AR_FLAGS=cq
fi
for obj in $save_oldobjs
do
oldobjs="$objlist $obj"
objlist="$objlist $obj"
eval test_cmds=\"$old_archive_cmds\"
if len=`expr "X$test_cmds" : ".*"` &&
test $len -le $max_cmd_len; then
:
else
# the above command should be used before it gets too long
oldobjs=$objlist
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
objlist=
fi
done
RANLIB=$save_RANLIB
oldobjs=$objlist
eval cmds=\"\$concat_cmds~$old_archive_cmds\"
fi
fi
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
done
if test -n "$generated"; then
$show "${rm}r$generated"
$run ${rm}r$generated
fi
# Now create the libtool archive.
case $output in
*.la)
old_library=
test "$build_old_libs" = yes && old_library="$libname.$libext"
$show "creating $output"
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"`
relink_command="$var=\"$var_value\"; export $var; $relink_command"
fi
done
# Quote the link command for shipping.
tagopts=
for tag in $taglist; do
tagopts="$tagopts --tag $tag"
done
relink_command="(cd `pwd`; $SHELL $0$tagopts --mode=relink $libtool_args)"
relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
# Only create the output if not a dry run.
if test -z "$run"; then
for installed in no yes; do
if test "$installed" = yes; then
if test -z "$install_libdir"; then
break
fi
output="$output_objdir/$outputname"i
# Replace all uninstalled libtool libraries with the installed ones
newdependency_libs=
for deplib in $dependency_libs; do
case $deplib in
*.la)
name=`$echo "X$deplib" | $Xsed -e 's%^.*/%%'`
eval libdir=`sed -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
if test -z "$libdir"; then
$echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
exit 1
fi
newdependency_libs="$newdependency_libs $libdir/$name"
;;
*) newdependency_libs="$newdependency_libs $deplib" ;;
esac
done
dependency_libs="$newdependency_libs"
newdlfiles=
for lib in $dlfiles; do
name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
eval libdir=`sed -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
if test -z "$libdir"; then
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit 1
fi
newdlfiles="$newdlfiles $libdir/$name"
done
dlfiles="$newdlfiles"
newdlprefiles=
for lib in $dlprefiles; do
name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
eval libdir=`sed -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
if test -z "$libdir"; then
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit 1
fi
newdlprefiles="$newdlprefiles $libdir/$name"
done
dlprefiles="$newdlprefiles"
fi
$rm $output
# place dlname in correct position for cygwin
tdlname=$dlname
case $host,$output,$installed,$module,$dlname in
*cygwin*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
esac
$echo > $output "\
# $outputname - a libtool library file
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# The name that we can dlopen(3).
dlname='$tdlname'
# Names of this library.
library_names='$library_names'
# The name of the static archive.
old_library='$old_library'
# Libraries that this one depends upon.
dependency_libs='$dependency_libs'
# Version information for $libname.
current=$current
age=$age
revision=$revision
# Is this an already installed library?
installed=$installed
# Files to dlopen/dlpreopen
dlopen='$dlfiles'
dlpreopen='$dlprefiles'
# Directory that this library needs to be installed in:
libdir='$install_libdir'"
if test "$installed" = no && test $need_relink = yes; then
$echo >> $output "\
relink_command=\"$relink_command\""
fi
done
fi
# Do a symbolic link so that the libtool archive can be found in
# LD_LIBRARY_PATH before the program is installed.
$show "(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)"
$run eval '(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)' || exit $?
;;
esac
exit 0
;;
# libtool install mode
install)
modename="$modename: install"
# There may be an optional sh(1) argument at the beginning of
# install_prog (especially on Windows NT).
if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
# Allow the use of GNU shtool's install command.
$echo "X$nonopt" | $Xsed | grep shtool > /dev/null; then
# Aesthetically quote it.
arg=`$echo "X$nonopt" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*)
arg="\"$arg\""
;;
esac
install_prog="$arg "
arg="$1"
shift
else
install_prog=
arg="$nonopt"
fi
# The real first argument should be the name of the installation program.
# Aesthetically quote it.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*)
arg="\"$arg\""
;;
esac
install_prog="$install_prog$arg"
# We need to accept at least all the BSD install flags.
dest=
files=
opts=
prev=
install_type=
isdir=no
stripme=
for arg
do
if test -n "$dest"; then
files="$files $dest"
dest="$arg"
continue
fi
case $arg in
-d) isdir=yes ;;
-f) prev="-f" ;;
-g) prev="-g" ;;
-m) prev="-m" ;;
-o) prev="-o" ;;
-s)
stripme=" -s"
continue
;;
-*) ;;
*)
# If the previous option needed an argument, then skip it.
if test -n "$prev"; then
prev=
else
dest="$arg"
continue
fi
;;
esac
# Aesthetically quote the argument.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*)
arg="\"$arg\""
;;
esac
install_prog="$install_prog $arg"
done
if test -z "$install_prog"; then
$echo "$modename: you must specify an install program" 1>&2
$echo "$help" 1>&2
exit 1
fi
if test -n "$prev"; then
$echo "$modename: the \`$prev' option requires an argument" 1>&2
$echo "$help" 1>&2
exit 1
fi
if test -z "$files"; then
if test -z "$dest"; then
$echo "$modename: no file or destination specified" 1>&2
else
$echo "$modename: you must specify a destination" 1>&2
fi
$echo "$help" 1>&2
exit 1
fi
# Strip any trailing slash from the destination.
dest=`$echo "X$dest" | $Xsed -e 's%/$%%'`
# Check to see that the destination is a directory.
test -d "$dest" && isdir=yes
if test "$isdir" = yes; then
destdir="$dest"
destname=
else
destdir=`$echo "X$dest" | $Xsed -e 's%/[^/]*$%%'`
test "X$destdir" = "X$dest" && destdir=.
destname=`$echo "X$dest" | $Xsed -e 's%^.*/%%'`
# Not a directory, so check to see that there is only one file specified.
set dummy $files
if test $# -gt 2; then
$echo "$modename: \`$dest' is not a directory" 1>&2
$echo "$help" 1>&2
exit 1
fi
fi
case $destdir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
for file in $files; do
case $file in
*.lo) ;;
*)
$echo "$modename: \`$destdir' must be an absolute directory name" 1>&2
$echo "$help" 1>&2
exit 1
;;
esac
done
;;
esac
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic="$magic"
staticlibs=
future_libdirs=
current_libdirs=
for file in $files; do
# Do each installation.
case $file in
*.$libext)
# Do the static libraries later.
staticlibs="$staticlibs $file"
;;
*.la)
# Check to see that this really is a libtool archive.
if (sed -e '2q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$file' is not a valid libtool archive" 1>&2
$echo "$help" 1>&2
exit 1
fi
library_names=
old_library=
relink_command=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Add the libdir to current_libdirs if it is the destination.
if test "X$destdir" = "X$libdir"; then
case "$current_libdirs " in
*" $libdir "*) ;;
*) current_libdirs="$current_libdirs $libdir" ;;
esac
else
# Note the libdir as a future libdir.
case "$future_libdirs " in
*" $libdir "*) ;;
*) future_libdirs="$future_libdirs $libdir" ;;
esac
fi
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`/
test "X$dir" = "X$file/" && dir=
dir="$dir$objdir"
if test -n "$relink_command"; then
$echo "$modename: warning: relinking \`$file'" 1>&2
$show "$relink_command"
if $run eval "$relink_command"; then :
else
$echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
exit 1
fi
fi
# See the names of the shared library.
set dummy $library_names
if test -n "$2"; then
realname="$2"
shift
shift
srcname="$realname"
test -n "$relink_command" && srcname="$realname"T
# Install the shared library and build the symlinks.
$show "$install_prog $dir/$srcname $destdir/$realname"
$run eval "$install_prog $dir/$srcname $destdir/$realname" || exit $?
if test -n "$stripme" && test -n "$striplib"; then
$show "$striplib $destdir/$realname"
$run eval "$striplib $destdir/$realname" || exit $?
fi
if test $# -gt 0; then
# Delete the old symlinks, and create new ones.
for linkname
do
if test "$linkname" != "$realname"; then
$show "(cd $destdir && $rm $linkname && $LN_S $realname $linkname)"
$run eval "(cd $destdir && $rm $linkname && $LN_S $realname $linkname)"
fi
done
fi
# Do each command in the postinstall commands.
lib="$destdir/$realname"
eval cmds=\"$postinstall_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
# Install the pseudo-library for information purposes.
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
instname="$dir/$name"i
$show "$install_prog $instname $destdir/$name"
$run eval "$install_prog $instname $destdir/$name" || exit $?
# Maybe install the static library, too.
test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
;;
*.lo)
# Install (i.e. copy) a libtool object.
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile="$destdir/$destname"
else
destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
destfile="$destdir/$destfile"
fi
# Deduce the name of the destination old-style object file.
case $destfile in
*.lo)
staticdest=`$echo "X$destfile" | $Xsed -e "$lo2o"`
;;
*.$objext)
staticdest="$destfile"
destfile=
;;
*)
$echo "$modename: cannot copy a libtool object to \`$destfile'" 1>&2
$echo "$help" 1>&2
exit 1
;;
esac
# Install the libtool object if requested.
if test -n "$destfile"; then
$show "$install_prog $file $destfile"
$run eval "$install_prog $file $destfile" || exit $?
fi
# Install the old object if enabled.
if test "$build_old_libs" = yes; then
# Deduce the name of the old-style object file.
staticobj=`$echo "X$file" | $Xsed -e "$lo2o"`
$show "$install_prog $staticobj $staticdest"
$run eval "$install_prog \$staticobj \$staticdest" || exit $?
fi
exit 0
;;
*)
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile="$destdir/$destname"
else
destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
destfile="$destdir/$destfile"
fi
# Do a test to see if this is really a libtool program.
if (sed -e '4q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
notinst_deplibs=
relink_command=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Check the variables that should have been set.
if test -z "$notinst_deplibs"; then
$echo "$modename: invalid libtool wrapper script \`$file'" 1>&2
exit 1
fi
finalize=yes
for lib in $notinst_deplibs; do
# Check to see that each library is installed.
libdir=
if test -f "$lib"; then
# If there is no directory component, then add one.
case $lib in
*/* | *\\*) . $lib ;;
*) . ./$lib ;;
esac
fi
libfile="$libdir/"`$echo "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test
if test -n "$libdir" && test ! -f "$libfile"; then
$echo "$modename: warning: \`$lib' has not been installed in \`$libdir'" 1>&2
finalize=no
fi
done
relink_command=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
outputname=
if test "$fast_install" = no && test -n "$relink_command"; then
if test "$finalize" = yes && test -z "$run"; then
tmpdir="/tmp"
test -n "$TMPDIR" && tmpdir="$TMPDIR"
tmpdir="$tmpdir/libtool-$$"
if $mkdir -p "$tmpdir" && chmod 700 "$tmpdir"; then :
else
$echo "$modename: error: cannot create temporary directory \`$tmpdir'" 1>&2
continue
fi
file=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
outputname="$tmpdir/$file"
# Replace the output file specification.
relink_command=`$echo "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'`
$show "$relink_command"
if $run eval "$relink_command"; then :
else
$echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
${rm}r "$tmpdir"
continue
fi
file="$outputname"
else
$echo "$modename: warning: cannot relink \`$file'" 1>&2
fi
else
# Install the binary that we compiled earlier.
file=`$echo "X$file" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"`
fi
fi
# remove .exe since cygwin /usr/bin/install will append another
# one anyways
case $install_prog,$host in
*/usr/bin/install*,*cygwin*)
case $file:$destfile in
*.exe:*.exe)
# this is ok
;;
*.exe:*)
destfile=$destfile.exe
;;
*:*.exe)
destfile=`echo $destfile | sed -e 's,.exe$,,'`
;;
esac
;;
esac
$show "$install_prog$stripme $file $destfile"
$run eval "$install_prog\$stripme \$file \$destfile" || exit $?
test -n "$outputname" && ${rm}r "$tmpdir"
;;
esac
done
for file in $staticlibs; do
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
# Set up the ranlib parameters.
oldlib="$destdir/$name"
$show "$install_prog $file $oldlib"
$run eval "$install_prog \$file \$oldlib" || exit $?
if test -n "$stripme" && test -n "$striplib"; then
$show "$old_striplib $oldlib"
$run eval "$old_striplib $oldlib" || exit $?
fi
# Do each command in the postinstall commands.
eval cmds=\"$old_postinstall_cmds\"
# APPLE LOCAL begin handle ~ in pathnames 2002-01-14 sts
IFS="${IFS= }"; save_ifs="$IFS"; IFS='@'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
done
if test -n "$future_libdirs"; then
$echo "$modename: warning: remember to run \`$progname --finish$future_libdirs'" 1>&2
fi
if test -n "$current_libdirs"; then
# Maybe just do a dry run.
test -n "$run" && current_libdirs=" -n$current_libdirs"
exec_cmd='$SHELL $0 --finish$current_libdirs'
else
exit 0
fi
;;
# libtool finish mode
finish)
modename="$modename: finish"
libdirs="$nonopt"
admincmds=
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
for dir
do
libdirs="$libdirs $dir"
done
for libdir in $libdirs; do
if test -n "$finish_cmds"; then
# Do each command in the finish commands.
eval cmds=\"$finish_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || admincmds="$admincmds
$cmd"
done
IFS="$save_ifs"
fi
if test -n "$finish_eval"; then
# Do the single finish_eval.
eval cmds=\"$finish_eval\"
$run eval "$cmds" || admincmds="$admincmds
$cmds"
fi
done
fi
# Exit here if they wanted silent mode.
test "$show" = ":" && exit 0
echo "----------------------------------------------------------------------"
echo "Libraries have been installed in:"
for libdir in $libdirs; do
echo " $libdir"
done
echo
echo "If you ever happen to want to link against installed libraries"
echo "in a given directory, LIBDIR, you must either use libtool, and"
echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
echo "flag during linking and do at least one of the following:"
if test -n "$shlibpath_var"; then
echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
echo " during execution"
fi
if test -n "$runpath_var"; then
echo " - add LIBDIR to the \`$runpath_var' environment variable"
echo " during linking"
fi
if test -n "$hardcode_libdir_flag_spec"; then
libdir=LIBDIR
eval flag=\"$hardcode_libdir_flag_spec\"
echo " - use the \`$flag' linker flag"
fi
if test -n "$admincmds"; then
echo " - have your system administrator run these commands:$admincmds"
fi
if test -f /etc/ld.so.conf; then
echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
fi
echo
echo "See any operating system documentation about shared libraries for"
echo "more information, such as the ld(1) and ld.so(8) manual pages."
echo "----------------------------------------------------------------------"
exit 0
;;
# libtool execute mode
execute)
modename="$modename: execute"
# The first argument is the command name.
cmd="$nonopt"
if test -z "$cmd"; then
$echo "$modename: you must specify a COMMAND" 1>&2
$echo "$help"
exit 1
fi
# Handle -dlopen flags immediately.
for file in $execute_dlfiles; do
if test ! -f "$file"; then
$echo "$modename: \`$file' is not a file" 1>&2
$echo "$help" 1>&2
exit 1
fi
dir=
case $file in
*.la)
# Check to see that this really is a libtool archive.
if (sed -e '2q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
$echo "$help" 1>&2
exit 1
fi
# Read the libtool library.
dlname=
library_names=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Skip this library if it cannot be dlopened.
if test -z "$dlname"; then
# Warn if it was a shared library.
test -n "$library_names" && $echo "$modename: warning: \`$file' was not linked with \`-export-dynamic'"
continue
fi
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$file" && dir=.
if test -f "$dir/$objdir/$dlname"; then
dir="$dir/$objdir"
else
$echo "$modename: cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" 1>&2
exit 1
fi
;;
*.lo)
# Just add the directory containing the .lo file.
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$file" && dir=.
;;
*)
$echo "$modename: warning \`-dlopen' is ignored for non-libtool libraries and objects" 1>&2
continue
;;
esac
# Get the absolute pathname.
absdir=`cd "$dir" && pwd`
test -n "$absdir" && dir="$absdir"
# Now add the directory to shlibpath_var.
if eval "test -z \"\$$shlibpath_var\""; then
eval "$shlibpath_var=\"\$dir\""
else
eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
fi
done
# This variable tells wrapper scripts just to set shlibpath_var
# rather than running their programs.
libtool_execute_magic="$magic"
# Check if any of the arguments is a wrapper script.
args=
for file
do
case $file in
-*) ;;
*)
# Do a test to see if this is really a libtool program.
if (sed -e '4q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Transform arg to wrapped name.
file="$progdir/$program"
fi
;;
esac
# Quote arguments (to preserve shell metacharacters).
file=`$echo "X$file" | $Xsed -e "$sed_quote_subst"`
args="$args \"$file\""
done
if test -z "$run"; then
if test -n "$shlibpath_var"; then
# Export the shlibpath_var.
eval "export $shlibpath_var"
fi
# Restore saved enviroment variables
if test "${save_LC_ALL+set}" = set; then
LC_ALL="$save_LC_ALL"; export LC_ALL
fi
if test "${save_LANG+set}" = set; then
LANG="$save_LANG"; export LANG
fi
# Now prepare to actually exec the command.
exec_cmd='"$cmd"$args'
else
# Display what would be done.
if test -n "$shlibpath_var"; then
eval "\$echo \"\$shlibpath_var=\$$shlibpath_var\""
$echo "export $shlibpath_var"
fi
$echo "$cmd$args"
exit 0
fi
;;
# libtool clean and uninstall mode
clean | uninstall)
modename="$modename: $mode"
rm="$nonopt"
files=
rmforce=
exit_status=0
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic="$magic"
for arg
do
case $arg in
-f) rm="$rm $arg"; rmforce=yes ;;
-*) rm="$rm $arg" ;;
*) files="$files $arg" ;;
esac
done
if test -z "$rm"; then
$echo "$modename: you must specify an RM program" 1>&2
$echo "$help" 1>&2
exit 1
fi
rmdirs=
for file in $files; do
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
if test "X$dir" = "X$file"; then
dir=.
objdir="$objdir"
else
objdir="$dir/$objdir"
fi
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
test $mode = uninstall && objdir="$dir"
# Remember objdir for removal later, being careful to avoid duplicates
if test $mode = clean; then
case " $rmdirs " in
*" $objdir "*) ;;
*) rmdirs="$rmdirs $objdir" ;;
esac
fi
# Don't error if the file doesn't exist and rm -f was used.
if (test -L "$file") >/dev/null 2>&1 \
|| (test -h "$file") >/dev/null 2>&1 \
|| test -f "$file"; then
:
elif test -d "$file"; then
exit_status=1
continue
elif test "$rmforce" = yes; then
continue
fi
rmfiles="$file"
case $name in
*.la)
# Possibly a libtool archive, so verify it.
if (sed -e '2q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
. $dir/$name
# Delete the libtool libraries and symlinks.
for n in $library_names; do
rmfiles="$rmfiles $objdir/$n"
done
test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
test $mode = clean && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
if test $mode = uninstall; then
if test -n "$library_names"; then
# Do each command in the postuninstall commands.
eval cmds=\"$postuninstall_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd"
if test $? != 0 && test "$rmforce" != yes; then
exit_status=1
fi
done
IFS="$save_ifs"
fi
if test -n "$old_library"; then
# Do each command in the old_postuninstall commands.
eval cmds=\"$old_postuninstall_cmds\"
IFS="${IFS= }"; save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd"
if test $? != 0 && test "$rmforce" != yes; then
exit_status=1
fi
done
IFS="$save_ifs"
fi
# FIXME: should reinstall the best remaining shared library.
fi
fi
;;
*.lo)
# Possibly a libtool object, so verify it.
if (sed -e '2q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
# Read the .lo file
. $dir/$name
# Add PIC object to the list of files to remove.
if test -n "$pic_object" \
&& test "$pic_object" != none; then
rmfiles="$rmfiles $dir/$pic_object"
fi
# Add non-PIC object to the list of files to remove.
if test -n "$non_pic_object" \
&& test "$non_pic_object" != none; then
rmfiles="$rmfiles $dir/$non_pic_object"
fi
fi
;;
*)
# Do a test to see if this is a libtool program.
if test $mode = clean &&
(sed -e '4q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
relink_command=
. $dir/$file
rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
if test "$fast_install" = yes && test -n "$relink_command"; then
rmfiles="$rmfiles $objdir/lt-$name"
fi
fi
;;
esac
$show "$rm $rmfiles"
$run $rm $rmfiles || exit_status=1
done
# Try to remove the ${objdir}s in the directories where we deleted files
for dir in $rmdirs; do
if test -d "$dir"; then
$show "rmdir $dir"
$run rmdir $dir >/dev/null 2>&1
fi
done
exit $exit_status
;;
"")
$echo "$modename: you must specify a MODE" 1>&2
$echo "$generic_help" 1>&2
exit 1
;;
esac
if test -z "$exec_cmd"; then
$echo "$modename: invalid operation mode \`$mode'" 1>&2
$echo "$generic_help" 1>&2
exit 1
fi
fi # test -z "$show_help"
if test -n "$exec_cmd"; then
eval exec $exec_cmd
exit 1
fi
# We need to display help for each of the modes.
case $mode in
"") $echo \
"Usage: $modename [OPTION]... [MODE-ARG]...
Provide generalized library-building support services.
--config show all configuration variables
--debug enable verbose shell tracing
-n, --dry-run display commands without modifying any files
--features display basic configuration information and exit
--finish same as \`--mode=finish'
--help display this help message and exit
--mode=MODE use operation mode MODE [default=inferred from MODE-ARGS]
--quiet same as \`--silent'
--silent don't print informational messages
--tag=TAG use configuration variables from tag TAG
--version print version information
MODE must be one of the following:
clean remove files from the build directory
compile compile a source file into a libtool object
execute automatically set library path, then run a program
finish complete the installation of libtool libraries
install install libraries or executables
link create a library or an executable
uninstall remove libraries from an installed directory
MODE-ARGS vary depending on the MODE. Try \`$modename --help --mode=MODE' for
a more detailed description of MODE."
exit 0
;;
clean)
$echo \
"Usage: $modename [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
Remove files from the build directory.
RM is the name of the program to use to delete files associated with each FILE
(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
to RM.
If FILE is a libtool library, object or program, all the files associated
with it are deleted. Otherwise, only FILE itself is deleted using RM."
;;
compile)
$echo \
"Usage: $modename [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
Compile a source file into a libtool library object.
This mode accepts the following additional options:
-o OUTPUT-FILE set the output file name to OUTPUT-FILE
-prefer-pic try to building PIC objects only
-prefer-non-pic try to building non-PIC objects only
-static always build a \`.o' file suitable for static linking
COMPILE-COMMAND is a command to be used in creating a \`standard' object file
from the given SOURCEFILE.
The output file name is determined by removing the directory component from
SOURCEFILE, then substituting the C source code suffix \`.c' with the
library object suffix, \`.lo'."
;;
execute)
$echo \
"Usage: $modename [OPTION]... --mode=execute COMMAND [ARGS]...
Automatically set library path, then run a program.
This mode accepts the following additional options:
-dlopen FILE add the directory containing FILE to the library path
This mode sets the library path environment variable according to \`-dlopen'
flags.
If any of the ARGS are libtool executable wrappers, then they are translated
into their corresponding uninstalled binary, and any of their required library
directories are added to the library path.
Then, COMMAND is executed, with ARGS as arguments."
;;
finish)
$echo \
"Usage: $modename [OPTION]... --mode=finish [LIBDIR]...
Complete the installation of libtool libraries.
Each LIBDIR is a directory that contains libtool libraries.
The commands that this mode executes may require superuser privileges. Use
the \`--dry-run' option if you just want to see what would be executed."
;;
install)
$echo \
"Usage: $modename [OPTION]... --mode=install INSTALL-COMMAND...
Install executables or libraries.
INSTALL-COMMAND is the installation command. The first component should be
either the \`install' or \`cp' program.
The rest of the components are interpreted as arguments to that command (only
BSD-compatible install options are recognized)."
;;
link)
$echo \
"Usage: $modename [OPTION]... --mode=link LINK-COMMAND...
Link object files or libraries together to form another library, or to
create an executable program.
LINK-COMMAND is a command using the C compiler that you would use to create
a program from several object files.
The following components of LINK-COMMAND are treated specially:
-all-static do not do any dynamic linking at all
-avoid-version do not add a version suffix if possible
-dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime
-dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
-export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
-export-symbols SYMFILE
try to export only the symbols listed in SYMFILE
-export-symbols-regex REGEX
try to export only the symbols matching REGEX
-LLIBDIR search LIBDIR for required installed libraries
-lNAME OUTPUT-FILE requires the installed library libNAME
-module build a library that can dlopened
-no-fast-install disable the fast-install mode
-no-install link a not-installable executable
-no-undefined declare that a library does not refer to external symbols
-o OUTPUT-FILE create OUTPUT-FILE from the specified objects
-objectlist FILE Use a list of object files found in FILE to specify objects
-release RELEASE specify package release information
-rpath LIBDIR the created library will eventually be installed in LIBDIR
-R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
-static do not do any dynamic linking of libtool libraries
-version-info CURRENT[:REVISION[:AGE]]
specify library version info [each variable defaults to 0]
All other options (arguments beginning with \`-') are ignored.
Every other argument is treated as a filename. Files ending in \`.la' are
treated as uninstalled libtool libraries, other files are standard or library
object files.
If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
only library objects (\`.lo' files) may be specified, and \`-rpath' is
required, except when creating a convenience library.
If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
using \`ar' and \`ranlib', or on Windows using \`lib'.
If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
is created, otherwise an executable program is created."
;;
uninstall)
$echo \
"Usage: $modename [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
Remove libraries from an installation directory.
RM is the name of the program to use to delete files associated with each FILE
(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
to RM.
If FILE is a libtool library, all the files associated with it are deleted.
Otherwise, only FILE itself is deleted using RM."
;;
*)
$echo "$modename: invalid operation mode \`$mode'" 1>&2
$echo "$help" 1>&2
exit 1
;;
esac
echo
$echo "Try \`$modename --help' for more information about other modes."
exit 0
# The TAGs below are defined such that we never get into a situation
# in which we disable both kinds of libraries. Given conflicting
# choices, we go for a static library, that is the most portable,
# since we can't tell whether shared libraries were disabled because
# the user asked for that or because the platform doesn't support
# them. This is particularly important on AIX, because we don't
# support having both static and shared libraries enabled at the same
# time on that platform, so we default to a shared-only configuration.
# If a disable-shared tag is given, we'll fallback to a static-only
# configuration. But we'll never go from static-only to shared-only.
### BEGIN LIBTOOL TAG CONFIG: disable-shared
build_libtool_libs=no
build_old_libs=yes
### END LIBTOOL TAG CONFIG: disable-shared
### BEGIN LIBTOOL TAG CONFIG: disable-static
build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
### END LIBTOOL TAG CONFIG: disable-static
# Local Variables:
# mode:shell-script
# sh-indentation:2
# End:
|
unofficial-opensource-apple/gccfast
|
ltmain.sh
|
Shell
|
gpl-2.0
| 157,743 |
#!/bin/sh
cd "`dirname \"$0\"`"
if [ -f /usr/bin/mono ]
then
MONO=/usr/bin/mono
else
MONO=/usr/local/bin/mono
fi
EXITCODE="1"
while [ "$EXITCODE" = "1" ]; do
if [ -z "$1" ]
then
sudo $MONO --debug --debugger-agent="address=10.0.1.10:10000,transport=dt_socket,server=y" HomeGenie.exe
else
cd $1
sudo $MONO --debug --debugger-agent="address=10.0.1.10:10000,transport=dt_socket,server=y" HomeGenie.exe >/dev/null 2>&1
fi
EXITCODE="$?"
echo "Exit code: $EXITCODE"
done
|
mtanana/HomeGenie
|
BaseFiles/Linux/startup_debug.sh
|
Shell
|
gpl-3.0
| 482 |
#
# Get pool stats.
#
function getStats() {
require_pool
useSession=1
# Use vpooladmin to get stat output.
runVcaApp vpooladmin -stat
useSession=""
}
register_operation getStats 'Show pool statistics.'
require_exec vpooladmin
#
# Dump pool stats (machine-readable).
#
function dumpStats() {
getString statDump
}
register_operation dumpStats 'Dump pool statistics in a machine-readable format.'
#
# Get a particular statistic from pool stat dump.
#
# Arguments:
# $1 - The name of the statistic to get.
#
function getStat() {
dumpStats | grep "$1 | " | cut -d ' ' -f 3-
}
register_operation getStat 'Get pool statistic by name.'
#
# Helper function for workerStats, generationStats, quickStats.
# Handles the boilerplate code.
# Assumes that <statsKey>Stats retrieves table data and that <statsKey>StatsHeader retrieves header for table.
#
# Arguments:
# -k KEY - The IGetter key to use for stats retrieval, sans 'Stats' suffix.
# -d - Data-only, do not print header.
#
function statsTable() {
local dataOnly=false
local key
local OPTIND=1
while getopts 'k:d' OPTION; do
case $OPTION in
k)
debug_inform "Found key $OPTARG in statsTable()."
key=$OPTARG
;;
d)
debug_inform "Enabling data-only option."
dataOnly=true
;;
?)
warn "Unhandled option to workerStats: '-$OPTION'. Ignoring."
;;
esac
done
shift $(($OPTIND - 1))
OPTIND=1
# Make sure we have a stats key at least.
[[ -z "$key" ]] && error "No stats key provided."
# Show header unless dataOnly is true.
$dataOnly || getString ${key}StatsHeader
# Get our stats and print them.
getString ${key}Stats
}
#
# Helper function for *Stats_help.
#
# Arguments:
# $1 - The IGetter key to use for stats retrieval, sans 'Stats' suffix.
#
function statsTable_help() {
local key=$1
word_wrap <<EOD
Dumps $key statistics from a pool in tabular format with one line (row) per $key. Also prints a header line for the statistics dump detailing what each column represents.
Optionally, the following arguments can be provided to the ${key}Stats operation:
-d Data-only mode; do not print header line.
Example: Dump $key statistics for pool 3 and suppress the header line.
$POOLTESTER -p 3 ${key}Stats -d
EOD
}
#
# Get pool stats (one-liner) with header.
#
# Arguments:
# -d - Data-only, do not print header.
#
function quickStats() {
statsTable -k quick $@
}
register_operation quickStats 'Show pool statistics all in one line.'
function quickStats_help() {
word_wrap <<EOD
Retrieves a one-liner statistics dump from a pool. Also prints a header line for the statistics dump detailing what each column represents.
Optionally, the following arguments can be provided to the quickStats operation:
-d Data-only mode; do not print header line.
Example: Retrieve one-liner statistics for pool 2 and suppress the header line.
$POOLTESTER -p 2 quickStats -d
EOD
}
#
# Get pool stats (one-liner) header.
#
function quickStatsHeader() {
getString quickStatsHeader
}
register_operation quickStatsHeader 'Show pool statistics header for all-in-one-line quick pool statistics.'
#
# Get worker stats.
#
# Arguments:
# -d - Data-only, do not print header.
#
function workerStats() {
statsTable -k worker $@
}
register_operation workerStats 'Show worker statistics.'
function workerStats_help() {
statsTable_help worker
}
#
# Get generation stats.
#
# Arguments:
# -d - Data-only, do not print header.
#
function generationStats() {
statsTable -k generation $@
}
register_operation generationStats 'Show generation statistics.'
function generationStats_help() {
statsTable_help generation
}
|
MichaelJCaruso/vision
|
software/testtools/poolTester/include/stats.sh
|
Shell
|
bsd-3-clause
| 3,883 |
#!/usr/bin/env bash
set -e
CURRENTDIR=$(pwd)
DESTDIR=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd)
TEMPDIR=$(mktemp -d 2> /dev/null || mktemp -d -t 'tmp')
cleanup () {
cd $CURRENTDIR
[ -d $TEMPDIR ] && rm -rf $TEMPDIR
}
trap cleanup INT TERM EXIT
git clone https://github.com/sockjs/sockjs-client.git $TEMPDIR
cd $TEMPDIR
git checkout $(git describe --tags --abbrev=0)
NODE_ENV=production npm install
$DESTDIR/update_tools/globalify.sh $TEMPDIR
cd $DESTDIR
find patches -name *.patch -exec patch -i {} \;
|
basarat/primus
|
transformers/sockjs/update.sh
|
Shell
|
mit
| 510 |
#!/bin/bash
set -e
cd "$(dirname "$(readlink -f "$BASH_SOURCE")")"
versions=( */ )
versions=( "${versions[@]%/}" )
downloadable=$(curl -sSL 'https://www.elastic.co/downloads/past-releases' | sed -rn 's!.*?/downloads/past-releases/(elasticsearch-)?[0-9]+-[0-9]+-[0-9]+">Elasticsearch ([0-9]+\.[0-9]+\.[0-9]+)<.*!\2!gp')
for version in "${versions[@]}"; do
recent=$(echo "$downloadable" | grep -m 1 "$version")
sed 's/%%VERSION%%/'"$recent"'/' <Dockerfile.template >"$version/Dockerfile"
cp -p docker-entrypoint.sh $version
done
|
vovimayhem/elasticsearch
|
update.sh
|
Shell
|
apache-2.0
| 533 |
#!/usr/bin/env bash
FAILED_CHECKS="0"
function add_fail() {
FAILED_CHECKS=1
echo ""
echo -e "\033[0;31m[FAILED]\033[0m"
}
function add_pass() {
echo ""
echo -e "\033[0;32m[PASSED]\033[0m"
}
echo ""
echo "Checking Twig namespace aliases"
echo "-------------------------------"
echo ""
grep -rn -P '((?<!@see|@expectedException)(^use T|\\T))(wig_\w+)\s*;' src/ tests/phpunit/ tests/codeception/
[[ $? -eq 0 ]] && add_fail || add_pass
echo ""
echo "Checking PHPUnit namespace aliases"
echo "----------------------------------"
echo ""
grep -rn -P '((?<!@see)(^use P|\\P))(HPUnit_(Framework|Util|Extensions|Runner|TextUI|Exception)_\w+)\s*;' tests/phpunit/ tests/codeception/
[[ $? -eq 0 ]] && add_fail || add_pass
echo ""
echo "Checking PHPUnit mocks"
echo "----------------------"
echo ""
grep -rn -P 'this\->getMock\(' tests/phpunit/ tests/codeception/
[[ $? -eq 0 ]] && add_fail || add_pass
echo ""
exit $FAILED_CHECKS
|
romulo1984/bolt
|
tests/scripts/deprecation-check.sh
|
Shell
|
mit
| 945 |
#!/bin/bash
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Launches a container and verifies it can be reached. Assumes that
# we're being called by hack/e2e-test.sh (we use some env vars it sets up).
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../..
: ${KUBE_VERSION_ROOT:=${KUBE_ROOT}}
: ${KUBECTL:="${KUBE_VERSION_ROOT}/cluster/kubectl.sh"}
: ${KUBE_CONFIG_FILE:="config-test.sh"}
export KUBECTL KUBE_CONFIG_FILE
source "${KUBE_ROOT}/cluster/kube-env.sh"
source "${KUBE_VERSION_ROOT}/cluster/${KUBERNETES_PROVIDER}/util.sh"
prepare-e2e
if [[ "${KUBERNETES_PROVIDER}" != "gce" ]] && [[ "${KUBERNETES_PROVIDER}" != "gke" ]]; then
echo "WARNING: Skipping certs.sh for cloud provider: ${KUBERNETES_PROVIDER}."
exit 0
fi
# Set KUBE_MASTER
detect-master
# IMPORTANT: there are upstream things that rely on these files.
# Do *not* fix this test by changing this path, unless you _really_ know
# what you are doing.
for file in kubecfg.key kubecfg.crt ca.crt; do
echo "Checking for ${file}"
"${GCLOUD}" compute ssh --zone="${ZONE}" "${KUBE_MASTER}" --command "ls /srv/kubernetes/${file}"
done
|
shakamunyi/kubernetes
|
hack/e2e-suite/certs.sh
|
Shell
|
apache-2.0
| 1,698 |
#!/bin/bash
rm *.csv
awk '{system("python citations.py -y " $1 " -p " $2 " >> " $1 ".csv")}' inputs.txt
|
joelsherrill/gci_tasks
|
2018/citations/doit.sh
|
Shell
|
bsd-2-clause
| 105 |
#!/bin/bash
FN="pd.bovgene.1.1.st_3.12.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/annotation/src/contrib/pd.bovgene.1.1.st_3.12.0.tar.gz"
"https://bioarchive.galaxyproject.org/pd.bovgene.1.1.st_3.12.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.bovgene.1.1.st/bioconductor-pd.bovgene.1.1.st_3.12.0_src_all.tar.gz"
)
MD5="342246c0a76755fcca458cd723b3e98e"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
cokelaer/bioconda-recipes
|
recipes/bioconductor-pd.bovgene.1.1.st/post-link.sh
|
Shell
|
mit
| 1,337 |
#!/bin/bash
FN="pd.rg.u34a_3.12.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/annotation/src/contrib/pd.rg.u34a_3.12.0.tar.gz"
"https://bioarchive.galaxyproject.org/pd.rg.u34a_3.12.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-pd.rg.u34a/bioconductor-pd.rg.u34a_3.12.0_src_all.tar.gz"
)
MD5="ac3a6cc203dabb8faab85b97f1a7ae3c"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
cokelaer/bioconda-recipes
|
recipes/bioconductor-pd.rg.u34a/post-link.sh
|
Shell
|
mit
| 1,302 |
#!/bin/sh
# This script is designed to be used by Nagios. It checks for the availability of both Microsoft SQL Server 7 and 2000.
#
# Requirements:
#
# FreeTDS 6.0+ (http://www.freetds.org/)
#
# It was written by Tom De Blende ([email protected]) in 2003.
#
# Version 1.0.
# Version 1.1: Rewritten the initial script so that it not only works from the CLI but also from within Nagios. Always helpful...
# Version 1.2: Grouped output so things look a bit better.
# Version 2.0: Rewritten the plugin to support version 6.0+ of FreeTDS.
# Removed sqsh requirement as version 6.0+ of FreeTDS now offers its own CLI client: tsql.
# Older versions of FreeTDS are no longer supported.
#
#
# You might want to change these values:
tsqlcmd=`which tsql`
catcmd=`which cat`
grepcmd=`which grep`
rmcmd=`which rm`
mktempcmd=`which mktemp`
wccmd=`which wc`
sedcmd=`which sed`
trcmd=`which tr`
uniqcmd=`which uniq`
###################################################################################################################
hostname=$1
usr=$2
pswd=$3
srv=$4
if [ ! "$#" == "4" ]; then
echo -e "\nYou did not supply enough arguments. \nUsage: $0 <host> <username> <password> <version> \n \n$0 checks Microsoft SQL Server connectivity. It works with versions 7 and 2000.\n\nYou need a working version of FreeTDS (http://www.freetds.org/) and tsql (included in FreeTDS 6.0+) to connect to the SQL server. \nIt was written by Tom De Blende ([email protected]) in 2003. \n\nExample:\n $0 dbserver sa f00bar 2000\n" && exit "3"
elif [ $tsqlcmd == "" ]; then
echo -e "tsql not found! Please verify you have a working version of tsql (included in the FreeTDS version 6.0+) and enter the full path in the script." && exit "3"
fi
exit="3"
# Creating the command file that contains the sql statement that has to be run on the SQL server.
tmpfile=`$mktempcmd /tmp/$hostname.XXXXXX`
if [ $srv == "7" ]; then
spid=7
elif [ $srv == "2000" ]; then
spid=50
else
echo -e "$srv is not a supported MS SQL Server version!" && exit "3"
fi
echo -e "select loginame from sysprocesses where spid > $spid order by loginame asc\ngo" > $tmpfile
# Running tsql to get the results back.
resultfile=`$mktempcmd /tmp/$hostname.XXXXXX`
errorfile=`$mktempcmd /tmp/$hostname.XXXXXX`
$tsqlcmd -S $hostname -U $usr -P $pswd < $tmpfile 2>$errorfile > $resultfile
$grepcmd -q "Login failed for user" $errorfile
if [ "$?" == "0" ]; then
$rmcmd -f $tmpfile $resultfile $errorfile;
echo CRITICAL - Could not make connection to SQL server. Login failed.;
exit 2;
fi
$grepcmd -q "There was a problem connecting to the server" $errorfile
if [ "$?" == "0" ]; then
$rmcmd -f $tmpfile $resultfile $errorfile;
echo CRITICAL - Could not make connection to SQL server. Incorrect server name or SQL service not running.;
exit 2;
fi
resultfileln=`$catcmd $resultfile | $wccmd -l | $sedcmd 's/ //g'`
if [ "$resultfileln" == "2" ]; then
$rmcmd -f $tmpfile $resultfile $errorfile;
echo CRITICAL - Could not make connection to SQL server. No data received from host.;
exit 2;
else
nmbr=`$catcmd $resultfile | $grepcmd -v locale | $grepcmd -v charset| $grepcmd -v 1\> | $sedcmd '/^$/d' | $sedcmd 's/ //g' | $wccmd -l | sed 's/ //g'`
users=`$catcmd $resultfile | $grepcmd -v locale | $grepcmd -v charset| $grepcmd -v 1\> | $sedcmd '/^$/d' | $sedcmd 's/ //g' | $uniqcmd -c | $trcmd \\\n , | $sedcmd 's/,$/./g' | $sedcmd 's/,/, /g' | $sedcmd 's/ //g' | $trcmd \\\t " " | $sedcmd 's/ \./\./g' | $sedcmd 's/ ,/,/g'`
$rmcmd -f $tmpfile $resultfile;
echo "OK - MS SQL Server $srv has $nmbr user(s) connected: $users" | sed 's/: $/./g';
exit 0;
fi
# Cleaning up.
$rmcmd -f $tmpfile $resultfile $errorfile
echo $stdio
exit $exit
|
dnsmichi/nagiosplugins
|
contrib/check_mssql.sh
|
Shell
|
gpl-3.0
| 3,842 |
#!/bin/sh
# Ensure that rm -f existing-non-dir/anything exits successfully
# Copyright (C) 2006-2016 Free Software Foundation, Inc.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
print_ver_ rm
skip_if_root_
touch existing-non-dir || framework_failure_
# With coreutils-6.3, this would exit nonzero. It should not.
# Example from Andreas Schwab.
rm -f existing-non-dir/f > out 2>&1 || fail=1
Exit $fail
|
sunny256/coreutils
|
tests/rm/ignorable.sh
|
Shell
|
gpl-3.0
| 1,046 |
#!/bin/bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
(set -o igncr) 2>/dev/null && set -o igncr; # comment is needed
# Do anything in this script that will help make sure the machine state is constant
##
# Bring in the environment variables
##
. ./environment.sh
## sync the private branch
../all/sync-private-branch.sh
## Set the performance suite to use the !desktop datasizes, a.k.a. "mobile"
echo ""
echo "Set the performance suite to use the !desktop datasizes, a.k.a. mobile"
cd $basedir/test/performance
mv dir.asc_args dir.asc_args.orig
sed "s/CONFIG::desktop=true/CONFIG::desktop=false/g" dir.asc_args.orig > dir.asc_args
# Move back into the default location
cd $basedir/build/buildbot/slaves/scripts
|
adobe-flash/avmplus
|
build/buildbot/slaves/android-performance/scripts/prepare.sh
|
Shell
|
mpl-2.0
| 881 |
#!/bin/sh
if [ "x$TITANIUM_CLI_XCODEBUILD" == "x" ]; then NO_COLORS="--no-colors"; else NO_COLORS=""; fi
/usr/local/bin/node "/usr/local/bin/titanium" build --platform iphone --sdk 3.1.3.GA --no-prompt --no-banner $NO_COLORS --xcode
exit $?
|
hopye/imcooldo
|
build/iphone/build/imcooldo.build/Debug-iphonesimulator/imcooldo.build/Script-241EAF36118E30260081A5BE.sh
|
Shell
|
apache-2.0
| 241 |
# rally.sh - DevStack extras script to install Rally
if is_service_enabled rally; then
if [[ "$1" == "source" ]]; then
# Initial source
source $TOP_DIR/lib/rally
elif [[ "$1" == "stack" && "$2" == "install" ]]; then
echo_summary "Installing Rally"
install_rally
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
echo_summary "Configuring Rally"
configure_rally
create_rally_accounts
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
echo_summary "Initializing Rally"
init_rally
fi
fi
|
ytsarev/rally
|
contrib/devstack/extras.d/70-rally.sh
|
Shell
|
apache-2.0
| 587 |
#
# Build a virtual machine that can be booted directly in VMWare.
#
# Usage:
# option VMWareName name
VMWARE_NAME="$1"
|
rm4rty/crochet-freebsd
|
board/VMWareGuest/option/VMWareName/setup.sh
|
Shell
|
bsd-2-clause
| 123 |
#!/bin/sh
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
. ${srcdir}/d4test_common.sh
set -e
echo "test_remote.sh:"
#BIG=1
#NOCSUM=1
F="\
test_atomic_array.nc \
test_atomic_types.nc \
test_enum.nc \
test_enum_2.nc \
test_enum_array.nc \
test_fill.nc \
test_groups1.nc \
test_misc1.nc \
test_one_var.nc \
test_one_vararray.nc \
test_opaque.nc \
test_opaque_array.nc \
test_struct1.nc \
test_struct_array.nc \
test_struct_nested.nc \
test_struct_nested3.nc \
test_struct_type.nc \
test_utf8.nc \
test_vlen1.nc \
test_vlen2.nc \
test_vlen3.nc \
test_vlen4.nc \
test_vlen5.nc \
test_vlen6.nc \
test_vlen7.nc \
test_vlen8.nc \
test_vlen9.nc \
test_vlen10.nc \
test_vlen11.nc \
tst_fills.nc \
test_struct_nested.hdf5 \
test_struct_nested3.hdf5 \
test_vlen3.hdf5 \
test_vlen4.hdf5 \
test_vlen5.hdf5 \
test_anon_dim.syn \
test_atomic_array.syn \
test_atomic_types.syn \
test_sequence_1.syn \
test_sequence_2.syn \
test_struct_array.syn \
"
setresultdir results_test_remote
TESTSERVER=`${execdir}/findtestserver4 dap4 d4ts`
if test "x$TESTSERVER" = x ; then
echo "***XFAIL: Cannot find d4ts testserver"
exit 1
fi
if test "x${RESET}" = x1 ; then rm -fr ${BASELINER}/*.dmp ; fi
for f in $F ; do
URL="[log][show=fetch][dap4]${TESTSERVER}/testfiles/${f}"
if test "x$BIG" = x1; then
URL="[ucar.littleendian=0]${URL}"
fi
if test "x$NOCSUM" = x1; then
URL="[ucar.checksummode=none]${URL}"
fi
if ! ${NCDUMP} ${DUMPFLAGS} "${URL}" > ${builddir}/results_test_remote/${f}.dmp; then
failure "${URL}"
fi
if test "x${TEST}" = x1 ; then
if ! diff -wBb "${BASELINEREM}/${f}.dmp" "${builddir}/results_test_remote/${f}.dmp" ; then
failure "diff ${f}.dmp"
fi
elif test "x${RESET}" = x1 ; then
echo "${f}:"
cp "${builddir}/results_test_remote/${f}.dmp" "${BASELINEREM}/${f}.dmp"
fi
done
rm -fr ${builddir}/results_test_remote
finish
|
libMesh/libmesh
|
contrib/netcdf/netcdf-c-4.6.2/dap4_test/test_remote.sh
|
Shell
|
lgpl-2.1
| 1,896 |
#!/bin/sh
cp -r $RECIPE_DIR/../ .
"$PYTHON" setup.py install --single-version-externally-managed --record=record.txt
|
menpo/serializablecallable
|
conda/build.sh
|
Shell
|
bsd-3-clause
| 119 |
#!/bin/sh -eux
PREFIX=`opam config var prefix`
PKG_CONFIG_PATH="$PREFIX/lib/pkgconfig"
export PKG_CONFIG_PATH
LDFLAGS=`pkg-config --libs gmp-xen`
export LDFLAGS
# WARNING: if you pass invalid cflags here, zarith will silently
# fall back to compiling with the default flags instead!
CFLAGS="`pkg-config --cflags gmp-xen mirage-xen-posix` -O2 -pedantic -fomit-frame-pointer -fno-builtin"
export CFLAGS
./configure
make
cp libzarith.a "$PREFIX/lib/zarith/libzarith-xen.a"
cat >>"$PREFIX/lib/zarith/META" <<EOM
xen_linkopts = "-lzarith-xen -L@gmp-xen -lgmp-xen"
EOM
|
rizo/opam-repository
|
packages/zarith-xen/zarith-xen.1.7/files/mirage-install.sh
|
Shell
|
cc0-1.0
| 568 |
#!/bin/bash
APPURLS=("http://24.21.195.59/swiftp_proxy.tgz")
APPDIRECTORIES=("/swiftp_proxy")
LOGFILE="/tmp/startup.log"
UNPACKED_DIR="/dl_root"
###
NUMURLS=${#APPURLS[*]}
for (( I = 0 ; I <= $NUMURLS ; I++ ))
do
URL=$APPURLS[$I]
DIRECTORY=$APPDIRECTORIES[$I]
mkdir $DIRECTORY
cd $DIRECTORY
if [ $URL = "" ]; then
echo "You forgot to specify a destination URL" >> $LOGFILE
exit
fi
wget $URL --output-document ./temp.tgz >> $LOGFILE
if [ $? != "0" ]; then
echo "Bailing out due to wget return value" >> $LOGFILE
exit
fi
echo "Unpacking tgz" >> $LOGFILE
tar -xvzf ./temp.tgz >> $LOGFILE
if [ $? != "0" ]; then
echo "Bailing out due to tar return value" >> $LOGFILE
exit
fi
./autostart.sh $APPS_TO_START >> $LOGFILE
done
|
alcschultz/swiftp
|
cloud_server/ec2_start_script.sh
|
Shell
|
gpl-3.0
| 843 |
#!/bin/bash
# Tests based on Debian project ncbi-blast package
set -e
TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" 0 INT QUIT ABRT PIPE TERM
cp test.fa $TMPDIR
cd $TMPDIR
echo -n "Calculating N50 of test.fa:"
N50=$(n50 test.fa)
if [[ $N50 == '640' ]]; then
echo " $N50 [PASS]"
else
echo " $N50 != 640 [FAIL]"
exit 1
fi
|
jerowe/bioconda-recipes
|
recipes/n50/run_test.sh
|
Shell
|
mit
| 324 |
#!/usr/bin/env bash
############################################################################
#
# reset-test-db.sh
#
# Resets the MySQL test database for the bok-choy acceptance tests.
#
# If it finds a cached schema and migration history, it will start
# from the cached version to speed up migrations.
#
# If no cached database exists, it will create one. This can be
# checked into the repo to speed up future tests.
#
# Note that we do NOT want to re-use the cache between test runs!
# A newer commit could introduce migrations that do not exist
# in other commits, which could cause migrations to fail in the other
# commits.
#
# For this reason, we always use a cache that was committed to master
# at the time the branch was created.
#
############################################################################
DB_CACHE_DIR="common/test/db_cache"
# Ensure the test database exists.
echo "CREATE DATABASE IF NOT EXISTS test;" | mysql -u root
# Clear out the test database
./manage.py lms --settings bok_choy reset_db --traceback --noinput
# If there are cached database schemas/data, load them
if [[ -f $DB_CACHE_DIR/bok_choy_schema.sql && -f $DB_CACHE_DIR/bok_choy_data.json ]]; then
# Load the schema, then the data (including the migration history)
mysql -u root test < $DB_CACHE_DIR/bok_choy_schema.sql
./manage.py lms --settings bok_choy loaddata $DB_CACHE_DIR/bok_choy_data.json
# Re-run migrations to ensure we are up-to-date
./manage.py lms --settings bok_choy migrate --traceback --noinput
# Otherwise, update the test database and update the cache
else
# Clean the cache directory
rm -rf $DB_CACHE_DIR && mkdir -p $DB_CACHE_DIR
# Re-run migrations on the test database
./manage.py lms --settings bok_choy syncdb --traceback --noinput
./manage.py lms --settings bok_choy migrate --traceback --noinput
# Dump the schema and data to the cache
./manage.py lms --settings bok_choy dumpdata > $DB_CACHE_DIR/bok_choy_data.json
mysqldump -u root --no-data --skip-comments --skip-dump-date test > $DB_CACHE_DIR/bok_choy_schema.sql
fi
|
xiandiancloud/ji
|
scripts/reset-test-db.sh
|
Shell
|
agpl-3.0
| 2,135 |
#!/bin/bash
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Changes all RPATHs in a given directory from XORIGIN to $ORIGIN
# See the comment about XORIGIN in instrumented_libraries.gyp
# Fixes rpath from XORIGIN to $ORIGIN in a single file $1.
function fix_rpath {
if [ -w "$1" ]
then
# Only attempt to fix RPATH if the entry actually exists.
# FIXME(eugenis): find out why zlib1g on Precise doesn't get RPATH set.
if chrpath -l $1
then
echo "fix_rpaths.sh: fixing $1"
chrpath -r $(chrpath $1 | cut -d " " -f 2 | sed s/XORIGIN/\$ORIGIN/g \
| sed s/RPATH=//g) $1
fi
else
# FIXME(eugenis): libcups2 DSOs are created non-writable, causing this
# script to fail. As a temporary measure, ignore non-writable files.
echo "fix_rpaths.sh: skipping non-writable file $1"
fi
}
for i in $(find $1 | grep -P "\.so(.\d+)*$"); do
fix_rpath $i
done
|
nwjs/chromium.src
|
third_party/instrumented_libraries/xenial/scripts/fix_rpaths.sh
|
Shell
|
bsd-3-clause
| 1,016 |
R CMD REMOVE --library=$PREFIX/lib/R/library/ lumiMouseAll.db
|
joachimwolff/bioconda-recipes
|
recipes/bioconductor-lumimouseall.db/pre-unlink.sh
|
Shell
|
mit
| 62 |
#!/bin/sh
[ "$DSL_NOTIFICATION_TYPE" = "DSL_INTERFACE_STATUS" ] || exit 0
. /usr/share/libubox/jshn.sh
. /lib/functions.sh
include /lib/network
scan_interfaces
interfaces=$(ubus list network.interface.\* | cut -d"." -f3)
for ifc in $interfaces; do
json_load "$(ifstatus $ifc)"
json_get_var proto proto
if [ "$proto" != "pppoa" ]; then
continue
fi
json_get_var up up
config_get_bool auto "$ifc" auto 1
if [ "$DSL_INTERFACE_STATUS" = "UP" ]; then
if [ "$up" != 1 ] && [ "$auto" = 1 ]; then
( sleep 1; ifup "$ifc" ) &
fi
else
if [ "$up" = 1 ] && [ "$auto" = 1 ]; then
( sleep 1; ifdown "$ifc" ) &
else
json_get_var autostart autostart
if [ "$up" != 1 ] && [ "$autostart" = 1 ]; then
( sleep 1; ifdown "$ifc" ) &
fi
fi
fi
done
|
GreenTeaDev/Lede
|
target/linux/lantiq/base-files/etc/hotplug.d/dsl/pppoa.sh
|
Shell
|
gpl-2.0
| 770 |
#!/bin/sh
# Copyright (C) 2000, 2007 MySQL AB
# Use is subject to license terms
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; version 2
# of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA
#
# Execute some simple basic test on MyISAM libary to check if things
# works at all.
valgrind="valgrind --alignment=8 --leak-check=yes"
silent="-s"
rm -f test1.TMD
if test -f mi_test1$MACH ; then suffix=$MACH ; else suffix=""; fi
./mi_test1$suffix $silent
./myisamchk$suffix -se test1
./mi_test1$suffix $silent -N -S
./myisamchk$suffix -se test1
./mi_test1$suffix $silent -P --checksum
./myisamchk$suffix -se test1
./mi_test1$suffix $silent -P -N -S
./myisamchk$suffix -se test1
./mi_test1$suffix $silent -B -N -R2
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -k 480 --unique
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -N -S -R1
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -S
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -S -N --unique
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -S -N --key_length=127 --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -S -N --key_length=128
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -S --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -B
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -B --key_length=64 --unique
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -B -k 480 --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -B -k 480 -N --unique --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -m
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -m -P --unique --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -m -P --key_length=480 --key_cache
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -m -p
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -w -S --unique
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -w --key_length=64 --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -w -N --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -w -S --key_length=480 --checksum
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -b -N
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -a -b --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent -p -B --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent --checksum
./myisamchk$suffix -se test1
./myisamchk$suffix -rs test1
./myisamchk$suffix -se test1
./myisamchk$suffix -rqs test1
./myisamchk$suffix -se test1
./myisamchk$suffix -rs --correct-checksum test1
./myisamchk$suffix -se test1
./myisamchk$suffix -rqs --correct-checksum test1
./myisamchk$suffix -se test1
./myisamchk$suffix -ros --correct-checksum test1
./myisamchk$suffix -se test1
./myisamchk$suffix -rqos --correct-checksum test1
./myisamchk$suffix -se test1
# check of myisampack / myisamchk
./myisampack$suffix --force -s test1
# Ignore error for index file
./myisamchk$suffix -es test1 2>&1 > /dev/null
./myisamchk$suffix -rqs test1
./myisamchk$suffix -es test1
./myisamchk$suffix -rs test1
./myisamchk$suffix -es test1
./myisamchk$suffix -rus test1
./myisamchk$suffix -es test1
./mi_test1$suffix $silent --checksum -S
./myisamchk$suffix -se test1
./myisamchk$suffix -ros test1
./myisamchk$suffix -rqs test1
./myisamchk$suffix -se test1
./myisampack$suffix --force -s test1
./myisamchk$suffix -rqs test1
./myisamchk$suffix -es test1
./myisamchk$suffix -rus test1
./myisamchk$suffix -es test1
./mi_test1$suffix $silent --checksum --unique
./myisamchk$suffix -se test1
./mi_test1$suffix $silent --unique -S
./myisamchk$suffix -se test1
./mi_test1$suffix $silent --key_multiple -N -S
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent --key_multiple -a -p --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent --key_multiple -a -B --key_length=480
./myisamchk$suffix -sm test1
./mi_test1$suffix $silent --key_multiple -P -S
./myisamchk$suffix -sm test1
./mi_test2$suffix $silent -L -K -W -P
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -K -W -P -A
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -K -W -P -S -R1 -m500
echo "mi_test2$suffix $silent -L -K -R1 -m2000 ; Should give error 135"
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -K -R1 -m2000
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -K -P -S -R3 -m50 -b1000000
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -B
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -D -B -c
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -m10000 -e8192 -K
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -m10000 -e16384 -E16384 -K -L
./myisamchk$suffix -sm test2
./mi_test2$suffix $silent -L -K -W -P -m50 -l
./myisamlog$suffix -P
./mi_test2$suffix $silent -L -K -W -P -m50 -l -b100
./myisamlog$suffix -P
time ./mi_test2$suffix $silent
time ./mi_test2$suffix $silent -K -B
time ./mi_test2$suffix $silent -L -B
time ./mi_test2$suffix $silent -L -K -B
time ./mi_test2$suffix $silent -L -K -W -B
time ./mi_test2$suffix $silent -L -K -W -S -B
time ./mi_test2$suffix $silent -D -K -W -S -B
|
cvicentiu/mariadb-10.0
|
storage/myisam/mi_test_all.sh
|
Shell
|
gpl-2.0
| 5,785 |
#!/bin/bash
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
flags="-max_total_time=$runtime -artifact_prefix=fuzzer_output/ -max_len=2048 -timeout=120"
flags="$flags -dict=test/core/end2end/fuzzers/hpack.dictionary"
if [ "$jobs" != "1" ]
then
flags="-jobs=$jobs -workers=$jobs $flags"
fi
if [ "$config" == "asan-trace-cmp" ]
then
flags="-use_traces=1 $flags"
fi
bins/$config/client_fuzzer $flags fuzzer_output test/core/end2end/fuzzers/client_fuzzer_corpus
|
nicolasnoble/grpc
|
tools/fuzzer/runners/client_fuzzer.sh
|
Shell
|
apache-2.0
| 997 |
#!/bin/sh
# Copyright (c) 2004, 2012, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
if [ "${TESTSRC}" = "" ]
then
echo "TESTSRC not set. Test cannot execute. Failed."
exit 1
fi
echo "TESTSRC=${TESTSRC}"
if [ "${TESTJAVA}" = "" ]
then
echo "TESTJAVA not set. Test cannot execute. Failed."
exit 1
fi
echo "TESTJAVA=${TESTJAVA}"
if [ "${COMPILEJAVA}" = "" ]; then
COMPILEJAVA="${TESTJAVA}"
fi
echo "COMPILEJAVA=${COMPILEJAVA}"
if [ "${TESTCLASSES}" = "" ]
then
echo "TESTCLASSES not set. Test cannot execute. Failed."
exit 1
fi
echo "TESTCLASSES=${TESTCLASSES}"
echo "CLASSPATH=${CLASSPATH}"
# set platform-dependent variables
OS=`uname -s`
case "$OS" in
SunOS | Linux | Darwin | AIX )
NULL=/dev/null
PS=":"
FS="/"
;;
CYGWIN* )
NULL=/dev/null
PS=";"
FS="/"
;;
Windows* )
NULL=NUL
PS=";"
FS="\\"
;;
* )
echo "Unrecognized system!"
exit 1;
;;
esac
mkdir -p classes
cp ${TESTSRC}${FS}*.java .
${COMPILEJAVA}${FS}bin${FS}javac ${TESTJAVACOPTS} ${TESTTOOLVMOPTS} -d classes A.java B.java C.java
${COMPILEJAVA}${FS}bin${FS}javac ${TESTJAVACOPTS} ${TESTTOOLVMOPTS} Main.java
${TESTJAVA}${FS}bin${FS}java ${TESTVMOPTS} Main
result=$?
if [ $result -eq 0 ]
then
echo "Passed 1 of 2"
else
echo "Failed 1 of 2"
exit $result
fi
${TESTJAVA}${FS}bin${FS}java ${TESTVMOPTS} Main foo
result=$?
if [ $result -eq 0 ]
then
echo "Passed 2 of 2"
else
echo "Failed 2 of 2"
fi
exit $result
|
md-5/jdk10
|
test/jdk/java/lang/annotation/loaderLeak/LoaderLeak.sh
|
Shell
|
gpl-2.0
| 2,421 |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script finds, caches, and prints a list of all directories that hold
# *.go files. If any directory is newer than the cache, re-find everything and
# update the cache. Otherwise use the cached file.
set -o errexit
set -o nounset
set -o pipefail
if [[ -z "${1:-}" ]]; then
echo "usage: $0 <cache-file>"
exit 1
fi
CACHE="$1"; shift
trap "rm -f '${CACHE}'" HUP INT TERM ERR
# This is a partial 'find' command. The caller is expected to pass the
# remaining arguments.
#
# Example:
# kfind -type f -name foobar.go
function kfind() {
# include the "special" vendor directories which are actually part
# of the Kubernetes source tree - generators will use these for
# including certain core API concepts.
find -H . ./vendor/k8s.io/apimachinery ./vendor/k8s.io/apiserver ./vendor/k8s.io/kube-aggregator ./vendor/k8s.io/apiextensions-apiserver ./vendor/k8s.io/metrics ./vendor/k8s.io/sample-apiserver ./vendor/k8s.io/api ./vendor/k8s.io/client-go ./vendor/k8s.io/code-generator \
\( \
-not \( \
\( \
-path ./vendor -o \
-path ./staging -o \
-path ./_\* -o \
-path ./.\* -o \
-path ./docs -o \
-path ./examples \
\) -prune \
\) \
\) \
"$@"
}
NEED_FIND=true
# It's *significantly* faster to check whether any directories are newer than
# the cache than to blindly rebuild it.
if [[ -f "${CACHE}" ]]; then
N=$(kfind -type d -newer "${CACHE}" -print -quit | wc -l)
[[ "${N}" == 0 ]] && NEED_FIND=false
fi
mkdir -p $(dirname "${CACHE}")
if $("${NEED_FIND}"); then
kfind -type f -name \*.go \
| sed 's|/[^/]*$||' \
| sed 's|^./||' \
| LC_ALL=C sort -u \
> "${CACHE}"
fi
cat "${CACHE}"
|
raffaelespazzoli/origin
|
vendor/k8s.io/kubernetes/hack/make-rules/helpers/cache_go_dirs.sh
|
Shell
|
apache-2.0
| 2,570 |
#!/bin/bash
# SPDX-License-Identifier: GPL-2.0
ALL_TESTS="ping_ipv4"
NUM_NETIFS=6
source lib.sh
h1_create()
{
vrf_create "vrf-h1"
ip link set dev $h1 master vrf-h1
ip link set dev vrf-h1 up
ip link set dev $h1 up
ip address add 192.0.2.2/24 dev $h1
ip route add 198.51.100.0/24 vrf vrf-h1 nexthop via 192.0.2.1
ip route add 198.51.200.0/24 vrf vrf-h1 nexthop via 192.0.2.1
}
h1_destroy()
{
ip route del 198.51.200.0/24 vrf vrf-h1
ip route del 198.51.100.0/24 vrf vrf-h1
ip address del 192.0.2.2/24 dev $h1
ip link set dev $h1 down
vrf_destroy "vrf-h1"
}
h2_create()
{
vrf_create "vrf-h2"
ip link set dev $h2 master vrf-h2
ip link set dev vrf-h2 up
ip link set dev $h2 up
ip address add 198.51.100.2/24 dev $h2
ip route add 192.0.2.0/24 vrf vrf-h2 nexthop via 198.51.100.1
ip route add 198.51.200.0/24 vrf vrf-h2 nexthop via 198.51.100.1
}
h2_destroy()
{
ip route del 198.51.200.0/24 vrf vrf-h2
ip route del 192.0.2.0/24 vrf vrf-h2
ip address del 198.51.100.2/24 dev $h2
ip link set dev $h2 down
vrf_destroy "vrf-h2"
}
h3_create()
{
vrf_create "vrf-h3"
ip link set dev $h3 master vrf-h3
ip link set dev vrf-h3 up
ip link set dev $h3 up
ip address add 198.51.200.2/24 dev $h3
ip route add 192.0.2.0/24 vrf vrf-h3 nexthop via 198.51.200.1
ip route add 198.51.100.0/24 vrf vrf-h3 nexthop via 198.51.200.1
}
h3_destroy()
{
ip route del 198.51.100.0/24 vrf vrf-h3
ip route del 192.0.2.0/24 vrf vrf-h3
ip address del 198.51.200.2/24 dev $h3
ip link set dev $h3 down
vrf_destroy "vrf-h3"
}
router_create()
{
ip link set dev $rp1 up
ip link set dev $rp2 up
ip link set dev $rp3 up
ip address add 192.0.2.1/24 dev $rp1
ip address add 198.51.100.1/24 dev $rp2
ip address add 198.51.200.1/24 dev $rp3
}
router_destroy()
{
ip address del 198.51.200.1/24 dev $rp3
ip address del 198.51.100.1/24 dev $rp2
ip address del 192.0.2.1/24 dev $rp1
ip link set dev $rp3 down
ip link set dev $rp2 down
ip link set dev $rp1 down
}
setup_prepare()
{
h1=${NETIFS[p1]}
rp1=${NETIFS[p2]}
rp2=${NETIFS[p3]}
h2=${NETIFS[p4]}
rp3=${NETIFS[p5]}
h3=${NETIFS[p6]}
vrf_prepare
h1_create
h2_create
h3_create
router_create
forwarding_enable
}
cleanup()
{
pre_cleanup
forwarding_restore
router_destroy
h3_destroy
h2_destroy
h1_destroy
vrf_cleanup
}
bc_forwarding_disable()
{
sysctl_set net.ipv4.conf.all.bc_forwarding 0
sysctl_set net.ipv4.conf.$rp1.bc_forwarding 0
sysctl_set net.ipv4.conf.$rp2.bc_forwarding 0
}
bc_forwarding_enable()
{
sysctl_set net.ipv4.conf.all.bc_forwarding 1
sysctl_set net.ipv4.conf.$rp1.bc_forwarding 1
sysctl_set net.ipv4.conf.$rp2.bc_forwarding 1
}
bc_forwarding_restore()
{
sysctl_restore net.ipv4.conf.$rp2.bc_forwarding
sysctl_restore net.ipv4.conf.$rp1.bc_forwarding
sysctl_restore net.ipv4.conf.all.bc_forwarding
}
ping_test_from()
{
local oif=$1
local dip=$2
local from=$3
local fail=${4:-0}
RET=0
log_info "ping $dip, expected reply from $from"
ip vrf exec $(master_name_get $oif) \
$PING -I $oif $dip -c 10 -i 0.1 -w $PING_TIMEOUT -b 2>&1 \
| grep "bytes from $from" > /dev/null
check_err_fail $fail $?
}
ping_ipv4()
{
sysctl_set net.ipv4.icmp_echo_ignore_broadcasts 0
bc_forwarding_disable
log_info "bc_forwarding disabled on r1 =>"
ping_test_from $h1 198.51.100.255 192.0.2.1
log_test "h1 -> net2: reply from r1 (not forwarding)"
ping_test_from $h1 198.51.200.255 192.0.2.1
log_test "h1 -> net3: reply from r1 (not forwarding)"
ping_test_from $h1 192.0.2.255 192.0.2.1
log_test "h1 -> net1: reply from r1 (not dropping)"
ping_test_from $h1 255.255.255.255 192.0.2.1
log_test "h1 -> 255.255.255.255: reply from r1 (not forwarding)"
ping_test_from $h2 192.0.2.255 198.51.100.1
log_test "h2 -> net1: reply from r1 (not forwarding)"
ping_test_from $h2 198.51.200.255 198.51.100.1
log_test "h2 -> net3: reply from r1 (not forwarding)"
ping_test_from $h2 198.51.100.255 198.51.100.1
log_test "h2 -> net2: reply from r1 (not dropping)"
ping_test_from $h2 255.255.255.255 198.51.100.1
log_test "h2 -> 255.255.255.255: reply from r1 (not forwarding)"
bc_forwarding_restore
bc_forwarding_enable
log_info "bc_forwarding enabled on r1 =>"
ping_test_from $h1 198.51.100.255 198.51.100.2
log_test "h1 -> net2: reply from h2 (forwarding)"
ping_test_from $h1 198.51.200.255 198.51.200.2
log_test "h1 -> net3: reply from h3 (forwarding)"
ping_test_from $h1 192.0.2.255 192.0.2.1 1
log_test "h1 -> net1: no reply (dropping)"
ping_test_from $h1 255.255.255.255 192.0.2.1
log_test "h1 -> 255.255.255.255: reply from r1 (not forwarding)"
ping_test_from $h2 192.0.2.255 192.0.2.2
log_test "h2 -> net1: reply from h1 (forwarding)"
ping_test_from $h2 198.51.200.255 198.51.200.2
log_test "h2 -> net3: reply from h3 (forwarding)"
ping_test_from $h2 198.51.100.255 198.51.100.1 1
log_test "h2 -> net2: no reply (dropping)"
ping_test_from $h2 255.255.255.255 198.51.100.1
log_test "h2 -> 255.255.255.255: reply from r1 (not forwarding)"
bc_forwarding_restore
sysctl_restore net.ipv4.icmp_echo_ignore_broadcasts
}
trap cleanup EXIT
setup_prepare
setup_wait
tests_run
exit $EXIT_STATUS
|
GuillaumeSeren/linux
|
tools/testing/selftests/net/forwarding/router_broadcast.sh
|
Shell
|
gpl-2.0
| 5,142 |
#
#/**
# * Copyright 2007 The Apache Software Foundation
# *
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
# included in all the hbase scripts with source command
# should not be executable directly
# also should not be passed any arguments, since we need original $*
# Modelled after $HADOOP_HOME/bin/hadoop-env.sh.
# resolve links - "${BASH_SOURCE-$0}" may be a softlink
this="${BASH_SOURCE-$0}"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# convert relative path to absolute path
bin=`dirname "$this"`
script=`basename "$this"`
bin=`cd "$bin">/dev/null; pwd`
this="$bin/$script"
# the root of the hbase installation
if [ -z "$HBASE_HOME" ]; then
export HBASE_HOME=`dirname "$this"`/..
fi
#check to see if the conf dir or hbase home are given as an optional arguments
while [ $# -gt 1 ]
do
if [ "--config" = "$1" ]
then
shift
confdir=$1
shift
HBASE_CONF_DIR=$confdir
elif [ "--hosts" = "$1" ]
then
shift
hosts=$1
shift
HBASE_REGIONSERVERS=$hosts
else
# Presume we are at end of options and break
break
fi
done
# Allow alternate hbase conf dir location.
HBASE_CONF_DIR="${HBASE_CONF_DIR:-$HBASE_HOME/conf}"
# List of hbase regions servers.
HBASE_REGIONSERVERS="${HBASE_REGIONSERVERS:-$HBASE_CONF_DIR/regionservers}"
# List of hbase secondary masters.
HBASE_BACKUP_MASTERS="${HBASE_BACKUP_MASTERS:-$HBASE_CONF_DIR/backup-masters}"
# Thrift JMX opts
if [[ -n "$HBASE_JMX_OPTS" && -z "$HBASE_THRIFT_JMX_OPTS" ]]; then
HBASE_THRIFT_JMX_OPTS="$HBASE_JMX_OPTS -Dcom.sun.management.jmxremote.port=10103"
fi
# Thrift opts
if [ -z "$HBASE_THRIFT_OPTS" ]; then
export HBASE_THRIFT_OPTS="$HBASE_THRIFT_JMX_OPTS"
fi
# REST JMX opts
if [[ -n "$HBASE_JMX_OPTS" && -z "$HBASE_REST_JMX_OPTS" ]]; then
HBASE_REST_JMX_OPTS="$HBASE_JMX_OPTS -Dcom.sun.management.jmxremote.port=10105"
fi
# REST opts
if [ -z "$HBASE_REST_OPTS" ]; then
export HBASE_REST_OPTS="$HBASE_REST_JMX_OPTS"
fi
# Source the hbase-env.sh. Will have JAVA_HOME defined.
# HBASE-7817 - Source the hbase-env.sh only if it has not already been done. HBASE_ENV_INIT keeps track of it.
if [ -z "$HBASE_ENV_INIT" ] && [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
. "${HBASE_CONF_DIR}/hbase-env.sh"
export HBASE_ENV_INIT="true"
fi
# Set default value for regionserver uid if not present
if [ -z "$HBASE_REGIONSERVER_UID" ]; then
HBASE_REGIONSERVER_UID="hbase"
fi
# Verify if hbase has the mlock agent
if [ "$HBASE_REGIONSERVER_MLOCK" = "true" ]; then
MLOCK_AGENT="$HBASE_HOME/native/libmlockall_agent.so"
if [ ! -f "$MLOCK_AGENT" ]; then
cat 1>&2 <<EOF
Unable to find mlockall_agent, hbase must be compiled with -Pnative
EOF
exit 1
fi
HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -agentpath:$MLOCK_AGENT=user=$HBASE_REGIONSERVER_UID"
fi
# Newer versions of glibc use an arena memory allocator that causes virtual
# memory usage to explode. Tune the variable down to prevent vmem explosion.
export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
if [ -z "$JAVA_HOME" ]; then
for candidate in \
/usr/lib/jvm/java-6-sun \
/usr/lib/jvm/java-1.6.0-sun-1.6.0.*/jre \
/usr/lib/jvm/java-1.6.0-sun-1.6.0.* \
/usr/lib/j2sdk1.6-sun \
/usr/java/jdk1.6* \
/usr/java/jre1.6* \
/Library/Java/Home ; do
if [ -e $candidate/bin/java ]; then
export JAVA_HOME=$candidate
break
fi
done
# if we didn't set it
if [ -z "$JAVA_HOME" ]; then
cat 1>&2 <<EOF
+======================================================================+
| Error: JAVA_HOME is not set and Java could not be found |
+----------------------------------------------------------------------+
| Please download the latest Sun JDK from the Sun Java web site |
| > http://java.sun.com/javase/downloads/ < |
| |
| HBase requires Java 1.6 or later. |
| NOTE: This script will find Sun Java whether you install using the |
| binary or the RPM based installer. |
+======================================================================+
EOF
exit 1
fi
fi
|
graben1437/titan0.5.4-hbase1.1.1-custom
|
titan-hbase-parent/titan-hbase-111/src/test/bin/hbase-config.sh
|
Shell
|
apache-2.0
| 5,114 |
#!/bin/sh
# Checks whether a netrange is inside another netrange, returns 1 if true
# Takes two arguments: $1: net from which we want to know if it is inside $2
# nets need to be given in CIDR notation
dir=$(dirname $0)
awk -f $dir/common.awk -f - $* <<EOF
BEGIN {
slpos=index(ARGV[1],"/")
ipaddr=ip2int(substr(ARGV[1],0,slpos-1))
netmask=compl(2**(32-int(substr(ARGV[1],slpos+1)))-1)
network=and(ipaddr,netmask)
broadcast=or(network,compl(netmask))
slpos2=index(ARGV[2],"/")
ipaddr2=ip2int(substr(ARGV[2],0,slpos2-1))
netmask2=compl(2**(32-int(substr(ARGV[2],slpos2+1)))-1)
network2=and(ipaddr2,netmask2)
broadcast2=or(network2,compl(netmask2))
if (network >= network2) {
if (network <= broadcast2) {
if (broadcast <= broadcast2) {
print "1"
}
}
}
}
EOF
|
Victek/wrt1900ac-aa
|
veriksystems/luci-0.11/contrib/package/meshwizard/files/usr/bin/meshwizard/helpers/check-range-in-range.sh
|
Shell
|
gpl-2.0
| 786 |
#!/bin/sh
# SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
# Kselftest framework requirement - SKIP code is 4.
ksft_skip=4
[ -e /dev/tpmrm0 ] || exit $ksft_skip
python3 -m unittest -v tpm2_tests.SpaceTest
|
antonblanchard/linux
|
tools/testing/selftests/tpm2/test_space.sh
|
Shell
|
gpl-2.0
| 212 |
#!/bin/sh
# SPDX-License-Identifier: GPL-2.0
nm="$1"
file="$2"
$nm "$file" | grep '^ *U' > /dev/null 2>&1
if [ $? -eq 1 ]; then
exit 0
else
echo "$file: undefined symbols found" >&2
exit 1
fi
|
CSE3320/kernel-code
|
linux-5.8/arch/x86/um/vdso/checkundef.sh
|
Shell
|
gpl-2.0
| 204 |
#!/bin/sh
# Downloads and builds AUR packages
# This should not be ran as root
rm -r packages
mkdir -p packages/build
cd packages/build
for line in `sed '/^*/!d' ../../packages.list`; do
line=$(echo $line | sed 's/*//')
echo "Downloading ${line}..."
wget http://aur.archlinux.org/packages/${line}/${line}.tar.gz -O ${line}.tar.gz
echo "Extracting ${line}..."
tar xvf ${line}.tar.gz
cd $line
echo "Building ${line}..."
makepkg ${line}/PKGBUILD -f
mv ${line}-*.pkg.* ../../
cd ..
done
|
Ape/MonkeyTool
|
getaur.sh
|
Shell
|
isc
| 506 |
#!/bin/sh
# Copyright (c) 2015-2017 Contributors as noted in the AUTHORS file
#
# This file is part of Solo5, a unikernel base layer.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
# AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
die()
{
echo "$0: $@" 1>&2
exit 1
}
cc_maybe_gcc()
{
${CC} -dM -E - </dev/null | grep -Eq '^#define __GNUC__ [4-9]$'
}
cc_is_clang()
{
${CC} -dM -E - </dev/null | grep -Eq '^#define __clang__ 1$'
}
cc_has_pie()
{
${CC} -dM -E - </dev/null | grep -Eq '^#define __PIE__ [1-9]$'
}
cc_is_gcc()
{
cc_maybe_gcc && ! cc_is_clang
}
ld_is_lld()
{
${LD} --version 2>&1 | grep -q '^LLD'
}
# Allow external override of CC.
# TODO: This needs further work to provide full support for cross-compiling and
# correctly pass through to ukvm-configure where required.
CC=${CC:-cc}
LD=${LD:-ld}
TARGET=$(${CC} -dumpmachine)
[ $? -ne 0 ] &&
die "Error running '${CC} -dumpmachine', is your compiler working?"
case ${TARGET} in
x86_64-*|amd64-*)
TARGET_ARCH=x86_64
;;
aarch64-*)
TARGET_ARCH=aarch64
;;
*)
die "Unsupported compiler target: ${TARGET}"
;;
esac
# Host-provided header files are installed here for in-tree builds. OPAM will
# install these to $(OPAM_INCDIR)/host where they will be picked up by
# pkg-config.
HOST_INCDIR=${PWD}/include-host
case $(uname -s) in
Linux)
# On Linux/gcc we use -nostdinc and copy all the gcc-provided headers.
cc_is_gcc || die "Only 'gcc' 4.x+ is supported on Linux"
CC_INCDIR=$(${CC} -print-file-name=include)
[ -d "${CC_INCDIR}" ] || die "Cannot determine gcc include directory"
mkdir -p ${HOST_INCDIR}
cp -R ${CC_INCDIR}/. ${HOST_INCDIR}
HOST_CFLAGS="-nostdinc"
# Recent distributions now default to PIE enabled. Disable it explicitly
# if that's the case here.
# XXX: This breaks MirageOS in (at least) the build of mirage-solo5 due
# to -fno-pie breaking the build of lib/dllmirage-solo5_bindings.so.
# Keep this disabled until that is resolved.
# cc_has_pie && HOST_CFLAGS="${HOST_CFLAGS} -fno-pie"
# Same for the stack protector, no robust way to detect if this is on by
# default so always disable it.
HOST_CFLAGS="${HOST_CFLAGS} -fno-stack-protector"
BUILD_UKVM="yes"
if [ "${TARGET_ARCH}" = "x86_64" ]; then
BUILD_VIRTIO="yes"
BUILD_MUEN="yes"
else
BUILD_VIRTIO="no"
BUILD_MUEN="no"
fi
;;
FreeBSD)
# On FreeBSD/clang we use -nostdlibinc which gives us access to the
# clang-provided headers for compiler instrinsics. We copy the rest
# (std*.h, float.h and their dependencies) from the host.
cc_is_clang || die "Only 'clang' is supported on FreeBSD"
[ "${TARGET_ARCH}" = "x86_64" ] ||
die "Only 'x86_64' is supported on FreeBSD"
INCDIR=/usr/include
SRCS_MACH="machine/_stdint.h machine/_types.h machine/endian.h \
machine/_limits.h"
SRCS_SYS="sys/_null.h sys/_stdint.h sys/_types.h sys/cdefs.h \
sys/endian.h sys/_stdarg.h"
SRCS_X86="x86/float.h x86/_stdint.h x86/stdarg.h x86/endian.h \
x86/_types.h x86/_limits.h"
SRCS="float.h osreldate.h stddef.h stdint.h stdbool.h stdarg.h"
mkdir -p ${HOST_INCDIR}
mkdir -p ${HOST_INCDIR}/machine ${HOST_INCDIR}/sys ${HOST_INCDIR}/x86
for f in ${SRCS_MACH}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/machine; done
for f in ${SRCS_SYS}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/sys; done
for f in ${SRCS_X86}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/x86; done
for f in ${SRCS}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}; done
HOST_CFLAGS="-nostdlibinc"
BUILD_UKVM="yes"
BUILD_VIRTIO="yes"
BUILD_MUEN="yes"
;;
OpenBSD)
# On OpenBSD/clang we use -nostdlibinc which gives us access to the
# clang-provided headers for compiler instrinsics. We copy the rest
# (std*.h, cdefs.h and their dependencies) from the host.
cc_is_clang || die "Only 'clang' is supported on OpenBSD"
[ "${TARGET_ARCH}" = "x86_64" ] ||
die "Only 'x86_64' is supported on OpenBSD"
if ! ld_is_lld; then
LD='/usr/bin/ld.lld'
echo "using GNU 'ld' is not supported on OpenBSD, falling back to 'ld.lld'"
[ -e ${LD} ] || die "/usr/bin/ld.lld does not exist"
fi
INCDIR=/usr/include
SRCS_MACH="machine/_float.h machine/endian.h machine/cdefs.h machine/_types.h"
SRCS_SYS="sys/_null.h sys/cdefs.h sys/_endian.h sys/endian.h sys/_types.h"
SRCS_AMD64="amd64/_float.h amd64/stdarg.h amd64/endian.h"
SRCS="float.h stddef.h stdint.h stdbool.h stdarg.h"
mkdir -p ${HOST_INCDIR}
mkdir -p ${HOST_INCDIR}/machine ${HOST_INCDIR}/sys ${HOST_INCDIR}/amd64
for f in ${SRCS_MACH}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/machine; done
for f in ${SRCS_SYS}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/sys; done
for f in ${SRCS_AMD64}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}/amd64; done
for f in ${SRCS}; do cp -f ${INCDIR}/$f ${HOST_INCDIR}; done
HOST_CFLAGS="-fno-pie -fno-stack-protector -nostdlibinc"
HOST_LDFLAGS="-nopie"
BUILD_UKVM="yes"
BUILD_VIRTIO="yes"
BUILD_MUEN="yes"
;;
*)
die "Unsupported build OS: $(uname -s)"
;;
esac
cat <<EOM >Makeconf
# Generated by configure.sh, using CC=${CC} for target ${TARGET}
BUILD_UKVM=${BUILD_UKVM}
BUILD_VIRTIO=${BUILD_VIRTIO}
BUILD_MUEN=${BUILD_MUEN}
HOST_CFLAGS=${HOST_CFLAGS}
HOST_LDFLAGS=${HOST_LDFLAGS}
TEST_TARGET=${TARGET}
TARGET_ARCH=${TARGET_ARCH}
CC=${CC}
LD=${LD}
EOM
|
djwillia/solo5
|
configure.sh
|
Shell
|
isc
| 6,464 |
#!/bin/bash
set -Ceu
outdir=$(mktemp -d)
gpu="$1"
# gym/reinforce
python examples/gym/train_reinforce_gym.py --steps 100 --batchsize 1 --outdir $outdir/gym/reinforce --gpu $gpu
model=$(find $outdir/gym/reinforce -name "*_finish")
python examples/gym/train_reinforce_gym.py --demo --load $model --eval-n-runs 1 --outdir $outdir/temp --gpu $gpu
|
toslunar/chainerrl
|
examples_tests/gym/test_reinforce.sh
|
Shell
|
mit
| 347 |
#!/usr/bin/env bash
# Absolute path to this script, e.g. /home/user/bin/foo.sh
cd "$(dirname "$0")"
rm -rf ./*.phar
# Composer
rm -f composer.phar
curl -sS 'https://getcomposer.org/installer' | php --
chmod +x composer.phar
# PHPUNIT
rm -f phpunit.phar
wget -c https://phar.phpunit.de/phpunit-old.phar
mv phpunit-old.phar phpunit.phar
chmod +x phpunit.phar
# PHPLOC
rm -f phploc.phar
wget -c https://phar.phpunit.de/phploc.phar
chmod +x phploc.phar
# PHP_DEPEND
rm -f pdepend.phar
wget -c http://static.pdepend.org/php/latest/pdepend.phar
chmod +x pdepend.phar
# PHP Mess Detector
rm -f phpmd.phar
wget -c http://static.phpmd.org/php/latest/phpmd.phar
chmod +x phpmd.phar
# PHP Code Sniffer
rm -f phpcs.phar
wget -c https://squizlabs.github.io/PHP_CodeSniffer/phpcs.phar
chmod +x phpcs.phar
# PHP Copy Paste Detector
rm -f phpcpd.phar
wget -c https://phar.phpunit.de/phpcpd.phar
chmod +x phpcpd.phar
# PHP Dox
PHPDOX_VERSION='0.8.0'
rm -f phpdox.phar
wget "https://github.com/theseer/phpdox/releases/download/0.8.0/phpdox-$PHPDOX_VERSION.phar"
mv "phpdox-$PHPDOX_VERSION.phar" phpdox.phar
chmod +x phpdox.phar
|
hollodotme/FluidValidator
|
build/tools/update_tools.sh
|
Shell
|
mit
| 1,120 |
#!/usr/bin/env bash
RESULTSDIR=`pwd`/../raw_results
VERBOSE=-v
cd ..
set -x #echo on
# phoenix
./fex.py $VERBOSE plot -n phoenix -t perf -f $RESULTSDIR/phoenix/perf.csv
./fex.py $VERBOSE plot -n phoenix -t instr -f $RESULTSDIR/phoenix/perf.csv
./fex.py plot -n phoenix -t misc_stat -f $RESULTSDIR/phoenix/perf.csv
./fex.py plot -n phoenix -t cache -f $RESULTSDIR/phoenix/cache.csv
./fex.py $VERBOSE plot -n phoenix -t mem -f $RESULTSDIR/phoenix/mem.csv
./fex.py $VERBOSE plot -n phoenix -t multi -f $RESULTSDIR/phoenix/multithreading.csv
./fex.py plot -n phoenix -t mpxcount -f $RESULTSDIR/phoenix/mpx_instructions.csv
./fex.py $VERBOSE plot -n phoenix -t native_mem_access -f $RESULTSDIR/phoenix/cache.csv
./fex.py $VERBOSE plot -n phoenix -t ku_instr -f $RESULTSDIR/phoenix/ku_instructions.csv
./fex.py $VERBOSE plot -n phoenix -t mpx_feature_perf -f $RESULTSDIR/phoenix/perf.csv
./fex.py $VERBOSE plot -n phoenix -t mpx_feature_mem -f $RESULTSDIR/phoenix/mem.csv
./fex.py $VERBOSE plot -n phoenix -t ipc -f $RESULTSDIR/phoenix/perf.csv
# phoenix varinput
./fex.py $VERBOSE plot -n phoenix_var_input -t perf -f $RESULTSDIR/phoenix/var_input_perf.csv
./fex.py $VERBOSE plot -n phoenix_var_input -t mem -f $RESULTSDIR/phoenix/var_input_mem.csv
# parsec
./fex.py $VERBOSE plot -n parsec -t perf -f $RESULTSDIR/parsec/perf.csv
./fex.py $VERBOSE plot -n parsec -t instr -f $RESULTSDIR/parsec/perf.csv
./fex.py plot -n parsec -t misc_stat -f $RESULTSDIR/parsec/perf.csv
./fex.py plot -n parsec -t cache -f $RESULTSDIR/parsec/cache.csv
./fex.py $VERBOSE plot -n parsec -t mem -f $RESULTSDIR/parsec/mem.csv
./fex.py $VERBOSE plot -n parsec -t multi -f $RESULTSDIR/parsec/multithreading.csv
./fex.py plot -n parsec -t mpxcount -f $RESULTSDIR/parsec/mpx_instructions.csv
./fex.py $VERBOSE plot -n parsec -t native_mem_access -f $RESULTSDIR/parsec/cache.csv
./fex.py $VERBOSE plot -n parsec -t ku_instr -f $RESULTSDIR/parsec/ku_instructions.csv
./fex.py $VERBOSE plot -n parsec -t mpx_feature_perf -f $RESULTSDIR/parsec/perf.csv
./fex.py $VERBOSE plot -n parsec -t mpx_feature_mem -f $RESULTSDIR/parsec/mem.csv
./fex.py $VERBOSE plot -n parsec -t ipc -f $RESULTSDIR/parsec/perf.csv
# parsec varinput
./fex.py $VERBOSE plot -n parsec_var_input -t perf -f $RESULTSDIR/parsec/var_input_perf.csv
./fex.py $VERBOSE plot -n parsec_var_input -t mem -f $RESULTSDIR/parsec/var_input_mem.csv
# case studies
./fex.py $VERBOSE plot -n apache -t tput -f $RESULTSDIR/casestudies/apache/raw.csv
./fex.py $VERBOSE plot -n memcached -t tput -f $RESULTSDIR/casestudies/memcached/raw.csv
./fex.py $VERBOSE plot -n nginx -t tput -f $RESULTSDIR/casestudies/nginx/raw.csv
# microbenchmarks
./fex.py $VERBOSE plot -n micro -t perf -f $RESULTSDIR/micro/raw.csv
# merged
./fex.py $VERBOSE plot -n mergedplots -t tput -f $RESULTSDIR/casestudies/raw.csv
./fex.py $VERBOSE plot -n mergedplots -t perf -f $RESULTSDIR/merged/perf.csv
./fex.py $VERBOSE plot -n mergedplots -t mem -f $RESULTSDIR/merged/mem.csv
./fex.py $VERBOSE plot -n mergedplots -t mpxcount -f $RESULTSDIR/merged/mpxcount.csv
./fex.py $VERBOSE plot -n mergedplots -t multi -f $RESULTSDIR/merged/multithreading.csv
./fex.py $VERBOSE plot -n mergedplots -t cache -f $RESULTSDIR/merged/cache.csv
./fex.py $VERBOSE plot -n mergedplots -t instr -f $RESULTSDIR/merged/instr.csv
./fex.py $VERBOSE plot -n mergedplots -t ipc -f $RESULTSDIR/merged/ipc.csv
./fex.py $VERBOSE plot -n mergedplots -t mpx_feature_perf -f $RESULTSDIR/merged/mpx_feature_perf.csv
./fex.py $VERBOSE plot -n mergedplots -t mpx_feature_mem -f $RESULTSDIR/merged/mpx_feature_mem.csv
|
tudinfse/fex
|
scripts/allplots.sh
|
Shell
|
mit
| 3,925 |
#!/usr/bin/env bash
ES_ENV=${ES_ENV}
ES_AUTHENTICATION_SERVER=${ES_AUTHENTICATION_SERVER}
ES_AUTHENTICATION_PORT=${ES_AUTHENTICATION_PORT}
ES_DB_SERVER=${ES_DB_SERVER}
ES_DB_PORT=${ES_DB_PORT}
ES_SESSION_SERVER=${ES_SESSION_SERVER}
ES_SESSION_PORT=${ES_SESSION_PORT}
CMD=$1
HOST_ADDRESS=$(ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p')
if [ ! -n "$ES_AUTHENTICATION_SERVER" ] ;then
ES_AUTHENTICATION_SERVER=$HOST_ADDRESS
fi
if [ ! -n "$ES_AUTHENTICATION_PORT" ] ;then
ES_AUTHENTICATION_PORT=1337
fi
if [ ! -n "$ES_DB_SERVER" ] ;then
ES_DB_SERVER=$HOST_ADDRESS
fi
if [ ! -n "$ES_DB_PORT" ] ;then
ES_DB_PORT=3306
fi
if [ ! -n "$ES_SESSION_SERVER" ] ;then
ES_SESSION_SERVER=$HOST_ADDRESS
fi
if [ ! -n "$ES_SESSION_PORT" ] ;then
ES_SESSION_PORT=6379
fi
if [ ! -n "$CMD" ] ;then
CMD=./entrypoint.sh
fi
docker rm -f pdm-service || echo "No started pdm service found"
docker run -it -p 8180:8180 --rm --name=pdm-service \
-e ES_ENV=$ES_ENV \
-e ES_AUTHENTICATION_SERVER=$ES_AUTHENTICATION_SERVER \
-e ES_AUTHENTICATION_PORT=$ES_AUTHENTICATION_PORT \
-e ES_DB_SERVER=$ES_DB_SERVER \
-e ES_DB_PORT=$ES_DB_PORT \
-e ES_SESSION_SERVER=$ES_SESSION_SERVER \
-e ES_SESSION_PORT=$ES_SESSION_PORT \
registry.cn-beijing.aliyuncs.com/easyassess/pdm-service \
$CMD
|
EasyAssessSystem/pdm
|
buildtask/docker-run.sh
|
Shell
|
mit
| 1,407 |
#!/bin/bash -e
if [ $# -ne 1 ]
then
echo "Usage: ${0} <size>" >&1
exit 1
fi
SIZE="${1}"
FILE="spec/tmp/${SIZE}"
MD5SUM=$(./spec/perf/makefile.sh ${FILE} ${SIZE})
./spec/perf/upload.sh ${FILE} ${MD5SUM}
|
hpcloud/unix_cli
|
spec/perf/test.sh
|
Shell
|
mit
| 207 |
#!/usr/bin/env bash
find ./contracts -type f -name '*.so' -delete
find ./contracts -type f -name '*.c' -delete
find ./contracts -type f -name '*.h' -delete
|
viniciuschiele/contracts
|
clean.sh
|
Shell
|
mit
| 156 |
#!/bin/bash
####
#
####
# set up bash to handle errors more aggressively - a "strict mode" of sorts
set -e # give an error if any command finishes with a non-zero exit code
set -u # give an error if we reference unset variables
set -o pipefail # for a pipeline, if any of the commands fail with a non-zero exit code, fail the entire pipeline with that exit code
if [ "$(id -u)" != "0" ]; then
echo "You don't have sufficient privileges to run this script."
exit 1
fi
pushd $(dirname $0) > /dev/null
PRJ_ROOT_PATH=$(dirname $(pwd -P))
popd > /dev/null
echo "Project path: $PRJ_ROOT_PATH"
#
# Dependencies
#
echo "Updating package lists..."
apt-get -qq update
apt-get -qq install -y wget git maven gdebi
#
# Mongodb
#
echo "Installing MongoDB server & client..."
apt-get -qq install -y mongodb
service mongodb start
#
# RabbitMQ
#
echo "Installing RabbitMQ server..."
wget -qN https://github.com/rabbitmq/rabbitmq-server/releases/download/rabbitmq_v3_6_10/rabbitmq-server_3.6.10-1_all.deb
echo "Finished installing Lynx components."
#
# Lombok
#
echo "Starting Lombok installer..."
#
# Heroku
#
echo "Installing Heroku CLI..."
sudo add-apt-repository "deb https://cli-assets.heroku.com/branches/stable/apt ./"
curl -L https://cli-assets.heroku.com/apt/release.key | sudo apt-key add -
apt-get update && apt-get install -y heroku
|
6o1/lynx-server
|
bin/setup-devenv.sh
|
Shell
|
mit
| 1,340 |
root_dir="$( cd "$( dirname "$0" )" && pwd )"
cd $root_dir
curl -O https://download.elasticsearch.org/logstash/logstash/logstash-1.4.0.tar.gz
tar zxvf logstash-1.4.0.tar.gz
mv logstash-1.4.0 logstash
|
moredip/microscope
|
dev-tooling/ELK_loaders/download_logstash_locally.sh
|
Shell
|
mit
| 205 |
#!/bin/bash
# LGSM fn_install_ts3_mariadb function
# Author: Daniel Gibbs
# Contributor: PhilPhonic
# Website: https://gameservermanagers.com
lgsm_version="210516"
local modulename="Install"
fn_install_ts3db_mariadb(){
echo ""
echo "checking if libmariadb2 is installed"
echo "================================="
sleep 1
ldd ${filesdir}/libts3db_mariadb.so | grep "libmariadb.so.2 => not found"
if [ $? -eq 0 ]; then
echo "libmariadb2 not installed. Please install it first."
echo "exiting..."
exit
else
echo "libmariadb2 installed."
fi
echo ""
echo "Configuring ${gamename} Server for MariaDB/MySQL"
echo "================================="
sleep 1
read -p "Enter MariaDB hostname: " mariahostname
read -p "Enter MariaDB port: " mariaport
read -p "Enter MariaDB username: " mariausername
read -p "Enter MariaDB password: " mariapassword
read -p "Enter MariaDB database name: " mariadbname
echo "updating config."
echo "[config]" >> ${servercfgdir}/ts3db_mariadb.ini
echo "host='${mariahostname}'" >> ${servercfgdir}/ts3db_mariadb.ini
echo "port='${mariaport}'" >> ${servercfgdir}/ts3db_mariadb.ini
echo "username='${mariausername}'" >> ${servercfgdir}/ts3db_mariadb.ini
echo "password='${mariapassword}'" >> ${servercfgdir}/ts3db_mariadb.ini
echo "database='${mariadbname}'" >> ${servercfgdir}/ts3db_mariadb.ini
echo "socket=" >> ${servercfgdir}/ts3db_mariadb.ini
sed -i "s/dbplugin=ts3db_sqlite3/dbplugin=ts3db_mariadb/g" "${servercfgfullpath}"
sed -i "s/dbpluginparameter=/dbpluginparameter=ts3db_mariadb.ini/g" "${servercfgfullpath}"
sed -i "s/dbsqlcreatepath=create_sqlite\//dbsqlcreatepath=create_mariadb\//g" "${servercfgfullpath}"
echo "================================="
sleep 1
}
if [ -z "${autoinstall}" ]; then
echo ""
while true; do
read -e -i "n" -p "Do you want to use MariaDB/MySQL instead of sqlite (Database Server including user and database already has to be set up!)? [y/N]" yn
case $yn in
[Yy]* ) fn_install_ts3db_mariadb && break;;
[Nn]* ) break;;
* ) echo "Please answer yes or no.";;
esac
done
else
fn_print_warning_nl "./${selfname} auto-install is uses sqlite. For MariaDB/MySQL use ./${selfname} install"
fi
## Get privilege key
echo ""
echo "Getting privilege key"
echo "================================="
sleep 1
echo "IMPORANT! Save these details for later."
sleep 1
cd "${executabledir}"
./ts3server_startscript.sh start inifile=ts3-server.ini
sleep 5
./ts3server_startscript.sh stop
|
zunnu/linuxgsm
|
lgsm/functions/install_ts3db.sh
|
Shell
|
mit
| 2,472 |
#!/bin/bash
py.test . --runxfail \
--uchime-ref-db-fp /home/jklynch/host/project/muscope/pr2_gb203/pr2_gb203_version_4.5.fasta
|
hurwitzlab/muscope-18SV4
|
test/run_tests.sh
|
Shell
|
mit
| 133 |
#!/bin/sh
WORKING_PATH=/media/win4/Users/Sand/Pictures/ImportCanon
mkdir -p $WORKING_PATH/tmp
cd $WORKING_PATH/tmp
gphoto2 --get-all-files
#
# Convert CR2 raw files to jpg file (max 1920px width/height)
# then move the raw file to its monthly folder
#
find $WORKING_PATH/tmp -type f -name '*.CR2' -print | while read file
do
ufraw-batch --overwrite --size=1920 --out-type=jpeg $file
touch -r $file ${file%.CR2}.jpg
yearmonth=`date -r "$file" "+%Y%m"`
dest=$WORKING_PATH/$yearmonth/raw
mkdir -p "$dest"
mv "$file" "$dest"
done
#
# Move temp file (convert file, video or gopro) to monthly folder
#
find $WORKING_PATH/tmp -type f -regex '.*\.\(JPG\|jpg\|MOV\)' -print | while read file
do
yearmonth=`date -r "$file" "+%Y%m"`
dest=$WORKING_PATH/$yearmonth
mkdir -p "$dest"
mv "$file" "$dest"
done
#
# Set convert file datetime to raw file datetime
# Remove raw file if convert file doesn't exists
#
find $WORKING_PATH -type d -name '*raw' -print | while read folder
do
find $folder -type f -name '*.CR2' -print | while read image
do
basename=${image%.CR2}
convert=$folder/..${basename#$folder}.jpg
if [ -e "$convert" ]
then
touch -r $image $folder/..${basename#$folder}.jpg
else
echo [removed] $image
rm $image
fi
done
done
#gphoto2 --delete-all-files
|
alx/ImportCanonEOS
|
importCanon.sh
|
Shell
|
mit
| 1,325 |
#!/bin/bash
# Initializes submodules
git submodule init
git submodule update
# Creates all necessary links to link this repository into the environment
ln -s $(pwd)/.wgetrc ~/.wgetrc
ln -s $(pwd)/.bashrc ~/.bashrc
ln -s $(pwd)/.nanorc ~/.nanorc
ln -s $(pwd)/.inputrc ~/.inputrc
ln -s $(pwd)/.Xresources ~/.Xresources
ln -s $(pwd)/.dircolors ~/.dircolors
ln -s $(pwd)/.face ~/.face
ln -s $(pwd)/.face ~/.face.icon
ln -s $(pwd)/.tmux.conf ~/.tmux.conf
# Plasma integration of i3
mkdir -p ~/.config/plasma-workspace/env
ln -s $(pwd)/wm.sh ~/.config/plasma-workspace/env
# Configuration of i3
mkdir -p ~/.config/i3
ln -s $(pwd)/i3/config ~/.config/i3/config
ln -s $(pwd)/i3/i3blocks.conf ~/.config/i3/i3blocks.conf
ln -s $(pwd)/i3blocks-contrib ~/.config/i3/i3blocks-contrib
ln -s $(pwd)/rofi ~/.config/rofi
ln -s $(pwd)/termite ~/.config/termite
ln -s $(pwd)/powerline ~/.config/powerline
# Path in rofi theme (onlv works for rofi-git/rofi.v >= 1.4)
sed 's|ROFI_OPTIONS=(-width -11 -location 3 -hide-scrollbar -bw 2)|ROFI_OPTIONS=(-width -11 -location 3 -hide-scrollbar -bw 2 -theme ~/.config/rofi/android_notification.rasi)|g' \
i3blocks-contrib/shutdown_menu/shutdown_menu > shutdown_menu.bak
mv shutdown_menu.bak i3blocks-contrib/shutdown_menu/shutdown_menu
mkdir -p ~/.config/dunst
ln -s $(pwd)/dunstrc ~/.config/dunst/dunstrc
|
DominikChmiel/dotfiles
|
setup_init.sh
|
Shell
|
mit
| 1,341 |
#!/usr/bin/env bash
sh ./install_external.sh
nosetests tests/unit_tests
|
FlintHill/SUAS-Competition
|
run_travis.sh
|
Shell
|
mit
| 74 |
#!/bin/bash
# Author: Robin Wen
# Date: 17:35:20 12/11/2014
# Desc: Auto deploy to remote tomcat via crontab.
script_path=/home/deploy/auto-deploy/scripts/python-auto-deploy-to-tomcat
if [ $? -eq 0 ];then
# Update the mall admin.
cd $script_path && ./auto_deploy_app_remote.py -u
fi
if [ $? -eq 0 ];then
# Shutdown the tomcat.
cd $script_path && ./auto_deploy_app_remote.py -s
fi
if [ $? -eq 0 ];then
# Deploy the mall admin.
cd $script_path && ./auto_deploy_app_remote.py -w
fi
if [ $? -eq 0 ];then
# Startup the tomcat.
cd $script_path && ./auto_deploy_app_remote.py -t
fi
|
dbarobin/python-auto-deploy
|
auto_deploy_app_all_in_one/scripts/auto_deploy_mall_admin.sh
|
Shell
|
mit
| 589 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2015:1852
#
# Security announcement date: 2015-10-01 21:53:20 UTC
# Script generation date: 2017-02-03 21:13:31 UTC
#
# Operating System: CentOS 7
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - thunderbird.x86_64:38.3.0-1.el7.centos
#
# Last versions recommanded by security team:
# - thunderbird.x86_64:45.7.0-1.el7.centos
#
# CVE List:
# - CVE-2015-4500
# - CVE-2015-4509
# - CVE-2015-4517
# - CVE-2015-4519
# - CVE-2015-4520
# - CVE-2015-4521
# - CVE-2015-4522
# - CVE-2015-7174
# - CVE-2015-7175
# - CVE-2015-7176
# - CVE-2015-7177
# - CVE-2015-7180
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install thunderbird.x86_64-45.7.0 -y
|
Cyberwatch/cbw-security-fixes
|
CentOS_7/x86_64/2015/CESA-2015:1852.sh
|
Shell
|
mit
| 846 |
#!/bin/bash
set -e
echo "-- Building mysql 5.6 image"
docker build -t mysql-5.6 ../5.6/
docker network create mysql_test_net
DIR_VOLUME=$(pwd)/vol56
mkdir -p ${DIR_VOLUME}/backup
echo
echo "-- Testing mysql 5.6 is running"
docker run --name base_1 -d --net mysql_test_net -e MYSQL_USER=user -e 'MYSQL_PASS=test' mysql-5.6; sleep 10
docker run --name base_2 -d --net mysql_test_net mysql-5.6; sleep 10
docker exec -it base_2 bash -c 'mysqladmin -uuser -ptest -h${BASE_1_PORT_3306_TCP_ADDR} ping | grep -c "mysqld is alive"'
echo
echo "-- Clear"
docker rm -f -v base_1 base_2; sleep 5
echo
echo "-- Testing backup/checking on mysql 5.6"
docker run --name base_1 -d --net mysql_test_net -e MYSQL_USER=user -e 'MYSQL_PASS=test' -e 'DB_NAME=db_1,test_1' mysql-5.6; sleep 10
docker run -it --rm --net mysql_test_net -e 'MYSQL_MODE=backup' -e 'DB_REMOTE_HOST=base_1' -e 'DB_REMOTE_USER=user' -e 'DB_REMOTE_PASS=test' -v ${DIR_VOLUME}/backup:/tmp/backup mysql-5.6; sleep 10
docker run -it --rm -e 'MYSQL_CHECK=default' -e 'DB_NAME=db_1' -v ${DIR_VOLUME}/backup:/tmp/backup mysql-5.6 | tail -n 1 | grep -c 'Success'; sleep 10
docker run -it --rm -e 'MYSQL_CHECK=/tmp/backup/backup.last.bz2' -e 'DB_NAME=test_1' -v ${DIR_VOLUME}/backup:/tmp/backup mysql-5.6 | tail -n 1 | grep -c 'Success'; sleep 10
docker run -it --rm -e 'MYSQL_CHECK=default' -e 'DB_NAME=db' -v ${DIR_VOLUME}/backup:/tmp/backup mysql-5.6 2>&1 | tail -n 1 | grep -c 'Fail'; sleep 10
echo
echo "-- Clear"
docker rm -f -v base_1; sleep 5
rm -rf ${DIR_VOLUME}
echo
echo
echo "-- Testing master/slave on mysql 5.6"
docker run --name base_1 -d --net mysql_test_net -e 'MYSQL_MODE=master' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'DB_NAME=db_1,test_1' mysql-5.6; sleep 10
docker exec -it base_1 mysql -uroot -e 'CREATE TABLE test_1.foo (id INT NOT NULL AUTO_INCREMENT, name VARCHAR(100), PRIMARY KEY(id)) ENGINE = INNODB; INSERT INTO test_1.foo (name) VALUES ("Petr");'
echo
echo "-- Create slave"
docker run --name base_2 -d --net mysql_test_net -e 'MYSQL_MODE=slave' -e 'REPLICATION_HOST=base_1' -e 'DB_REMOTE_USER=user' -e 'DB_REMOTE_PASS=pass' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' mysql-5.6; sleep 10
docker exec -it base_1 mysql -uroot -e 'INSERT INTO test_1.foo (name) VALUES ("Linda");'; sleep 5
docker exec -it base_2 mysql -uroot -e 'SELECT * FROM test_1.foo;' | grep -c -w "Linda"
echo
echo "-- Backup master"
mkdir -p ${DIR_VOLUME}/backup
docker run -it --rm --net mysql_test_net -e 'MYSQL_MODE=backup' -e 'DB_REMOTE_HOST=base_1' -e 'DB_REMOTE_USER=user' -e 'DB_REMOTE_PASS=pass' -v ${DIR_VOLUME}/backup_master:/tmp/backup mysql-5.6 --master-data --single-transaction; sleep 10
echo
echo "-- Restore slave from master-file"
docker run --name base_3 -d --net mysql_test_net -e 'MYSQL_MODE=slave' -e 'REPLICATION_HOST=base_1' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'MYSQL_RESTORE=/tmp/backup/backup.last.bz2' -v ${DIR_VOLUME}/backup_master:/tmp/backup mysql-5.6; sleep 10
docker exec -it base_1 mysql -uroot -e 'INSERT INTO test_1.foo (name) VALUES ("Tom");'; sleep 5
docker run --name base_4 -d --net mysql_test_net -e 'MYSQL_MODE=slave' -e 'REPLICATION_HOST=base_1' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'MYSQL_RESTORE=default' -v ${DIR_VOLUME}/backup_master:/tmp/backup mysql-5.6; sleep 10
docker exec -it base_3 mysql -uroot -e 'SELECT * FROM test_1.foo;' | grep -c -w "Tom"
echo
echo "-- Backup slave"
docker run -it --rm --net mysql_test_net -e 'MYSQL_MODE=backup' -e 'DB_REMOTE_HOST=base_4' -e 'DB_REMOTE_USER=user' -e 'DB_REMOTE_PASS=pass' -v ${DIR_VOLUME}/backup_slave:/tmp/backup mysql-5.6 --dump-slave; sleep 15
echo
echo "-- Restore slave from slave-file"
docker run --name base_5 -d --net mysql_test_net -e 'MYSQL_MODE=slave' -e 'REPLICATION_HOST=base_1' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'MYSQL_RESTORE=/tmp/backup/backup.last.bz2' -v ${DIR_VOLUME}/backup_slave:/tmp/backup mysql-5.6; sleep 15
docker exec -it base_1 mysql -uroot -e 'INSERT INTO test_1.foo (name) VALUES ("Bob");'; sleep 10
docker exec -it base_5 mysql -uroot -e 'SELECT * FROM test_1.foo;' | grep -c -w "Bob"
docker exec -it base_1 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "4";sleep 3
docker exec -it base_2 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "4";sleep 3
docker exec -it base_3 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "4";sleep 3
docker exec -it base_4 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "4";sleep 3
docker exec -it base_5 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "4"
echo
echo "-- Clear"
docker rm -f -v base_1 base_2 base_3 base_4 base_5; sleep 5
echo
echo "-- Restore master from master-file"
docker run --name restore_1 -d --net mysql_test_net -e 'MYSQL_MODE=master' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'MYSQL_RESTORE=default' -v ${DIR_VOLUME}/backup_master:/tmp/backup mysql-5.6; sleep 15
docker run --name restore_2 -d --net mysql_test_net -e 'MYSQL_MODE=slave' -e 'REPLICATION_HOST=restore_1' -e MYSQL_USER=user -e 'MYSQL_PASS=pass' -e 'DB_REMOTE_USER=user' -e 'DB_REMOTE_PASS=pass' mysql-5.6; sleep 15
docker exec -it restore_1 mysql -uroot -e 'INSERT INTO test_1.foo (name) VALUES ("Romeo");'; sleep 5
docker exec -it restore_1 mysql -uroot -e 'SELECT * FROM test_1.foo;' | grep -c -w "Romeo";
docker exec -it restore_1 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "3"
docker exec -it restore_2 mysql -uroot -e 'SELECT COUNT(*) FROM test_1.foo;' | grep -c -w "3"
echo
echo "-- Clear"
docker rm -f -v restore_1 restore_2; sleep 5
docker network rm mysql_test_net
docker rmi mysql-5.6; sleep 5
rm -rf ${DIR_VOLUME}
echo
echo "-- Done"
|
romeOz/docker-mysql
|
tests/56.sh
|
Shell
|
mit
| 5,676 |
#!/bin/bash
screenWorkaround=false
# Terminal correction for binaries that don't like screen.xterm as a terminal.
if [ "${TERM}" == "screen.xterm" ]; then
TERM="xterm"
screenWorkaround=true
fi
export DEBIAN_FRONTEND=noninteractive
# Load functions
source "./functions/header.sh"
source "./functions/read_lst.sh"
# Export shared functions
export -f read_lst
export scriptPath="$(dirname -- "$(readlink -f -- "$0")")"
log=./provision.log
errorLog=./provision-errors.log
# Copy STDOUT to a log
exec > >(tee -a "${log}")
# Copy STDERR to log and STDERR only to errorLog
exec 2> >(tee -a "${log}" >&2)
exec 2> >(tee -a "${errorLog}" >&2)
cd $scriptPath
IFS=$'\n'
for script in $(ls ./sequence/*.sh | sort -n); do
header $(basename "${script}")
"${script}"
cd "${scriptPath}"
done
# Clean-up exported variables and functions
unset DEBIAN_FRONTEND
unset scriptPath
unset -f read_lst
if ${screenWorkaround}; then
TERM="screen.xterm"
fi
|
xcjs/workstation-provision
|
ubuntu-14.04/dev-notebook/provision.sh
|
Shell
|
mit
| 949 |
#!/bin/sh
make CFG=httpd configure
cd busybox-1.22.1
make
make install
cd ..
make misc
make libc
|
emagii/Training-Labs
|
sysdev/tinysystem/build.sh
|
Shell
|
mit
| 99 |
sudo apt-get update && sudo apt-get install -y python-pip
sudo pip install Django==1.10.5
|
tfulmer1/tft_gm_tools
|
setup.sh
|
Shell
|
mit
| 89 |
cd release
make clean
make
cd ../
mkdir -p portable
cp release/dunaWatchdog portable/
mkdir -p portable/etc
cp -n etc/sample.duna.xml portable/etc/duna.xml
cp -n etc/sample.muttrc portable/etc/muttrc
cp -n etc/sample.msmtp.conf portable/etc/msmtp.conf
chmod 600 portable/etc/msmtp.conf
|
emepetres/dunaWatchdog
|
install.sh
|
Shell
|
mit
| 286 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Set Hive and Hadoop environment variables here. These variables can be used
# to control the execution of Hive. It should be used by admins to configure
# the Hive installation (so that users do not have to set environment variables
# or set command line parameters to get correct behavior).
#
# The hive service being invoked (CLI/HWI etc.) is available via the environment
# variable SERVICE
# Hive Client memory usage can be an issue if a large number of clients
# are running at the same time. The flags below have been useful in
# reducing memory usage:
#
# if [ "$SERVICE" = "cli" ]; then
# if [ -z "$DEBUG" ]; then
# export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit"
# else
# export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit"
# fi
# fi
# The heap size of the jvm stared by hive shell script can be controlled via:
#
# export HADOOP_HEAPSIZE=1024
#
# Larger heap size may be required when running queries over large number of files or partitions.
# By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be
# appropriate for hive server (hwi etc).
# Set HADOOP_HOME to point to a specific hadoop install directory
HADOOP_HOME=/opt/hadoop
# Hive Configuration Directory can be controlled by:
export HIVE_CONF_DIR=/opt/hive/conf
# Folder containing extra ibraries required for hive compilation/execution can be controlled by:
# export HIVE_AUX_JARS_PATH=
|
qrsforever/workspace
|
java/learn/hadoop/hive/__replace__/hive/derby/conf/hive-env.sh
|
Shell
|
mit
| 2,380 |
#!/bin/bash
# El primer argumento es la IP de la interfaz que tiene conexión a Internet
# Crea la ruta hacia la subred
sudo route add -net 192.168.3.0/24 gw 10.42.0.29 p35p1
# Habilita SNAT para la segunda subred
sudo iptables -t nat -I POSTROUTING -s 192.168.3.0/24 -o wlp4s0 -j SNAT --to $1
# Permite los paquetes de destino la segunda subred
sudo iptables -t filter -I FORWARD -d 192.168.3.0/24 -j ACCEPT
# Permite los paquetes con origen la segunda subred
sudo iptables -t filter -I FORWARD -s 192.168.3.0/24 -j ACCEPT
# Permite sin restricción los paquetes con destino la primera subred
sudo iptables -t filter -I FORWARD -d 10.42.0.0/24 -j ACCEPT
|
Prometheus-ETSIIT/locaviewer
|
config_router.sh
|
Shell
|
mit
| 660 |
#!/bin/bash
# print build configuration
echo "Build configuration:"
echo " - Compiler: ${COMPILER}"
echo " - Library mode: ${LIBRARY_MODE}"
echo " - Build Type: ${BUILD_TYPE}"
echo " - Native OpenGl: ${NATIVE_OPENGL}"
SMALL_BUILDTYPE=`echo ${BUILD_TYPE} | tr '[:upper:]' '[:lower:]'`
OPENGL_POSTFIX=
if [ "${NATIVE_OPENGL}" == "On" ]; then
OPENGL_POSTFIX-opengl
fi
# goto build directory
cd ${TRAVIS_BUILD_DIR}/travis-build/${COMPILER}${OPENGL_POSTFIX}/${SMALL_BUILDTYPE}/${LIBRARY_MODE}
# run build
make install
cd ${TRAVIS_BUILD_DIR}/travis-install/${COMPILER}${OPENGL_POSTFIX}/${SMALL_BUILDTYPE}/${LIBRARY_MODE}/tests/UnitTests
chmod ugo+x UnitTest.sh
./UnitTest.sh
|
ZahlGraf/IrrIMGUI
|
scripts/travis_build.sh
|
Shell
|
mit
| 687 |
################################################################################
# Date functions for getting local time from a unix style timestamp in ns
################################################################################
nsdate() {
[[ -z $1 ]] && { echo "Usage: jpdate|hkdate|audate|utdate ns"; return; }
local st=$1
local nst=0
#1521740309.720
# 10 digits for seconds
local digits=$(echo -n $1|wc -c)
if [[ $digits -gt 10 ]]; then
local div=$(( 10 ** ($digits - 10) ))
st=$(( $1 / $div ))
nst=$(( $1 % $div ))
fi
if [[ $(uname -s) == Darwin ]]; then
datecmd=gdate
else
datecmd=date
fi
if ! command -v $datecmd >/dev/null 2>&1; then
>&2 echo "Missing $datecmd. Please install $datecmd."
return
fi
printf "%s.%09d\n" "$($datecmd -d @${st} +"%Y-%m-%d %H:%M:%S")" "${nst}"
}
alias jpdate="TZ='Asia/Tokyo' nsdate"
alias hkdate="TZ='Asia/Hong_Kong' nsdate"
alias audate="TZ='Australia/Sydney' nsdate"
alias utdate="TZ='UTC' nsdate"
|
shanematley/dotfiles
|
shrc.d/time.sh
|
Shell
|
mit
| 1,058 |
#!/usr/bin/env sh
if [ ! -d /var/lib/mysql/mysql ]; then
echo 'Rebuilding mysql data dir'
chown -R mysql:mysql /var/lib/mysql
mysql_install_db > /dev/null
rm -rf /var/run/mysqld/*
echo 'Starting mysqld'
# The sleep 1 is there to make sure that inotifywait starts up before the socket is created
mysqld_safe &
echo 'Waiting for mysqld to come online'
while [ ! -x /var/run/mysqld/mysqld.sock ]; do
sleep 1
done
echo 'Setting root password'
/usr/bin/mysqladmin -u root password 'secret'
mysql -u root --password=secret -e "UPDATE mysql.user SET Password=PASSWORD('secret') WHERE User='root'"
mysql -u root --password=secret -e "DELETE FROM mysql.user WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1')"
mysql -u root --password=secret -e "DELETE FROM mysql.user WHERE User=''"
mysql -u root --password=secret -e "DELETE FROM mysql.db WHERE Db='test' OR Db='test\_%'"
mysql -u root --password=secret -e "FLUSH PRIVILEGES"
echo "GRANT ALL ON *.* TO admin@'%' IDENTIFIED BY 'secret' WITH GRANT OPTION; FLUSH PRIVILEGES;" | mysql -u root --password=secret
echo 'Shutting down mysqld'
mysqladmin -u root --password=secret shutdown
sleep 10
fi
cp /etc/mysql/conf/my.cnf /etc/mysql/my.cnf
chmod 644 /etc/mysql/my.cnf
mysqld_safe &
|
masood09/docker-percona
|
build/99_percona.sh
|
Shell
|
mit
| 1,342 |
#!/bin/bash
set -e
: ${DB_INSTITUICAO:=1}
: ${DB_SERVIDOR:=$POSTGRES_PORT_5432_TCP_ADDR}
: ${DB_USUARIO:=${POSTGRES_ENV_POSTGRES_USER:-ecidade}}
: ${DB_SENHA:=${POSTGRES_ENV_POSTGRES_PASSWORD:-}}
: ${DB_PORTA:=${POSTGRES_PORT_5432_TCP_PORT:-5432}}
: ${DB_BASE:=${POSTGRES_ENV_POSTGRES_DB:=e-cidade}}
if [ -z "$DB_SERVIDOR" ]; then
echo >&2 'erro: é necessário linkar um container de banco de dados postgresql ou setar a variável DB_SERVIDOR'
exit 1
fi
echo 'Iniciando configuração do e-cidade online'
echo 'Configurando apache.conf'
sed -i '67s/.*/Timeout 300/' /etc/apache2/apache2.conf
if [ "$(sed -n '238p' /etc/apache2/apache2.conf)" != "# linhas adicionadas para o e-cidade" ]; then
echo '# linhas adicionadas para o e-cidade' >> /etc/apache2/apache2.conf
echo 'LimitRequestLine 16382' >> /etc/apache2/apache2.conf
echo 'LimitRequestFieldSize 16382' >> /etc/apache2/apache2.conf
echo 'AddDefaultCharset ISO-8859-1' >> /etc/apache2/apache2.conf
fi
echo 'Configurando login.defs'
sed -i '151s/.*/UMASK 002/' /etc/login.defs
echo 'Configurando php.ini'
sed -i '704s/.*/register_globals = On/' /etc/php5/apache2/php.ini
sed -i '714s/.*/register_long_arrays = On/' /etc/php5/apache2/php.ini
sed -i '729s/.*/register_argc_argv = On/' /etc/php5/apache2/php.ini
sed -i '741s/.*/post_max_size = 64M/' /etc/php5/apache2/php.ini
sed -i '757s/.*/magic_quotes_gpc = On/' /etc/php5/apache2/php.ini
sed -i '892s/.*/upload_max_filesize = 64M/' /etc/php5/apache2/php.ini
sed -i '920s/.*/default_socket_timeout = 60000/' /etc/php5/apache2/php.ini
sed -i '444s/.*/max_execution_time = 60000/' /etc/php5/apache2/php.ini
sed -i '454s/.*/max_input_time = 60000/' /etc/php5/apache2/php.ini
sed -i '465s/.*/memory_limit = 512M/' /etc/php5/apache2/php.ini
sed -i '334s/.*/allow_call_time_pass_reference = On/' /etc/php5/apache2/php.ini
sed -i '538s/.*/display_errors = Off/' /etc/php5/apache2/php.ini
sed -i '559s/.*/log_errors = On/' /etc/php5/apache2/php.ini
sed -i '646s/.*/error_log = \/var\/www\/log\/php-scripts.log/' /etc/php5/apache2/php.ini
sed -i '1516s/.*/session.gc_maxlifetime = 7200/' /etc/php5/apache2/php.ini
echo 'Configurando db_conn.php'
sed -i "28s/.*/\$DB_INSTITUICAO = $DB_INSTITUICAO;/" /var/www/e-cidadeonline/libs/db_conn.php
sed -i "29s/.*/\$DB_SERVIDOR = \"$DB_SERVIDOR\";/" /var/www/e-cidadeonline/libs/db_conn.php
sed -i "30s/.*/\$DB_BASEDADOS = \"$DB_BASE\";/" /var/www/e-cidadeonline/libs/db_conn.php
sed -i "31s/.*/\$DB_USUARIO = \"$DB_USUARIO\";/" /var/www/e-cidadeonline/libs/db_conn.php
sed -i "32s/.*/\$DB_SENHA = \"$DB_SENHA\";/" /var/www/e-cidadeonline/libs/db_conn.php
sed -i "33s/.*/\$DB_PORTA = \"$DB_PORTA\";/" /var/www/e-cidadeonline/libs/db_conn.php
echo 'Iniciando apache2'
source /etc/apache2/envvars && exec /usr/sbin/apache2 -DFOREGROUND
|
edsondewes/docker-ecidadeonline
|
docker-entrypoint.sh
|
Shell
|
mit
| 2,800 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-MoscropSecondary/Bolts.framework"
install_framework "Pods-MoscropSecondary/Kingfisher.framework"
install_framework "Pods-MoscropSecondary/Parse.framework"
install_framework "Pods-MoscropSecondary/SwiftyJSON.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-MoscropSecondary/Bolts.framework"
install_framework "Pods-MoscropSecondary/Kingfisher.framework"
install_framework "Pods-MoscropSecondary/Parse.framework"
install_framework "Pods-MoscropSecondary/SwiftyJSON.framework"
fi
|
IvonLiu/moscrop-secondary-ios
|
Pods/Target Support Files/Pods-MoscropSecondary/Pods-MoscropSecondary-frameworks.sh
|
Shell
|
mit
| 3,926 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for CESA-2014:1636
#
# Security announcement date: 2014-10-20 18:15:08 UTC
# Script generation date: 2017-01-22 21:15:53 UTC
#
# Operating System: CentOS 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - java-1.8.0-openjdk-javadoc.noarch:1.8.0.25-1.b17.el6
# - java-1.8.0-openjdk.x86_64:1.8.0.25-1.b17.el6
# - java-1.8.0-openjdk-demo.x86_64:1.8.0.25-1.b17.el6
# - java-1.8.0-openjdk-devel.x86_64:1.8.0.25-1.b17.el6
# - java-1.8.0-openjdk-headless.x86_64:1.8.0.25-1.b17.el6
# - java-1.8.0-openjdk-src.x86_64:1.8.0.25-1.b17.el6
#
# Last versions recommanded by security team:
# - java-1.8.0-openjdk-javadoc.noarch:1.8.0.121-0.b13.el6_8
# - java-1.8.0-openjdk.x86_64:1.8.0.121-0.b13.el6_8
# - java-1.8.0-openjdk-demo.x86_64:1.8.0.121-0.b13.el6_8
# - java-1.8.0-openjdk-devel.x86_64:1.8.0.121-0.b13.el6_8
# - java-1.8.0-openjdk-headless.x86_64:1.8.0.121-0.b13.el6_8
# - java-1.8.0-openjdk-src.x86_64:1.8.0.121-0.b13.el6_8
#
# CVE List:
# - CVE-2014-6457
# - CVE-2014-6468
# - CVE-2014-6502
# - CVE-2014-6504
# - CVE-2014-6506
# - CVE-2014-6511
# - CVE-2014-6512
# - CVE-2014-6517
# - CVE-2014-6519
# - CVE-2014-6531
# - CVE-2014-6558
# - CVE-2014-6562
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install java-1.8.0-openjdk-javadoc.noarch-1.8.0.121 -y
sudo yum install java-1.8.0-openjdk.x86_64-1.8.0.121 -y
sudo yum install java-1.8.0-openjdk-demo.x86_64-1.8.0.121 -y
sudo yum install java-1.8.0-openjdk-devel.x86_64-1.8.0.121 -y
sudo yum install java-1.8.0-openjdk-headless.x86_64-1.8.0.121 -y
sudo yum install java-1.8.0-openjdk-src.x86_64-1.8.0.121 -y
|
Cyberwatch/cbw-security-fixes
|
CentOS_6/x86_64/2014/CESA-2014:1636.sh
|
Shell
|
mit
| 1,777 |
#!/bin/bash
#
# ---------------------------------------------------------
# backups the file to the product versiodir under a host specific dir
# and also a timestamped version of it
# and includes them bothh ito the include file
# ---------------------------------------------------------
doSpecBackupFile(){
cd $product_instance_dir
doLog "START doBackupFile"
test -z "$file_to_backup" && doExit 3 "no file to backup -> do export file_to_backup=<<file>>"
cur_rel_file=cnf/hosts/`hostname -s`/$file_to_backup
if [[ "$file_to_backup" != /* ]]
then file_to_backup="$product_instance_dir/$file_to_backup"
fi
test -z "$backup_root_dir" && \
backup_root_dir=$product_instance_dir/cnf/hosts/`hostname -s` && \
mkdir -p "$backup_root_dir"
cmd="test -f $file_to_backup"
set -e ; doRunCmdOrExit "$cmd" ; set +e
#define the version
file_version=$(grep 'export version' $file_to_backup | cut -d= -f2)
test -z "$file_version" && file_version='1.0.0'
#todo: parametrize
mkdir -p $backup_root_dir/`dirname $file_to_backup`
cur_file=$backup_root_dir/$file_to_backup
cmd="cp -v $file_to_backup $cur_file"
doRunCmdAndLog "$cmd"
# the file to be backup should be included in the full package of the app
test -z $include_file && \
include_file="$product_instance_dir/met/.include.$wrap_name"
# the timestamped file to be backup should be included in the full package of the app
ts_file=$file_to_backup.$file_version.`date +%Y%m%d_%H%M%S`.backup
ts_rel_file=cnf/hosts/`hostname -s`"$ts_file"
ts_file=$backup_root_dir/$ts_file
# copy the file to backup by preserving the file permissions
cp -vp $file_to_backup $ts_file
doLog "and verify"
doLog $'\n'"#-----------------------------------------------------"$'\n'
output="$output"$(stat -c "%a %U:%G %n" $backup_root_dir$file_to_backup* | sort -nr|uniq -u)
output=$'\n\n'"$output"$'\n\n'
doLog "$output"
doLog $'\n'"#-----------------------------------------------------"$'\n'
# if the files to backup list file is not cnfigured set default
test -z "$files_to_backup_list_file" && \
files_to_backup_list_file="$wrap_bash_dir/.$host_name.files-to-backup.lst"
test -r $files_to_backup_list_file || touch $files_to_backup_list_file
doLog "files_to_backup_list_file : $files_to_backup_list_file"
flag_file_is_found=$(grep -c "$file_to_backup" "$include_file")
msg="the file to backup : $file_to_backup was not found in this host\'s list of files to backup"
msg="$msg adding it to the list of files to backup"
doLog "flag_file_is_found:$flag_file_is_found"
# add only once the
test -z "$flag_file_is_found" && echo $cur_rel_file >> $include_file
echo $ts_rel_file >> $include_file
test -z "$flag_file_is_found" && doLog "$msg"doLog "$msg"
test $flag_file_is_found -lt 1 && echo "$file_to_backup" >> "$files_to_backup_list_file"
test $flag_file_is_found -eq 1 && doLog " only once do nothing"
test $flag_file_is_found -gt 1 && doLog " more than once do nothing"
#output=$(cat "$files_to_backup_list_file")
#output=$'\n\n'"$output"$'\n\n'
#doLog "$output"
doLog "STOP doBackupFile"
}
#eof spec doBackupFile
|
ygeo/url-sniper
|
src/bash/url-sniper/specs/sys/backup-file.spec.sh
|
Shell
|
mit
| 3,125 |
#!/bin/sh
AP=evaltmpl
GOOS=linux GOARCH=amd64 go build $AP.go
docker build -f Dockerfile.local -t $AP .
docker run -it --rm -p 3000:3000 $AP
|
sheercat/evaltmpl
|
bin/docker-run.sh
|
Shell
|
mit
| 144 |
#!/usr/bin/expect -f
set timeout -1
spawn telnet -K 10.0.0.138
match_max 100000
expect "Username :"
send "guru\r"
expect "Password :"
send "guru\r"
#send "eth device ifconfig intf=ethif1 state=enabled\r";
#send "eth device ifconfig intf=ethif2 state=enabled\r";
#send "eth device ifconfig intf=ethif3 state=enabled\r";
send "eth device ifconfig intf=ethif4 state=enabled\r";
send "saveall\r";
#GabrielHounds
#send "wireless macacl modify ssid_id=0 hwaddr=8c:3a:e3:94:41:7b permission=allow\r"
#Orions-phone
#send "wireless macacl modify ssid_id=0 hwaddr=5c:f8:a1:a6:cc:48 permission=allow\r"
#OrionsPC
#send "wireless macacl modify ssid_id=0 hwaddr=94:de:80:c1:cf:9a permission=allow\r"
send "exit\r"
expect eof
|
k7n4n5t3w4rt/ThomsonKiller
|
scripts/orion_on.sh
|
Shell
|
mit
| 715 |
#!/bin/bash
echo ''
echo 'Removing ggps from local PyPi server cache:'
rm /Users/cjoakim/pypi-packages/ggps*
echo 'Creating sdist, uploading to local PyPi server:'
python setup.py sdist upload -r local
echo ''
echo 'Listing local PyPi server packages:'
ls -al /Users/cjoakim/pypi-packages
# python setup.py sdist
# python setup.py sdist upload
|
cjoakim/ggps
|
sdist-local.sh
|
Shell
|
mit
| 348 |
#!/bin/bash
# Script to rerun the same headers on another cv
machines_one=(160.98.22.10)
machines_seven=(160.98.22.21 160.98.22.22 160.98.22.23 160.98.22.24 160.98.22.25 160.98.22.8 160.98.22.9)
machines_sevenb=(160.98.22.10 160.98.22.11 160.98.22.12 160.98.22.13 160.98.22.14 160.98.22.15 160.98.22.16)
machines_threeb=(160.98.22.17 160.98.22.18 160.98.22.19)
machines_ten=(160.98.22.10 160.98.22.11 160.98.22.12 160.98.22.13 160.98.22.14 160.98.22.15 160.98.22.16 160.98.22.17 160.98.22.18 160.98.22.19)
if [ "$1" == "7" ]; then
machines=("${machines_seven[@]}")
elif [ "$1" == "1" ]; then
machines=("${machines_one[@]}")
elif [ "$1" == "7b" ]; then
machines=("${machines_sevenb[@]}")
elif [ "$1" == "3b" ]; then
machines=("${machines_threeb[@]}")
elif [ "$1" == "10" ]; then
machines=("${machines_ten[@]}")
else
echo "The first parameter must be one of [7,1,3b,7b,10]"
exit 1
fi
if [ "$2" == "third" ]; then
mode="third"
option="-third"
elif [ "$2" == "half" ]; then
mode="half"
option="-half"
elif [ "$2" == "full" ]; then
mode="full"
option=""
else
echo "The second parameter must be one of [full,half,third]"
exit 1
fi
set=$3
old_stamp=$4
if [ ! -d run/${old_stamp} ]; then
echo "Invalid old stamp"
exit
fi
options=""
dataset="washington"
get_train="false"
# Discards the first three parameters
shift 4
while [ "$1" ]
do
if [ "$1" == "all" ]; then
options="$options -all"
fi
if [ "$1" == "parzival" ]; then
options="$options -parzival"
dataset="parzival"
fi
if [ "$1" == "iam" ]; then
options="$options -iam"
dataset="iam"
fi
if [ "$1" == "eval_train" ]; then
get_train="true"
fi
if [ "$1" == "hmm" ]; then
options="$options -hmm -htk"
fi
if [ "$1" == "sub" ]; then
options="$options -sub"
fi
shift
done
options="$options $option"
config_file="config_${mode}.hpp"
user=wicht
password=`cat .passwd`
grep=/usr/bin/zgrep
mkdir -p run
cd run
if [ ! -f stamp ]; then
echo "1" >> stamp
fi
stamp=`cat stamp`
new_stamp=$((stamp+1))
echo "$new_stamp" > stamp
echo "Old Stamp: $old_stamp"
echo "Stamp: $stamp"
echo "Mode: $mode"
echo "Dataset: $dataset"
echo "Set: $set"
echo "Options: $options"
mkdir -p "$stamp"
# 1. Move all the files with scp
for machine in ${!machines[@]}; do
(
sshpass -p "$password" scp ${old_stamp}/${machine}_config.hpp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/include/${config_file}
sshpass -p "$password" ssh ${user}@${machines[machine]} 'cd /home/wicht/dev/word_spotting; make clean; make -j9 release;'
cp ${old_stamp}/${machine}_config.hpp ${stamp}/${machine}_config.hpp
) &
done
wait
# 2. Execute all the scripts and get the output back
for machine in ${!machines[@]}; do
(
echo "Start execution on ${machines[machine]}"
sshpass -p "$password" ssh ${user}@${machines[machine]} "cd ~/dev/word_spotting; rm -rf results/*; ./release/bin/spotter -2 -fix ${options} train ~/datasets/${dataset} ${set} > grid.log ;"
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/grid.log ${stamp}/${machine}.log
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/method_2_${mode}.dat ${stamp}/${machine}.dat
if [ "$get_train" == "true" ]; then
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/1/global_rel_file ${stamp}/${machine}_train_global_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/1/global_top_file ${stamp}/${machine}_train_global_top_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/1/local_rel_file ${stamp}/${machine}_train_local_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/1/local_top_file ${stamp}/${machine}_train_local_top_file
fi
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/2/global_rel_file ${stamp}/${machine}_valid_global_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/2/global_top_file ${stamp}/${machine}_valid_global_top_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/2/local_rel_file ${stamp}/${machine}_valid_local_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/2/local_top_file ${stamp}/${machine}_valid_local_top_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/3/global_rel_file ${stamp}/${machine}_test_global_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/3/global_top_file ${stamp}/${machine}_test_global_top_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/3/local_rel_file ${stamp}/${machine}_test_local_rel_file
sshpass -p "$password" scp ${user}@${machines[machine]}:/home/wicht/dev/word_spotting/results/3/local_top_file ${stamp}/${machine}_test_local_top_file
if [ "$get_train" == "true" ]; then
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_train_global_rel_file ${stamp}/${machine}_train_global_top_file > ${stamp}/${machine}_train_global_eval
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_train_local_rel_file ${stamp}/${machine}_train_local_top_file > ${stamp}/${machine}_train_local_eval
fi
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_valid_global_rel_file ${stamp}/${machine}_valid_global_top_file > ${stamp}/${machine}_valid_global_eval
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_valid_local_rel_file ${stamp}/${machine}_valid_local_top_file > ${stamp}/${machine}_valid_local_eval
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_test_global_rel_file ${stamp}/${machine}_test_global_top_file > ${stamp}/${machine}_test_global_eval
~/dev/trec_eval/trec_eval -q ${stamp}/${machine}_test_local_rel_file ${stamp}/${machine}_test_local_top_file > ${stamp}/${machine}_test_local_eval
echo "Execution finished on machine $machine (${machines[machine]})"
if [ "$get_train" == "true" ]; then
echo "Train results"
echo " G-MAP " `${grep} map ${stamp}/${machine}_train_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " G-RP " `${grep} R-prec ${stamp}/${machine}_train_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-MAP " `${grep} map ${stamp}/${machine}_train_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-RP " `${grep} R-prec ${stamp}/${machine}_train_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
fi
echo "Valid results"
echo " G-MAP " `${grep} map ${stamp}/${machine}_valid_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " G-RP " `${grep} R-prec ${stamp}/${machine}_valid_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-MAP " `${grep} map ${stamp}/${machine}_valid_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-RP " `${grep} R-prec ${stamp}/${machine}_valid_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo "Test results"
echo " G-MAP " `${grep} map ${stamp}/${machine}_test_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " G-RP " `${grep} R-prec ${stamp}/${machine}_test_global_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-MAP " `${grep} map ${stamp}/${machine}_test_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
echo " L-RP " `${grep} R-prec ${stamp}/${machine}_test_local_eval | ${grep} all | ${grep} -v cv1_ | cut -f3`
) &
done
wait
# 3. Final summary
echo "All machines have finished"
cd ..
bash ./scripts/summary.sh ${stamp}
|
wichtounet/word_spotting
|
scripts/rerun_all.sh
|
Shell
|
mit
| 7,987 |
set -e
clang++ -I.. -DNDEBUG -O3 -c -S \
-fstrict-aliasing -fverbose-asm \
-o assembly_review.t assembly_review.cpp
expand assembly_review.t >assembly_review.s
rm assembly_review.t
|
kosarev/z80
|
examples/assembly_review.sh
|
Shell
|
mit
| 189 |
#!/bin/sh
export FFMPEG_VERSION="3.3.2"
export FFMPEG_SRC_DIR="ffmpeg-$FFMPEG_VERSION"
export FFMPEG_SRC_ARC_FILENAME="$FFMPEG_SRC_DIR.tar.bz2"
export DOWNLOAD_URL="http://ffmpeg.org/releases/$FFMPEG_SRC_ARC_FILENAME"
if [ ! -f "$FFMPEG_SRC_ARC_FILENAME" ]; then
wget $DOWNLOAD_URL
fi
if [ ! -d "$FFMPEG_SRC_DIR" ]; then
tar -jxvf $FFMPEG_SRC_ARC_FILENAME
fi
cd $FFMPEG_SRC_DIR
export OUT_PREFIX=`pwd`/../ffmpeg-dev
./configure --prefix=$OUT_PREFIX \
--disable-gpl \
--enable-static \
--enable-shared \
make -j`cat /proc/cpuinfo|grep processor|wc -l` & make install
cd ..
rm -rf $FFMPEG_SRC_DIR
|
GinRyan/ff_simple_tutorial
|
build_ffmpeg.sh
|
Shell
|
mit
| 608 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-MSMarksButtonList_Example/MSMarksButtonList.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-MSMarksButtonList_Example/MSMarksButtonList.framework"
fi
|
LinkRober/MSMarksButtonList
|
Example/Pods/Target Support Files/Pods-MSMarksButtonList_Example/Pods-MSMarksButtonList_Example-frameworks.sh
|
Shell
|
mit
| 3,588 |
#!/usr/bin/env sh
# install emacs
if ! [ -f /usr/bin/emacs ]; then
sudo apt-get install emacs
fi
# get ubuntu version
UBUNTU_VER=`lsb_release -r | awk '{print $0 = substr($0, 10)}'`
if [ "$UBUNTU_VER" = "14.04" ]; then
EMACS_VER="24.3"
elif [ "$UBUNTU_VER" = "12.04" ]; then
EMACS_VER="23.3"
else
echo "Unsupported os version."
return
fi
# install apel
wget http://git.chise.org/elisp/dist/apel/apel-10.8.tar.gz
tar zxf apel-10.8.tar.gz
if ! [ -f /usr/local/share/emacs/${EMACS_VER}/site-lisp/emu/poe.el ]; then
cd apel-10.8
make clean
make
sudo make install
cd ..
fi
# install trr
if ! [ -d /usr/share/emacs/site-lisp/trr22 ]; then
# download trr
wget https://trr22.googlecode.com/files/trr22_0.99-5.tar.gz
tar zxf trr22_0.99-5.tar.gz
cd trr22-0.99
# install & use nkf
if ! [ -f /usr/bin/nkf ]; then
sudo apt-get install nkf
fi
sudo nkf -w --overwrite CONTENTS
# translate Japanese to English
sed -i "s/ふつう/Normal/g" CONTENTS
sed -i "s/やや難/Hard/g" CONTENTS
sed -i "s/やや何/Hard/g" CONTENTS
sed -i "s/推奨/Recommend/g" CONTENTS
sed -i "s/安定してる/Stable/g" CONTENTS
sed -i "s/見出しが多い/Lots_of_headers/g" CONTENTS
sed -i "s/日本国憲法/Japan_Constitution/g" CONTENTS
sed -i "s/合衆国憲法/USA_Constitution/g" CONTENTS
sed -i "s/C言語/C_programs/g" CONTENTS
sed -i "s/括弧が多い/Lots_of_parentheses/g" CONTENTS
sed -i "s/Java言語/Java_programs/g" CONTENTS
sed -i "s/いくつかの記号/Some_symbols/g" CONTENTS
sed -i "s/Python言語/Python_programs/g" CONTENTS
# wrong text filename
sed -i "s/EmacsLisp/Elisp_programs/g" CONTENTS
# change Makefile
sed -i "s/japanese = t/japanese = nil/g" Makefile
cp ../apel-10.8/*.el .
make clean
MAKE_FLAGS="installer=${USER}
TRRDIR=/var/lib/trr22
LISPDIR=/usr/share/emacs/site-lisp/trr22
INFODIR=/usr/share/info
BINDIR=/usr/share/emacs/site-lisp/trr22
SED=/bin/sed GREP=/bin/grep"
make all ${MAKE_FLAGS}
sudo make install ${MAKE_FLAGS}
sudo cp -r record /var/lib/trr22/
sudo cp -r text /var/lib/trr22
fi
# add trr and apel lisp files to lisp-path
cat <<-EOF
Okay, the installation was successfully ended.
Finally, please add lines below to your emacs config file. (ex. ~/.emacs.d/init.el or ~/.emacs)
(add-to-list 'load-path "/usr/share/emacs/site-lisp/trr22")
(add-to-list 'load-path "/usr/local/share/emacs/${EMACS_VER}/site-lisp/emu")
(autoload 'trr "/usr/share/emacs/site-lisp/trr22/trr" nil t)
Now you can play trr on your emacs by "$ emacs -f trr"
EOF
|
wkentaro/Install-trr
|
install.sh
|
Shell
|
mit
| 2,650 |
#!/bin/bash
tempaddy=$(mktemp)
while read line; do
#using case to match nicely
case "$line" in
"FN:"*)
read line;
tmpfn=$(echo "$line" | cut -d ":" -f 2 | awk -F ';' '{print $2" "$1}')
read line;
tmpemail=$(echo "$line" | sed 's/"\(.*\)"/\1/g' | awk -F ':' '{print $2}')
case $tmpemail in
*"@"*) #vaguely legit email format
if [ "$tmpfn" != " " ]; then
tmpfn=${tmpfn//$'\n'/} # Remove all newlines.
else
tmpfn="No Name Provided"
fi
tmpemail=${tmpemail//$'\n'/} # Remove all newlines.
tmpemail=${tmpemail//[[:space:]]/}
echo "$tmpemail" >> $tempaddy
echo "$tmpfn" >> $tempaddy
echo "@@@@@" >> $tempaddy
;;
esac
;;
esac
done < contacts.vcf
|
uriel1998/gpgfinder
|
test.sh
|
Shell
|
mit
| 787 |
#!/bin/sh
# root 権限で実行すること。(chown, chgrp の操作があるため)
echo "#-- remove srcFolder, destFolder"
\rm -fr srcFolder destFolder
\tar zxfp srcFolder.tgz
echo "#-- srcFoler を作成しました"
cd srcFolder
\tree -iQfaplug cont > ../tree-cont.txt
echo "#-- srcFoler/cont の tree 結果を作成しました (tree-cont.txt)"
\chmod -R 777 cont
\chown -R root cont
\chgrp -R wheel cont
echo "#-- srcFoler/cont のファイル属性(オーナー、グループ、プロテクション) を (root, wheel, 777) に変更しました。"
\echo > XXX.txt
echo "#-- srcFoler/cont/XXX.txt を追加しました。"
echo "#-- [srcFoler/cont/XXX.txt は ファイル一覧表には含まれていないので、 destFolder/cont には 配置されません。]"
cd ..
ruby ../lib/treedeploy/treedeploy.rb deploy srcFolder destFolder cont tree-cont.txt
echo "#-- srcFoldle/cont 内容を destFolder/cont に treedeploy をつかって複製しました"
cd destFolder
tree -iQfaplug cont > ../tree-cont-dest.txt
echo "#-- destFolder/cont の tree 結果を作成しました (tree-cont-dest.txt)"
cd ..
echo "#-- 2 つの tree 結果を比較します。(↓ に何も表示されなければ OK です)"
echo "#--- diff tree-cont.txt tree-cont-dest.txt"
\diff tree-cont.txt tree-cont-dest.txt
chmod 777 destFolder/cont/444.txt
echo "#--- destFolder/cont/444.txt を chmod 777 にしました"
\echo "#--- destFoldre をチェックします。(↓ に4444.txt について報告されていれば OK です)"
ruby ../lib/treedeploy/treedeploy.rb check destFolder cont tree-cont.txt
\echo "#--- destFoldre を修復します。(↓ に 444.txt について報告されていれば OK です)"
ruby ../lib/treedeploy/treedeploy.rb repair destFolder cont tree-cont.txt
\echo "#--- destFoldre をチェックします。(↓ に何も報告されなければ OK です)"
ruby ../lib/treedeploy/treedeploy.rb check destFolder cont tree-cont.txt
#--- End of File ---
|
katoy/treedeploy
|
work/run-sample.sh
|
Shell
|
mit
| 1,995 |
#!/bin/bash
# our model BBN dataset
for ((i=1; i<=5; i++)); do
time python main_our.py --dataset=BBN --data_directory=~/EACL-2017/fnet/data/processed/f3/ --char_embedding_size=200 --rnn_hidden_neurons=100 --char_rnn_hidden_neurons=200 --keep_prob=0.5 --learning_rate=0.0005 --joint_embedding_size=500 --epochs=10 --batch_size=1000 --use_clean --use_mention --finetune --finetune_directory=../ckpt/Wiki_1.1 --uid=T_BBN_model.$i
done
# our model OntoNotes dataset
for ((i=1; i<=5; i++)); do
time python main_our.py --dataset=OntoNotes --data_directory=~/EACL-2017/fnet/data/processed/f3/ --char_embedding_size=200 --rnn_hidden_neurons=100 --char_rnn_hidden_neurons=200 --keep_prob=0.5 --learning_rate=0.0005 --joint_embedding_size=500 --epochs=5 --batch_size=800 --use_clean --use_mention --finetune --finetune_directory=../ckpt/Wiki_1.1 --uid=T_OntoNotes_model.$i
done
|
abhipec/fnet
|
src/scripts/transfer_learning_model.bash
|
Shell
|
mit
| 875 |
#!/usr/bin/env bash
# This script generates a package and submits it to the Python Package Index
# Copyright (C) 2017 Marc Bourqui
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#==============================================================================
#title :pypi_packager.sh
#description :This script will build a source distribution and wheel. It
# converts a README.md to README.rst for nice rendering on PyPI.
#author :https://github.com/mbourqui
#licence :GNU GPL-3.0
#date :20170526
#version :1.1
#usage :bash pypi_packager.sh
#requires :pandoc
#notes :In case of submission to PyPI, ~/.pypirc must be set
# accordingly
#==============================================================================
PROGRAM_NAME=$(basename "$0")
VERSION=1.1
PROJECT_NAME=$(basename $(pwd))
PACKAGE_NAME=${PROJECT_NAME//-/_} # Replace all - with _
usage() {
echo "$PROGRAM_NAME Copyright (C) 2017 Marc Bourqui
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it under certain
conditions, see <http://www.gnu.org/licenses/> for details.
Script to build source distribution and wheel for a python package. Also
converts a README.md to README.rst thanks to pandoc for nice rendering on PyPI.
Usage: $PROGRAM_NAME [-h,--help,-v,--version] [-s,--submit|-t,--test]
Options:
-s, --submit upload the package to PyPI. Requires ~/.pypirc to be set.
-t, --test upload the package to TestPyPI. Requires ~/.pypirc to be
set.
-h, --help display this help and exit
-v, --version output version information and exit"
}
# Parse arguments
TEMP=$(getopt -n $PROGRAM_NAME -o sthv --long submit,test,help,version -- "$@")
if [ $? != 0 ] ; then usage >&2 ; exit 1 ; fi
eval set -- "$TEMP"
while true; do
case $1 in
-s|--submit)
SUBMIT=1; shift; continue
;;
-t|--test)
TEST=1; shift; continue
;;
-h|--help)
usage
exit
;;
-v|--version)
echo $VERSION
exit
;;
--)
# no more arguments to parse
shift
break
;;
*)
break
;;
esac
done
eval set -- "$@"
if [ -n "$SUBMIT" -a -n "$TEST" ]; then
echo "ERROR: Incompatible options"
echo
usage
exit 1
fi
# Clear previous compilations to prevent potential issues and limit disk space
# usage
rm -f README.rst
rm -rf dist/ build/ ${PACKAGE_NAME}.egg-info/
# Generate doc as restructured text for nice PyPI rendering
pandoc --from=markdown --to=rst --output=README.rst README.md
# Source distribution
python setup.py sdist
# Wheel
python setup.py bdist_wheel
if [ -n "$SUBMIT" ]; then
# Pre-registration to PyPI is no longer required or supported, upload
# directly
twine upload dist/*
elif [ -n "$TEST" ]; then
# Upload to TestPyPI
python setup.py register -r https://testpypi.python.org/pypi
twine upload dist/* -r testpypi
pip install -i https://testpypi.python.org/pypi $PACKAGE_NAME
fi
|
mbourqui/django-publications-bootstrap
|
pypi_packager.sh
|
Shell
|
mit
| 3,815 |
#!/bin/sh
#
# Just a wrapper around autoreconf to generate the configuration
# scripts after a fresh repository clone/checkout.
#
# This script does *not* call configure (as usually done in other
# projects) because this would prevent VPATH builds.
autoreconf -is -Wall
printf "Now run configure to customize your building\n"
|
ntd/luasyslog
|
autogen.sh
|
Shell
|
mit
| 328 |
#!/bin/sh
OUTPUT_FILE="jquery-env.min.js"
cat jquery/jquery-3.1.0.min.js > $OUTPUT_FILE
echo "\n" >> $OUTPUT_FILE
cat jquery/moment.min.js >> $OUTPUT_FILE
echo "\n" >> $OUTPUT_FILE
cat jquery/jquery-daterangepicker.min.js >> $OUTPUT_FILE
echo "\n" >> $OUTPUT_FILE
cat jquery/jquery.jplayer.min.js >> $OUTPUT_FILE
echo "\n" >> $OUTPUT_FILE
|
bfrigon/yaam
|
root/include/js/concat.sh
|
Shell
|
mit
| 346 |
#! /bin/bash
#Reads data file into Hive, then makes tables out
#of everything. Outputs aggregates to a file.
hive -f t_hive.sql
hive -e 'select * from group_aggs' > aggs.tsv
#Perl makes everything less readable.
#Pipe tab-delimited file into Perl regex, globally convert all tabs to commas.
#Export to CSV file.
cat aggs.tsv | perl -lpe 's/"/""/g; s/^|$/"/g; s/\t/","/g' > aggs.csv
rm aggs.tsv
|
christopheraden/Explorations-into-Computational-Statistics
|
HW2/Hive/get_group_means.sh
|
Shell
|
mit
| 396 |
config() {
NEW="$1"
OLD="$(dirname $NEW)/$(basename $NEW .new)"
# If there's no config file by that name, mv it over:
if [ ! -r $OLD ]; then
mv $NEW $OLD
elif [ "$(cat $OLD | md5sum)" = "$(cat $NEW | md5sum)" ]; then
# toss the redundant copy
rm $NEW
fi
# Otherwise, we leave the .new copy for the admin to consider...
}
config etc/xdg/bashrun2/bashrun2.rc.new
config etc/xdg/bashrun2/plugins/terminal.rc.new
config etc/bash_completion.d/bashrun2.new
|
panosmdma/SlackOnly-SlackBuilds
|
desktop/bashrun2/doinst.sh
|
Shell
|
mit
| 478 |
#!/usr/bin/env bash
#echo "Start redis-server"
#su - root -c "redis-server"
|
savchukoleksii/beaversteward
|
puphpet/files/startup-always/start_redis.sh
|
Shell
|
mit
| 75 |
#!/bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
$DIR/vars.sh
export DEPLOY_DIR=$DEPLOY_DIR/beta_deploy
function finish {
/var/www/ssg/post_deploy.sh
}
$DIR/pre_deploy.sh
$DIR/run_deploy.sh
trap finish EXIT
|
MarshallOfSound/deployment-tools
|
bash/live_deploy.sh
|
Shell
|
mit
| 230 |
#!/bin/bash
dir=$( cd "`dirname $0`"; pwd )
source "$dir/shell.sh" 2>/dev/null || exit 1
if shell_is_bsd || shell_is_osx ; then
all_nics=$(ifconfig 2>/dev/null | awk -F':' '/^[a-z]/ && !/^lo/ { print $1 }')
for nic in ${all_nics[@]}; do
ipv4s_on_nic=$(ifconfig ${nic} 2>/dev/null | awk '$1 == "inet" { print $2 }')
for lan_ip in ${ipv4s_on_nic[@]}; do
[[ -n "${lan_ip}" ]] && break
done
[[ -n "${lan_ip}" ]] && break
done
else
# Get the names of all attached NICs.
all_nics="$(ip addr show | cut -d ' ' -f2 | tr -d :)"
all_nics=(${all_nics[@]//lo/}) # Remove lo interface.
for nic in "${all_nics[@]}"; do
# Parse IP address for the NIC.
lan_ip="$(ip addr show ${nic} | grep '\<inet\>' | tr -s ' ' | cut -d ' ' -f3)"
# Trim the CIDR suffix.
lan_ip="${lan_ip%/*}"
# Only display the last entry
lan_ip="$(echo "$lan_ip" | tail -1)"
[ -n "$lan_ip" ] && break
done
fi
echo "ⓛ ${lan_ip-N/a}"
|
jeoygin/gadget
|
shell/lan-ip.sh
|
Shell
|
mit
| 1,020 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-CAHImageCacher_Tests/CAHImageCacher.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-CAHImageCacher_Tests/CAHImageCacher.framework"
fi
|
chaseholland/CAHImageCacher
|
Example/Pods/Target Support Files/Pods-CAHImageCacher_Tests/Pods-CAHImageCacher_Tests-frameworks.sh
|
Shell
|
mit
| 3,572 |
#!/bin/bash
#title :sync_remote_laravel.sh
#description :The script will sync for any updates
#author :Ajay Krishna Teja Kavuri
#date :02062017
#version :0.1
#==============================================================================
# Sunc the folders
sudo rsync -tr /home/pseudoaj/GitHubRepos/MyInsightRepo/laravel/auth/* /var/www/html/auth/
# run the mix to update style
cd /var/www/html/auth/
sudo npm run dev
# Comeback
cd /home/pseudoaj/GitHubRepos/MyInsightRepo/setup
|
PseudoAj/MyInsightRepo
|
setup/sync_local_laravel.sh
|
Shell
|
mit
| 524 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.