code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/sh
echo "PPA repos are going to be installed. Proceed with installation pressing Enter."
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
sudo sh -c 'echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list'
sudo apt-get update
echo "Installing GNOME Panel, vim, Guake, Chrome and additional tools..."
sudo apt-get -y install git gnome-panel vim guake google-chrome-stable xsel ibus-mozc
echo "Launching ibus-setup..."
ibus-setup
echo "Logout this session and re-login with GNOME Panel."
|
puhitaku/ubuntify
|
base.sh
|
Shell
|
unlicense
| 584 |
#!/bin/bash
# Copyright 2014-2015 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
GIT_STATUS_NOT_CLEAN_MSG="Git status of current directory is not clean"
GIT_STATUS_CONSIDER_CLEAN_MSG="Consider removing all untracked files, locally commiting all changes and running $0 again"
clear
gitignore_files_list=`find . -name .gitignore`
if [ "$gitignore_files_list" != "./.gitignore" ]
then
echo -e "\n\e[1;33mInvalid .gitignore configuration\e[0m\n"
echo -e -n ".gitignore files list:\t"
echo $gitignore_files_list
echo
exit 1
fi
if [ "`git status --porcelain 2>&1 | wc -l`" != "0" ]
then
echo -e "\n \e[1;90m$GIT_STATUS_NOT_CLEAN_MSG:\n"
git status
echo -e "\n\n $GIT_STATUS_CONSIDER_CLEAN_MSG.\e[0m\n"
fi
ok_to_push=1
current_branch=`git branch | grep "^* " | cut -d ' ' -f 2`
git branch -r | grep "^ *origin/$current_branch$" 2>&1 > /dev/null
have_remote=$?
if [ $have_remote -eq 0 ]
then
base_ref="origin/$current_branch"
echo "Pulling..."
make pull
status_code=$?
if [ $status_code -ne 0 ]
then
echo "Pull failed"
exit 1
fi
else
base_ref=`git merge-base master $current_branch`
status_code=$?
if [ $status_code -ne 0 ]
then
echo "Cannot determine merge-base for '$current_branch' and 'master' branches."
exit 1
fi
fi
commits_to_push=`git log $base_ref..$current_branch | grep "^commit [0-9a-f]*$" | awk 'BEGIN { s = ""; } { s = $2" "s; } END { print s; }'`
echo $commits_to_push | grep "[^ ]" >&/dev/null
status_code=$?
if [ $status_code -ne 0 ]
then
echo "Nothing to push"
exit 0
fi
trap ctrl_c INT
function ctrl_c() {
git checkout $current_branch >&/dev/null
exit 1
}
echo
echo "===== Starting pre-push commit testing series ====="
echo
echo "Commits list: $commits_to_push"
echo
for commit_hash in $commits_to_push
do
git checkout $commit_hash >&/dev/null
status_code=$?
if [ $status_code -ne 0 ]
then
echo "git checkout $commit_hash failed"
exit 1
fi
echo " > Testing $commit_hash"
echo -n " > "
git log --pretty=format:"%H %s" | grep $commit_hash | grep -o " .*"
echo
make -s -j precommit 2>&1
status_code=$?
if [ $status_code -ne 0 ]
then
echo "Pre-commit quality testing for '$commit_hash' failed"
echo
ok_to_push=0
break
fi
echo "Pre-commit quality testing for '$commit_hash' passed successfully"
done
git checkout $current_branch >&/dev/null
echo
echo "Pre-commit testing passed successfully"
echo
if [ $ok_to_push -eq 1 ]
then
if [ "`git status --porcelain 2>&1 | wc -l`" == "0" ]
then
echo "Pushing..."
echo
git push -u origin $current_branch
status_code=$?
if [ $status_code -eq 0 ]
then
echo -e "\n\e[0;32m Pushed successfully\e[0m\n"
else
echo -e "\n\e[1;33m Push failed\e[0m"
fi
exit $status_code
else
echo -e "\e[1;33m $GIT_STATUS_NOT_CLEAN_MSG. $GIT_STATUS_CONSIDER_CLEAN_MSG.\e[0m\n"
exit 1
fi
else
echo -e "\e[1;33mPre-commit testing not passed. Cancelling push.\e[0m"
exit 1
fi
|
qdk0901/iotjs-openwrt
|
deps/jerry/tools/git-scripts/push.sh
|
Shell
|
apache-2.0
| 3,559 |
#!/bin/bash
# show the usage
if [ $# -ne 5 ]; then
echo "Usage: orion_update_context.sh <orion_host> <orion_port> <auth_token> <chatroom> <message>";
exit 1;
fi
# get the arguments
HOST=$1
PORT=$2
TOKEN=$3
CHATROOM=$4
MSG=$5
# do the append
(curl $HOST:$PORT/v1/updateContext -s -S --header 'X-Auth-Token: $TOKEN' --header 'Content-Type: application/json' --header 'Accept: application/json' -d @-) <<EOF
{
"contextElements": [
{
"type": "ChatMessageLastOnly",
"isPattern": "false",
"id": "$CHATROOM",
"attributes": [
{
"name": "msg",
"type": "string",
"value": "$MSG"
}
]
}
],
"updateAction": "UPDATE"
}
EOF
|
frbattid/fiware-chatrooms-demo
|
scripts/orion_update_context.sh
|
Shell
|
apache-2.0
| 773 |
#!/bin/sh
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
openssl req -new -newkey rsa:4096 -x509 -sha256 \
-days 3650 -nodes -out cert.pem -keyout key.pem \
-subj "/C=US/ST=Denial/L=Ether/O=Dis/CN=localhost/SAN=localhost" \
-addext "subjectAltName = DNS:localhost"
openssl req -new -newkey rsa:4096 -x509 -sha256 \
-days 3650 -nodes -out cert2.pem -keyout key2.pem \
-subj "/C=US/ST=Denial/L=Ether/O=Dis/CN=anotherhost" \
-addext "subjectAltName = DNS:localhost"
|
istio/istio
|
pilot/pkg/model/test/testcert/generate.sh
|
Shell
|
apache-2.0
| 1,038 |
#!/bin/bash
#
# Runs all tests with Kudu server in docker containers.
#
# ./run_kudu_tests.sh <inferSchemaPrefix>
#
# <inferSchemaPrefix> InferSchema setup: "null" = disabled,
# "" = enabled, using empty prefix,
# "presto::" = enabled, using standard prefix
#
# If arguments are missing, defaults are 1 tablet server and disabled inferSchema.
set -euo pipefail -x
export KUDU_VERSION="1.10.0"
export KUDU_CLUSTERIP=$(docker network inspect bridge --format='{{index .IPAM.Config 0 "Gateway"}}')
PROJECT_ROOT="${BASH_SOURCE%/*}/../.."
DOCKER_COMPOSE_LOCATION="${BASH_SOURCE%/*}/../conf/docker-compose.yml"
# emulate schemas ?
if [ $# -lt 2 ]
then
TEST_SCHEMA_EMULATION_PREFIX=null
else
TEST_SCHEMA_EMULATION_PREFIX=$2
fi
function start_docker_container() {
# stop already running containers
docker-compose -f "${DOCKER_COMPOSE_LOCATION}" down || true
# start containers
docker-compose -f "${DOCKER_COMPOSE_LOCATION}" up -d
}
function cleanup_docker_container() {
docker-compose -f "${DOCKER_COMPOSE_LOCATION}" down
}
start_docker_container
# run product tests
pushd ${PROJECT_ROOT}
# sleep to allow cluster be up
sleep 15s
set +e
./mvnw -pl presto-kudu test -P integration \
-Dkudu.client.master-addresses=${KUDU_CLUSTERIP}:7051,${KUDU_CLUSTERIP}:7151,${KUDU_CLUSTERIP}:7251 \
-Dkudu.schema-emulation.prefix=${TEST_SCHEMA_EMULATION_PREFIX}
EXIT_CODE=$?
set -e
popd
cleanup_docker_container
exit ${EXIT_CODE}
|
wyukawa/presto
|
presto-kudu/bin/run_kudu_tests.sh
|
Shell
|
apache-2.0
| 1,527 |
#!/bin/bash
# Copyright 2018 Verily Life Sciences Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Setup file to be sourced into e2e_logging_paths*tasks.sh tests.
readonly LOGGING_PATHS_TASKS_FILE_TMPL="${TEST_DIR}/logging_paths_tasks.tsv.tmpl"
# Several of the logging paths tests specifically test the log file name
# generated, which includes the job-id which is based on the job-name,
# thus we cannot use the new --unique-job-id flag when launching these
# test jobs.
#
# This leads to flaky tests as sometimes jobs are launched concurrently
# and generate the same job identifier.
readonly LOGGING_PATHS_UNIQUE_ID="$(uuidgen)"
function logging_paths_tasks_setup::write_tasks_file() {
cat "${LOGGING_PATHS_TASKS_FILE_TMPL}" \
| util::expand_tsv_fields \
> "${TASKS_FILE}"
}
readonly -f logging_paths_tasks_setup::write_tasks_file
function logging_paths_tasks_setup::get_job_name() {
# Generate a job name from the test replacing "logging_paths" with "lp"
#
# dsub turns the job name into a google label and turns the underscores
# into labels, so let's start with our job names in that form.
#
# Truncate the test name at 10 characters, since that is what dsub will do
# when it generates the job-id and these logging_paths_* tests are
# specifically checking that the output log file name is generated correctly.
echo "lp_${TEST_NAME#logging_paths_}" | tr '_' '-' | cut -c1-10
}
readonly -f logging_paths_tasks_setup::get_job_name
function logging_paths_tasks_setup::run_dsub() {
run_dsub \
--name "${JOB_NAME}" \
--tasks "${TASKS_FILE}" \
--command 'echo "Test"' \
--label unique_id="${LOGGING_PATHS_UNIQUE_ID}" \
"${@}"
}
readonly -f logging_paths_tasks_setup::run_dsub
function logging_paths_tasks_setup::dstat_get_logging() {
local job_id="${1}"
local task_id="${2}"
local dstat_out=$(\
run_dstat \
--jobs "${job_id}" \
--label unique_id="${LOGGING_PATHS_UNIQUE_ID}" \
--status "*" \
--full \
--format json)
# Tasks are listed in reverse order, so use -${task_id}.
python "${SCRIPT_DIR}"/get_data_value.py \
"json" "${dstat_out}" "[-${task_id}].logging"
}
readonly -f logging_paths_tasks_setup::dstat_get_logging
function logging_paths_tasks_setup::ddel_task() {
local job_id="${1}"
run_ddel --jobs "${job_id}"
}
readonly -f logging_paths_tasks_setup::ddel_task
|
DataBiosphere/dsub
|
test/integration/logging_paths_tasks_setup.sh
|
Shell
|
apache-2.0
| 2,915 |
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
install_bazelisk
# Pick a more recent version of xcode
export DEVELOPER_DIR=/Applications/Xcode_10.3.app/Contents/Developer
sudo xcode-select -s "${DEVELOPER_DIR}"
# Set up and install MacOS pip dependencies.
install_macos_pip_deps_no_venv python3.8
# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.8'
export TF_BUILD_BOTH_CPU_PACKAGES=1
# Export optional variables for running pip.sh
export TF_BUILD_FLAGS="--config=release_cpu_macos --repo_env=PYTHON_BIN_PATH="$(which ${TF_PYTHON_VERSION})""
export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
export TF_TEST_TARGETS="//tensorflow/python/..."
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export TF_TEST_FILTER_TAGS='-nomac,-no_mac,-no_oss,-oss_serial,-no_oss_py38,-v1only,-gpu,-tpu,-benchmark-test'
#export IS_NIGHTLY=0 # Not nightly; uncomment if building from tf repo.
export TF_PROJECT_NAME="tensorflow"
export TF_PIP_TEST_ROOT="pip_test"
./tensorflow/tools/ci_build/builds/pip_new.sh
|
petewarden/tensorflow
|
tensorflow/tools/ci_build/rel/macos/cpu_py38_pip.sh
|
Shell
|
apache-2.0
| 1,947 |
#!/bin/bash
#
#clear out all data except seed data from the DW before
# dumping it to update the init sql script:
# (mysqldump --routines -u root mifos_ppi_dw > load_mifos_datawarehouse.sql)
#
#usage: clear_down_dw_tables.sh <DB_NAME>
DB_NAME=$1
mysql -u root ${DB_NAME} < ClearDownDWTables.sql
|
mifos/bi
|
ETL/MifosDataWarehouseETL/clear_down_dw_tables.sh
|
Shell
|
apache-2.0
| 296 |
.import /local/data/tpch100g/lineitem/lineitem0000 lineitem
.import /local/data/tpch100g/lineitem/lineitem0001 lineitem
.import /local/data/tpch100g/lineitem/lineitem0002 lineitem
.import /local/data/tpch100g/lineitem/lineitem0003 lineitem
.import /local/data/tpch100g/lineitem/lineitem0004 lineitem
.import /local/data/tpch100g/lineitem/lineitem0005 lineitem
.import /local/data/tpch100g/lineitem/lineitem0006 lineitem
.import /local/data/tpch100g/lineitem/lineitem0007 lineitem
.import /local/data/tpch100g/lineitem/lineitem0008 lineitem
.import /local/data/tpch100g/lineitem/lineitem0009 lineitem
.import /local/data/tpch100g/lineitem/lineitem0010 lineitem
.import /local/data/tpch100g/lineitem/lineitem0011 lineitem
.import /local/data/tpch100g/lineitem/lineitem0012 lineitem
.import /local/data/tpch100g/lineitem/lineitem0013 lineitem
.import /local/data/tpch100g/lineitem/lineitem0014 lineitem
.import /local/data/tpch100g/lineitem/lineitem0015 lineitem
.import /local/data/tpch100g/lineitem/lineitem0016 lineitem
.import /local/data/tpch100g/lineitem/lineitem0017 lineitem
.import /local/data/tpch100g/lineitem/lineitem0018 lineitem
.import /local/data/tpch100g/lineitem/lineitem0019 lineitem
.import /local/data/tpch100g/lineitem/lineitem0020 lineitem
.import /local/data/tpch100g/lineitem/lineitem0021 lineitem
.import /local/data/tpch100g/lineitem/lineitem0022 lineitem
.import /local/data/tpch100g/lineitem/lineitem0023 lineitem
.import /local/data/tpch100g/lineitem/lineitem0024 lineitem
.import /local/data/tpch100g/lineitem/lineitem0025 lineitem
.import /local/data/tpch100g/lineitem/lineitem0026 lineitem
.import /local/data/tpch100g/lineitem/lineitem0027 lineitem
.import /local/data/tpch100g/lineitem/lineitem0028 lineitem
.import /local/data/tpch100g/lineitem/lineitem0029 lineitem
.import /local/data/tpch100g/lineitem/lineitem0030 lineitem
.import /local/data/tpch100g/lineitem/lineitem0031 lineitem
.import /local/data/tpch100g/lineitem/lineitem0032 lineitem
.import /local/data/tpch100g/lineitem/lineitem0033 lineitem
.import /local/data/tpch100g/lineitem/lineitem0034 lineitem
.import /local/data/tpch100g/lineitem/lineitem0035 lineitem
.import /local/data/tpch100g/lineitem/lineitem0036 lineitem
.import /local/data/tpch100g/lineitem/lineitem0037 lineitem
.import /local/data/tpch100g/lineitem/lineitem0038 lineitem
.import /local/data/tpch100g/lineitem/lineitem0039 lineitem
.import /local/data/tpch100g/lineitem/lineitem0040 lineitem
.import /local/data/tpch100g/lineitem/lineitem0041 lineitem
.import /local/data/tpch100g/lineitem/lineitem0042 lineitem
.import /local/data/tpch100g/lineitem/lineitem0043 lineitem
.import /local/data/tpch100g/lineitem/lineitem0044 lineitem
.import /local/data/tpch100g/lineitem/lineitem0045 lineitem
.import /local/data/tpch100g/lineitem/lineitem0046 lineitem
.import /local/data/tpch100g/lineitem/lineitem0047 lineitem
.import /local/data/tpch100g/lineitem/lineitem0048 lineitem
.import /local/data/tpch100g/lineitem/lineitem0049 lineitem
.import /local/data/tpch100g/lineitem/lineitem0050 lineitem
.import /local/data/tpch100g/lineitem/lineitem0051 lineitem
.import /local/data/tpch100g/lineitem/lineitem0052 lineitem
.import /local/data/tpch100g/lineitem/lineitem0053 lineitem
.import /local/data/tpch100g/lineitem/lineitem0054 lineitem
.import /local/data/tpch100g/lineitem/lineitem0055 lineitem
.import /local/data/tpch100g/lineitem/lineitem0056 lineitem
.import /local/data/tpch100g/lineitem/lineitem0057 lineitem
.import /local/data/tpch100g/lineitem/lineitem0058 lineitem
.import /local/data/tpch100g/lineitem/lineitem0059 lineitem
.import /local/data/tpch100g/lineitem/lineitem0060 lineitem
.import /local/data/tpch100g/lineitem/lineitem0061 lineitem
.import /local/data/tpch100g/lineitem/lineitem0062 lineitem
.import /local/data/tpch100g/lineitem/lineitem0063 lineitem
.import /local/data/tpch100g/lineitem/lineitem0064 lineitem
.import /local/data/tpch100g/lineitem/lineitem0065 lineitem
.import /local/data/tpch100g/lineitem/lineitem0066 lineitem
.import /local/data/tpch100g/lineitem/lineitem0067 lineitem
.import /local/data/tpch100g/lineitem/lineitem0068 lineitem
.import /local/data/tpch100g/lineitem/lineitem0069 lineitem
.import /local/data/tpch100g/lineitem/lineitem0070 lineitem
.import /local/data/tpch100g/lineitem/lineitem0071 lineitem
.import /local/data/tpch100g/lineitem/lineitem0072 lineitem
.import /local/data/tpch100g/lineitem/lineitem0073 lineitem
.import /local/data/tpch100g/lineitem/lineitem0074 lineitem
.import /local/data/tpch100g/lineitem/lineitem0075 lineitem
.import /local/data/tpch100g/lineitem/lineitem0076 lineitem
.import /local/data/tpch100g/lineitem/lineitem0077 lineitem
.import /local/data/tpch100g/lineitem/lineitem0078 lineitem
.import /local/data/tpch100g/lineitem/lineitem0079 lineitem
.import /local/data/tpch100g/lineitem/lineitem0080 lineitem
.import /local/data/tpch100g/lineitem/lineitem0081 lineitem
.import /local/data/tpch100g/lineitem/lineitem0082 lineitem
.import /local/data/tpch100g/lineitem/lineitem0083 lineitem
.import /local/data/tpch100g/lineitem/lineitem0084 lineitem
.import /local/data/tpch100g/lineitem/lineitem0085 lineitem
.import /local/data/tpch100g/lineitem/lineitem0086 lineitem
.import /local/data/tpch100g/lineitem/lineitem0087 lineitem
.import /local/data/tpch100g/lineitem/lineitem0088 lineitem
.import /local/data/tpch100g/lineitem/lineitem0089 lineitem
.import /local/data/tpch100g/lineitem/lineitem0090 lineitem
.import /local/data/tpch100g/lineitem/lineitem0091 lineitem
.import /local/data/tpch100g/lineitem/lineitem0092 lineitem
.import /local/data/tpch100g/lineitem/lineitem0093 lineitem
.import /local/data/tpch100g/lineitem/lineitem0094 lineitem
.import /local/data/tpch100g/lineitem/lineitem0095 lineitem
.import /local/data/tpch100g/lineitem/lineitem0096 lineitem
.import /local/data/tpch100g/lineitem/lineitem0097 lineitem
.import /local/data/tpch100g/lineitem/lineitem0098 lineitem
.import /local/data/tpch100g/lineitem/lineitem0099 lineitem
.import /local/data/tpch100g/lineitem/lineitem0100 lineitem
.import /local/data/tpch100g/lineitem/lineitem0101 lineitem
.import /local/data/tpch100g/lineitem/lineitem0102 lineitem
.import /local/data/tpch100g/lineitem/lineitem0103 lineitem
.import /local/data/tpch100g/lineitem/lineitem0104 lineitem
.import /local/data/tpch100g/lineitem/lineitem0105 lineitem
.import /local/data/tpch100g/lineitem/lineitem0106 lineitem
.import /local/data/tpch100g/lineitem/lineitem0107 lineitem
.import /local/data/tpch100g/lineitem/lineitem0108 lineitem
.import /local/data/tpch100g/lineitem/lineitem0109 lineitem
.import /local/data/tpch100g/lineitem/lineitem0110 lineitem
.import /local/data/tpch100g/lineitem/lineitem0111 lineitem
.import /local/data/tpch100g/lineitem/lineitem0112 lineitem
.import /local/data/tpch100g/lineitem/lineitem0113 lineitem
.import /local/data/tpch100g/lineitem/lineitem0114 lineitem
.import /local/data/tpch100g/lineitem/lineitem0115 lineitem
.import /local/data/tpch100g/lineitem/lineitem0116 lineitem
.import /local/data/tpch100g/lineitem/lineitem0117 lineitem
.import /local/data/tpch100g/lineitem/lineitem0118 lineitem
.import /local/data/tpch100g/lineitem/lineitem0119 lineitem
.import /local/data/tpch100g/lineitem/lineitem0120 lineitem
.import /local/data/tpch100g/lineitem/lineitem0121 lineitem
.import /local/data/tpch100g/lineitem/lineitem0122 lineitem
.import /local/data/tpch100g/lineitem/lineitem0123 lineitem
.import /local/data/tpch100g/lineitem/lineitem0124 lineitem
.import /local/data/tpch100g/lineitem/lineitem0125 lineitem
.import /local/data/tpch100g/lineitem/lineitem0126 lineitem
.import /local/data/tpch100g/lineitem/lineitem0127 lineitem
.import /local/data/tpch100g/lineitem/lineitem0128 lineitem
.import /local/data/tpch100g/lineitem/lineitem0129 lineitem
.import /local/data/tpch100g/lineitem/lineitem0130 lineitem
.import /local/data/tpch100g/lineitem/lineitem0131 lineitem
.import /local/data/tpch100g/lineitem/lineitem0132 lineitem
.import /local/data/tpch100g/lineitem/lineitem0133 lineitem
.import /local/data/tpch100g/lineitem/lineitem0134 lineitem
.import /local/data/tpch100g/lineitem/lineitem0135 lineitem
.import /local/data/tpch100g/lineitem/lineitem0136 lineitem
.import /local/data/tpch100g/lineitem/lineitem0137 lineitem
.import /local/data/tpch100g/lineitem/lineitem0138 lineitem
.import /local/data/tpch100g/lineitem/lineitem0139 lineitem
.import /local/data/tpch100g/lineitem/lineitem0140 lineitem
.import /local/data/tpch100g/lineitem/lineitem0141 lineitem
.import /local/data/tpch100g/lineitem/lineitem0142 lineitem
.import /local/data/tpch100g/lineitem/lineitem0143 lineitem
.import /local/data/tpch100g/lineitem/lineitem0144 lineitem
.import /local/data/tpch100g/lineitem/lineitem0145 lineitem
.import /local/data/tpch100g/lineitem/lineitem0146 lineitem
.import /local/data/tpch100g/lineitem/lineitem0147 lineitem
.import /local/data/tpch100g/lineitem/lineitem0148 lineitem
.import /local/data/tpch100g/lineitem/lineitem0149 lineitem
.import /local/data/tpch100g/lineitem/lineitem0150 lineitem
.import /local/data/tpch100g/lineitem/lineitem0151 lineitem
.import /local/data/tpch100g/lineitem/lineitem0152 lineitem
.import /local/data/tpch100g/lineitem/lineitem0153 lineitem
.import /local/data/tpch100g/lineitem/lineitem0154 lineitem
.import /local/data/tpch100g/lineitem/lineitem0155 lineitem
.import /local/data/tpch100g/lineitem/lineitem0156 lineitem
.import /local/data/tpch100g/lineitem/lineitem0157 lineitem
.import /local/data/tpch100g/lineitem/lineitem0158 lineitem
.import /local/data/tpch100g/lineitem/lineitem0159 lineitem
.import /local/data/tpch100g/lineitem/lineitem0160 lineitem
.import /local/data/tpch100g/lineitem/lineitem0161 lineitem
.import /local/data/tpch100g/lineitem/lineitem0162 lineitem
.import /local/data/tpch100g/lineitem/lineitem0163 lineitem
.import /local/data/tpch100g/lineitem/lineitem0164 lineitem
.import /local/data/tpch100g/lineitem/lineitem0165 lineitem
.import /local/data/tpch100g/lineitem/lineitem0166 lineitem
.import /local/data/tpch100g/lineitem/lineitem0167 lineitem
.import /local/data/tpch100g/lineitem/lineitem0168 lineitem
.import /local/data/tpch100g/lineitem/lineitem0169 lineitem
.import /local/data/tpch100g/lineitem/lineitem0170 lineitem
.import /local/data/tpch100g/lineitem/lineitem0171 lineitem
.import /local/data/tpch100g/lineitem/lineitem0172 lineitem
.import /local/data/tpch100g/lineitem/lineitem0173 lineitem
.import /local/data/tpch100g/lineitem/lineitem0174 lineitem
.import /local/data/tpch100g/lineitem/lineitem0175 lineitem
.import /local/data/tpch100g/lineitem/lineitem0176 lineitem
.import /local/data/tpch100g/lineitem/lineitem0177 lineitem
.import /local/data/tpch100g/lineitem/lineitem0178 lineitem
.import /local/data/tpch100g/lineitem/lineitem0179 lineitem
.import /local/data/tpch100g/lineitem/lineitem0180 lineitem
.import /local/data/tpch100g/lineitem/lineitem0181 lineitem
.import /local/data/tpch100g/lineitem/lineitem0182 lineitem
.import /local/data/tpch100g/lineitem/lineitem0183 lineitem
.import /local/data/tpch100g/lineitem/lineitem0184 lineitem
.import /local/data/tpch100g/lineitem/lineitem0185 lineitem
.import /local/data/tpch100g/lineitem/lineitem0186 lineitem
.import /local/data/tpch100g/lineitem/lineitem0187 lineitem
.import /local/data/tpch100g/lineitem/lineitem0188 lineitem
.import /local/data/tpch100g/lineitem/lineitem0189 lineitem
.import /local/data/tpch100g/lineitem/lineitem0190 lineitem
.import /local/data/tpch100g/lineitem/lineitem0191 lineitem
.import /local/data/tpch100g/lineitem/lineitem0192 lineitem
.import /local/data/tpch100g/lineitem/lineitem0193 lineitem
.import /local/data/tpch100g/lineitem/lineitem0194 lineitem
.import /local/data/tpch100g/lineitem/lineitem0195 lineitem
.import /local/data/tpch100g/lineitem/lineitem0196 lineitem
.import /local/data/tpch100g/lineitem/lineitem0197 lineitem
.import /local/data/tpch100g/lineitem/lineitem0198 lineitem
.import /local/data/tpch100g/lineitem/lineitem0199 lineitem
.import /local/data/tpch100g/lineitem/lineitem0200 lineitem
.import /local/data/tpch100g/lineitem/lineitem0201 lineitem
.import /local/data/tpch100g/lineitem/lineitem0202 lineitem
.import /local/data/tpch100g/lineitem/lineitem0203 lineitem
.import /local/data/tpch100g/lineitem/lineitem0204 lineitem
.import /local/data/tpch100g/lineitem/lineitem0205 lineitem
.import /local/data/tpch100g/lineitem/lineitem0206 lineitem
.import /local/data/tpch100g/lineitem/lineitem0207 lineitem
.import /local/data/tpch100g/lineitem/lineitem0208 lineitem
.import /local/data/tpch100g/lineitem/lineitem0209 lineitem
.import /local/data/tpch100g/lineitem/lineitem0210 lineitem
.import /local/data/tpch100g/lineitem/lineitem0211 lineitem
.import /local/data/tpch100g/lineitem/lineitem0212 lineitem
.import /local/data/tpch100g/lineitem/lineitem0213 lineitem
.import /local/data/tpch100g/lineitem/lineitem0214 lineitem
.import /local/data/tpch100g/lineitem/lineitem0215 lineitem
.import /local/data/tpch100g/lineitem/lineitem0216 lineitem
.import /local/data/tpch100g/lineitem/lineitem0217 lineitem
.import /local/data/tpch100g/lineitem/lineitem0218 lineitem
.import /local/data/tpch100g/lineitem/lineitem0219 lineitem
.import /local/data/tpch100g/lineitem/lineitem0220 lineitem
.import /local/data/tpch100g/lineitem/lineitem0221 lineitem
.import /local/data/tpch100g/lineitem/lineitem0222 lineitem
.import /local/data/tpch100g/lineitem/lineitem0223 lineitem
.import /local/data/tpch100g/lineitem/lineitem0224 lineitem
.import /local/data/tpch100g/lineitem/lineitem0225 lineitem
.import /local/data/tpch100g/lineitem/lineitem0226 lineitem
.import /local/data/tpch100g/lineitem/lineitem0227 lineitem
.import /local/data/tpch100g/lineitem/lineitem0228 lineitem
.import /local/data/tpch100g/lineitem/lineitem0229 lineitem
.import /local/data/tpch100g/lineitem/lineitem0230 lineitem
.import /local/data/tpch100g/lineitem/lineitem0231 lineitem
.import /local/data/tpch100g/lineitem/lineitem0232 lineitem
.import /local/data/tpch100g/lineitem/lineitem0233 lineitem
.import /local/data/tpch100g/lineitem/lineitem0234 lineitem
.import /local/data/tpch100g/lineitem/lineitem0235 lineitem
.import /local/data/tpch100g/lineitem/lineitem0236 lineitem
.import /local/data/tpch100g/lineitem/lineitem0237 lineitem
.import /local/data/tpch100g/lineitem/lineitem0238 lineitem
.import /local/data/tpch100g/lineitem/lineitem0239 lineitem
.import /local/data/tpch100g/lineitem/lineitem0240 lineitem
.import /local/data/tpch100g/lineitem/lineitem0241 lineitem
.import /local/data/tpch100g/lineitem/lineitem0242 lineitem
.import /local/data/tpch100g/lineitem/lineitem0243 lineitem
.import /local/data/tpch100g/lineitem/lineitem0244 lineitem
.import /local/data/tpch100g/lineitem/lineitem0245 lineitem
.import /local/data/tpch100g/lineitem/lineitem0246 lineitem
.import /local/data/tpch100g/lineitem/lineitem0247 lineitem
.import /local/data/tpch100g/lineitem/lineitem0248 lineitem
.import /local/data/tpch100g/lineitem/lineitem0249 lineitem
.import /local/data/tpch100g/lineitem/lineitem0250 lineitem
.import /local/data/tpch100g/lineitem/lineitem0251 lineitem
.import /local/data/tpch100g/lineitem/lineitem0252 lineitem
.import /local/data/tpch100g/lineitem/lineitem0253 lineitem
.import /local/data/tpch100g/lineitem/lineitem0254 lineitem
.import /local/data/tpch100g/lineitem/lineitem0255 lineitem
.import /local/data/tpch100g/lineitem/lineitem0256 lineitem
.import /local/data/tpch100g/lineitem/lineitem0257 lineitem
.import /local/data/tpch100g/lineitem/lineitem0258 lineitem
.import /local/data/tpch100g/lineitem/lineitem0259 lineitem
.import /local/data/tpch100g/lineitem/lineitem0260 lineitem
.import /local/data/tpch100g/lineitem/lineitem0261 lineitem
.import /local/data/tpch100g/lineitem/lineitem0262 lineitem
.import /local/data/tpch100g/lineitem/lineitem0263 lineitem
.import /local/data/tpch100g/lineitem/lineitem0264 lineitem
.import /local/data/tpch100g/lineitem/lineitem0265 lineitem
.import /local/data/tpch100g/lineitem/lineitem0266 lineitem
.import /local/data/tpch100g/lineitem/lineitem0267 lineitem
.import /local/data/tpch100g/lineitem/lineitem0268 lineitem
.import /local/data/tpch100g/lineitem/lineitem0269 lineitem
.import /local/data/tpch100g/lineitem/lineitem0270 lineitem
.import /local/data/tpch100g/lineitem/lineitem0271 lineitem
.import /local/data/tpch100g/lineitem/lineitem0272 lineitem
.import /local/data/tpch100g/lineitem/lineitem0273 lineitem
.import /local/data/tpch100g/lineitem/lineitem0274 lineitem
.import /local/data/tpch100g/lineitem/lineitem0275 lineitem
.import /local/data/tpch100g/lineitem/lineitem0276 lineitem
.import /local/data/tpch100g/lineitem/lineitem0277 lineitem
.import /local/data/tpch100g/lineitem/lineitem0278 lineitem
.import /local/data/tpch100g/lineitem/lineitem0279 lineitem
.import /local/data/tpch100g/lineitem/lineitem0280 lineitem
.import /local/data/tpch100g/lineitem/lineitem0281 lineitem
.import /local/data/tpch100g/lineitem/lineitem0282 lineitem
.import /local/data/tpch100g/lineitem/lineitem0283 lineitem
.import /local/data/tpch100g/lineitem/lineitem0284 lineitem
.import /local/data/tpch100g/lineitem/lineitem0285 lineitem
.import /local/data/tpch100g/lineitem/lineitem0286 lineitem
.import /local/data/tpch100g/lineitem/lineitem0287 lineitem
.import /local/data/tpch100g/lineitem/lineitem0288 lineitem
.import /local/data/tpch100g/lineitem/lineitem0289 lineitem
.import /local/data/tpch100g/lineitem/lineitem0290 lineitem
.import /local/data/tpch100g/lineitem/lineitem0291 lineitem
.import /local/data/tpch100g/lineitem/lineitem0292 lineitem
.import /local/data/tpch100g/lineitem/lineitem0293 lineitem
.import /local/data/tpch100g/lineitem/lineitem0294 lineitem
.import /local/data/tpch100g/lineitem/lineitem0295 lineitem
.import /local/data/tpch100g/lineitem/lineitem0296 lineitem
.import /local/data/tpch100g/lineitem/lineitem0297 lineitem
.import /local/data/tpch100g/lineitem/lineitem0298 lineitem
.import /local/data/tpch100g/lineitem/lineitem0299 lineitem
.import /local/data/tpch100g/lineitem/lineitem0300 lineitem
.import /local/data/tpch100g/lineitem/lineitem0301 lineitem
.import /local/data/tpch100g/lineitem/lineitem0302 lineitem
.import /local/data/tpch100g/lineitem/lineitem0303 lineitem
.import /local/data/tpch100g/lineitem/lineitem0304 lineitem
.import /local/data/tpch100g/lineitem/lineitem0305 lineitem
.import /local/data/tpch100g/lineitem/lineitem0306 lineitem
.import /local/data/tpch100g/lineitem/lineitem0307 lineitem
.import /local/data/tpch100g/lineitem/lineitem0308 lineitem
.import /local/data/tpch100g/lineitem/lineitem0309 lineitem
.import /local/data/tpch100g/lineitem/lineitem0310 lineitem
.import /local/data/tpch100g/lineitem/lineitem0311 lineitem
.import /local/data/tpch100g/lineitem/lineitem0312 lineitem
.import /local/data/tpch100g/lineitem/lineitem0313 lineitem
.import /local/data/tpch100g/lineitem/lineitem0314 lineitem
.import /local/data/tpch100g/lineitem/lineitem0315 lineitem
.import /local/data/tpch100g/lineitem/lineitem0316 lineitem
.import /local/data/tpch100g/lineitem/lineitem0317 lineitem
.import /local/data/tpch100g/lineitem/lineitem0318 lineitem
.import /local/data/tpch100g/lineitem/lineitem0319 lineitem
.import /local/data/tpch100g/lineitem/lineitem0320 lineitem
.import /local/data/tpch100g/lineitem/lineitem0321 lineitem
.import /local/data/tpch100g/lineitem/lineitem0322 lineitem
.import /local/data/tpch100g/lineitem/lineitem0323 lineitem
.import /local/data/tpch100g/lineitem/lineitem0324 lineitem
.import /local/data/tpch100g/lineitem/lineitem0325 lineitem
.import /local/data/tpch100g/lineitem/lineitem0326 lineitem
.import /local/data/tpch100g/lineitem/lineitem0327 lineitem
.import /local/data/tpch100g/lineitem/lineitem0328 lineitem
.import /local/data/tpch100g/lineitem/lineitem0329 lineitem
.import /local/data/tpch100g/lineitem/lineitem0330 lineitem
.import /local/data/tpch100g/lineitem/lineitem0331 lineitem
.import /local/data/tpch100g/lineitem/lineitem0332 lineitem
.import /local/data/tpch100g/lineitem/lineitem0333 lineitem
.import /local/data/tpch100g/lineitem/lineitem0334 lineitem
.import /local/data/tpch100g/lineitem/lineitem0335 lineitem
.import /local/data/tpch100g/lineitem/lineitem0336 lineitem
.import /local/data/tpch100g/lineitem/lineitem0337 lineitem
.import /local/data/tpch100g/lineitem/lineitem0338 lineitem
.import /local/data/tpch100g/lineitem/lineitem0339 lineitem
.import /local/data/tpch100g/lineitem/lineitem0340 lineitem
.import /local/data/tpch100g/lineitem/lineitem0341 lineitem
.import /local/data/tpch100g/lineitem/lineitem0342 lineitem
.import /local/data/tpch100g/lineitem/lineitem0343 lineitem
.import /local/data/tpch100g/lineitem/lineitem0344 lineitem
.import /local/data/tpch100g/lineitem/lineitem0345 lineitem
.import /local/data/tpch100g/lineitem/lineitem0346 lineitem
.import /local/data/tpch100g/lineitem/lineitem0347 lineitem
.import /local/data/tpch100g/lineitem/lineitem0348 lineitem
.import /local/data/tpch100g/lineitem/lineitem0349 lineitem
.import /local/data/tpch100g/lineitem/lineitem0350 lineitem
.import /local/data/tpch100g/lineitem/lineitem0351 lineitem
.import /local/data/tpch100g/lineitem/lineitem0352 lineitem
.import /local/data/tpch100g/lineitem/lineitem0353 lineitem
.import /local/data/tpch100g/lineitem/lineitem0354 lineitem
.import /local/data/tpch100g/lineitem/lineitem0355 lineitem
.import /local/data/tpch100g/lineitem/lineitem0356 lineitem
.import /local/data/tpch100g/lineitem/lineitem0357 lineitem
.import /local/data/tpch100g/lineitem/lineitem0358 lineitem
.import /local/data/tpch100g/lineitem/lineitem0359 lineitem
.import /local/data/tpch100g/lineitem/lineitem0360 lineitem
.import /local/data/tpch100g/lineitem/lineitem0361 lineitem
.import /local/data/tpch100g/lineitem/lineitem0362 lineitem
.import /local/data/tpch100g/lineitem/lineitem0363 lineitem
.import /local/data/tpch100g/lineitem/lineitem0364 lineitem
.import /local/data/tpch100g/lineitem/lineitem0365 lineitem
.import /local/data/tpch100g/lineitem/lineitem0366 lineitem
.import /local/data/tpch100g/lineitem/lineitem0367 lineitem
.import /local/data/tpch100g/lineitem/lineitem0368 lineitem
.import /local/data/tpch100g/lineitem/lineitem0369 lineitem
.import /local/data/tpch100g/lineitem/lineitem0370 lineitem
.import /local/data/tpch100g/lineitem/lineitem0371 lineitem
.import /local/data/tpch100g/lineitem/lineitem0372 lineitem
.import /local/data/tpch100g/lineitem/lineitem0373 lineitem
.import /local/data/tpch100g/lineitem/lineitem0374 lineitem
.import /local/data/tpch100g/lineitem/lineitem0375 lineitem
.import /local/data/tpch100g/lineitem/lineitem0376 lineitem
.import /local/data/tpch100g/lineitem/lineitem0377 lineitem
.import /local/data/tpch100g/lineitem/lineitem0378 lineitem
.import /local/data/tpch100g/lineitem/lineitem0379 lineitem
.import /local/data/tpch100g/lineitem/lineitem0380 lineitem
.import /local/data/tpch100g/lineitem/lineitem0381 lineitem
.import /local/data/tpch100g/lineitem/lineitem0382 lineitem
.import /local/data/tpch100g/lineitem/lineitem0383 lineitem
.import /local/data/tpch100g/lineitem/lineitem0384 lineitem
.import /local/data/tpch100g/lineitem/lineitem0385 lineitem
.import /local/data/tpch100g/lineitem/lineitem0386 lineitem
.import /local/data/tpch100g/lineitem/lineitem0387 lineitem
.import /local/data/tpch100g/lineitem/lineitem0388 lineitem
.import /local/data/tpch100g/lineitem/lineitem0389 lineitem
.import /local/data/tpch100g/lineitem/lineitem0390 lineitem
.import /local/data/tpch100g/lineitem/lineitem0391 lineitem
.import /local/data/tpch100g/lineitem/lineitem0392 lineitem
.import /local/data/tpch100g/lineitem/lineitem0393 lineitem
.import /local/data/tpch100g/lineitem/lineitem0394 lineitem
.import /local/data/tpch100g/lineitem/lineitem0395 lineitem
.import /local/data/tpch100g/lineitem/lineitem0396 lineitem
.import /local/data/tpch100g/lineitem/lineitem0397 lineitem
.import /local/data/tpch100g/lineitem/lineitem0398 lineitem
.import /local/data/tpch100g/lineitem/lineitem0399 lineitem
.import /local/data/tpch100g/lineitem/lineitem0400 lineitem
.import /local/data/tpch100g/lineitem/lineitem0401 lineitem
.import /local/data/tpch100g/lineitem/lineitem0402 lineitem
.import /local/data/tpch100g/lineitem/lineitem0403 lineitem
.import /local/data/tpch100g/lineitem/lineitem0404 lineitem
.import /local/data/tpch100g/lineitem/lineitem0405 lineitem
.import /local/data/tpch100g/lineitem/lineitem0406 lineitem
.import /local/data/tpch100g/lineitem/lineitem0407 lineitem
.import /local/data/tpch100g/lineitem/lineitem0408 lineitem
.import /local/data/tpch100g/lineitem/lineitem0409 lineitem
.import /local/data/tpch100g/lineitem/lineitem0410 lineitem
.import /local/data/tpch100g/lineitem/lineitem0411 lineitem
.import /local/data/tpch100g/lineitem/lineitem0412 lineitem
.import /local/data/tpch100g/lineitem/lineitem0413 lineitem
.import /local/data/tpch100g/lineitem/lineitem0414 lineitem
.import /local/data/tpch100g/lineitem/lineitem0415 lineitem
.import /local/data/tpch100g/lineitem/lineitem0416 lineitem
.import /local/data/tpch100g/lineitem/lineitem0417 lineitem
.import /local/data/tpch100g/lineitem/lineitem0418 lineitem
.import /local/data/tpch100g/lineitem/lineitem0419 lineitem
.import /local/data/tpch100g/lineitem/lineitem0420 lineitem
.import /local/data/tpch100g/lineitem/lineitem0421 lineitem
.import /local/data/tpch100g/lineitem/lineitem0422 lineitem
.import /local/data/tpch100g/lineitem/lineitem0423 lineitem
.import /local/data/tpch100g/lineitem/lineitem0424 lineitem
.import /local/data/tpch100g/lineitem/lineitem0425 lineitem
.import /local/data/tpch100g/lineitem/lineitem0426 lineitem
.import /local/data/tpch100g/lineitem/lineitem0427 lineitem
.import /local/data/tpch100g/lineitem/lineitem0428 lineitem
.import /local/data/tpch100g/lineitem/lineitem0429 lineitem
.import /local/data/tpch100g/lineitem/lineitem0430 lineitem
.import /local/data/tpch100g/lineitem/lineitem0431 lineitem
.import /local/data/tpch100g/lineitem/lineitem0432 lineitem
.import /local/data/tpch100g/lineitem/lineitem0433 lineitem
.import /local/data/tpch100g/lineitem/lineitem0434 lineitem
.import /local/data/tpch100g/lineitem/lineitem0435 lineitem
.import /local/data/tpch100g/lineitem/lineitem0436 lineitem
.import /local/data/tpch100g/lineitem/lineitem0437 lineitem
.import /local/data/tpch100g/lineitem/lineitem0438 lineitem
.import /local/data/tpch100g/lineitem/lineitem0439 lineitem
.import /local/data/tpch100g/lineitem/lineitem0440 lineitem
.import /local/data/tpch100g/lineitem/lineitem0441 lineitem
.import /local/data/tpch100g/lineitem/lineitem0442 lineitem
.import /local/data/tpch100g/lineitem/lineitem0443 lineitem
.import /local/data/tpch100g/lineitem/lineitem0444 lineitem
.import /local/data/tpch100g/lineitem/lineitem0445 lineitem
.import /local/data/tpch100g/lineitem/lineitem0446 lineitem
.import /local/data/tpch100g/lineitem/lineitem0447 lineitem
.import /local/data/tpch100g/lineitem/lineitem0448 lineitem
.import /local/data/tpch100g/lineitem/lineitem0449 lineitem
.import /local/data/tpch100g/lineitem/lineitem0450 lineitem
.import /local/data/tpch100g/lineitem/lineitem0451 lineitem
.import /local/data/tpch100g/lineitem/lineitem0452 lineitem
.import /local/data/tpch100g/lineitem/lineitem0453 lineitem
.import /local/data/tpch100g/lineitem/lineitem0454 lineitem
.import /local/data/tpch100g/lineitem/lineitem0455 lineitem
.import /local/data/tpch100g/lineitem/lineitem0456 lineitem
.import /local/data/tpch100g/lineitem/lineitem0457 lineitem
.import /local/data/tpch100g/lineitem/lineitem0458 lineitem
.import /local/data/tpch100g/lineitem/lineitem0459 lineitem
.import /local/data/tpch100g/lineitem/lineitem0460 lineitem
.import /local/data/tpch100g/lineitem/lineitem0461 lineitem
.import /local/data/tpch100g/lineitem/lineitem0462 lineitem
.import /local/data/tpch100g/lineitem/lineitem0463 lineitem
.import /local/data/tpch100g/lineitem/lineitem0464 lineitem
.import /local/data/tpch100g/lineitem/lineitem0465 lineitem
.import /local/data/tpch100g/lineitem/lineitem0466 lineitem
.import /local/data/tpch100g/lineitem/lineitem0467 lineitem
.import /local/data/tpch100g/lineitem/lineitem0468 lineitem
.import /local/data/tpch100g/lineitem/lineitem0469 lineitem
.import /local/data/tpch100g/lineitem/lineitem0470 lineitem
.import /local/data/tpch100g/lineitem/lineitem0471 lineitem
.import /local/data/tpch100g/lineitem/lineitem0472 lineitem
.import /local/data/tpch100g/lineitem/lineitem0473 lineitem
.import /local/data/tpch100g/lineitem/lineitem0474 lineitem
.import /local/data/tpch100g/lineitem/lineitem0475 lineitem
.import /local/data/tpch100g/lineitem/lineitem0476 lineitem
.import /local/data/tpch100g/lineitem/lineitem0477 lineitem
.import /local/data/tpch100g/lineitem/lineitem0478 lineitem
.import /local/data/tpch100g/lineitem/lineitem0479 lineitem
.import /local/data/tpch100g/lineitem/lineitem0480 lineitem
.import /local/data/tpch100g/lineitem/lineitem0481 lineitem
.import /local/data/tpch100g/lineitem/lineitem0482 lineitem
.import /local/data/tpch100g/lineitem/lineitem0483 lineitem
.import /local/data/tpch100g/lineitem/lineitem0484 lineitem
.import /local/data/tpch100g/lineitem/lineitem0485 lineitem
.import /local/data/tpch100g/lineitem/lineitem0486 lineitem
.import /local/data/tpch100g/lineitem/lineitem0487 lineitem
.import /local/data/tpch100g/lineitem/lineitem0488 lineitem
.import /local/data/tpch100g/lineitem/lineitem0489 lineitem
.import /local/data/tpch100g/lineitem/lineitem0490 lineitem
.import /local/data/tpch100g/lineitem/lineitem0491 lineitem
.import /local/data/tpch100g/lineitem/lineitem0492 lineitem
.import /local/data/tpch100g/lineitem/lineitem0493 lineitem
.import /local/data/tpch100g/lineitem/lineitem0494 lineitem
.import /local/data/tpch100g/lineitem/lineitem0495 lineitem
.import /local/data/tpch100g/lineitem/lineitem0496 lineitem
.import /local/data/tpch100g/lineitem/lineitem0497 lineitem
.import /local/data/tpch100g/lineitem/lineitem0498 lineitem
.import /local/data/tpch100g/lineitem/lineitem0499 lineitem
.import /local/data/tpch100g/lineitem/lineitem0500 lineitem
.import /local/data/tpch100g/lineitem/lineitem0501 lineitem
.import /local/data/tpch100g/lineitem/lineitem0502 lineitem
.import /local/data/tpch100g/lineitem/lineitem0503 lineitem
.import /local/data/tpch100g/lineitem/lineitem0504 lineitem
.import /local/data/tpch100g/lineitem/lineitem0505 lineitem
.import /local/data/tpch100g/lineitem/lineitem0506 lineitem
.import /local/data/tpch100g/lineitem/lineitem0507 lineitem
.import /local/data/tpch100g/lineitem/lineitem0508 lineitem
.import /local/data/tpch100g/lineitem/lineitem0509 lineitem
.import /local/data/tpch100g/lineitem/lineitem0510 lineitem
.import /local/data/tpch100g/lineitem/lineitem0511 lineitem
.import /local/data/tpch100g/lineitem/lineitem0512 lineitem
.import /local/data/tpch100g/lineitem/lineitem0513 lineitem
.import /local/data/tpch100g/lineitem/lineitem0514 lineitem
.import /local/data/tpch100g/lineitem/lineitem0515 lineitem
.import /local/data/tpch100g/lineitem/lineitem0516 lineitem
.import /local/data/tpch100g/lineitem/lineitem0517 lineitem
.import /local/data/tpch100g/lineitem/lineitem0518 lineitem
.import /local/data/tpch100g/lineitem/lineitem0519 lineitem
.import /local/data/tpch100g/lineitem/lineitem0520 lineitem
.import /local/data/tpch100g/lineitem/lineitem0521 lineitem
.import /local/data/tpch100g/lineitem/lineitem0522 lineitem
.import /local/data/tpch100g/lineitem/lineitem0523 lineitem
.import /local/data/tpch100g/lineitem/lineitem0524 lineitem
.import /local/data/tpch100g/lineitem/lineitem0525 lineitem
.import /local/data/tpch100g/lineitem/lineitem0526 lineitem
.import /local/data/tpch100g/lineitem/lineitem0527 lineitem
.import /local/data/tpch100g/lineitem/lineitem0528 lineitem
.import /local/data/tpch100g/lineitem/lineitem0529 lineitem
.import /local/data/tpch100g/lineitem/lineitem0530 lineitem
.import /local/data/tpch100g/lineitem/lineitem0531 lineitem
.import /local/data/tpch100g/lineitem/lineitem0532 lineitem
.import /local/data/tpch100g/lineitem/lineitem0533 lineitem
.import /local/data/tpch100g/lineitem/lineitem0534 lineitem
.import /local/data/tpch100g/lineitem/lineitem0535 lineitem
.import /local/data/tpch100g/lineitem/lineitem0536 lineitem
.import /local/data/tpch100g/lineitem/lineitem0537 lineitem
.import /local/data/tpch100g/lineitem/lineitem0538 lineitem
.import /local/data/tpch100g/lineitem/lineitem0539 lineitem
.import /local/data/tpch100g/lineitem/lineitem0540 lineitem
.import /local/data/tpch100g/lineitem/lineitem0541 lineitem
.import /local/data/tpch100g/lineitem/lineitem0542 lineitem
.import /local/data/tpch100g/lineitem/lineitem0543 lineitem
.import /local/data/tpch100g/lineitem/lineitem0544 lineitem
.import /local/data/tpch100g/lineitem/lineitem0545 lineitem
.import /local/data/tpch100g/lineitem/lineitem0546 lineitem
.import /local/data/tpch100g/lineitem/lineitem0547 lineitem
.import /local/data/tpch100g/lineitem/lineitem0548 lineitem
.import /local/data/tpch100g/lineitem/lineitem0549 lineitem
.import /local/data/tpch100g/lineitem/lineitem0550 lineitem
.import /local/data/tpch100g/lineitem/lineitem0551 lineitem
.import /local/data/tpch100g/lineitem/lineitem0552 lineitem
.import /local/data/tpch100g/lineitem/lineitem0553 lineitem
.import /local/data/tpch100g/lineitem/lineitem0554 lineitem
.import /local/data/tpch100g/lineitem/lineitem0555 lineitem
.import /local/data/tpch100g/lineitem/lineitem0556 lineitem
.import /local/data/tpch100g/lineitem/lineitem0557 lineitem
.import /local/data/tpch100g/lineitem/lineitem0558 lineitem
.import /local/data/tpch100g/lineitem/lineitem0559 lineitem
.import /local/data/tpch100g/lineitem/lineitem0560 lineitem
.import /local/data/tpch100g/lineitem/lineitem0561 lineitem
.import /local/data/tpch100g/lineitem/lineitem0562 lineitem
.import /local/data/tpch100g/lineitem/lineitem0563 lineitem
.import /local/data/tpch100g/lineitem/lineitem0564 lineitem
.import /local/data/tpch100g/lineitem/lineitem0565 lineitem
.import /local/data/tpch100g/lineitem/lineitem0566 lineitem
.import /local/data/tpch100g/lineitem/lineitem0567 lineitem
.import /local/data/tpch100g/lineitem/lineitem0568 lineitem
.import /local/data/tpch100g/lineitem/lineitem0569 lineitem
.import /local/data/tpch100g/lineitem/lineitem0570 lineitem
.import /local/data/tpch100g/lineitem/lineitem0571 lineitem
.import /local/data/tpch100g/lineitem/lineitem0572 lineitem
.import /local/data/tpch100g/lineitem/lineitem0573 lineitem
.import /local/data/tpch100g/lineitem/lineitem0574 lineitem
.import /local/data/tpch100g/lineitem/lineitem0575 lineitem
.import /local/data/tpch100g/lineitem/lineitem0576 lineitem
.import /local/data/tpch100g/lineitem/lineitem0577 lineitem
.import /local/data/tpch100g/lineitem/lineitem0578 lineitem
.import /local/data/tpch100g/lineitem/lineitem0579 lineitem
.import /local/data/tpch100g/lineitem/lineitem0580 lineitem
.import /local/data/tpch100g/lineitem/lineitem0581 lineitem
.import /local/data/tpch100g/lineitem/lineitem0582 lineitem
.import /local/data/tpch100g/lineitem/lineitem0583 lineitem
.import /local/data/tpch100g/lineitem/lineitem0584 lineitem
.import /local/data/tpch100g/lineitem/lineitem0585 lineitem
.import /local/data/tpch100g/lineitem/lineitem0586 lineitem
.import /local/data/tpch100g/lineitem/lineitem0587 lineitem
.import /local/data/tpch100g/lineitem/lineitem0588 lineitem
.import /local/data/tpch100g/lineitem/lineitem0589 lineitem
.import /local/data/tpch100g/lineitem/lineitem0590 lineitem
.import /local/data/tpch100g/lineitem/lineitem0591 lineitem
.import /local/data/tpch100g/lineitem/lineitem0592 lineitem
.import /local/data/tpch100g/lineitem/lineitem0593 lineitem
.import /local/data/tpch100g/lineitem/lineitem0594 lineitem
.import /local/data/tpch100g/lineitem/lineitem0595 lineitem
.import /local/data/tpch100g/lineitem/lineitem0596 lineitem
.import /local/data/tpch100g/lineitem/lineitem0597 lineitem
.import /local/data/tpch100g/lineitem/lineitem0598 lineitem
.import /local/data/tpch100g/lineitem/lineitem0599 lineitem
.import /local/data/tpch100g/lineitem/lineitem0600 lineitem
.import /local/data/tpch100g/lineitem/lineitem0601 lineitem
.import /local/data/tpch100g/lineitem/lineitem0602 lineitem
.import /local/data/tpch100g/lineitem/lineitem0603 lineitem
.import /local/data/tpch100g/lineitem/lineitem0604 lineitem
.import /local/data/tpch100g/lineitem/lineitem0605 lineitem
.import /local/data/tpch100g/lineitem/lineitem0606 lineitem
.import /local/data/tpch100g/lineitem/lineitem0607 lineitem
.import /local/data/tpch100g/lineitem/lineitem0608 lineitem
.import /local/data/tpch100g/lineitem/lineitem0609 lineitem
.import /local/data/tpch100g/lineitem/lineitem0610 lineitem
.import /local/data/tpch100g/lineitem/lineitem0611 lineitem
.import /local/data/tpch100g/lineitem/lineitem0612 lineitem
.import /local/data/tpch100g/lineitem/lineitem0613 lineitem
.import /local/data/tpch100g/lineitem/lineitem0614 lineitem
.import /local/data/tpch100g/lineitem/lineitem0615 lineitem
.import /local/data/tpch100g/lineitem/lineitem0616 lineitem
.import /local/data/tpch100g/lineitem/lineitem0617 lineitem
.import /local/data/tpch100g/lineitem/lineitem0618 lineitem
.import /local/data/tpch100g/lineitem/lineitem0619 lineitem
.import /local/data/tpch100g/lineitem/lineitem0620 lineitem
.import /local/data/tpch100g/lineitem/lineitem0621 lineitem
.import /local/data/tpch100g/lineitem/lineitem0622 lineitem
.import /local/data/tpch100g/lineitem/lineitem0623 lineitem
.import /local/data/tpch100g/lineitem/lineitem0624 lineitem
.import /local/data/tpch100g/lineitem/lineitem0625 lineitem
.import /local/data/tpch100g/lineitem/lineitem0626 lineitem
.import /local/data/tpch100g/lineitem/lineitem0627 lineitem
.import /local/data/tpch100g/lineitem/lineitem0628 lineitem
.import /local/data/tpch100g/lineitem/lineitem0629 lineitem
.import /local/data/tpch100g/lineitem/lineitem0630 lineitem
.import /local/data/tpch100g/lineitem/lineitem0631 lineitem
.import /local/data/tpch100g/lineitem/lineitem0632 lineitem
.import /local/data/tpch100g/lineitem/lineitem0633 lineitem
.import /local/data/tpch100g/lineitem/lineitem0634 lineitem
.import /local/data/tpch100g/lineitem/lineitem0635 lineitem
.import /local/data/tpch100g/lineitem/lineitem0636 lineitem
.import /local/data/tpch100g/lineitem/lineitem0637 lineitem
.import /local/data/tpch100g/lineitem/lineitem0638 lineitem
.import /local/data/tpch100g/lineitem/lineitem0639 lineitem
.import /local/data/tpch100g/lineitem/lineitem0640 lineitem
.import /local/data/tpch100g/lineitem/lineitem0641 lineitem
.import /local/data/tpch100g/lineitem/lineitem0642 lineitem
.import /local/data/tpch100g/lineitem/lineitem0643 lineitem
.import /local/data/tpch100g/lineitem/lineitem0644 lineitem
.import /local/data/tpch100g/lineitem/lineitem0645 lineitem
.import /local/data/tpch100g/lineitem/lineitem0646 lineitem
.import /local/data/tpch100g/lineitem/lineitem0647 lineitem
.import /local/data/tpch100g/lineitem/lineitem0648 lineitem
.import /local/data/tpch100g/lineitem/lineitem0649 lineitem
.import /local/data/tpch100g/lineitem/lineitem0650 lineitem
.import /local/data/tpch100g/lineitem/lineitem0651 lineitem
.import /local/data/tpch100g/lineitem/lineitem0652 lineitem
.import /local/data/tpch100g/lineitem/lineitem0653 lineitem
.import /local/data/tpch100g/lineitem/lineitem0654 lineitem
.import /local/data/tpch100g/lineitem/lineitem0655 lineitem
.import /local/data/tpch100g/lineitem/lineitem0656 lineitem
.import /local/data/tpch100g/lineitem/lineitem0657 lineitem
.import /local/data/tpch100g/lineitem/lineitem0658 lineitem
.import /local/data/tpch100g/lineitem/lineitem0659 lineitem
.import /local/data/tpch100g/lineitem/lineitem0660 lineitem
.import /local/data/tpch100g/lineitem/lineitem0661 lineitem
.import /local/data/tpch100g/lineitem/lineitem0662 lineitem
.import /local/data/tpch100g/lineitem/lineitem0663 lineitem
.import /local/data/tpch100g/lineitem/lineitem0664 lineitem
.import /local/data/tpch100g/lineitem/lineitem0665 lineitem
.import /local/data/tpch100g/lineitem/lineitem0666 lineitem
.import /local/data/tpch100g/lineitem/lineitem0667 lineitem
.import /local/data/tpch100g/lineitem/lineitem0668 lineitem
.import /local/data/tpch100g/lineitem/lineitem0669 lineitem
.import /local/data/tpch100g/lineitem/lineitem0670 lineitem
.import /local/data/tpch100g/lineitem/lineitem0671 lineitem
.import /local/data/tpch100g/lineitem/lineitem0672 lineitem
.import /local/data/tpch100g/lineitem/lineitem0673 lineitem
.import /local/data/tpch100g/lineitem/lineitem0674 lineitem
.import /local/data/tpch100g/lineitem/lineitem0675 lineitem
.import /local/data/tpch100g/lineitem/lineitem0676 lineitem
.import /local/data/tpch100g/lineitem/lineitem0677 lineitem
.import /local/data/tpch100g/lineitem/lineitem0678 lineitem
.import /local/data/tpch100g/lineitem/lineitem0679 lineitem
.import /local/data/tpch100g/lineitem/lineitem0680 lineitem
.import /local/data/tpch100g/lineitem/lineitem0681 lineitem
.import /local/data/tpch100g/lineitem/lineitem0682 lineitem
.import /local/data/tpch100g/lineitem/lineitem0683 lineitem
.import /local/data/tpch100g/lineitem/lineitem0684 lineitem
.import /local/data/tpch100g/lineitem/lineitem0685 lineitem
.import /local/data/tpch100g/lineitem/lineitem0686 lineitem
.import /local/data/tpch100g/lineitem/lineitem0687 lineitem
.import /local/data/tpch100g/lineitem/lineitem0688 lineitem
.import /local/data/tpch100g/lineitem/lineitem0689 lineitem
.import /local/data/tpch100g/lineitem/lineitem0690 lineitem
.import /local/data/tpch100g/lineitem/lineitem0691 lineitem
.import /local/data/tpch100g/lineitem/lineitem0692 lineitem
.import /local/data/tpch100g/lineitem/lineitem0693 lineitem
.import /local/data/tpch100g/lineitem/lineitem0694 lineitem
.import /local/data/tpch100g/lineitem/lineitem0695 lineitem
.import /local/data/tpch100g/lineitem/lineitem0696 lineitem
.import /local/data/tpch100g/lineitem/lineitem0697 lineitem
.import /local/data/tpch100g/lineitem/lineitem0698 lineitem
.import /local/data/tpch100g/lineitem/lineitem0699 lineitem
.import /local/data/tpch100g/lineitem/lineitem0700 lineitem
.import /local/data/tpch100g/lineitem/lineitem0701 lineitem
.import /local/data/tpch100g/lineitem/lineitem0702 lineitem
.import /local/data/tpch100g/lineitem/lineitem0703 lineitem
.import /local/data/tpch100g/lineitem/lineitem0704 lineitem
.import /local/data/tpch100g/lineitem/lineitem0705 lineitem
.import /local/data/tpch100g/lineitem/lineitem0706 lineitem
.import /local/data/tpch100g/lineitem/lineitem0707 lineitem
.import /local/data/tpch100g/lineitem/lineitem0708 lineitem
.import /local/data/tpch100g/lineitem/lineitem0709 lineitem
.import /local/data/tpch100g/lineitem/lineitem0710 lineitem
.import /local/data/tpch100g/lineitem/lineitem0711 lineitem
.import /local/data/tpch100g/lineitem/lineitem0712 lineitem
.import /local/data/tpch100g/lineitem/lineitem0713 lineitem
.import /local/data/tpch100g/lineitem/lineitem0714 lineitem
.import /local/data/tpch100g/lineitem/lineitem0715 lineitem
.import /local/data/tpch100g/lineitem/lineitem0716 lineitem
.import /local/data/tpch100g/lineitem/lineitem0717 lineitem
.import /local/data/tpch100g/lineitem/lineitem0718 lineitem
.import /local/data/tpch100g/lineitem/lineitem0719 lineitem
.import /local/data/tpch100g/lineitem/lineitem0720 lineitem
.import /local/data/tpch100g/lineitem/lineitem0721 lineitem
.import /local/data/tpch100g/lineitem/lineitem0722 lineitem
.import /local/data/tpch100g/lineitem/lineitem0723 lineitem
.import /local/data/tpch100g/lineitem/lineitem0724 lineitem
.import /local/data/tpch100g/lineitem/lineitem0725 lineitem
.import /local/data/tpch100g/lineitem/lineitem0726 lineitem
.import /local/data/tpch100g/lineitem/lineitem0727 lineitem
.import /local/data/tpch100g/lineitem/lineitem0728 lineitem
.import /local/data/tpch100g/lineitem/lineitem0729 lineitem
.import /local/data/tpch100g/lineitem/lineitem0730 lineitem
.import /local/data/tpch100g/lineitem/lineitem0731 lineitem
.import /local/data/tpch100g/lineitem/lineitem0732 lineitem
.import /local/data/tpch100g/lineitem/lineitem0733 lineitem
.import /local/data/tpch100g/lineitem/lineitem0734 lineitem
.import /local/data/tpch100g/lineitem/lineitem0735 lineitem
.import /local/data/tpch100g/lineitem/lineitem0736 lineitem
.import /local/data/tpch100g/lineitem/lineitem0737 lineitem
.import /local/data/tpch100g/lineitem/lineitem0738 lineitem
.import /local/data/tpch100g/lineitem/lineitem0739 lineitem
.import /local/data/tpch100g/lineitem/lineitem0740 lineitem
.import /local/data/tpch100g/lineitem/lineitem0741 lineitem
.import /local/data/tpch100g/lineitem/lineitem0742 lineitem
.import /local/data/tpch100g/lineitem/lineitem0743 lineitem
.import /local/data/tpch100g/lineitem/lineitem0744 lineitem
.import /local/data/tpch100g/lineitem/lineitem0745 lineitem
.import /local/data/tpch100g/lineitem/lineitem0746 lineitem
.import /local/data/tpch100g/lineitem/lineitem0747 lineitem
.import /local/data/tpch100g/lineitem/lineitem0748 lineitem
.import /local/data/tpch100g/lineitem/lineitem0749 lineitem
.import /local/data/tpch100g/lineitem/lineitem0750 lineitem
.import /local/data/tpch100g/lineitem/lineitem0751 lineitem
.import /local/data/tpch100g/lineitem/lineitem0752 lineitem
.import /local/data/tpch100g/lineitem/lineitem0753 lineitem
.import /local/data/tpch100g/lineitem/lineitem0754 lineitem
.import /local/data/tpch100g/lineitem/lineitem0755 lineitem
.import /local/data/tpch100g/lineitem/lineitem0756 lineitem
.import /local/data/tpch100g/lineitem/lineitem0757 lineitem
.import /local/data/tpch100g/lineitem/lineitem0758 lineitem
.import /local/data/tpch100g/lineitem/lineitem0759 lineitem
.import /local/data/tpch100g/lineitem/lineitem0760 lineitem
.import /local/data/tpch100g/lineitem/lineitem0761 lineitem
.import /local/data/tpch100g/lineitem/lineitem0762 lineitem
.import /local/data/tpch100g/lineitem/lineitem0763 lineitem
.import /local/data/tpch100g/lineitem/lineitem0764 lineitem
.import /local/data/tpch100g/lineitem/lineitem0765 lineitem
.import /local/data/tpch100g/lineitem/lineitem0766 lineitem
.import /local/data/tpch100g/lineitem/lineitem0767 lineitem
.import /local/data/tpch100g/lineitem/lineitem0768 lineitem
.import /local/data/tpch100g/lineitem/lineitem0769 lineitem
.import /local/data/tpch100g/lineitem/lineitem0770 lineitem
.import /local/data/tpch100g/lineitem/lineitem0771 lineitem
.import /local/data/tpch100g/lineitem/lineitem0772 lineitem
.import /local/data/tpch100g/lineitem/lineitem0773 lineitem
.import /local/data/tpch100g/lineitem/lineitem0774 lineitem
.import /local/data/tpch100g/lineitem/lineitem0775 lineitem
.import /local/data/tpch100g/lineitem/lineitem0776 lineitem
.import /local/data/tpch100g/lineitem/lineitem0777 lineitem
.import /local/data/tpch100g/lineitem/lineitem0778 lineitem
.import /local/data/tpch100g/lineitem/lineitem0779 lineitem
.import /local/data/tpch100g/lineitem/lineitem0780 lineitem
.import /local/data/tpch100g/lineitem/lineitem0781 lineitem
.import /local/data/tpch100g/lineitem/lineitem0782 lineitem
.import /local/data/tpch100g/lineitem/lineitem0783 lineitem
.import /local/data/tpch100g/lineitem/lineitem0784 lineitem
.import /local/data/tpch100g/lineitem/lineitem0785 lineitem
.import /local/data/tpch100g/lineitem/lineitem0786 lineitem
.import /local/data/tpch100g/lineitem/lineitem0787 lineitem
.import /local/data/tpch100g/lineitem/lineitem0788 lineitem
.import /local/data/tpch100g/lineitem/lineitem0789 lineitem
.import /local/data/tpch100g/lineitem/lineitem0790 lineitem
.import /local/data/tpch100g/lineitem/lineitem0791 lineitem
.import /local/data/tpch100g/lineitem/lineitem0792 lineitem
.import /local/data/tpch100g/lineitem/lineitem0793 lineitem
.import /local/data/tpch100g/lineitem/lineitem0794 lineitem
.import /local/data/tpch100g/lineitem/lineitem0795 lineitem
.import /local/data/tpch100g/lineitem/lineitem0796 lineitem
.import /local/data/tpch100g/lineitem/lineitem0797 lineitem
.import /local/data/tpch100g/lineitem/lineitem0798 lineitem
.import /local/data/tpch100g/lineitem/lineitem0799 lineitem
.import /local/data/tpch100g/lineitem/lineitem0800 lineitem
.import /local/data/tpch100g/lineitem/lineitem0801 lineitem
.import /local/data/tpch100g/lineitem/lineitem0802 lineitem
.import /local/data/tpch100g/lineitem/lineitem0803 lineitem
.import /local/data/tpch100g/lineitem/lineitem0804 lineitem
.import /local/data/tpch100g/lineitem/lineitem0805 lineitem
.import /local/data/tpch100g/lineitem/lineitem0806 lineitem
.import /local/data/tpch100g/lineitem/lineitem0807 lineitem
.import /local/data/tpch100g/lineitem/lineitem0808 lineitem
.import /local/data/tpch100g/lineitem/lineitem0809 lineitem
.import /local/data/tpch100g/lineitem/lineitem0810 lineitem
.import /local/data/tpch100g/lineitem/lineitem0811 lineitem
.import /local/data/tpch100g/lineitem/lineitem0812 lineitem
.import /local/data/tpch100g/lineitem/lineitem0813 lineitem
.import /local/data/tpch100g/lineitem/lineitem0814 lineitem
.import /local/data/tpch100g/lineitem/lineitem0815 lineitem
.import /local/data/tpch100g/lineitem/lineitem0816 lineitem
.import /local/data/tpch100g/lineitem/lineitem0817 lineitem
.import /local/data/tpch100g/lineitem/lineitem0818 lineitem
.import /local/data/tpch100g/lineitem/lineitem0819 lineitem
.import /local/data/tpch100g/lineitem/lineitem0820 lineitem
.import /local/data/tpch100g/lineitem/lineitem0821 lineitem
.import /local/data/tpch100g/lineitem/lineitem0822 lineitem
.import /local/data/tpch100g/lineitem/lineitem0823 lineitem
.import /local/data/tpch100g/lineitem/lineitem0824 lineitem
.import /local/data/tpch100g/lineitem/lineitem0825 lineitem
.import /local/data/tpch100g/lineitem/lineitem0826 lineitem
.import /local/data/tpch100g/lineitem/lineitem0827 lineitem
.import /local/data/tpch100g/lineitem/lineitem0828 lineitem
.import /local/data/tpch100g/lineitem/lineitem0829 lineitem
.import /local/data/tpch100g/lineitem/lineitem0830 lineitem
.import /local/data/tpch100g/lineitem/lineitem0831 lineitem
.import /local/data/tpch100g/lineitem/lineitem0832 lineitem
.import /local/data/tpch100g/lineitem/lineitem0833 lineitem
.import /local/data/tpch100g/lineitem/lineitem0834 lineitem
.import /local/data/tpch100g/lineitem/lineitem0835 lineitem
.import /local/data/tpch100g/lineitem/lineitem0836 lineitem
.import /local/data/tpch100g/lineitem/lineitem0837 lineitem
.import /local/data/tpch100g/lineitem/lineitem0838 lineitem
.import /local/data/tpch100g/lineitem/lineitem0839 lineitem
.import /local/data/tpch100g/lineitem/lineitem0840 lineitem
.import /local/data/tpch100g/lineitem/lineitem0841 lineitem
.import /local/data/tpch100g/lineitem/lineitem0842 lineitem
.import /local/data/tpch100g/lineitem/lineitem0843 lineitem
.import /local/data/tpch100g/lineitem/lineitem0844 lineitem
.import /local/data/tpch100g/lineitem/lineitem0845 lineitem
.import /local/data/tpch100g/lineitem/lineitem0846 lineitem
.import /local/data/tpch100g/lineitem/lineitem0847 lineitem
.import /local/data/tpch100g/lineitem/lineitem0848 lineitem
.import /local/data/tpch100g/lineitem/lineitem0849 lineitem
.import /local/data/tpch100g/lineitem/lineitem0850 lineitem
.import /local/data/tpch100g/lineitem/lineitem0851 lineitem
.import /local/data/tpch100g/lineitem/lineitem0852 lineitem
.import /local/data/tpch100g/lineitem/lineitem0853 lineitem
.import /local/data/tpch100g/lineitem/lineitem0854 lineitem
.import /local/data/tpch100g/lineitem/lineitem0855 lineitem
.import /local/data/tpch100g/lineitem/lineitem0856 lineitem
.import /local/data/tpch100g/lineitem/lineitem0857 lineitem
.import /local/data/tpch100g/lineitem/lineitem0858 lineitem
.import /local/data/tpch100g/lineitem/lineitem0859 lineitem
.import /local/data/tpch100g/lineitem/lineitem0860 lineitem
.import /local/data/tpch100g/lineitem/lineitem0861 lineitem
.import /local/data/tpch100g/lineitem/lineitem0862 lineitem
.import /local/data/tpch100g/lineitem/lineitem0863 lineitem
.import /local/data/tpch100g/lineitem/lineitem0864 lineitem
.import /local/data/tpch100g/lineitem/lineitem0865 lineitem
.import /local/data/tpch100g/lineitem/lineitem0866 lineitem
.import /local/data/tpch100g/lineitem/lineitem0867 lineitem
.import /local/data/tpch100g/lineitem/lineitem0868 lineitem
.import /local/data/tpch100g/lineitem/lineitem0869 lineitem
.import /local/data/tpch100g/lineitem/lineitem0870 lineitem
.import /local/data/tpch100g/lineitem/lineitem0871 lineitem
.import /local/data/tpch100g/lineitem/lineitem0872 lineitem
.import /local/data/tpch100g/lineitem/lineitem0873 lineitem
.import /local/data/tpch100g/lineitem/lineitem0874 lineitem
.import /local/data/tpch100g/lineitem/lineitem0875 lineitem
.import /local/data/tpch100g/lineitem/lineitem0876 lineitem
.import /local/data/tpch100g/lineitem/lineitem0877 lineitem
.import /local/data/tpch100g/lineitem/lineitem0878 lineitem
.import /local/data/tpch100g/lineitem/lineitem0879 lineitem
.import /local/data/tpch100g/lineitem/lineitem0880 lineitem
.import /local/data/tpch100g/lineitem/lineitem0881 lineitem
.import /local/data/tpch100g/lineitem/lineitem0882 lineitem
.import /local/data/tpch100g/lineitem/lineitem0883 lineitem
.import /local/data/tpch100g/lineitem/lineitem0884 lineitem
.import /local/data/tpch100g/lineitem/lineitem0885 lineitem
.import /local/data/tpch100g/lineitem/lineitem0886 lineitem
.import /local/data/tpch100g/lineitem/lineitem0887 lineitem
.import /local/data/tpch100g/lineitem/lineitem0888 lineitem
.import /local/data/tpch100g/lineitem/lineitem0889 lineitem
.import /local/data/tpch100g/lineitem/lineitem0890 lineitem
.import /local/data/tpch100g/lineitem/lineitem0891 lineitem
.import /local/data/tpch100g/lineitem/lineitem0892 lineitem
.import /local/data/tpch100g/lineitem/lineitem0893 lineitem
.import /local/data/tpch100g/lineitem/lineitem0894 lineitem
.import /local/data/tpch100g/lineitem/lineitem0895 lineitem
.import /local/data/tpch100g/lineitem/lineitem0896 lineitem
.import /local/data/tpch100g/lineitem/lineitem0897 lineitem
.import /local/data/tpch100g/lineitem/lineitem0898 lineitem
.import /local/data/tpch100g/lineitem/lineitem0899 lineitem
.import /local/data/tpch100g/lineitem/lineitem0900 lineitem
.import /local/data/tpch100g/lineitem/lineitem0901 lineitem
.import /local/data/tpch100g/lineitem/lineitem0902 lineitem
.import /local/data/tpch100g/lineitem/lineitem0903 lineitem
.import /local/data/tpch100g/lineitem/lineitem0904 lineitem
.import /local/data/tpch100g/lineitem/lineitem0905 lineitem
.import /local/data/tpch100g/lineitem/lineitem0906 lineitem
.import /local/data/tpch100g/lineitem/lineitem0907 lineitem
.import /local/data/tpch100g/lineitem/lineitem0908 lineitem
.import /local/data/tpch100g/lineitem/lineitem0909 lineitem
.import /local/data/tpch100g/lineitem/lineitem0910 lineitem
.import /local/data/tpch100g/lineitem/lineitem0911 lineitem
.import /local/data/tpch100g/lineitem/lineitem0912 lineitem
.import /local/data/tpch100g/lineitem/lineitem0913 lineitem
.import /local/data/tpch100g/lineitem/lineitem0914 lineitem
.import /local/data/tpch100g/lineitem/lineitem0915 lineitem
.import /local/data/tpch100g/lineitem/lineitem0916 lineitem
.import /local/data/tpch100g/lineitem/lineitem0917 lineitem
.import /local/data/tpch100g/lineitem/lineitem0918 lineitem
.import /local/data/tpch100g/lineitem/lineitem0919 lineitem
.import /local/data/tpch100g/lineitem/lineitem0920 lineitem
.import /local/data/tpch100g/lineitem/lineitem0921 lineitem
.import /local/data/tpch100g/lineitem/lineitem0922 lineitem
.import /local/data/tpch100g/lineitem/lineitem0923 lineitem
.import /local/data/tpch100g/lineitem/lineitem0924 lineitem
.import /local/data/tpch100g/lineitem/lineitem0925 lineitem
.import /local/data/tpch100g/lineitem/lineitem0926 lineitem
.import /local/data/tpch100g/lineitem/lineitem0927 lineitem
.import /local/data/tpch100g/lineitem/lineitem0928 lineitem
.import /local/data/tpch100g/lineitem/lineitem0929 lineitem
.import /local/data/tpch100g/lineitem/lineitem0930 lineitem
.import /local/data/tpch100g/lineitem/lineitem0931 lineitem
.import /local/data/tpch100g/lineitem/lineitem0932 lineitem
.import /local/data/tpch100g/lineitem/lineitem0933 lineitem
.import /local/data/tpch100g/lineitem/lineitem0934 lineitem
.import /local/data/tpch100g/lineitem/lineitem0935 lineitem
.import /local/data/tpch100g/lineitem/lineitem0936 lineitem
.import /local/data/tpch100g/lineitem/lineitem0937 lineitem
.import /local/data/tpch100g/lineitem/lineitem0938 lineitem
.import /local/data/tpch100g/lineitem/lineitem0939 lineitem
.import /local/data/tpch100g/lineitem/lineitem0940 lineitem
.import /local/data/tpch100g/lineitem/lineitem0941 lineitem
.import /local/data/tpch100g/lineitem/lineitem0942 lineitem
.import /local/data/tpch100g/lineitem/lineitem0943 lineitem
.import /local/data/tpch100g/lineitem/lineitem0944 lineitem
.import /local/data/tpch100g/lineitem/lineitem0945 lineitem
.import /local/data/tpch100g/lineitem/lineitem0946 lineitem
.import /local/data/tpch100g/lineitem/lineitem0947 lineitem
.import /local/data/tpch100g/lineitem/lineitem0948 lineitem
.import /local/data/tpch100g/lineitem/lineitem0949 lineitem
.import /local/data/tpch100g/lineitem/lineitem0950 lineitem
.import /local/data/tpch100g/lineitem/lineitem0951 lineitem
.import /local/data/tpch100g/lineitem/lineitem0952 lineitem
.import /local/data/tpch100g/lineitem/lineitem0953 lineitem
.import /local/data/tpch100g/lineitem/lineitem0954 lineitem
.import /local/data/tpch100g/lineitem/lineitem0955 lineitem
.import /local/data/tpch100g/lineitem/lineitem0956 lineitem
.import /local/data/tpch100g/lineitem/lineitem0957 lineitem
.import /local/data/tpch100g/lineitem/lineitem0958 lineitem
.import /local/data/tpch100g/lineitem/lineitem0959 lineitem
.import /local/data/tpch100g/lineitem/lineitem0960 lineitem
.import /local/data/tpch100g/lineitem/lineitem0961 lineitem
.import /local/data/tpch100g/lineitem/lineitem0962 lineitem
.import /local/data/tpch100g/lineitem/lineitem0963 lineitem
.import /local/data/tpch100g/lineitem/lineitem0964 lineitem
.import /local/data/tpch100g/lineitem/lineitem0965 lineitem
.import /local/data/tpch100g/lineitem/lineitem0966 lineitem
.import /local/data/tpch100g/lineitem/lineitem0967 lineitem
.import /local/data/tpch100g/lineitem/lineitem0968 lineitem
.import /local/data/tpch100g/lineitem/lineitem0969 lineitem
.import /local/data/tpch100g/lineitem/lineitem0970 lineitem
.import /local/data/tpch100g/lineitem/lineitem0971 lineitem
.import /local/data/tpch100g/lineitem/lineitem0972 lineitem
.import /local/data/tpch100g/lineitem/lineitem0973 lineitem
.import /local/data/tpch100g/lineitem/lineitem0974 lineitem
.import /local/data/tpch100g/lineitem/lineitem0975 lineitem
.import /local/data/tpch100g/lineitem/lineitem0976 lineitem
.import /local/data/tpch100g/lineitem/lineitem0977 lineitem
.import /local/data/tpch100g/lineitem/lineitem0978 lineitem
.import /local/data/tpch100g/lineitem/lineitem0979 lineitem
.import /local/data/tpch100g/lineitem/lineitem0980 lineitem
.import /local/data/tpch100g/lineitem/lineitem0981 lineitem
.import /local/data/tpch100g/lineitem/lineitem0982 lineitem
.import /local/data/tpch100g/lineitem/lineitem0983 lineitem
.import /local/data/tpch100g/lineitem/lineitem0984 lineitem
.import /local/data/tpch100g/lineitem/lineitem0985 lineitem
.import /local/data/tpch100g/lineitem/lineitem0986 lineitem
.import /local/data/tpch100g/lineitem/lineitem0987 lineitem
.import /local/data/tpch100g/lineitem/lineitem0988 lineitem
.import /local/data/tpch100g/lineitem/lineitem0989 lineitem
.import /local/data/tpch100g/lineitem/lineitem0990 lineitem
.import /local/data/tpch100g/lineitem/lineitem0991 lineitem
.import /local/data/tpch100g/lineitem/lineitem0992 lineitem
.import /local/data/tpch100g/lineitem/lineitem0993 lineitem
.import /local/data/tpch100g/lineitem/lineitem0994 lineitem
.import /local/data/tpch100g/lineitem/lineitem0995 lineitem
.import /local/data/tpch100g/lineitem/lineitem0996 lineitem
.import /local/data/tpch100g/lineitem/lineitem0997 lineitem
.import /local/data/tpch100g/lineitem/lineitem0998 lineitem
.import /local/data/tpch100g/lineitem/lineitem0999 lineitem
.import /local/data/tpch100g/lineitem/lineitem1000 lineitem
.import /local/data/tpch100g/lineitem/lineitem1001 lineitem
.import /local/data/tpch100g/lineitem/lineitem1002 lineitem
.import /local/data/tpch100g/lineitem/lineitem1003 lineitem
.import /local/data/tpch100g/lineitem/lineitem1004 lineitem
.import /local/data/tpch100g/lineitem/lineitem1005 lineitem
.import /local/data/tpch100g/lineitem/lineitem1006 lineitem
.import /local/data/tpch100g/lineitem/lineitem1007 lineitem
.import /local/data/tpch100g/lineitem/lineitem1008 lineitem
.import /local/data/tpch100g/lineitem/lineitem1009 lineitem
.import /local/data/tpch100g/lineitem/lineitem1010 lineitem
.import /local/data/tpch100g/lineitem/lineitem1011 lineitem
.import /local/data/tpch100g/lineitem/lineitem1012 lineitem
.import /local/data/tpch100g/lineitem/lineitem1013 lineitem
.import /local/data/tpch100g/lineitem/lineitem1014 lineitem
.import /local/data/tpch100g/lineitem/lineitem1015 lineitem
.import /local/data/tpch100g/lineitem/lineitem1016 lineitem
.import /local/data/tpch100g/lineitem/lineitem1017 lineitem
.import /local/data/tpch100g/lineitem/lineitem1018 lineitem
.import /local/data/tpch100g/lineitem/lineitem1019 lineitem
.import /local/data/tpch100g/lineitem/lineitem1020 lineitem
.import /local/data/tpch100g/lineitem/lineitem1021 lineitem
.import /local/data/tpch100g/lineitem/lineitem1022 lineitem
.import /local/data/tpch100g/lineitem/lineitem1023 lineitem
.import /local/data/tpch100g/customer/customer0000 customer
.import /local/data/tpch100g/customer/customer0001 customer
.import /local/data/tpch100g/customer/customer0002 customer
.import /local/data/tpch100g/customer/customer0003 customer
.import /local/data/tpch100g/customer/customer0004 customer
.import /local/data/tpch100g/customer/customer0005 customer
.import /local/data/tpch100g/customer/customer0006 customer
.import /local/data/tpch100g/customer/customer0007 customer
.import /local/data/tpch100g/customer/customer0008 customer
.import /local/data/tpch100g/customer/customer0009 customer
.import /local/data/tpch100g/customer/customer0010 customer
.import /local/data/tpch100g/customer/customer0011 customer
.import /local/data/tpch100g/customer/customer0012 customer
.import /local/data/tpch100g/customer/customer0013 customer
.import /local/data/tpch100g/customer/customer0014 customer
.import /local/data/tpch100g/customer/customer0015 customer
.import /local/data/tpch100g/customer/customer0016 customer
.import /local/data/tpch100g/customer/customer0017 customer
.import /local/data/tpch100g/customer/customer0018 customer
.import /local/data/tpch100g/customer/customer0019 customer
.import /local/data/tpch100g/customer/customer0020 customer
.import /local/data/tpch100g/customer/customer0021 customer
.import /local/data/tpch100g/customer/customer0022 customer
.import /local/data/tpch100g/customer/customer0023 customer
.import /local/data/tpch100g/customer/customer0024 customer
.import /local/data/tpch100g/customer/customer0025 customer
.import /local/data/tpch100g/customer/customer0026 customer
.import /local/data/tpch100g/customer/customer0027 customer
.import /local/data/tpch100g/customer/customer0028 customer
.import /local/data/tpch100g/customer/customer0029 customer
.import /local/data/tpch100g/customer/customer0030 customer
.import /local/data/tpch100g/customer/customer0031 customer
.import /local/data/tpch100g/customer/customer0032 customer
.import /local/data/tpch100g/customer/customer0033 customer
.import /local/data/tpch100g/customer/customer0034 customer
.import /local/data/tpch100g/customer/customer0035 customer
.import /local/data/tpch100g/customer/customer0036 customer
.import /local/data/tpch100g/customer/customer0037 customer
.import /local/data/tpch100g/customer/customer0038 customer
.import /local/data/tpch100g/customer/customer0039 customer
.import /local/data/tpch100g/customer/customer0040 customer
.import /local/data/tpch100g/customer/customer0041 customer
.import /local/data/tpch100g/customer/customer0042 customer
.import /local/data/tpch100g/customer/customer0043 customer
.import /local/data/tpch100g/customer/customer0044 customer
.import /local/data/tpch100g/customer/customer0045 customer
.import /local/data/tpch100g/customer/customer0046 customer
.import /local/data/tpch100g/customer/customer0047 customer
.import /local/data/tpch100g/customer/customer0048 customer
.import /local/data/tpch100g/customer/customer0049 customer
.import /local/data/tpch100g/customer/customer0050 customer
.import /local/data/tpch100g/customer/customer0051 customer
.import /local/data/tpch100g/customer/customer0052 customer
.import /local/data/tpch100g/customer/customer0053 customer
.import /local/data/tpch100g/customer/customer0054 customer
.import /local/data/tpch100g/customer/customer0055 customer
.import /local/data/tpch100g/customer/customer0056 customer
.import /local/data/tpch100g/customer/customer0057 customer
.import /local/data/tpch100g/customer/customer0058 customer
.import /local/data/tpch100g/customer/customer0059 customer
.import /local/data/tpch100g/customer/customer0060 customer
.import /local/data/tpch100g/customer/customer0061 customer
.import /local/data/tpch100g/customer/customer0062 customer
.import /local/data/tpch100g/customer/customer0063 customer
.import /local/data/tpch100g/customer/customer0064 customer
.import /local/data/tpch100g/customer/customer0065 customer
.import /local/data/tpch100g/customer/customer0066 customer
.import /local/data/tpch100g/customer/customer0067 customer
.import /local/data/tpch100g/customer/customer0068 customer
.import /local/data/tpch100g/customer/customer0069 customer
.import /local/data/tpch100g/customer/customer0070 customer
.import /local/data/tpch100g/customer/customer0071 customer
.import /local/data/tpch100g/customer/customer0072 customer
.import /local/data/tpch100g/customer/customer0073 customer
.import /local/data/tpch100g/customer/customer0074 customer
.import /local/data/tpch100g/customer/customer0075 customer
.import /local/data/tpch100g/customer/customer0076 customer
.import /local/data/tpch100g/customer/customer0077 customer
.import /local/data/tpch100g/customer/customer0078 customer
.import /local/data/tpch100g/customer/customer0079 customer
.import /local/data/tpch100g/customer/customer0080 customer
.import /local/data/tpch100g/customer/customer0081 customer
.import /local/data/tpch100g/customer/customer0082 customer
.import /local/data/tpch100g/customer/customer0083 customer
.import /local/data/tpch100g/customer/customer0084 customer
.import /local/data/tpch100g/customer/customer0085 customer
.import /local/data/tpch100g/customer/customer0086 customer
.import /local/data/tpch100g/customer/customer0087 customer
.import /local/data/tpch100g/customer/customer0088 customer
.import /local/data/tpch100g/customer/customer0089 customer
.import /local/data/tpch100g/customer/customer0090 customer
.import /local/data/tpch100g/customer/customer0091 customer
.import /local/data/tpch100g/customer/customer0092 customer
.import /local/data/tpch100g/customer/customer0093 customer
.import /local/data/tpch100g/customer/customer0094 customer
.import /local/data/tpch100g/customer/customer0095 customer
.import /local/data/tpch100g/customer/customer0096 customer
.import /local/data/tpch100g/customer/customer0097 customer
.import /local/data/tpch100g/customer/customer0098 customer
.import /local/data/tpch100g/customer/customer0099 customer
.import /local/data/tpch100g/customer/customer0100 customer
.import /local/data/tpch100g/customer/customer0101 customer
.import /local/data/tpch100g/customer/customer0102 customer
.import /local/data/tpch100g/customer/customer0103 customer
.import /local/data/tpch100g/customer/customer0104 customer
.import /local/data/tpch100g/customer/customer0105 customer
.import /local/data/tpch100g/customer/customer0106 customer
.import /local/data/tpch100g/customer/customer0107 customer
.import /local/data/tpch100g/customer/customer0108 customer
.import /local/data/tpch100g/customer/customer0109 customer
.import /local/data/tpch100g/customer/customer0110 customer
.import /local/data/tpch100g/customer/customer0111 customer
.import /local/data/tpch100g/customer/customer0112 customer
.import /local/data/tpch100g/customer/customer0113 customer
.import /local/data/tpch100g/customer/customer0114 customer
.import /local/data/tpch100g/customer/customer0115 customer
.import /local/data/tpch100g/customer/customer0116 customer
.import /local/data/tpch100g/customer/customer0117 customer
.import /local/data/tpch100g/customer/customer0118 customer
.import /local/data/tpch100g/customer/customer0119 customer
.import /local/data/tpch100g/customer/customer0120 customer
.import /local/data/tpch100g/customer/customer0121 customer
.import /local/data/tpch100g/customer/customer0122 customer
.import /local/data/tpch100g/customer/customer0123 customer
.import /local/data/tpch100g/customer/customer0124 customer
.import /local/data/tpch100g/customer/customer0125 customer
.import /local/data/tpch100g/customer/customer0126 customer
.import /local/data/tpch100g/customer/customer0127 customer
.import /local/data/tpch100g/customer/customer0128 customer
.import /local/data/tpch100g/customer/customer0129 customer
.import /local/data/tpch100g/customer/customer0130 customer
.import /local/data/tpch100g/customer/customer0131 customer
.import /local/data/tpch100g/customer/customer0132 customer
.import /local/data/tpch100g/customer/customer0133 customer
.import /local/data/tpch100g/customer/customer0134 customer
.import /local/data/tpch100g/customer/customer0135 customer
.import /local/data/tpch100g/customer/customer0136 customer
.import /local/data/tpch100g/customer/customer0137 customer
.import /local/data/tpch100g/customer/customer0138 customer
.import /local/data/tpch100g/customer/customer0139 customer
.import /local/data/tpch100g/customer/customer0140 customer
.import /local/data/tpch100g/customer/customer0141 customer
.import /local/data/tpch100g/customer/customer0142 customer
.import /local/data/tpch100g/customer/customer0143 customer
.import /local/data/tpch100g/customer/customer0144 customer
.import /local/data/tpch100g/customer/customer0145 customer
.import /local/data/tpch100g/customer/customer0146 customer
.import /local/data/tpch100g/customer/customer0147 customer
.import /local/data/tpch100g/customer/customer0148 customer
.import /local/data/tpch100g/customer/customer0149 customer
.import /local/data/tpch100g/customer/customer0150 customer
.import /local/data/tpch100g/customer/customer0151 customer
.import /local/data/tpch100g/customer/customer0152 customer
.import /local/data/tpch100g/customer/customer0153 customer
.import /local/data/tpch100g/customer/customer0154 customer
.import /local/data/tpch100g/customer/customer0155 customer
.import /local/data/tpch100g/customer/customer0156 customer
.import /local/data/tpch100g/customer/customer0157 customer
.import /local/data/tpch100g/customer/customer0158 customer
.import /local/data/tpch100g/customer/customer0159 customer
.import /local/data/tpch100g/customer/customer0160 customer
.import /local/data/tpch100g/customer/customer0161 customer
.import /local/data/tpch100g/customer/customer0162 customer
.import /local/data/tpch100g/customer/customer0163 customer
.import /local/data/tpch100g/customer/customer0164 customer
.import /local/data/tpch100g/customer/customer0165 customer
.import /local/data/tpch100g/customer/customer0166 customer
.import /local/data/tpch100g/customer/customer0167 customer
.import /local/data/tpch100g/customer/customer0168 customer
.import /local/data/tpch100g/customer/customer0169 customer
.import /local/data/tpch100g/customer/customer0170 customer
.import /local/data/tpch100g/customer/customer0171 customer
.import /local/data/tpch100g/customer/customer0172 customer
.import /local/data/tpch100g/customer/customer0173 customer
.import /local/data/tpch100g/customer/customer0174 customer
.import /local/data/tpch100g/customer/customer0175 customer
.import /local/data/tpch100g/customer/customer0176 customer
.import /local/data/tpch100g/customer/customer0177 customer
.import /local/data/tpch100g/customer/customer0178 customer
.import /local/data/tpch100g/customer/customer0179 customer
.import /local/data/tpch100g/customer/customer0180 customer
.import /local/data/tpch100g/customer/customer0181 customer
.import /local/data/tpch100g/customer/customer0182 customer
.import /local/data/tpch100g/customer/customer0183 customer
.import /local/data/tpch100g/customer/customer0184 customer
.import /local/data/tpch100g/customer/customer0185 customer
.import /local/data/tpch100g/customer/customer0186 customer
.import /local/data/tpch100g/customer/customer0187 customer
.import /local/data/tpch100g/customer/customer0188 customer
.import /local/data/tpch100g/customer/customer0189 customer
.import /local/data/tpch100g/customer/customer0190 customer
.import /local/data/tpch100g/customer/customer0191 customer
.import /local/data/tpch100g/customer/customer0192 customer
.import /local/data/tpch100g/customer/customer0193 customer
.import /local/data/tpch100g/customer/customer0194 customer
.import /local/data/tpch100g/customer/customer0195 customer
.import /local/data/tpch100g/customer/customer0196 customer
.import /local/data/tpch100g/customer/customer0197 customer
.import /local/data/tpch100g/customer/customer0198 customer
.import /local/data/tpch100g/customer/customer0199 customer
.import /local/data/tpch100g/customer/customer0200 customer
.import /local/data/tpch100g/customer/customer0201 customer
.import /local/data/tpch100g/customer/customer0202 customer
.import /local/data/tpch100g/customer/customer0203 customer
.import /local/data/tpch100g/customer/customer0204 customer
.import /local/data/tpch100g/customer/customer0205 customer
.import /local/data/tpch100g/customer/customer0206 customer
.import /local/data/tpch100g/customer/customer0207 customer
.import /local/data/tpch100g/customer/customer0208 customer
.import /local/data/tpch100g/customer/customer0209 customer
.import /local/data/tpch100g/customer/customer0210 customer
.import /local/data/tpch100g/customer/customer0211 customer
.import /local/data/tpch100g/customer/customer0212 customer
.import /local/data/tpch100g/customer/customer0213 customer
.import /local/data/tpch100g/customer/customer0214 customer
.import /local/data/tpch100g/customer/customer0215 customer
.import /local/data/tpch100g/customer/customer0216 customer
.import /local/data/tpch100g/customer/customer0217 customer
.import /local/data/tpch100g/customer/customer0218 customer
.import /local/data/tpch100g/customer/customer0219 customer
.import /local/data/tpch100g/customer/customer0220 customer
.import /local/data/tpch100g/customer/customer0221 customer
.import /local/data/tpch100g/customer/customer0222 customer
.import /local/data/tpch100g/customer/customer0223 customer
.import /local/data/tpch100g/customer/customer0224 customer
.import /local/data/tpch100g/customer/customer0225 customer
.import /local/data/tpch100g/customer/customer0226 customer
.import /local/data/tpch100g/customer/customer0227 customer
.import /local/data/tpch100g/customer/customer0228 customer
.import /local/data/tpch100g/customer/customer0229 customer
.import /local/data/tpch100g/customer/customer0230 customer
.import /local/data/tpch100g/customer/customer0231 customer
.import /local/data/tpch100g/customer/customer0232 customer
.import /local/data/tpch100g/customer/customer0233 customer
.import /local/data/tpch100g/customer/customer0234 customer
.import /local/data/tpch100g/customer/customer0235 customer
.import /local/data/tpch100g/customer/customer0236 customer
.import /local/data/tpch100g/customer/customer0237 customer
.import /local/data/tpch100g/customer/customer0238 customer
.import /local/data/tpch100g/customer/customer0239 customer
.import /local/data/tpch100g/customer/customer0240 customer
.import /local/data/tpch100g/customer/customer0241 customer
.import /local/data/tpch100g/customer/customer0242 customer
.import /local/data/tpch100g/customer/customer0243 customer
.import /local/data/tpch100g/customer/customer0244 customer
.import /local/data/tpch100g/customer/customer0245 customer
.import /local/data/tpch100g/customer/customer0246 customer
.import /local/data/tpch100g/customer/customer0247 customer
.import /local/data/tpch100g/customer/customer0248 customer
.import /local/data/tpch100g/customer/customer0249 customer
.import /local/data/tpch100g/customer/customer0250 customer
.import /local/data/tpch100g/customer/customer0251 customer
.import /local/data/tpch100g/customer/customer0252 customer
.import /local/data/tpch100g/customer/customer0253 customer
.import /local/data/tpch100g/customer/customer0254 customer
.import /local/data/tpch100g/customer/customer0255 customer
.import /local/data/tpch100g/customer/customer0256 customer
.import /local/data/tpch100g/customer/customer0257 customer
.import /local/data/tpch100g/customer/customer0258 customer
.import /local/data/tpch100g/customer/customer0259 customer
.import /local/data/tpch100g/customer/customer0260 customer
.import /local/data/tpch100g/customer/customer0261 customer
.import /local/data/tpch100g/customer/customer0262 customer
.import /local/data/tpch100g/customer/customer0263 customer
.import /local/data/tpch100g/customer/customer0264 customer
.import /local/data/tpch100g/customer/customer0265 customer
.import /local/data/tpch100g/customer/customer0266 customer
.import /local/data/tpch100g/customer/customer0267 customer
.import /local/data/tpch100g/customer/customer0268 customer
.import /local/data/tpch100g/customer/customer0269 customer
.import /local/data/tpch100g/customer/customer0270 customer
.import /local/data/tpch100g/customer/customer0271 customer
.import /local/data/tpch100g/customer/customer0272 customer
.import /local/data/tpch100g/customer/customer0273 customer
.import /local/data/tpch100g/customer/customer0274 customer
.import /local/data/tpch100g/customer/customer0275 customer
.import /local/data/tpch100g/customer/customer0276 customer
.import /local/data/tpch100g/customer/customer0277 customer
.import /local/data/tpch100g/customer/customer0278 customer
.import /local/data/tpch100g/customer/customer0279 customer
.import /local/data/tpch100g/customer/customer0280 customer
.import /local/data/tpch100g/customer/customer0281 customer
.import /local/data/tpch100g/customer/customer0282 customer
.import /local/data/tpch100g/customer/customer0283 customer
.import /local/data/tpch100g/customer/customer0284 customer
.import /local/data/tpch100g/customer/customer0285 customer
.import /local/data/tpch100g/customer/customer0286 customer
.import /local/data/tpch100g/customer/customer0287 customer
.import /local/data/tpch100g/customer/customer0288 customer
.import /local/data/tpch100g/customer/customer0289 customer
.import /local/data/tpch100g/customer/customer0290 customer
.import /local/data/tpch100g/customer/customer0291 customer
.import /local/data/tpch100g/customer/customer0292 customer
.import /local/data/tpch100g/customer/customer0293 customer
.import /local/data/tpch100g/customer/customer0294 customer
.import /local/data/tpch100g/customer/customer0295 customer
.import /local/data/tpch100g/customer/customer0296 customer
.import /local/data/tpch100g/customer/customer0297 customer
.import /local/data/tpch100g/customer/customer0298 customer
.import /local/data/tpch100g/customer/customer0299 customer
.import /local/data/tpch100g/customer/customer0300 customer
.import /local/data/tpch100g/customer/customer0301 customer
.import /local/data/tpch100g/customer/customer0302 customer
.import /local/data/tpch100g/customer/customer0303 customer
.import /local/data/tpch100g/customer/customer0304 customer
.import /local/data/tpch100g/customer/customer0305 customer
.import /local/data/tpch100g/customer/customer0306 customer
.import /local/data/tpch100g/customer/customer0307 customer
.import /local/data/tpch100g/customer/customer0308 customer
.import /local/data/tpch100g/customer/customer0309 customer
.import /local/data/tpch100g/customer/customer0310 customer
.import /local/data/tpch100g/customer/customer0311 customer
.import /local/data/tpch100g/customer/customer0312 customer
.import /local/data/tpch100g/customer/customer0313 customer
.import /local/data/tpch100g/customer/customer0314 customer
.import /local/data/tpch100g/customer/customer0315 customer
.import /local/data/tpch100g/customer/customer0316 customer
.import /local/data/tpch100g/customer/customer0317 customer
.import /local/data/tpch100g/customer/customer0318 customer
.import /local/data/tpch100g/customer/customer0319 customer
.import /local/data/tpch100g/customer/customer0320 customer
.import /local/data/tpch100g/customer/customer0321 customer
.import /local/data/tpch100g/customer/customer0322 customer
.import /local/data/tpch100g/customer/customer0323 customer
.import /local/data/tpch100g/customer/customer0324 customer
.import /local/data/tpch100g/customer/customer0325 customer
.import /local/data/tpch100g/customer/customer0326 customer
.import /local/data/tpch100g/customer/customer0327 customer
.import /local/data/tpch100g/customer/customer0328 customer
.import /local/data/tpch100g/customer/customer0329 customer
.import /local/data/tpch100g/customer/customer0330 customer
.import /local/data/tpch100g/customer/customer0331 customer
.import /local/data/tpch100g/customer/customer0332 customer
.import /local/data/tpch100g/customer/customer0333 customer
.import /local/data/tpch100g/customer/customer0334 customer
.import /local/data/tpch100g/customer/customer0335 customer
.import /local/data/tpch100g/customer/customer0336 customer
.import /local/data/tpch100g/customer/customer0337 customer
.import /local/data/tpch100g/customer/customer0338 customer
.import /local/data/tpch100g/customer/customer0339 customer
.import /local/data/tpch100g/customer/customer0340 customer
.import /local/data/tpch100g/customer/customer0341 customer
.import /local/data/tpch100g/customer/customer0342 customer
.import /local/data/tpch100g/customer/customer0343 customer
.import /local/data/tpch100g/customer/customer0344 customer
.import /local/data/tpch100g/customer/customer0345 customer
.import /local/data/tpch100g/customer/customer0346 customer
.import /local/data/tpch100g/customer/customer0347 customer
.import /local/data/tpch100g/customer/customer0348 customer
.import /local/data/tpch100g/customer/customer0349 customer
.import /local/data/tpch100g/customer/customer0350 customer
.import /local/data/tpch100g/customer/customer0351 customer
.import /local/data/tpch100g/customer/customer0352 customer
.import /local/data/tpch100g/customer/customer0353 customer
.import /local/data/tpch100g/customer/customer0354 customer
.import /local/data/tpch100g/customer/customer0355 customer
.import /local/data/tpch100g/customer/customer0356 customer
.import /local/data/tpch100g/customer/customer0357 customer
.import /local/data/tpch100g/customer/customer0358 customer
.import /local/data/tpch100g/customer/customer0359 customer
.import /local/data/tpch100g/customer/customer0360 customer
.import /local/data/tpch100g/customer/customer0361 customer
.import /local/data/tpch100g/customer/customer0362 customer
.import /local/data/tpch100g/customer/customer0363 customer
.import /local/data/tpch100g/customer/customer0364 customer
.import /local/data/tpch100g/customer/customer0365 customer
.import /local/data/tpch100g/customer/customer0366 customer
.import /local/data/tpch100g/customer/customer0367 customer
.import /local/data/tpch100g/customer/customer0368 customer
.import /local/data/tpch100g/customer/customer0369 customer
.import /local/data/tpch100g/customer/customer0370 customer
.import /local/data/tpch100g/customer/customer0371 customer
.import /local/data/tpch100g/customer/customer0372 customer
.import /local/data/tpch100g/customer/customer0373 customer
.import /local/data/tpch100g/customer/customer0374 customer
.import /local/data/tpch100g/customer/customer0375 customer
.import /local/data/tpch100g/customer/customer0376 customer
.import /local/data/tpch100g/customer/customer0377 customer
.import /local/data/tpch100g/customer/customer0378 customer
.import /local/data/tpch100g/customer/customer0379 customer
.import /local/data/tpch100g/customer/customer0380 customer
.import /local/data/tpch100g/customer/customer0381 customer
.import /local/data/tpch100g/customer/customer0382 customer
.import /local/data/tpch100g/customer/customer0383 customer
.import /local/data/tpch100g/customer/customer0384 customer
.import /local/data/tpch100g/customer/customer0385 customer
.import /local/data/tpch100g/customer/customer0386 customer
.import /local/data/tpch100g/customer/customer0387 customer
.import /local/data/tpch100g/customer/customer0388 customer
.import /local/data/tpch100g/customer/customer0389 customer
.import /local/data/tpch100g/customer/customer0390 customer
.import /local/data/tpch100g/customer/customer0391 customer
.import /local/data/tpch100g/customer/customer0392 customer
.import /local/data/tpch100g/customer/customer0393 customer
.import /local/data/tpch100g/customer/customer0394 customer
.import /local/data/tpch100g/customer/customer0395 customer
.import /local/data/tpch100g/customer/customer0396 customer
.import /local/data/tpch100g/customer/customer0397 customer
.import /local/data/tpch100g/customer/customer0398 customer
.import /local/data/tpch100g/customer/customer0399 customer
.import /local/data/tpch100g/customer/customer0400 customer
.import /local/data/tpch100g/customer/customer0401 customer
.import /local/data/tpch100g/customer/customer0402 customer
.import /local/data/tpch100g/customer/customer0403 customer
.import /local/data/tpch100g/customer/customer0404 customer
.import /local/data/tpch100g/customer/customer0405 customer
.import /local/data/tpch100g/customer/customer0406 customer
.import /local/data/tpch100g/customer/customer0407 customer
.import /local/data/tpch100g/customer/customer0408 customer
.import /local/data/tpch100g/customer/customer0409 customer
.import /local/data/tpch100g/customer/customer0410 customer
.import /local/data/tpch100g/customer/customer0411 customer
.import /local/data/tpch100g/customer/customer0412 customer
.import /local/data/tpch100g/customer/customer0413 customer
.import /local/data/tpch100g/customer/customer0414 customer
.import /local/data/tpch100g/customer/customer0415 customer
.import /local/data/tpch100g/customer/customer0416 customer
.import /local/data/tpch100g/customer/customer0417 customer
.import /local/data/tpch100g/customer/customer0418 customer
.import /local/data/tpch100g/customer/customer0419 customer
.import /local/data/tpch100g/customer/customer0420 customer
.import /local/data/tpch100g/customer/customer0421 customer
.import /local/data/tpch100g/customer/customer0422 customer
.import /local/data/tpch100g/customer/customer0423 customer
.import /local/data/tpch100g/customer/customer0424 customer
.import /local/data/tpch100g/customer/customer0425 customer
.import /local/data/tpch100g/customer/customer0426 customer
.import /local/data/tpch100g/customer/customer0427 customer
.import /local/data/tpch100g/customer/customer0428 customer
.import /local/data/tpch100g/customer/customer0429 customer
.import /local/data/tpch100g/customer/customer0430 customer
.import /local/data/tpch100g/customer/customer0431 customer
.import /local/data/tpch100g/customer/customer0432 customer
.import /local/data/tpch100g/customer/customer0433 customer
.import /local/data/tpch100g/customer/customer0434 customer
.import /local/data/tpch100g/customer/customer0435 customer
.import /local/data/tpch100g/customer/customer0436 customer
.import /local/data/tpch100g/customer/customer0437 customer
.import /local/data/tpch100g/customer/customer0438 customer
.import /local/data/tpch100g/customer/customer0439 customer
.import /local/data/tpch100g/customer/customer0440 customer
.import /local/data/tpch100g/customer/customer0441 customer
.import /local/data/tpch100g/customer/customer0442 customer
.import /local/data/tpch100g/customer/customer0443 customer
.import /local/data/tpch100g/customer/customer0444 customer
.import /local/data/tpch100g/customer/customer0445 customer
.import /local/data/tpch100g/customer/customer0446 customer
.import /local/data/tpch100g/customer/customer0447 customer
.import /local/data/tpch100g/customer/customer0448 customer
.import /local/data/tpch100g/customer/customer0449 customer
.import /local/data/tpch100g/customer/customer0450 customer
.import /local/data/tpch100g/customer/customer0451 customer
.import /local/data/tpch100g/customer/customer0452 customer
.import /local/data/tpch100g/customer/customer0453 customer
.import /local/data/tpch100g/customer/customer0454 customer
.import /local/data/tpch100g/customer/customer0455 customer
.import /local/data/tpch100g/customer/customer0456 customer
.import /local/data/tpch100g/customer/customer0457 customer
.import /local/data/tpch100g/customer/customer0458 customer
.import /local/data/tpch100g/customer/customer0459 customer
.import /local/data/tpch100g/customer/customer0460 customer
.import /local/data/tpch100g/customer/customer0461 customer
.import /local/data/tpch100g/customer/customer0462 customer
.import /local/data/tpch100g/customer/customer0463 customer
.import /local/data/tpch100g/customer/customer0464 customer
.import /local/data/tpch100g/customer/customer0465 customer
.import /local/data/tpch100g/customer/customer0466 customer
.import /local/data/tpch100g/customer/customer0467 customer
.import /local/data/tpch100g/customer/customer0468 customer
.import /local/data/tpch100g/customer/customer0469 customer
.import /local/data/tpch100g/customer/customer0470 customer
.import /local/data/tpch100g/customer/customer0471 customer
.import /local/data/tpch100g/customer/customer0472 customer
.import /local/data/tpch100g/customer/customer0473 customer
.import /local/data/tpch100g/customer/customer0474 customer
.import /local/data/tpch100g/customer/customer0475 customer
.import /local/data/tpch100g/customer/customer0476 customer
.import /local/data/tpch100g/customer/customer0477 customer
.import /local/data/tpch100g/customer/customer0478 customer
.import /local/data/tpch100g/customer/customer0479 customer
.import /local/data/tpch100g/customer/customer0480 customer
.import /local/data/tpch100g/customer/customer0481 customer
.import /local/data/tpch100g/customer/customer0482 customer
.import /local/data/tpch100g/customer/customer0483 customer
.import /local/data/tpch100g/customer/customer0484 customer
.import /local/data/tpch100g/customer/customer0485 customer
.import /local/data/tpch100g/customer/customer0486 customer
.import /local/data/tpch100g/customer/customer0487 customer
.import /local/data/tpch100g/customer/customer0488 customer
.import /local/data/tpch100g/customer/customer0489 customer
.import /local/data/tpch100g/customer/customer0490 customer
.import /local/data/tpch100g/customer/customer0491 customer
.import /local/data/tpch100g/customer/customer0492 customer
.import /local/data/tpch100g/customer/customer0493 customer
.import /local/data/tpch100g/customer/customer0494 customer
.import /local/data/tpch100g/customer/customer0495 customer
.import /local/data/tpch100g/customer/customer0496 customer
.import /local/data/tpch100g/customer/customer0497 customer
.import /local/data/tpch100g/customer/customer0498 customer
.import /local/data/tpch100g/customer/customer0499 customer
.import /local/data/tpch100g/customer/customer0500 customer
.import /local/data/tpch100g/customer/customer0501 customer
.import /local/data/tpch100g/customer/customer0502 customer
.import /local/data/tpch100g/customer/customer0503 customer
.import /local/data/tpch100g/customer/customer0504 customer
.import /local/data/tpch100g/customer/customer0505 customer
.import /local/data/tpch100g/customer/customer0506 customer
.import /local/data/tpch100g/customer/customer0507 customer
.import /local/data/tpch100g/customer/customer0508 customer
.import /local/data/tpch100g/customer/customer0509 customer
.import /local/data/tpch100g/customer/customer0510 customer
.import /local/data/tpch100g/customer/customer0511 customer
.import /local/data/tpch100g/customer/customer0512 customer
.import /local/data/tpch100g/customer/customer0513 customer
.import /local/data/tpch100g/customer/customer0514 customer
.import /local/data/tpch100g/customer/customer0515 customer
.import /local/data/tpch100g/customer/customer0516 customer
.import /local/data/tpch100g/customer/customer0517 customer
.import /local/data/tpch100g/customer/customer0518 customer
.import /local/data/tpch100g/customer/customer0519 customer
.import /local/data/tpch100g/customer/customer0520 customer
.import /local/data/tpch100g/customer/customer0521 customer
.import /local/data/tpch100g/customer/customer0522 customer
.import /local/data/tpch100g/customer/customer0523 customer
.import /local/data/tpch100g/customer/customer0524 customer
.import /local/data/tpch100g/customer/customer0525 customer
.import /local/data/tpch100g/customer/customer0526 customer
.import /local/data/tpch100g/customer/customer0527 customer
.import /local/data/tpch100g/customer/customer0528 customer
.import /local/data/tpch100g/customer/customer0529 customer
.import /local/data/tpch100g/customer/customer0530 customer
.import /local/data/tpch100g/customer/customer0531 customer
.import /local/data/tpch100g/customer/customer0532 customer
.import /local/data/tpch100g/customer/customer0533 customer
.import /local/data/tpch100g/customer/customer0534 customer
.import /local/data/tpch100g/customer/customer0535 customer
.import /local/data/tpch100g/customer/customer0536 customer
.import /local/data/tpch100g/customer/customer0537 customer
.import /local/data/tpch100g/customer/customer0538 customer
.import /local/data/tpch100g/customer/customer0539 customer
.import /local/data/tpch100g/customer/customer0540 customer
.import /local/data/tpch100g/customer/customer0541 customer
.import /local/data/tpch100g/customer/customer0542 customer
.import /local/data/tpch100g/customer/customer0543 customer
.import /local/data/tpch100g/customer/customer0544 customer
.import /local/data/tpch100g/customer/customer0545 customer
.import /local/data/tpch100g/customer/customer0546 customer
.import /local/data/tpch100g/customer/customer0547 customer
.import /local/data/tpch100g/customer/customer0548 customer
.import /local/data/tpch100g/customer/customer0549 customer
.import /local/data/tpch100g/customer/customer0550 customer
.import /local/data/tpch100g/customer/customer0551 customer
.import /local/data/tpch100g/customer/customer0552 customer
.import /local/data/tpch100g/customer/customer0553 customer
.import /local/data/tpch100g/customer/customer0554 customer
.import /local/data/tpch100g/customer/customer0555 customer
.import /local/data/tpch100g/customer/customer0556 customer
.import /local/data/tpch100g/customer/customer0557 customer
.import /local/data/tpch100g/customer/customer0558 customer
.import /local/data/tpch100g/customer/customer0559 customer
.import /local/data/tpch100g/customer/customer0560 customer
.import /local/data/tpch100g/customer/customer0561 customer
.import /local/data/tpch100g/customer/customer0562 customer
.import /local/data/tpch100g/customer/customer0563 customer
.import /local/data/tpch100g/customer/customer0564 customer
.import /local/data/tpch100g/customer/customer0565 customer
.import /local/data/tpch100g/customer/customer0566 customer
.import /local/data/tpch100g/customer/customer0567 customer
.import /local/data/tpch100g/customer/customer0568 customer
.import /local/data/tpch100g/customer/customer0569 customer
.import /local/data/tpch100g/customer/customer0570 customer
.import /local/data/tpch100g/customer/customer0571 customer
.import /local/data/tpch100g/customer/customer0572 customer
.import /local/data/tpch100g/customer/customer0573 customer
.import /local/data/tpch100g/customer/customer0574 customer
.import /local/data/tpch100g/customer/customer0575 customer
.import /local/data/tpch100g/customer/customer0576 customer
.import /local/data/tpch100g/customer/customer0577 customer
.import /local/data/tpch100g/customer/customer0578 customer
.import /local/data/tpch100g/customer/customer0579 customer
.import /local/data/tpch100g/customer/customer0580 customer
.import /local/data/tpch100g/customer/customer0581 customer
.import /local/data/tpch100g/customer/customer0582 customer
.import /local/data/tpch100g/customer/customer0583 customer
.import /local/data/tpch100g/customer/customer0584 customer
.import /local/data/tpch100g/customer/customer0585 customer
.import /local/data/tpch100g/customer/customer0586 customer
.import /local/data/tpch100g/customer/customer0587 customer
.import /local/data/tpch100g/customer/customer0588 customer
.import /local/data/tpch100g/customer/customer0589 customer
.import /local/data/tpch100g/customer/customer0590 customer
.import /local/data/tpch100g/customer/customer0591 customer
.import /local/data/tpch100g/customer/customer0592 customer
.import /local/data/tpch100g/customer/customer0593 customer
.import /local/data/tpch100g/customer/customer0594 customer
.import /local/data/tpch100g/customer/customer0595 customer
.import /local/data/tpch100g/customer/customer0596 customer
.import /local/data/tpch100g/customer/customer0597 customer
.import /local/data/tpch100g/customer/customer0598 customer
.import /local/data/tpch100g/customer/customer0599 customer
.import /local/data/tpch100g/customer/customer0600 customer
.import /local/data/tpch100g/customer/customer0601 customer
.import /local/data/tpch100g/customer/customer0602 customer
.import /local/data/tpch100g/customer/customer0603 customer
.import /local/data/tpch100g/customer/customer0604 customer
.import /local/data/tpch100g/customer/customer0605 customer
.import /local/data/tpch100g/customer/customer0606 customer
.import /local/data/tpch100g/customer/customer0607 customer
.import /local/data/tpch100g/customer/customer0608 customer
.import /local/data/tpch100g/customer/customer0609 customer
.import /local/data/tpch100g/customer/customer0610 customer
.import /local/data/tpch100g/customer/customer0611 customer
.import /local/data/tpch100g/customer/customer0612 customer
.import /local/data/tpch100g/customer/customer0613 customer
.import /local/data/tpch100g/customer/customer0614 customer
.import /local/data/tpch100g/customer/customer0615 customer
.import /local/data/tpch100g/customer/customer0616 customer
.import /local/data/tpch100g/customer/customer0617 customer
.import /local/data/tpch100g/customer/customer0618 customer
.import /local/data/tpch100g/customer/customer0619 customer
.import /local/data/tpch100g/customer/customer0620 customer
.import /local/data/tpch100g/customer/customer0621 customer
.import /local/data/tpch100g/customer/customer0622 customer
.import /local/data/tpch100g/customer/customer0623 customer
.import /local/data/tpch100g/customer/customer0624 customer
.import /local/data/tpch100g/customer/customer0625 customer
.import /local/data/tpch100g/customer/customer0626 customer
.import /local/data/tpch100g/customer/customer0627 customer
.import /local/data/tpch100g/customer/customer0628 customer
.import /local/data/tpch100g/customer/customer0629 customer
.import /local/data/tpch100g/customer/customer0630 customer
.import /local/data/tpch100g/customer/customer0631 customer
.import /local/data/tpch100g/customer/customer0632 customer
.import /local/data/tpch100g/customer/customer0633 customer
.import /local/data/tpch100g/customer/customer0634 customer
.import /local/data/tpch100g/customer/customer0635 customer
.import /local/data/tpch100g/customer/customer0636 customer
.import /local/data/tpch100g/customer/customer0637 customer
.import /local/data/tpch100g/customer/customer0638 customer
.import /local/data/tpch100g/customer/customer0639 customer
.import /local/data/tpch100g/customer/customer0640 customer
.import /local/data/tpch100g/customer/customer0641 customer
.import /local/data/tpch100g/customer/customer0642 customer
.import /local/data/tpch100g/customer/customer0643 customer
.import /local/data/tpch100g/customer/customer0644 customer
.import /local/data/tpch100g/customer/customer0645 customer
.import /local/data/tpch100g/customer/customer0646 customer
.import /local/data/tpch100g/customer/customer0647 customer
.import /local/data/tpch100g/customer/customer0648 customer
.import /local/data/tpch100g/customer/customer0649 customer
.import /local/data/tpch100g/customer/customer0650 customer
.import /local/data/tpch100g/customer/customer0651 customer
.import /local/data/tpch100g/customer/customer0652 customer
.import /local/data/tpch100g/customer/customer0653 customer
.import /local/data/tpch100g/customer/customer0654 customer
.import /local/data/tpch100g/customer/customer0655 customer
.import /local/data/tpch100g/customer/customer0656 customer
.import /local/data/tpch100g/customer/customer0657 customer
.import /local/data/tpch100g/customer/customer0658 customer
.import /local/data/tpch100g/customer/customer0659 customer
.import /local/data/tpch100g/customer/customer0660 customer
.import /local/data/tpch100g/customer/customer0661 customer
.import /local/data/tpch100g/customer/customer0662 customer
.import /local/data/tpch100g/customer/customer0663 customer
.import /local/data/tpch100g/customer/customer0664 customer
.import /local/data/tpch100g/customer/customer0665 customer
.import /local/data/tpch100g/customer/customer0666 customer
.import /local/data/tpch100g/customer/customer0667 customer
.import /local/data/tpch100g/customer/customer0668 customer
.import /local/data/tpch100g/customer/customer0669 customer
.import /local/data/tpch100g/customer/customer0670 customer
.import /local/data/tpch100g/customer/customer0671 customer
.import /local/data/tpch100g/customer/customer0672 customer
.import /local/data/tpch100g/customer/customer0673 customer
.import /local/data/tpch100g/customer/customer0674 customer
.import /local/data/tpch100g/customer/customer0675 customer
.import /local/data/tpch100g/customer/customer0676 customer
.import /local/data/tpch100g/customer/customer0677 customer
.import /local/data/tpch100g/customer/customer0678 customer
.import /local/data/tpch100g/customer/customer0679 customer
.import /local/data/tpch100g/customer/customer0680 customer
.import /local/data/tpch100g/customer/customer0681 customer
.import /local/data/tpch100g/customer/customer0682 customer
.import /local/data/tpch100g/customer/customer0683 customer
.import /local/data/tpch100g/customer/customer0684 customer
.import /local/data/tpch100g/customer/customer0685 customer
.import /local/data/tpch100g/customer/customer0686 customer
.import /local/data/tpch100g/customer/customer0687 customer
.import /local/data/tpch100g/customer/customer0688 customer
.import /local/data/tpch100g/customer/customer0689 customer
.import /local/data/tpch100g/customer/customer0690 customer
.import /local/data/tpch100g/customer/customer0691 customer
.import /local/data/tpch100g/customer/customer0692 customer
.import /local/data/tpch100g/customer/customer0693 customer
.import /local/data/tpch100g/customer/customer0694 customer
.import /local/data/tpch100g/customer/customer0695 customer
.import /local/data/tpch100g/customer/customer0696 customer
.import /local/data/tpch100g/customer/customer0697 customer
.import /local/data/tpch100g/customer/customer0698 customer
.import /local/data/tpch100g/customer/customer0699 customer
.import /local/data/tpch100g/customer/customer0700 customer
.import /local/data/tpch100g/customer/customer0701 customer
.import /local/data/tpch100g/customer/customer0702 customer
.import /local/data/tpch100g/customer/customer0703 customer
.import /local/data/tpch100g/customer/customer0704 customer
.import /local/data/tpch100g/customer/customer0705 customer
.import /local/data/tpch100g/customer/customer0706 customer
.import /local/data/tpch100g/customer/customer0707 customer
.import /local/data/tpch100g/customer/customer0708 customer
.import /local/data/tpch100g/customer/customer0709 customer
.import /local/data/tpch100g/customer/customer0710 customer
.import /local/data/tpch100g/customer/customer0711 customer
.import /local/data/tpch100g/customer/customer0712 customer
.import /local/data/tpch100g/customer/customer0713 customer
.import /local/data/tpch100g/customer/customer0714 customer
.import /local/data/tpch100g/customer/customer0715 customer
.import /local/data/tpch100g/customer/customer0716 customer
.import /local/data/tpch100g/customer/customer0717 customer
.import /local/data/tpch100g/customer/customer0718 customer
.import /local/data/tpch100g/customer/customer0719 customer
.import /local/data/tpch100g/customer/customer0720 customer
.import /local/data/tpch100g/customer/customer0721 customer
.import /local/data/tpch100g/customer/customer0722 customer
.import /local/data/tpch100g/customer/customer0723 customer
.import /local/data/tpch100g/customer/customer0724 customer
.import /local/data/tpch100g/customer/customer0725 customer
.import /local/data/tpch100g/customer/customer0726 customer
.import /local/data/tpch100g/customer/customer0727 customer
.import /local/data/tpch100g/customer/customer0728 customer
.import /local/data/tpch100g/customer/customer0729 customer
.import /local/data/tpch100g/customer/customer0730 customer
.import /local/data/tpch100g/customer/customer0731 customer
.import /local/data/tpch100g/customer/customer0732 customer
.import /local/data/tpch100g/customer/customer0733 customer
.import /local/data/tpch100g/customer/customer0734 customer
.import /local/data/tpch100g/customer/customer0735 customer
.import /local/data/tpch100g/customer/customer0736 customer
.import /local/data/tpch100g/customer/customer0737 customer
.import /local/data/tpch100g/customer/customer0738 customer
.import /local/data/tpch100g/customer/customer0739 customer
.import /local/data/tpch100g/customer/customer0740 customer
.import /local/data/tpch100g/customer/customer0741 customer
.import /local/data/tpch100g/customer/customer0742 customer
.import /local/data/tpch100g/customer/customer0743 customer
.import /local/data/tpch100g/customer/customer0744 customer
.import /local/data/tpch100g/customer/customer0745 customer
.import /local/data/tpch100g/customer/customer0746 customer
.import /local/data/tpch100g/customer/customer0747 customer
.import /local/data/tpch100g/customer/customer0748 customer
.import /local/data/tpch100g/customer/customer0749 customer
.import /local/data/tpch100g/customer/customer0750 customer
.import /local/data/tpch100g/customer/customer0751 customer
.import /local/data/tpch100g/customer/customer0752 customer
.import /local/data/tpch100g/customer/customer0753 customer
.import /local/data/tpch100g/customer/customer0754 customer
.import /local/data/tpch100g/customer/customer0755 customer
.import /local/data/tpch100g/customer/customer0756 customer
.import /local/data/tpch100g/customer/customer0757 customer
.import /local/data/tpch100g/customer/customer0758 customer
.import /local/data/tpch100g/customer/customer0759 customer
.import /local/data/tpch100g/customer/customer0760 customer
.import /local/data/tpch100g/customer/customer0761 customer
.import /local/data/tpch100g/customer/customer0762 customer
.import /local/data/tpch100g/customer/customer0763 customer
.import /local/data/tpch100g/customer/customer0764 customer
.import /local/data/tpch100g/customer/customer0765 customer
.import /local/data/tpch100g/customer/customer0766 customer
.import /local/data/tpch100g/customer/customer0767 customer
.import /local/data/tpch100g/customer/customer0768 customer
.import /local/data/tpch100g/customer/customer0769 customer
.import /local/data/tpch100g/customer/customer0770 customer
.import /local/data/tpch100g/customer/customer0771 customer
.import /local/data/tpch100g/customer/customer0772 customer
.import /local/data/tpch100g/customer/customer0773 customer
.import /local/data/tpch100g/customer/customer0774 customer
.import /local/data/tpch100g/customer/customer0775 customer
.import /local/data/tpch100g/customer/customer0776 customer
.import /local/data/tpch100g/customer/customer0777 customer
.import /local/data/tpch100g/customer/customer0778 customer
.import /local/data/tpch100g/customer/customer0779 customer
.import /local/data/tpch100g/customer/customer0780 customer
.import /local/data/tpch100g/customer/customer0781 customer
.import /local/data/tpch100g/customer/customer0782 customer
.import /local/data/tpch100g/customer/customer0783 customer
.import /local/data/tpch100g/customer/customer0784 customer
.import /local/data/tpch100g/customer/customer0785 customer
.import /local/data/tpch100g/customer/customer0786 customer
.import /local/data/tpch100g/customer/customer0787 customer
.import /local/data/tpch100g/customer/customer0788 customer
.import /local/data/tpch100g/customer/customer0789 customer
.import /local/data/tpch100g/customer/customer0790 customer
.import /local/data/tpch100g/customer/customer0791 customer
.import /local/data/tpch100g/customer/customer0792 customer
.import /local/data/tpch100g/customer/customer0793 customer
.import /local/data/tpch100g/customer/customer0794 customer
.import /local/data/tpch100g/customer/customer0795 customer
.import /local/data/tpch100g/customer/customer0796 customer
.import /local/data/tpch100g/customer/customer0797 customer
.import /local/data/tpch100g/customer/customer0798 customer
.import /local/data/tpch100g/customer/customer0799 customer
.import /local/data/tpch100g/customer/customer0800 customer
.import /local/data/tpch100g/customer/customer0801 customer
.import /local/data/tpch100g/customer/customer0802 customer
.import /local/data/tpch100g/customer/customer0803 customer
.import /local/data/tpch100g/customer/customer0804 customer
.import /local/data/tpch100g/customer/customer0805 customer
.import /local/data/tpch100g/customer/customer0806 customer
.import /local/data/tpch100g/customer/customer0807 customer
.import /local/data/tpch100g/customer/customer0808 customer
.import /local/data/tpch100g/customer/customer0809 customer
.import /local/data/tpch100g/customer/customer0810 customer
.import /local/data/tpch100g/customer/customer0811 customer
.import /local/data/tpch100g/customer/customer0812 customer
.import /local/data/tpch100g/customer/customer0813 customer
.import /local/data/tpch100g/customer/customer0814 customer
.import /local/data/tpch100g/customer/customer0815 customer
.import /local/data/tpch100g/customer/customer0816 customer
.import /local/data/tpch100g/customer/customer0817 customer
.import /local/data/tpch100g/customer/customer0818 customer
.import /local/data/tpch100g/customer/customer0819 customer
.import /local/data/tpch100g/customer/customer0820 customer
.import /local/data/tpch100g/customer/customer0821 customer
.import /local/data/tpch100g/customer/customer0822 customer
.import /local/data/tpch100g/customer/customer0823 customer
.import /local/data/tpch100g/customer/customer0824 customer
.import /local/data/tpch100g/customer/customer0825 customer
.import /local/data/tpch100g/customer/customer0826 customer
.import /local/data/tpch100g/customer/customer0827 customer
.import /local/data/tpch100g/customer/customer0828 customer
.import /local/data/tpch100g/customer/customer0829 customer
.import /local/data/tpch100g/customer/customer0830 customer
.import /local/data/tpch100g/customer/customer0831 customer
.import /local/data/tpch100g/customer/customer0832 customer
.import /local/data/tpch100g/customer/customer0833 customer
.import /local/data/tpch100g/customer/customer0834 customer
.import /local/data/tpch100g/customer/customer0835 customer
.import /local/data/tpch100g/customer/customer0836 customer
.import /local/data/tpch100g/customer/customer0837 customer
.import /local/data/tpch100g/customer/customer0838 customer
.import /local/data/tpch100g/customer/customer0839 customer
.import /local/data/tpch100g/customer/customer0840 customer
.import /local/data/tpch100g/customer/customer0841 customer
.import /local/data/tpch100g/customer/customer0842 customer
.import /local/data/tpch100g/customer/customer0843 customer
.import /local/data/tpch100g/customer/customer0844 customer
.import /local/data/tpch100g/customer/customer0845 customer
.import /local/data/tpch100g/customer/customer0846 customer
.import /local/data/tpch100g/customer/customer0847 customer
.import /local/data/tpch100g/customer/customer0848 customer
.import /local/data/tpch100g/customer/customer0849 customer
.import /local/data/tpch100g/customer/customer0850 customer
.import /local/data/tpch100g/customer/customer0851 customer
.import /local/data/tpch100g/customer/customer0852 customer
.import /local/data/tpch100g/customer/customer0853 customer
.import /local/data/tpch100g/customer/customer0854 customer
.import /local/data/tpch100g/customer/customer0855 customer
.import /local/data/tpch100g/customer/customer0856 customer
.import /local/data/tpch100g/customer/customer0857 customer
.import /local/data/tpch100g/customer/customer0858 customer
.import /local/data/tpch100g/customer/customer0859 customer
.import /local/data/tpch100g/customer/customer0860 customer
.import /local/data/tpch100g/customer/customer0861 customer
.import /local/data/tpch100g/customer/customer0862 customer
.import /local/data/tpch100g/customer/customer0863 customer
.import /local/data/tpch100g/customer/customer0864 customer
.import /local/data/tpch100g/customer/customer0865 customer
.import /local/data/tpch100g/customer/customer0866 customer
.import /local/data/tpch100g/customer/customer0867 customer
.import /local/data/tpch100g/customer/customer0868 customer
.import /local/data/tpch100g/customer/customer0869 customer
.import /local/data/tpch100g/customer/customer0870 customer
.import /local/data/tpch100g/customer/customer0871 customer
.import /local/data/tpch100g/customer/customer0872 customer
.import /local/data/tpch100g/customer/customer0873 customer
.import /local/data/tpch100g/customer/customer0874 customer
.import /local/data/tpch100g/customer/customer0875 customer
.import /local/data/tpch100g/customer/customer0876 customer
.import /local/data/tpch100g/customer/customer0877 customer
.import /local/data/tpch100g/customer/customer0878 customer
.import /local/data/tpch100g/customer/customer0879 customer
.import /local/data/tpch100g/customer/customer0880 customer
.import /local/data/tpch100g/customer/customer0881 customer
.import /local/data/tpch100g/customer/customer0882 customer
.import /local/data/tpch100g/customer/customer0883 customer
.import /local/data/tpch100g/customer/customer0884 customer
.import /local/data/tpch100g/customer/customer0885 customer
.import /local/data/tpch100g/customer/customer0886 customer
.import /local/data/tpch100g/customer/customer0887 customer
.import /local/data/tpch100g/customer/customer0888 customer
.import /local/data/tpch100g/customer/customer0889 customer
.import /local/data/tpch100g/customer/customer0890 customer
.import /local/data/tpch100g/customer/customer0891 customer
.import /local/data/tpch100g/customer/customer0892 customer
.import /local/data/tpch100g/customer/customer0893 customer
.import /local/data/tpch100g/customer/customer0894 customer
.import /local/data/tpch100g/customer/customer0895 customer
.import /local/data/tpch100g/customer/customer0896 customer
.import /local/data/tpch100g/customer/customer0897 customer
.import /local/data/tpch100g/customer/customer0898 customer
.import /local/data/tpch100g/customer/customer0899 customer
.import /local/data/tpch100g/customer/customer0900 customer
.import /local/data/tpch100g/customer/customer0901 customer
.import /local/data/tpch100g/customer/customer0902 customer
.import /local/data/tpch100g/customer/customer0903 customer
.import /local/data/tpch100g/customer/customer0904 customer
.import /local/data/tpch100g/customer/customer0905 customer
.import /local/data/tpch100g/customer/customer0906 customer
.import /local/data/tpch100g/customer/customer0907 customer
.import /local/data/tpch100g/customer/customer0908 customer
.import /local/data/tpch100g/customer/customer0909 customer
.import /local/data/tpch100g/customer/customer0910 customer
.import /local/data/tpch100g/customer/customer0911 customer
.import /local/data/tpch100g/customer/customer0912 customer
.import /local/data/tpch100g/customer/customer0913 customer
.import /local/data/tpch100g/customer/customer0914 customer
.import /local/data/tpch100g/customer/customer0915 customer
.import /local/data/tpch100g/customer/customer0916 customer
.import /local/data/tpch100g/customer/customer0917 customer
.import /local/data/tpch100g/customer/customer0918 customer
.import /local/data/tpch100g/customer/customer0919 customer
.import /local/data/tpch100g/customer/customer0920 customer
.import /local/data/tpch100g/customer/customer0921 customer
.import /local/data/tpch100g/customer/customer0922 customer
.import /local/data/tpch100g/customer/customer0923 customer
.import /local/data/tpch100g/customer/customer0924 customer
.import /local/data/tpch100g/customer/customer0925 customer
.import /local/data/tpch100g/customer/customer0926 customer
.import /local/data/tpch100g/customer/customer0927 customer
.import /local/data/tpch100g/customer/customer0928 customer
.import /local/data/tpch100g/customer/customer0929 customer
.import /local/data/tpch100g/customer/customer0930 customer
.import /local/data/tpch100g/customer/customer0931 customer
.import /local/data/tpch100g/customer/customer0932 customer
.import /local/data/tpch100g/customer/customer0933 customer
.import /local/data/tpch100g/customer/customer0934 customer
.import /local/data/tpch100g/customer/customer0935 customer
.import /local/data/tpch100g/customer/customer0936 customer
.import /local/data/tpch100g/customer/customer0937 customer
.import /local/data/tpch100g/customer/customer0938 customer
.import /local/data/tpch100g/customer/customer0939 customer
.import /local/data/tpch100g/customer/customer0940 customer
.import /local/data/tpch100g/customer/customer0941 customer
.import /local/data/tpch100g/customer/customer0942 customer
.import /local/data/tpch100g/customer/customer0943 customer
.import /local/data/tpch100g/customer/customer0944 customer
.import /local/data/tpch100g/customer/customer0945 customer
.import /local/data/tpch100g/customer/customer0946 customer
.import /local/data/tpch100g/customer/customer0947 customer
.import /local/data/tpch100g/customer/customer0948 customer
.import /local/data/tpch100g/customer/customer0949 customer
.import /local/data/tpch100g/customer/customer0950 customer
.import /local/data/tpch100g/customer/customer0951 customer
.import /local/data/tpch100g/customer/customer0952 customer
.import /local/data/tpch100g/customer/customer0953 customer
.import /local/data/tpch100g/customer/customer0954 customer
.import /local/data/tpch100g/customer/customer0955 customer
.import /local/data/tpch100g/customer/customer0956 customer
.import /local/data/tpch100g/customer/customer0957 customer
.import /local/data/tpch100g/customer/customer0958 customer
.import /local/data/tpch100g/customer/customer0959 customer
.import /local/data/tpch100g/customer/customer0960 customer
.import /local/data/tpch100g/customer/customer0961 customer
.import /local/data/tpch100g/customer/customer0962 customer
.import /local/data/tpch100g/customer/customer0963 customer
.import /local/data/tpch100g/customer/customer0964 customer
.import /local/data/tpch100g/customer/customer0965 customer
.import /local/data/tpch100g/customer/customer0966 customer
.import /local/data/tpch100g/customer/customer0967 customer
.import /local/data/tpch100g/customer/customer0968 customer
.import /local/data/tpch100g/customer/customer0969 customer
.import /local/data/tpch100g/customer/customer0970 customer
.import /local/data/tpch100g/customer/customer0971 customer
.import /local/data/tpch100g/customer/customer0972 customer
.import /local/data/tpch100g/customer/customer0973 customer
.import /local/data/tpch100g/customer/customer0974 customer
.import /local/data/tpch100g/customer/customer0975 customer
.import /local/data/tpch100g/customer/customer0976 customer
.import /local/data/tpch100g/customer/customer0977 customer
.import /local/data/tpch100g/customer/customer0978 customer
.import /local/data/tpch100g/customer/customer0979 customer
.import /local/data/tpch100g/customer/customer0980 customer
.import /local/data/tpch100g/customer/customer0981 customer
.import /local/data/tpch100g/customer/customer0982 customer
.import /local/data/tpch100g/customer/customer0983 customer
.import /local/data/tpch100g/customer/customer0984 customer
.import /local/data/tpch100g/customer/customer0985 customer
.import /local/data/tpch100g/customer/customer0986 customer
.import /local/data/tpch100g/customer/customer0987 customer
.import /local/data/tpch100g/customer/customer0988 customer
.import /local/data/tpch100g/customer/customer0989 customer
.import /local/data/tpch100g/customer/customer0990 customer
.import /local/data/tpch100g/customer/customer0991 customer
.import /local/data/tpch100g/customer/customer0992 customer
.import /local/data/tpch100g/customer/customer0993 customer
.import /local/data/tpch100g/customer/customer0994 customer
.import /local/data/tpch100g/customer/customer0995 customer
.import /local/data/tpch100g/customer/customer0996 customer
.import /local/data/tpch100g/customer/customer0997 customer
.import /local/data/tpch100g/customer/customer0998 customer
.import /local/data/tpch100g/customer/customer0999 customer
.import /local/data/tpch100g/customer/customer1000 customer
.import /local/data/tpch100g/customer/customer1001 customer
.import /local/data/tpch100g/customer/customer1002 customer
.import /local/data/tpch100g/customer/customer1003 customer
.import /local/data/tpch100g/customer/customer1004 customer
.import /local/data/tpch100g/customer/customer1005 customer
.import /local/data/tpch100g/customer/customer1006 customer
.import /local/data/tpch100g/customer/customer1007 customer
.import /local/data/tpch100g/customer/customer1008 customer
.import /local/data/tpch100g/customer/customer1009 customer
.import /local/data/tpch100g/customer/customer1010 customer
.import /local/data/tpch100g/customer/customer1011 customer
.import /local/data/tpch100g/customer/customer1012 customer
.import /local/data/tpch100g/customer/customer1013 customer
.import /local/data/tpch100g/customer/customer1014 customer
.import /local/data/tpch100g/customer/customer1015 customer
.import /local/data/tpch100g/customer/customer1016 customer
.import /local/data/tpch100g/customer/customer1017 customer
.import /local/data/tpch100g/customer/customer1018 customer
.import /local/data/tpch100g/customer/customer1019 customer
.import /local/data/tpch100g/customer/customer1020 customer
.import /local/data/tpch100g/customer/customer1021 customer
.import /local/data/tpch100g/customer/customer1022 customer
.import /local/data/tpch100g/customer/customer1023 customer
.import /local/data/tpch100g/orders/orders0000 orders
.import /local/data/tpch100g/orders/orders0001 orders
.import /local/data/tpch100g/orders/orders0002 orders
.import /local/data/tpch100g/orders/orders0003 orders
.import /local/data/tpch100g/orders/orders0004 orders
.import /local/data/tpch100g/orders/orders0005 orders
.import /local/data/tpch100g/orders/orders0006 orders
.import /local/data/tpch100g/orders/orders0007 orders
.import /local/data/tpch100g/orders/orders0008 orders
.import /local/data/tpch100g/orders/orders0009 orders
.import /local/data/tpch100g/orders/orders0010 orders
.import /local/data/tpch100g/orders/orders0011 orders
.import /local/data/tpch100g/orders/orders0012 orders
.import /local/data/tpch100g/orders/orders0013 orders
.import /local/data/tpch100g/orders/orders0014 orders
.import /local/data/tpch100g/orders/orders0015 orders
.import /local/data/tpch100g/orders/orders0016 orders
.import /local/data/tpch100g/orders/orders0017 orders
.import /local/data/tpch100g/orders/orders0018 orders
.import /local/data/tpch100g/orders/orders0019 orders
.import /local/data/tpch100g/orders/orders0020 orders
.import /local/data/tpch100g/orders/orders0021 orders
.import /local/data/tpch100g/orders/orders0022 orders
.import /local/data/tpch100g/orders/orders0023 orders
.import /local/data/tpch100g/orders/orders0024 orders
.import /local/data/tpch100g/orders/orders0025 orders
.import /local/data/tpch100g/orders/orders0026 orders
.import /local/data/tpch100g/orders/orders0027 orders
.import /local/data/tpch100g/orders/orders0028 orders
.import /local/data/tpch100g/orders/orders0029 orders
.import /local/data/tpch100g/orders/orders0030 orders
.import /local/data/tpch100g/orders/orders0031 orders
.import /local/data/tpch100g/orders/orders0032 orders
.import /local/data/tpch100g/orders/orders0033 orders
.import /local/data/tpch100g/orders/orders0034 orders
.import /local/data/tpch100g/orders/orders0035 orders
.import /local/data/tpch100g/orders/orders0036 orders
.import /local/data/tpch100g/orders/orders0037 orders
.import /local/data/tpch100g/orders/orders0038 orders
.import /local/data/tpch100g/orders/orders0039 orders
.import /local/data/tpch100g/orders/orders0040 orders
.import /local/data/tpch100g/orders/orders0041 orders
.import /local/data/tpch100g/orders/orders0042 orders
.import /local/data/tpch100g/orders/orders0043 orders
.import /local/data/tpch100g/orders/orders0044 orders
.import /local/data/tpch100g/orders/orders0045 orders
.import /local/data/tpch100g/orders/orders0046 orders
.import /local/data/tpch100g/orders/orders0047 orders
.import /local/data/tpch100g/orders/orders0048 orders
.import /local/data/tpch100g/orders/orders0049 orders
.import /local/data/tpch100g/orders/orders0050 orders
.import /local/data/tpch100g/orders/orders0051 orders
.import /local/data/tpch100g/orders/orders0052 orders
.import /local/data/tpch100g/orders/orders0053 orders
.import /local/data/tpch100g/orders/orders0054 orders
.import /local/data/tpch100g/orders/orders0055 orders
.import /local/data/tpch100g/orders/orders0056 orders
.import /local/data/tpch100g/orders/orders0057 orders
.import /local/data/tpch100g/orders/orders0058 orders
.import /local/data/tpch100g/orders/orders0059 orders
.import /local/data/tpch100g/orders/orders0060 orders
.import /local/data/tpch100g/orders/orders0061 orders
.import /local/data/tpch100g/orders/orders0062 orders
.import /local/data/tpch100g/orders/orders0063 orders
.import /local/data/tpch100g/orders/orders0064 orders
.import /local/data/tpch100g/orders/orders0065 orders
.import /local/data/tpch100g/orders/orders0066 orders
.import /local/data/tpch100g/orders/orders0067 orders
.import /local/data/tpch100g/orders/orders0068 orders
.import /local/data/tpch100g/orders/orders0069 orders
.import /local/data/tpch100g/orders/orders0070 orders
.import /local/data/tpch100g/orders/orders0071 orders
.import /local/data/tpch100g/orders/orders0072 orders
.import /local/data/tpch100g/orders/orders0073 orders
.import /local/data/tpch100g/orders/orders0074 orders
.import /local/data/tpch100g/orders/orders0075 orders
.import /local/data/tpch100g/orders/orders0076 orders
.import /local/data/tpch100g/orders/orders0077 orders
.import /local/data/tpch100g/orders/orders0078 orders
.import /local/data/tpch100g/orders/orders0079 orders
.import /local/data/tpch100g/orders/orders0080 orders
.import /local/data/tpch100g/orders/orders0081 orders
.import /local/data/tpch100g/orders/orders0082 orders
.import /local/data/tpch100g/orders/orders0083 orders
.import /local/data/tpch100g/orders/orders0084 orders
.import /local/data/tpch100g/orders/orders0085 orders
.import /local/data/tpch100g/orders/orders0086 orders
.import /local/data/tpch100g/orders/orders0087 orders
.import /local/data/tpch100g/orders/orders0088 orders
.import /local/data/tpch100g/orders/orders0089 orders
.import /local/data/tpch100g/orders/orders0090 orders
.import /local/data/tpch100g/orders/orders0091 orders
.import /local/data/tpch100g/orders/orders0092 orders
.import /local/data/tpch100g/orders/orders0093 orders
.import /local/data/tpch100g/orders/orders0094 orders
.import /local/data/tpch100g/orders/orders0095 orders
.import /local/data/tpch100g/orders/orders0096 orders
.import /local/data/tpch100g/orders/orders0097 orders
.import /local/data/tpch100g/orders/orders0098 orders
.import /local/data/tpch100g/orders/orders0099 orders
.import /local/data/tpch100g/orders/orders0100 orders
.import /local/data/tpch100g/orders/orders0101 orders
.import /local/data/tpch100g/orders/orders0102 orders
.import /local/data/tpch100g/orders/orders0103 orders
.import /local/data/tpch100g/orders/orders0104 orders
.import /local/data/tpch100g/orders/orders0105 orders
.import /local/data/tpch100g/orders/orders0106 orders
.import /local/data/tpch100g/orders/orders0107 orders
.import /local/data/tpch100g/orders/orders0108 orders
.import /local/data/tpch100g/orders/orders0109 orders
.import /local/data/tpch100g/orders/orders0110 orders
.import /local/data/tpch100g/orders/orders0111 orders
.import /local/data/tpch100g/orders/orders0112 orders
.import /local/data/tpch100g/orders/orders0113 orders
.import /local/data/tpch100g/orders/orders0114 orders
.import /local/data/tpch100g/orders/orders0115 orders
.import /local/data/tpch100g/orders/orders0116 orders
.import /local/data/tpch100g/orders/orders0117 orders
.import /local/data/tpch100g/orders/orders0118 orders
.import /local/data/tpch100g/orders/orders0119 orders
.import /local/data/tpch100g/orders/orders0120 orders
.import /local/data/tpch100g/orders/orders0121 orders
.import /local/data/tpch100g/orders/orders0122 orders
.import /local/data/tpch100g/orders/orders0123 orders
.import /local/data/tpch100g/orders/orders0124 orders
.import /local/data/tpch100g/orders/orders0125 orders
.import /local/data/tpch100g/orders/orders0126 orders
.import /local/data/tpch100g/orders/orders0127 orders
.import /local/data/tpch100g/orders/orders0128 orders
.import /local/data/tpch100g/orders/orders0129 orders
.import /local/data/tpch100g/orders/orders0130 orders
.import /local/data/tpch100g/orders/orders0131 orders
.import /local/data/tpch100g/orders/orders0132 orders
.import /local/data/tpch100g/orders/orders0133 orders
.import /local/data/tpch100g/orders/orders0134 orders
.import /local/data/tpch100g/orders/orders0135 orders
.import /local/data/tpch100g/orders/orders0136 orders
.import /local/data/tpch100g/orders/orders0137 orders
.import /local/data/tpch100g/orders/orders0138 orders
.import /local/data/tpch100g/orders/orders0139 orders
.import /local/data/tpch100g/orders/orders0140 orders
.import /local/data/tpch100g/orders/orders0141 orders
.import /local/data/tpch100g/orders/orders0142 orders
.import /local/data/tpch100g/orders/orders0143 orders
.import /local/data/tpch100g/orders/orders0144 orders
.import /local/data/tpch100g/orders/orders0145 orders
.import /local/data/tpch100g/orders/orders0146 orders
.import /local/data/tpch100g/orders/orders0147 orders
.import /local/data/tpch100g/orders/orders0148 orders
.import /local/data/tpch100g/orders/orders0149 orders
.import /local/data/tpch100g/orders/orders0150 orders
.import /local/data/tpch100g/orders/orders0151 orders
.import /local/data/tpch100g/orders/orders0152 orders
.import /local/data/tpch100g/orders/orders0153 orders
.import /local/data/tpch100g/orders/orders0154 orders
.import /local/data/tpch100g/orders/orders0155 orders
.import /local/data/tpch100g/orders/orders0156 orders
.import /local/data/tpch100g/orders/orders0157 orders
.import /local/data/tpch100g/orders/orders0158 orders
.import /local/data/tpch100g/orders/orders0159 orders
.import /local/data/tpch100g/orders/orders0160 orders
.import /local/data/tpch100g/orders/orders0161 orders
.import /local/data/tpch100g/orders/orders0162 orders
.import /local/data/tpch100g/orders/orders0163 orders
.import /local/data/tpch100g/orders/orders0164 orders
.import /local/data/tpch100g/orders/orders0165 orders
.import /local/data/tpch100g/orders/orders0166 orders
.import /local/data/tpch100g/orders/orders0167 orders
.import /local/data/tpch100g/orders/orders0168 orders
.import /local/data/tpch100g/orders/orders0169 orders
.import /local/data/tpch100g/orders/orders0170 orders
.import /local/data/tpch100g/orders/orders0171 orders
.import /local/data/tpch100g/orders/orders0172 orders
.import /local/data/tpch100g/orders/orders0173 orders
.import /local/data/tpch100g/orders/orders0174 orders
.import /local/data/tpch100g/orders/orders0175 orders
.import /local/data/tpch100g/orders/orders0176 orders
.import /local/data/tpch100g/orders/orders0177 orders
.import /local/data/tpch100g/orders/orders0178 orders
.import /local/data/tpch100g/orders/orders0179 orders
.import /local/data/tpch100g/orders/orders0180 orders
.import /local/data/tpch100g/orders/orders0181 orders
.import /local/data/tpch100g/orders/orders0182 orders
.import /local/data/tpch100g/orders/orders0183 orders
.import /local/data/tpch100g/orders/orders0184 orders
.import /local/data/tpch100g/orders/orders0185 orders
.import /local/data/tpch100g/orders/orders0186 orders
.import /local/data/tpch100g/orders/orders0187 orders
.import /local/data/tpch100g/orders/orders0188 orders
.import /local/data/tpch100g/orders/orders0189 orders
.import /local/data/tpch100g/orders/orders0190 orders
.import /local/data/tpch100g/orders/orders0191 orders
.import /local/data/tpch100g/orders/orders0192 orders
.import /local/data/tpch100g/orders/orders0193 orders
.import /local/data/tpch100g/orders/orders0194 orders
.import /local/data/tpch100g/orders/orders0195 orders
.import /local/data/tpch100g/orders/orders0196 orders
.import /local/data/tpch100g/orders/orders0197 orders
.import /local/data/tpch100g/orders/orders0198 orders
.import /local/data/tpch100g/orders/orders0199 orders
.import /local/data/tpch100g/orders/orders0200 orders
.import /local/data/tpch100g/orders/orders0201 orders
.import /local/data/tpch100g/orders/orders0202 orders
.import /local/data/tpch100g/orders/orders0203 orders
.import /local/data/tpch100g/orders/orders0204 orders
.import /local/data/tpch100g/orders/orders0205 orders
.import /local/data/tpch100g/orders/orders0206 orders
.import /local/data/tpch100g/orders/orders0207 orders
.import /local/data/tpch100g/orders/orders0208 orders
.import /local/data/tpch100g/orders/orders0209 orders
.import /local/data/tpch100g/orders/orders0210 orders
.import /local/data/tpch100g/orders/orders0211 orders
.import /local/data/tpch100g/orders/orders0212 orders
.import /local/data/tpch100g/orders/orders0213 orders
.import /local/data/tpch100g/orders/orders0214 orders
.import /local/data/tpch100g/orders/orders0215 orders
.import /local/data/tpch100g/orders/orders0216 orders
.import /local/data/tpch100g/orders/orders0217 orders
.import /local/data/tpch100g/orders/orders0218 orders
.import /local/data/tpch100g/orders/orders0219 orders
.import /local/data/tpch100g/orders/orders0220 orders
.import /local/data/tpch100g/orders/orders0221 orders
.import /local/data/tpch100g/orders/orders0222 orders
.import /local/data/tpch100g/orders/orders0223 orders
.import /local/data/tpch100g/orders/orders0224 orders
.import /local/data/tpch100g/orders/orders0225 orders
.import /local/data/tpch100g/orders/orders0226 orders
.import /local/data/tpch100g/orders/orders0227 orders
.import /local/data/tpch100g/orders/orders0228 orders
.import /local/data/tpch100g/orders/orders0229 orders
.import /local/data/tpch100g/orders/orders0230 orders
.import /local/data/tpch100g/orders/orders0231 orders
.import /local/data/tpch100g/orders/orders0232 orders
.import /local/data/tpch100g/orders/orders0233 orders
.import /local/data/tpch100g/orders/orders0234 orders
.import /local/data/tpch100g/orders/orders0235 orders
.import /local/data/tpch100g/orders/orders0236 orders
.import /local/data/tpch100g/orders/orders0237 orders
.import /local/data/tpch100g/orders/orders0238 orders
.import /local/data/tpch100g/orders/orders0239 orders
.import /local/data/tpch100g/orders/orders0240 orders
.import /local/data/tpch100g/orders/orders0241 orders
.import /local/data/tpch100g/orders/orders0242 orders
.import /local/data/tpch100g/orders/orders0243 orders
.import /local/data/tpch100g/orders/orders0244 orders
.import /local/data/tpch100g/orders/orders0245 orders
.import /local/data/tpch100g/orders/orders0246 orders
.import /local/data/tpch100g/orders/orders0247 orders
.import /local/data/tpch100g/orders/orders0248 orders
.import /local/data/tpch100g/orders/orders0249 orders
.import /local/data/tpch100g/orders/orders0250 orders
.import /local/data/tpch100g/orders/orders0251 orders
.import /local/data/tpch100g/orders/orders0252 orders
.import /local/data/tpch100g/orders/orders0253 orders
.import /local/data/tpch100g/orders/orders0254 orders
.import /local/data/tpch100g/orders/orders0255 orders
.import /local/data/tpch100g/orders/orders0256 orders
.import /local/data/tpch100g/orders/orders0257 orders
.import /local/data/tpch100g/orders/orders0258 orders
.import /local/data/tpch100g/orders/orders0259 orders
.import /local/data/tpch100g/orders/orders0260 orders
.import /local/data/tpch100g/orders/orders0261 orders
.import /local/data/tpch100g/orders/orders0262 orders
.import /local/data/tpch100g/orders/orders0263 orders
.import /local/data/tpch100g/orders/orders0264 orders
.import /local/data/tpch100g/orders/orders0265 orders
.import /local/data/tpch100g/orders/orders0266 orders
.import /local/data/tpch100g/orders/orders0267 orders
.import /local/data/tpch100g/orders/orders0268 orders
.import /local/data/tpch100g/orders/orders0269 orders
.import /local/data/tpch100g/orders/orders0270 orders
.import /local/data/tpch100g/orders/orders0271 orders
.import /local/data/tpch100g/orders/orders0272 orders
.import /local/data/tpch100g/orders/orders0273 orders
.import /local/data/tpch100g/orders/orders0274 orders
.import /local/data/tpch100g/orders/orders0275 orders
.import /local/data/tpch100g/orders/orders0276 orders
.import /local/data/tpch100g/orders/orders0277 orders
.import /local/data/tpch100g/orders/orders0278 orders
.import /local/data/tpch100g/orders/orders0279 orders
.import /local/data/tpch100g/orders/orders0280 orders
.import /local/data/tpch100g/orders/orders0281 orders
.import /local/data/tpch100g/orders/orders0282 orders
.import /local/data/tpch100g/orders/orders0283 orders
.import /local/data/tpch100g/orders/orders0284 orders
.import /local/data/tpch100g/orders/orders0285 orders
.import /local/data/tpch100g/orders/orders0286 orders
.import /local/data/tpch100g/orders/orders0287 orders
.import /local/data/tpch100g/orders/orders0288 orders
.import /local/data/tpch100g/orders/orders0289 orders
.import /local/data/tpch100g/orders/orders0290 orders
.import /local/data/tpch100g/orders/orders0291 orders
.import /local/data/tpch100g/orders/orders0292 orders
.import /local/data/tpch100g/orders/orders0293 orders
.import /local/data/tpch100g/orders/orders0294 orders
.import /local/data/tpch100g/orders/orders0295 orders
.import /local/data/tpch100g/orders/orders0296 orders
.import /local/data/tpch100g/orders/orders0297 orders
.import /local/data/tpch100g/orders/orders0298 orders
.import /local/data/tpch100g/orders/orders0299 orders
.import /local/data/tpch100g/orders/orders0300 orders
.import /local/data/tpch100g/orders/orders0301 orders
.import /local/data/tpch100g/orders/orders0302 orders
.import /local/data/tpch100g/orders/orders0303 orders
.import /local/data/tpch100g/orders/orders0304 orders
.import /local/data/tpch100g/orders/orders0305 orders
.import /local/data/tpch100g/orders/orders0306 orders
.import /local/data/tpch100g/orders/orders0307 orders
.import /local/data/tpch100g/orders/orders0308 orders
.import /local/data/tpch100g/orders/orders0309 orders
.import /local/data/tpch100g/orders/orders0310 orders
.import /local/data/tpch100g/orders/orders0311 orders
.import /local/data/tpch100g/orders/orders0312 orders
.import /local/data/tpch100g/orders/orders0313 orders
.import /local/data/tpch100g/orders/orders0314 orders
.import /local/data/tpch100g/orders/orders0315 orders
.import /local/data/tpch100g/orders/orders0316 orders
.import /local/data/tpch100g/orders/orders0317 orders
.import /local/data/tpch100g/orders/orders0318 orders
.import /local/data/tpch100g/orders/orders0319 orders
.import /local/data/tpch100g/orders/orders0320 orders
.import /local/data/tpch100g/orders/orders0321 orders
.import /local/data/tpch100g/orders/orders0322 orders
.import /local/data/tpch100g/orders/orders0323 orders
.import /local/data/tpch100g/orders/orders0324 orders
.import /local/data/tpch100g/orders/orders0325 orders
.import /local/data/tpch100g/orders/orders0326 orders
.import /local/data/tpch100g/orders/orders0327 orders
.import /local/data/tpch100g/orders/orders0328 orders
.import /local/data/tpch100g/orders/orders0329 orders
.import /local/data/tpch100g/orders/orders0330 orders
.import /local/data/tpch100g/orders/orders0331 orders
.import /local/data/tpch100g/orders/orders0332 orders
.import /local/data/tpch100g/orders/orders0333 orders
.import /local/data/tpch100g/orders/orders0334 orders
.import /local/data/tpch100g/orders/orders0335 orders
.import /local/data/tpch100g/orders/orders0336 orders
.import /local/data/tpch100g/orders/orders0337 orders
.import /local/data/tpch100g/orders/orders0338 orders
.import /local/data/tpch100g/orders/orders0339 orders
.import /local/data/tpch100g/orders/orders0340 orders
.import /local/data/tpch100g/orders/orders0341 orders
.import /local/data/tpch100g/orders/orders0342 orders
.import /local/data/tpch100g/orders/orders0343 orders
.import /local/data/tpch100g/orders/orders0344 orders
.import /local/data/tpch100g/orders/orders0345 orders
.import /local/data/tpch100g/orders/orders0346 orders
.import /local/data/tpch100g/orders/orders0347 orders
.import /local/data/tpch100g/orders/orders0348 orders
.import /local/data/tpch100g/orders/orders0349 orders
.import /local/data/tpch100g/orders/orders0350 orders
.import /local/data/tpch100g/orders/orders0351 orders
.import /local/data/tpch100g/orders/orders0352 orders
.import /local/data/tpch100g/orders/orders0353 orders
.import /local/data/tpch100g/orders/orders0354 orders
.import /local/data/tpch100g/orders/orders0355 orders
.import /local/data/tpch100g/orders/orders0356 orders
.import /local/data/tpch100g/orders/orders0357 orders
.import /local/data/tpch100g/orders/orders0358 orders
.import /local/data/tpch100g/orders/orders0359 orders
.import /local/data/tpch100g/orders/orders0360 orders
.import /local/data/tpch100g/orders/orders0361 orders
.import /local/data/tpch100g/orders/orders0362 orders
.import /local/data/tpch100g/orders/orders0363 orders
.import /local/data/tpch100g/orders/orders0364 orders
.import /local/data/tpch100g/orders/orders0365 orders
.import /local/data/tpch100g/orders/orders0366 orders
.import /local/data/tpch100g/orders/orders0367 orders
.import /local/data/tpch100g/orders/orders0368 orders
.import /local/data/tpch100g/orders/orders0369 orders
.import /local/data/tpch100g/orders/orders0370 orders
.import /local/data/tpch100g/orders/orders0371 orders
.import /local/data/tpch100g/orders/orders0372 orders
.import /local/data/tpch100g/orders/orders0373 orders
.import /local/data/tpch100g/orders/orders0374 orders
.import /local/data/tpch100g/orders/orders0375 orders
.import /local/data/tpch100g/orders/orders0376 orders
.import /local/data/tpch100g/orders/orders0377 orders
.import /local/data/tpch100g/orders/orders0378 orders
.import /local/data/tpch100g/orders/orders0379 orders
.import /local/data/tpch100g/orders/orders0380 orders
.import /local/data/tpch100g/orders/orders0381 orders
.import /local/data/tpch100g/orders/orders0382 orders
.import /local/data/tpch100g/orders/orders0383 orders
.import /local/data/tpch100g/orders/orders0384 orders
.import /local/data/tpch100g/orders/orders0385 orders
.import /local/data/tpch100g/orders/orders0386 orders
.import /local/data/tpch100g/orders/orders0387 orders
.import /local/data/tpch100g/orders/orders0388 orders
.import /local/data/tpch100g/orders/orders0389 orders
.import /local/data/tpch100g/orders/orders0390 orders
.import /local/data/tpch100g/orders/orders0391 orders
.import /local/data/tpch100g/orders/orders0392 orders
.import /local/data/tpch100g/orders/orders0393 orders
.import /local/data/tpch100g/orders/orders0394 orders
.import /local/data/tpch100g/orders/orders0395 orders
.import /local/data/tpch100g/orders/orders0396 orders
.import /local/data/tpch100g/orders/orders0397 orders
.import /local/data/tpch100g/orders/orders0398 orders
.import /local/data/tpch100g/orders/orders0399 orders
.import /local/data/tpch100g/orders/orders0400 orders
.import /local/data/tpch100g/orders/orders0401 orders
.import /local/data/tpch100g/orders/orders0402 orders
.import /local/data/tpch100g/orders/orders0403 orders
.import /local/data/tpch100g/orders/orders0404 orders
.import /local/data/tpch100g/orders/orders0405 orders
.import /local/data/tpch100g/orders/orders0406 orders
.import /local/data/tpch100g/orders/orders0407 orders
.import /local/data/tpch100g/orders/orders0408 orders
.import /local/data/tpch100g/orders/orders0409 orders
.import /local/data/tpch100g/orders/orders0410 orders
.import /local/data/tpch100g/orders/orders0411 orders
.import /local/data/tpch100g/orders/orders0412 orders
.import /local/data/tpch100g/orders/orders0413 orders
.import /local/data/tpch100g/orders/orders0414 orders
.import /local/data/tpch100g/orders/orders0415 orders
.import /local/data/tpch100g/orders/orders0416 orders
.import /local/data/tpch100g/orders/orders0417 orders
.import /local/data/tpch100g/orders/orders0418 orders
.import /local/data/tpch100g/orders/orders0419 orders
.import /local/data/tpch100g/orders/orders0420 orders
.import /local/data/tpch100g/orders/orders0421 orders
.import /local/data/tpch100g/orders/orders0422 orders
.import /local/data/tpch100g/orders/orders0423 orders
.import /local/data/tpch100g/orders/orders0424 orders
.import /local/data/tpch100g/orders/orders0425 orders
.import /local/data/tpch100g/orders/orders0426 orders
.import /local/data/tpch100g/orders/orders0427 orders
.import /local/data/tpch100g/orders/orders0428 orders
.import /local/data/tpch100g/orders/orders0429 orders
.import /local/data/tpch100g/orders/orders0430 orders
.import /local/data/tpch100g/orders/orders0431 orders
.import /local/data/tpch100g/orders/orders0432 orders
.import /local/data/tpch100g/orders/orders0433 orders
.import /local/data/tpch100g/orders/orders0434 orders
.import /local/data/tpch100g/orders/orders0435 orders
.import /local/data/tpch100g/orders/orders0436 orders
.import /local/data/tpch100g/orders/orders0437 orders
.import /local/data/tpch100g/orders/orders0438 orders
.import /local/data/tpch100g/orders/orders0439 orders
.import /local/data/tpch100g/orders/orders0440 orders
.import /local/data/tpch100g/orders/orders0441 orders
.import /local/data/tpch100g/orders/orders0442 orders
.import /local/data/tpch100g/orders/orders0443 orders
.import /local/data/tpch100g/orders/orders0444 orders
.import /local/data/tpch100g/orders/orders0445 orders
.import /local/data/tpch100g/orders/orders0446 orders
.import /local/data/tpch100g/orders/orders0447 orders
.import /local/data/tpch100g/orders/orders0448 orders
.import /local/data/tpch100g/orders/orders0449 orders
.import /local/data/tpch100g/orders/orders0450 orders
.import /local/data/tpch100g/orders/orders0451 orders
.import /local/data/tpch100g/orders/orders0452 orders
.import /local/data/tpch100g/orders/orders0453 orders
.import /local/data/tpch100g/orders/orders0454 orders
.import /local/data/tpch100g/orders/orders0455 orders
.import /local/data/tpch100g/orders/orders0456 orders
.import /local/data/tpch100g/orders/orders0457 orders
.import /local/data/tpch100g/orders/orders0458 orders
.import /local/data/tpch100g/orders/orders0459 orders
.import /local/data/tpch100g/orders/orders0460 orders
.import /local/data/tpch100g/orders/orders0461 orders
.import /local/data/tpch100g/orders/orders0462 orders
.import /local/data/tpch100g/orders/orders0463 orders
.import /local/data/tpch100g/orders/orders0464 orders
.import /local/data/tpch100g/orders/orders0465 orders
.import /local/data/tpch100g/orders/orders0466 orders
.import /local/data/tpch100g/orders/orders0467 orders
.import /local/data/tpch100g/orders/orders0468 orders
.import /local/data/tpch100g/orders/orders0469 orders
.import /local/data/tpch100g/orders/orders0470 orders
.import /local/data/tpch100g/orders/orders0471 orders
.import /local/data/tpch100g/orders/orders0472 orders
.import /local/data/tpch100g/orders/orders0473 orders
.import /local/data/tpch100g/orders/orders0474 orders
.import /local/data/tpch100g/orders/orders0475 orders
.import /local/data/tpch100g/orders/orders0476 orders
.import /local/data/tpch100g/orders/orders0477 orders
.import /local/data/tpch100g/orders/orders0478 orders
.import /local/data/tpch100g/orders/orders0479 orders
.import /local/data/tpch100g/orders/orders0480 orders
.import /local/data/tpch100g/orders/orders0481 orders
.import /local/data/tpch100g/orders/orders0482 orders
.import /local/data/tpch100g/orders/orders0483 orders
.import /local/data/tpch100g/orders/orders0484 orders
.import /local/data/tpch100g/orders/orders0485 orders
.import /local/data/tpch100g/orders/orders0486 orders
.import /local/data/tpch100g/orders/orders0487 orders
.import /local/data/tpch100g/orders/orders0488 orders
.import /local/data/tpch100g/orders/orders0489 orders
.import /local/data/tpch100g/orders/orders0490 orders
.import /local/data/tpch100g/orders/orders0491 orders
.import /local/data/tpch100g/orders/orders0492 orders
.import /local/data/tpch100g/orders/orders0493 orders
.import /local/data/tpch100g/orders/orders0494 orders
.import /local/data/tpch100g/orders/orders0495 orders
.import /local/data/tpch100g/orders/orders0496 orders
.import /local/data/tpch100g/orders/orders0497 orders
.import /local/data/tpch100g/orders/orders0498 orders
.import /local/data/tpch100g/orders/orders0499 orders
.import /local/data/tpch100g/orders/orders0500 orders
.import /local/data/tpch100g/orders/orders0501 orders
.import /local/data/tpch100g/orders/orders0502 orders
.import /local/data/tpch100g/orders/orders0503 orders
.import /local/data/tpch100g/orders/orders0504 orders
.import /local/data/tpch100g/orders/orders0505 orders
.import /local/data/tpch100g/orders/orders0506 orders
.import /local/data/tpch100g/orders/orders0507 orders
.import /local/data/tpch100g/orders/orders0508 orders
.import /local/data/tpch100g/orders/orders0509 orders
.import /local/data/tpch100g/orders/orders0510 orders
.import /local/data/tpch100g/orders/orders0511 orders
.import /local/data/tpch100g/orders/orders0512 orders
.import /local/data/tpch100g/orders/orders0513 orders
.import /local/data/tpch100g/orders/orders0514 orders
.import /local/data/tpch100g/orders/orders0515 orders
.import /local/data/tpch100g/orders/orders0516 orders
.import /local/data/tpch100g/orders/orders0517 orders
.import /local/data/tpch100g/orders/orders0518 orders
.import /local/data/tpch100g/orders/orders0519 orders
.import /local/data/tpch100g/orders/orders0520 orders
.import /local/data/tpch100g/orders/orders0521 orders
.import /local/data/tpch100g/orders/orders0522 orders
.import /local/data/tpch100g/orders/orders0523 orders
.import /local/data/tpch100g/orders/orders0524 orders
.import /local/data/tpch100g/orders/orders0525 orders
.import /local/data/tpch100g/orders/orders0526 orders
.import /local/data/tpch100g/orders/orders0527 orders
.import /local/data/tpch100g/orders/orders0528 orders
.import /local/data/tpch100g/orders/orders0529 orders
.import /local/data/tpch100g/orders/orders0530 orders
.import /local/data/tpch100g/orders/orders0531 orders
.import /local/data/tpch100g/orders/orders0532 orders
.import /local/data/tpch100g/orders/orders0533 orders
.import /local/data/tpch100g/orders/orders0534 orders
.import /local/data/tpch100g/orders/orders0535 orders
.import /local/data/tpch100g/orders/orders0536 orders
.import /local/data/tpch100g/orders/orders0537 orders
.import /local/data/tpch100g/orders/orders0538 orders
.import /local/data/tpch100g/orders/orders0539 orders
.import /local/data/tpch100g/orders/orders0540 orders
.import /local/data/tpch100g/orders/orders0541 orders
.import /local/data/tpch100g/orders/orders0542 orders
.import /local/data/tpch100g/orders/orders0543 orders
.import /local/data/tpch100g/orders/orders0544 orders
.import /local/data/tpch100g/orders/orders0545 orders
.import /local/data/tpch100g/orders/orders0546 orders
.import /local/data/tpch100g/orders/orders0547 orders
.import /local/data/tpch100g/orders/orders0548 orders
.import /local/data/tpch100g/orders/orders0549 orders
.import /local/data/tpch100g/orders/orders0550 orders
.import /local/data/tpch100g/orders/orders0551 orders
.import /local/data/tpch100g/orders/orders0552 orders
.import /local/data/tpch100g/orders/orders0553 orders
.import /local/data/tpch100g/orders/orders0554 orders
.import /local/data/tpch100g/orders/orders0555 orders
.import /local/data/tpch100g/orders/orders0556 orders
.import /local/data/tpch100g/orders/orders0557 orders
.import /local/data/tpch100g/orders/orders0558 orders
.import /local/data/tpch100g/orders/orders0559 orders
.import /local/data/tpch100g/orders/orders0560 orders
.import /local/data/tpch100g/orders/orders0561 orders
.import /local/data/tpch100g/orders/orders0562 orders
.import /local/data/tpch100g/orders/orders0563 orders
.import /local/data/tpch100g/orders/orders0564 orders
.import /local/data/tpch100g/orders/orders0565 orders
.import /local/data/tpch100g/orders/orders0566 orders
.import /local/data/tpch100g/orders/orders0567 orders
.import /local/data/tpch100g/orders/orders0568 orders
.import /local/data/tpch100g/orders/orders0569 orders
.import /local/data/tpch100g/orders/orders0570 orders
.import /local/data/tpch100g/orders/orders0571 orders
.import /local/data/tpch100g/orders/orders0572 orders
.import /local/data/tpch100g/orders/orders0573 orders
.import /local/data/tpch100g/orders/orders0574 orders
.import /local/data/tpch100g/orders/orders0575 orders
.import /local/data/tpch100g/orders/orders0576 orders
.import /local/data/tpch100g/orders/orders0577 orders
.import /local/data/tpch100g/orders/orders0578 orders
.import /local/data/tpch100g/orders/orders0579 orders
.import /local/data/tpch100g/orders/orders0580 orders
.import /local/data/tpch100g/orders/orders0581 orders
.import /local/data/tpch100g/orders/orders0582 orders
.import /local/data/tpch100g/orders/orders0583 orders
.import /local/data/tpch100g/orders/orders0584 orders
.import /local/data/tpch100g/orders/orders0585 orders
.import /local/data/tpch100g/orders/orders0586 orders
.import /local/data/tpch100g/orders/orders0587 orders
.import /local/data/tpch100g/orders/orders0588 orders
.import /local/data/tpch100g/orders/orders0589 orders
.import /local/data/tpch100g/orders/orders0590 orders
.import /local/data/tpch100g/orders/orders0591 orders
.import /local/data/tpch100g/orders/orders0592 orders
.import /local/data/tpch100g/orders/orders0593 orders
.import /local/data/tpch100g/orders/orders0594 orders
.import /local/data/tpch100g/orders/orders0595 orders
.import /local/data/tpch100g/orders/orders0596 orders
.import /local/data/tpch100g/orders/orders0597 orders
.import /local/data/tpch100g/orders/orders0598 orders
.import /local/data/tpch100g/orders/orders0599 orders
.import /local/data/tpch100g/orders/orders0600 orders
.import /local/data/tpch100g/orders/orders0601 orders
.import /local/data/tpch100g/orders/orders0602 orders
.import /local/data/tpch100g/orders/orders0603 orders
.import /local/data/tpch100g/orders/orders0604 orders
.import /local/data/tpch100g/orders/orders0605 orders
.import /local/data/tpch100g/orders/orders0606 orders
.import /local/data/tpch100g/orders/orders0607 orders
.import /local/data/tpch100g/orders/orders0608 orders
.import /local/data/tpch100g/orders/orders0609 orders
.import /local/data/tpch100g/orders/orders0610 orders
.import /local/data/tpch100g/orders/orders0611 orders
.import /local/data/tpch100g/orders/orders0612 orders
.import /local/data/tpch100g/orders/orders0613 orders
.import /local/data/tpch100g/orders/orders0614 orders
.import /local/data/tpch100g/orders/orders0615 orders
.import /local/data/tpch100g/orders/orders0616 orders
.import /local/data/tpch100g/orders/orders0617 orders
.import /local/data/tpch100g/orders/orders0618 orders
.import /local/data/tpch100g/orders/orders0619 orders
.import /local/data/tpch100g/orders/orders0620 orders
.import /local/data/tpch100g/orders/orders0621 orders
.import /local/data/tpch100g/orders/orders0622 orders
.import /local/data/tpch100g/orders/orders0623 orders
.import /local/data/tpch100g/orders/orders0624 orders
.import /local/data/tpch100g/orders/orders0625 orders
.import /local/data/tpch100g/orders/orders0626 orders
.import /local/data/tpch100g/orders/orders0627 orders
.import /local/data/tpch100g/orders/orders0628 orders
.import /local/data/tpch100g/orders/orders0629 orders
.import /local/data/tpch100g/orders/orders0630 orders
.import /local/data/tpch100g/orders/orders0631 orders
.import /local/data/tpch100g/orders/orders0632 orders
.import /local/data/tpch100g/orders/orders0633 orders
.import /local/data/tpch100g/orders/orders0634 orders
.import /local/data/tpch100g/orders/orders0635 orders
.import /local/data/tpch100g/orders/orders0636 orders
.import /local/data/tpch100g/orders/orders0637 orders
.import /local/data/tpch100g/orders/orders0638 orders
.import /local/data/tpch100g/orders/orders0639 orders
.import /local/data/tpch100g/orders/orders0640 orders
.import /local/data/tpch100g/orders/orders0641 orders
.import /local/data/tpch100g/orders/orders0642 orders
.import /local/data/tpch100g/orders/orders0643 orders
.import /local/data/tpch100g/orders/orders0644 orders
.import /local/data/tpch100g/orders/orders0645 orders
.import /local/data/tpch100g/orders/orders0646 orders
.import /local/data/tpch100g/orders/orders0647 orders
.import /local/data/tpch100g/orders/orders0648 orders
.import /local/data/tpch100g/orders/orders0649 orders
.import /local/data/tpch100g/orders/orders0650 orders
.import /local/data/tpch100g/orders/orders0651 orders
.import /local/data/tpch100g/orders/orders0652 orders
.import /local/data/tpch100g/orders/orders0653 orders
.import /local/data/tpch100g/orders/orders0654 orders
.import /local/data/tpch100g/orders/orders0655 orders
.import /local/data/tpch100g/orders/orders0656 orders
.import /local/data/tpch100g/orders/orders0657 orders
.import /local/data/tpch100g/orders/orders0658 orders
.import /local/data/tpch100g/orders/orders0659 orders
.import /local/data/tpch100g/orders/orders0660 orders
.import /local/data/tpch100g/orders/orders0661 orders
.import /local/data/tpch100g/orders/orders0662 orders
.import /local/data/tpch100g/orders/orders0663 orders
.import /local/data/tpch100g/orders/orders0664 orders
.import /local/data/tpch100g/orders/orders0665 orders
.import /local/data/tpch100g/orders/orders0666 orders
.import /local/data/tpch100g/orders/orders0667 orders
.import /local/data/tpch100g/orders/orders0668 orders
.import /local/data/tpch100g/orders/orders0669 orders
.import /local/data/tpch100g/orders/orders0670 orders
.import /local/data/tpch100g/orders/orders0671 orders
.import /local/data/tpch100g/orders/orders0672 orders
.import /local/data/tpch100g/orders/orders0673 orders
.import /local/data/tpch100g/orders/orders0674 orders
.import /local/data/tpch100g/orders/orders0675 orders
.import /local/data/tpch100g/orders/orders0676 orders
.import /local/data/tpch100g/orders/orders0677 orders
.import /local/data/tpch100g/orders/orders0678 orders
.import /local/data/tpch100g/orders/orders0679 orders
.import /local/data/tpch100g/orders/orders0680 orders
.import /local/data/tpch100g/orders/orders0681 orders
.import /local/data/tpch100g/orders/orders0682 orders
.import /local/data/tpch100g/orders/orders0683 orders
.import /local/data/tpch100g/orders/orders0684 orders
.import /local/data/tpch100g/orders/orders0685 orders
.import /local/data/tpch100g/orders/orders0686 orders
.import /local/data/tpch100g/orders/orders0687 orders
.import /local/data/tpch100g/orders/orders0688 orders
.import /local/data/tpch100g/orders/orders0689 orders
.import /local/data/tpch100g/orders/orders0690 orders
.import /local/data/tpch100g/orders/orders0691 orders
.import /local/data/tpch100g/orders/orders0692 orders
.import /local/data/tpch100g/orders/orders0693 orders
.import /local/data/tpch100g/orders/orders0694 orders
.import /local/data/tpch100g/orders/orders0695 orders
.import /local/data/tpch100g/orders/orders0696 orders
.import /local/data/tpch100g/orders/orders0697 orders
.import /local/data/tpch100g/orders/orders0698 orders
.import /local/data/tpch100g/orders/orders0699 orders
.import /local/data/tpch100g/orders/orders0700 orders
.import /local/data/tpch100g/orders/orders0701 orders
.import /local/data/tpch100g/orders/orders0702 orders
.import /local/data/tpch100g/orders/orders0703 orders
.import /local/data/tpch100g/orders/orders0704 orders
.import /local/data/tpch100g/orders/orders0705 orders
.import /local/data/tpch100g/orders/orders0706 orders
.import /local/data/tpch100g/orders/orders0707 orders
.import /local/data/tpch100g/orders/orders0708 orders
.import /local/data/tpch100g/orders/orders0709 orders
.import /local/data/tpch100g/orders/orders0710 orders
.import /local/data/tpch100g/orders/orders0711 orders
.import /local/data/tpch100g/orders/orders0712 orders
.import /local/data/tpch100g/orders/orders0713 orders
.import /local/data/tpch100g/orders/orders0714 orders
.import /local/data/tpch100g/orders/orders0715 orders
.import /local/data/tpch100g/orders/orders0716 orders
.import /local/data/tpch100g/orders/orders0717 orders
.import /local/data/tpch100g/orders/orders0718 orders
.import /local/data/tpch100g/orders/orders0719 orders
.import /local/data/tpch100g/orders/orders0720 orders
.import /local/data/tpch100g/orders/orders0721 orders
.import /local/data/tpch100g/orders/orders0722 orders
.import /local/data/tpch100g/orders/orders0723 orders
.import /local/data/tpch100g/orders/orders0724 orders
.import /local/data/tpch100g/orders/orders0725 orders
.import /local/data/tpch100g/orders/orders0726 orders
.import /local/data/tpch100g/orders/orders0727 orders
.import /local/data/tpch100g/orders/orders0728 orders
.import /local/data/tpch100g/orders/orders0729 orders
.import /local/data/tpch100g/orders/orders0730 orders
.import /local/data/tpch100g/orders/orders0731 orders
.import /local/data/tpch100g/orders/orders0732 orders
.import /local/data/tpch100g/orders/orders0733 orders
.import /local/data/tpch100g/orders/orders0734 orders
.import /local/data/tpch100g/orders/orders0735 orders
.import /local/data/tpch100g/orders/orders0736 orders
.import /local/data/tpch100g/orders/orders0737 orders
.import /local/data/tpch100g/orders/orders0738 orders
.import /local/data/tpch100g/orders/orders0739 orders
.import /local/data/tpch100g/orders/orders0740 orders
.import /local/data/tpch100g/orders/orders0741 orders
.import /local/data/tpch100g/orders/orders0742 orders
.import /local/data/tpch100g/orders/orders0743 orders
.import /local/data/tpch100g/orders/orders0744 orders
.import /local/data/tpch100g/orders/orders0745 orders
.import /local/data/tpch100g/orders/orders0746 orders
.import /local/data/tpch100g/orders/orders0747 orders
.import /local/data/tpch100g/orders/orders0748 orders
.import /local/data/tpch100g/orders/orders0749 orders
.import /local/data/tpch100g/orders/orders0750 orders
.import /local/data/tpch100g/orders/orders0751 orders
.import /local/data/tpch100g/orders/orders0752 orders
.import /local/data/tpch100g/orders/orders0753 orders
.import /local/data/tpch100g/orders/orders0754 orders
.import /local/data/tpch100g/orders/orders0755 orders
.import /local/data/tpch100g/orders/orders0756 orders
.import /local/data/tpch100g/orders/orders0757 orders
.import /local/data/tpch100g/orders/orders0758 orders
.import /local/data/tpch100g/orders/orders0759 orders
.import /local/data/tpch100g/orders/orders0760 orders
.import /local/data/tpch100g/orders/orders0761 orders
.import /local/data/tpch100g/orders/orders0762 orders
.import /local/data/tpch100g/orders/orders0763 orders
.import /local/data/tpch100g/orders/orders0764 orders
.import /local/data/tpch100g/orders/orders0765 orders
.import /local/data/tpch100g/orders/orders0766 orders
.import /local/data/tpch100g/orders/orders0767 orders
.import /local/data/tpch100g/orders/orders0768 orders
.import /local/data/tpch100g/orders/orders0769 orders
.import /local/data/tpch100g/orders/orders0770 orders
.import /local/data/tpch100g/orders/orders0771 orders
.import /local/data/tpch100g/orders/orders0772 orders
.import /local/data/tpch100g/orders/orders0773 orders
.import /local/data/tpch100g/orders/orders0774 orders
.import /local/data/tpch100g/orders/orders0775 orders
.import /local/data/tpch100g/orders/orders0776 orders
.import /local/data/tpch100g/orders/orders0777 orders
.import /local/data/tpch100g/orders/orders0778 orders
.import /local/data/tpch100g/orders/orders0779 orders
.import /local/data/tpch100g/orders/orders0780 orders
.import /local/data/tpch100g/orders/orders0781 orders
.import /local/data/tpch100g/orders/orders0782 orders
.import /local/data/tpch100g/orders/orders0783 orders
.import /local/data/tpch100g/orders/orders0784 orders
.import /local/data/tpch100g/orders/orders0785 orders
.import /local/data/tpch100g/orders/orders0786 orders
.import /local/data/tpch100g/orders/orders0787 orders
.import /local/data/tpch100g/orders/orders0788 orders
.import /local/data/tpch100g/orders/orders0789 orders
.import /local/data/tpch100g/orders/orders0790 orders
.import /local/data/tpch100g/orders/orders0791 orders
.import /local/data/tpch100g/orders/orders0792 orders
.import /local/data/tpch100g/orders/orders0793 orders
.import /local/data/tpch100g/orders/orders0794 orders
.import /local/data/tpch100g/orders/orders0795 orders
.import /local/data/tpch100g/orders/orders0796 orders
.import /local/data/tpch100g/orders/orders0797 orders
.import /local/data/tpch100g/orders/orders0798 orders
.import /local/data/tpch100g/orders/orders0799 orders
.import /local/data/tpch100g/orders/orders0800 orders
.import /local/data/tpch100g/orders/orders0801 orders
.import /local/data/tpch100g/orders/orders0802 orders
.import /local/data/tpch100g/orders/orders0803 orders
.import /local/data/tpch100g/orders/orders0804 orders
.import /local/data/tpch100g/orders/orders0805 orders
.import /local/data/tpch100g/orders/orders0806 orders
.import /local/data/tpch100g/orders/orders0807 orders
.import /local/data/tpch100g/orders/orders0808 orders
.import /local/data/tpch100g/orders/orders0809 orders
.import /local/data/tpch100g/orders/orders0810 orders
.import /local/data/tpch100g/orders/orders0811 orders
.import /local/data/tpch100g/orders/orders0812 orders
.import /local/data/tpch100g/orders/orders0813 orders
.import /local/data/tpch100g/orders/orders0814 orders
.import /local/data/tpch100g/orders/orders0815 orders
.import /local/data/tpch100g/orders/orders0816 orders
.import /local/data/tpch100g/orders/orders0817 orders
.import /local/data/tpch100g/orders/orders0818 orders
.import /local/data/tpch100g/orders/orders0819 orders
.import /local/data/tpch100g/orders/orders0820 orders
.import /local/data/tpch100g/orders/orders0821 orders
.import /local/data/tpch100g/orders/orders0822 orders
.import /local/data/tpch100g/orders/orders0823 orders
.import /local/data/tpch100g/orders/orders0824 orders
.import /local/data/tpch100g/orders/orders0825 orders
.import /local/data/tpch100g/orders/orders0826 orders
.import /local/data/tpch100g/orders/orders0827 orders
.import /local/data/tpch100g/orders/orders0828 orders
.import /local/data/tpch100g/orders/orders0829 orders
.import /local/data/tpch100g/orders/orders0830 orders
.import /local/data/tpch100g/orders/orders0831 orders
.import /local/data/tpch100g/orders/orders0832 orders
.import /local/data/tpch100g/orders/orders0833 orders
.import /local/data/tpch100g/orders/orders0834 orders
.import /local/data/tpch100g/orders/orders0835 orders
.import /local/data/tpch100g/orders/orders0836 orders
.import /local/data/tpch100g/orders/orders0837 orders
.import /local/data/tpch100g/orders/orders0838 orders
.import /local/data/tpch100g/orders/orders0839 orders
.import /local/data/tpch100g/orders/orders0840 orders
.import /local/data/tpch100g/orders/orders0841 orders
.import /local/data/tpch100g/orders/orders0842 orders
.import /local/data/tpch100g/orders/orders0843 orders
.import /local/data/tpch100g/orders/orders0844 orders
.import /local/data/tpch100g/orders/orders0845 orders
.import /local/data/tpch100g/orders/orders0846 orders
.import /local/data/tpch100g/orders/orders0847 orders
.import /local/data/tpch100g/orders/orders0848 orders
.import /local/data/tpch100g/orders/orders0849 orders
.import /local/data/tpch100g/orders/orders0850 orders
.import /local/data/tpch100g/orders/orders0851 orders
.import /local/data/tpch100g/orders/orders0852 orders
.import /local/data/tpch100g/orders/orders0853 orders
.import /local/data/tpch100g/orders/orders0854 orders
.import /local/data/tpch100g/orders/orders0855 orders
.import /local/data/tpch100g/orders/orders0856 orders
.import /local/data/tpch100g/orders/orders0857 orders
.import /local/data/tpch100g/orders/orders0858 orders
.import /local/data/tpch100g/orders/orders0859 orders
.import /local/data/tpch100g/orders/orders0860 orders
.import /local/data/tpch100g/orders/orders0861 orders
.import /local/data/tpch100g/orders/orders0862 orders
.import /local/data/tpch100g/orders/orders0863 orders
.import /local/data/tpch100g/orders/orders0864 orders
.import /local/data/tpch100g/orders/orders0865 orders
.import /local/data/tpch100g/orders/orders0866 orders
.import /local/data/tpch100g/orders/orders0867 orders
.import /local/data/tpch100g/orders/orders0868 orders
.import /local/data/tpch100g/orders/orders0869 orders
.import /local/data/tpch100g/orders/orders0870 orders
.import /local/data/tpch100g/orders/orders0871 orders
.import /local/data/tpch100g/orders/orders0872 orders
.import /local/data/tpch100g/orders/orders0873 orders
.import /local/data/tpch100g/orders/orders0874 orders
.import /local/data/tpch100g/orders/orders0875 orders
.import /local/data/tpch100g/orders/orders0876 orders
.import /local/data/tpch100g/orders/orders0877 orders
.import /local/data/tpch100g/orders/orders0878 orders
.import /local/data/tpch100g/orders/orders0879 orders
.import /local/data/tpch100g/orders/orders0880 orders
.import /local/data/tpch100g/orders/orders0881 orders
.import /local/data/tpch100g/orders/orders0882 orders
.import /local/data/tpch100g/orders/orders0883 orders
.import /local/data/tpch100g/orders/orders0884 orders
.import /local/data/tpch100g/orders/orders0885 orders
.import /local/data/tpch100g/orders/orders0886 orders
.import /local/data/tpch100g/orders/orders0887 orders
.import /local/data/tpch100g/orders/orders0888 orders
.import /local/data/tpch100g/orders/orders0889 orders
.import /local/data/tpch100g/orders/orders0890 orders
.import /local/data/tpch100g/orders/orders0891 orders
.import /local/data/tpch100g/orders/orders0892 orders
.import /local/data/tpch100g/orders/orders0893 orders
.import /local/data/tpch100g/orders/orders0894 orders
.import /local/data/tpch100g/orders/orders0895 orders
.import /local/data/tpch100g/orders/orders0896 orders
.import /local/data/tpch100g/orders/orders0897 orders
.import /local/data/tpch100g/orders/orders0898 orders
.import /local/data/tpch100g/orders/orders0899 orders
.import /local/data/tpch100g/orders/orders0900 orders
.import /local/data/tpch100g/orders/orders0901 orders
.import /local/data/tpch100g/orders/orders0902 orders
.import /local/data/tpch100g/orders/orders0903 orders
.import /local/data/tpch100g/orders/orders0904 orders
.import /local/data/tpch100g/orders/orders0905 orders
.import /local/data/tpch100g/orders/orders0906 orders
.import /local/data/tpch100g/orders/orders0907 orders
.import /local/data/tpch100g/orders/orders0908 orders
.import /local/data/tpch100g/orders/orders0909 orders
.import /local/data/tpch100g/orders/orders0910 orders
.import /local/data/tpch100g/orders/orders0911 orders
.import /local/data/tpch100g/orders/orders0912 orders
.import /local/data/tpch100g/orders/orders0913 orders
.import /local/data/tpch100g/orders/orders0914 orders
.import /local/data/tpch100g/orders/orders0915 orders
.import /local/data/tpch100g/orders/orders0916 orders
.import /local/data/tpch100g/orders/orders0917 orders
.import /local/data/tpch100g/orders/orders0918 orders
.import /local/data/tpch100g/orders/orders0919 orders
.import /local/data/tpch100g/orders/orders0920 orders
.import /local/data/tpch100g/orders/orders0921 orders
.import /local/data/tpch100g/orders/orders0922 orders
.import /local/data/tpch100g/orders/orders0923 orders
.import /local/data/tpch100g/orders/orders0924 orders
.import /local/data/tpch100g/orders/orders0925 orders
.import /local/data/tpch100g/orders/orders0926 orders
.import /local/data/tpch100g/orders/orders0927 orders
.import /local/data/tpch100g/orders/orders0928 orders
.import /local/data/tpch100g/orders/orders0929 orders
.import /local/data/tpch100g/orders/orders0930 orders
.import /local/data/tpch100g/orders/orders0931 orders
.import /local/data/tpch100g/orders/orders0932 orders
.import /local/data/tpch100g/orders/orders0933 orders
.import /local/data/tpch100g/orders/orders0934 orders
.import /local/data/tpch100g/orders/orders0935 orders
.import /local/data/tpch100g/orders/orders0936 orders
.import /local/data/tpch100g/orders/orders0937 orders
.import /local/data/tpch100g/orders/orders0938 orders
.import /local/data/tpch100g/orders/orders0939 orders
.import /local/data/tpch100g/orders/orders0940 orders
.import /local/data/tpch100g/orders/orders0941 orders
.import /local/data/tpch100g/orders/orders0942 orders
.import /local/data/tpch100g/orders/orders0943 orders
.import /local/data/tpch100g/orders/orders0944 orders
.import /local/data/tpch100g/orders/orders0945 orders
.import /local/data/tpch100g/orders/orders0946 orders
.import /local/data/tpch100g/orders/orders0947 orders
.import /local/data/tpch100g/orders/orders0948 orders
.import /local/data/tpch100g/orders/orders0949 orders
.import /local/data/tpch100g/orders/orders0950 orders
.import /local/data/tpch100g/orders/orders0951 orders
.import /local/data/tpch100g/orders/orders0952 orders
.import /local/data/tpch100g/orders/orders0953 orders
.import /local/data/tpch100g/orders/orders0954 orders
.import /local/data/tpch100g/orders/orders0955 orders
.import /local/data/tpch100g/orders/orders0956 orders
.import /local/data/tpch100g/orders/orders0957 orders
.import /local/data/tpch100g/orders/orders0958 orders
.import /local/data/tpch100g/orders/orders0959 orders
.import /local/data/tpch100g/orders/orders0960 orders
.import /local/data/tpch100g/orders/orders0961 orders
.import /local/data/tpch100g/orders/orders0962 orders
.import /local/data/tpch100g/orders/orders0963 orders
.import /local/data/tpch100g/orders/orders0964 orders
.import /local/data/tpch100g/orders/orders0965 orders
.import /local/data/tpch100g/orders/orders0966 orders
.import /local/data/tpch100g/orders/orders0967 orders
.import /local/data/tpch100g/orders/orders0968 orders
.import /local/data/tpch100g/orders/orders0969 orders
.import /local/data/tpch100g/orders/orders0970 orders
.import /local/data/tpch100g/orders/orders0971 orders
.import /local/data/tpch100g/orders/orders0972 orders
.import /local/data/tpch100g/orders/orders0973 orders
.import /local/data/tpch100g/orders/orders0974 orders
.import /local/data/tpch100g/orders/orders0975 orders
.import /local/data/tpch100g/orders/orders0976 orders
.import /local/data/tpch100g/orders/orders0977 orders
.import /local/data/tpch100g/orders/orders0978 orders
.import /local/data/tpch100g/orders/orders0979 orders
.import /local/data/tpch100g/orders/orders0980 orders
.import /local/data/tpch100g/orders/orders0981 orders
.import /local/data/tpch100g/orders/orders0982 orders
.import /local/data/tpch100g/orders/orders0983 orders
.import /local/data/tpch100g/orders/orders0984 orders
.import /local/data/tpch100g/orders/orders0985 orders
.import /local/data/tpch100g/orders/orders0986 orders
.import /local/data/tpch100g/orders/orders0987 orders
.import /local/data/tpch100g/orders/orders0988 orders
.import /local/data/tpch100g/orders/orders0989 orders
.import /local/data/tpch100g/orders/orders0990 orders
.import /local/data/tpch100g/orders/orders0991 orders
.import /local/data/tpch100g/orders/orders0992 orders
.import /local/data/tpch100g/orders/orders0993 orders
.import /local/data/tpch100g/orders/orders0994 orders
.import /local/data/tpch100g/orders/orders0995 orders
.import /local/data/tpch100g/orders/orders0996 orders
.import /local/data/tpch100g/orders/orders0997 orders
.import /local/data/tpch100g/orders/orders0998 orders
.import /local/data/tpch100g/orders/orders0999 orders
.import /local/data/tpch100g/orders/orders1000 orders
.import /local/data/tpch100g/orders/orders1001 orders
.import /local/data/tpch100g/orders/orders1002 orders
.import /local/data/tpch100g/orders/orders1003 orders
.import /local/data/tpch100g/orders/orders1004 orders
.import /local/data/tpch100g/orders/orders1005 orders
.import /local/data/tpch100g/orders/orders1006 orders
.import /local/data/tpch100g/orders/orders1007 orders
.import /local/data/tpch100g/orders/orders1008 orders
.import /local/data/tpch100g/orders/orders1009 orders
.import /local/data/tpch100g/orders/orders1010 orders
.import /local/data/tpch100g/orders/orders1011 orders
.import /local/data/tpch100g/orders/orders1012 orders
.import /local/data/tpch100g/orders/orders1013 orders
.import /local/data/tpch100g/orders/orders1014 orders
.import /local/data/tpch100g/orders/orders1015 orders
.import /local/data/tpch100g/orders/orders1016 orders
.import /local/data/tpch100g/orders/orders1017 orders
.import /local/data/tpch100g/orders/orders1018 orders
.import /local/data/tpch100g/orders/orders1019 orders
.import /local/data/tpch100g/orders/orders1020 orders
.import /local/data/tpch100g/orders/orders1021 orders
.import /local/data/tpch100g/orders/orders1022 orders
.import /local/data/tpch100g/orders/orders1023 orders
.import /local/data/tpch100g/partsupp/partsupp0000 partsupp
.import /local/data/tpch100g/partsupp/partsupp0001 partsupp
.import /local/data/tpch100g/partsupp/partsupp0002 partsupp
.import /local/data/tpch100g/partsupp/partsupp0003 partsupp
.import /local/data/tpch100g/partsupp/partsupp0004 partsupp
.import /local/data/tpch100g/partsupp/partsupp0005 partsupp
.import /local/data/tpch100g/partsupp/partsupp0006 partsupp
.import /local/data/tpch100g/partsupp/partsupp0007 partsupp
.import /local/data/tpch100g/partsupp/partsupp0008 partsupp
.import /local/data/tpch100g/partsupp/partsupp0009 partsupp
.import /local/data/tpch100g/partsupp/partsupp0010 partsupp
.import /local/data/tpch100g/partsupp/partsupp0011 partsupp
.import /local/data/tpch100g/partsupp/partsupp0012 partsupp
.import /local/data/tpch100g/partsupp/partsupp0013 partsupp
.import /local/data/tpch100g/partsupp/partsupp0014 partsupp
.import /local/data/tpch100g/partsupp/partsupp0015 partsupp
.import /local/data/tpch100g/partsupp/partsupp0016 partsupp
.import /local/data/tpch100g/partsupp/partsupp0017 partsupp
.import /local/data/tpch100g/partsupp/partsupp0018 partsupp
.import /local/data/tpch100g/partsupp/partsupp0019 partsupp
.import /local/data/tpch100g/partsupp/partsupp0020 partsupp
.import /local/data/tpch100g/partsupp/partsupp0021 partsupp
.import /local/data/tpch100g/partsupp/partsupp0022 partsupp
.import /local/data/tpch100g/partsupp/partsupp0023 partsupp
.import /local/data/tpch100g/partsupp/partsupp0024 partsupp
.import /local/data/tpch100g/partsupp/partsupp0025 partsupp
.import /local/data/tpch100g/partsupp/partsupp0026 partsupp
.import /local/data/tpch100g/partsupp/partsupp0027 partsupp
.import /local/data/tpch100g/partsupp/partsupp0028 partsupp
.import /local/data/tpch100g/partsupp/partsupp0029 partsupp
.import /local/data/tpch100g/partsupp/partsupp0030 partsupp
.import /local/data/tpch100g/partsupp/partsupp0031 partsupp
.import /local/data/tpch100g/partsupp/partsupp0032 partsupp
.import /local/data/tpch100g/partsupp/partsupp0033 partsupp
.import /local/data/tpch100g/partsupp/partsupp0034 partsupp
.import /local/data/tpch100g/partsupp/partsupp0035 partsupp
.import /local/data/tpch100g/partsupp/partsupp0036 partsupp
.import /local/data/tpch100g/partsupp/partsupp0037 partsupp
.import /local/data/tpch100g/partsupp/partsupp0038 partsupp
.import /local/data/tpch100g/partsupp/partsupp0039 partsupp
.import /local/data/tpch100g/partsupp/partsupp0040 partsupp
.import /local/data/tpch100g/partsupp/partsupp0041 partsupp
.import /local/data/tpch100g/partsupp/partsupp0042 partsupp
.import /local/data/tpch100g/partsupp/partsupp0043 partsupp
.import /local/data/tpch100g/partsupp/partsupp0044 partsupp
.import /local/data/tpch100g/partsupp/partsupp0045 partsupp
.import /local/data/tpch100g/partsupp/partsupp0046 partsupp
.import /local/data/tpch100g/partsupp/partsupp0047 partsupp
.import /local/data/tpch100g/partsupp/partsupp0048 partsupp
.import /local/data/tpch100g/partsupp/partsupp0049 partsupp
.import /local/data/tpch100g/partsupp/partsupp0050 partsupp
.import /local/data/tpch100g/partsupp/partsupp0051 partsupp
.import /local/data/tpch100g/partsupp/partsupp0052 partsupp
.import /local/data/tpch100g/partsupp/partsupp0053 partsupp
.import /local/data/tpch100g/partsupp/partsupp0054 partsupp
.import /local/data/tpch100g/partsupp/partsupp0055 partsupp
.import /local/data/tpch100g/partsupp/partsupp0056 partsupp
.import /local/data/tpch100g/partsupp/partsupp0057 partsupp
.import /local/data/tpch100g/partsupp/partsupp0058 partsupp
.import /local/data/tpch100g/partsupp/partsupp0059 partsupp
.import /local/data/tpch100g/partsupp/partsupp0060 partsupp
.import /local/data/tpch100g/partsupp/partsupp0061 partsupp
.import /local/data/tpch100g/partsupp/partsupp0062 partsupp
.import /local/data/tpch100g/partsupp/partsupp0063 partsupp
.import /local/data/tpch100g/partsupp/partsupp0064 partsupp
.import /local/data/tpch100g/partsupp/partsupp0065 partsupp
.import /local/data/tpch100g/partsupp/partsupp0066 partsupp
.import /local/data/tpch100g/partsupp/partsupp0067 partsupp
.import /local/data/tpch100g/partsupp/partsupp0068 partsupp
.import /local/data/tpch100g/partsupp/partsupp0069 partsupp
.import /local/data/tpch100g/partsupp/partsupp0070 partsupp
.import /local/data/tpch100g/partsupp/partsupp0071 partsupp
.import /local/data/tpch100g/partsupp/partsupp0072 partsupp
.import /local/data/tpch100g/partsupp/partsupp0073 partsupp
.import /local/data/tpch100g/partsupp/partsupp0074 partsupp
.import /local/data/tpch100g/partsupp/partsupp0075 partsupp
.import /local/data/tpch100g/partsupp/partsupp0076 partsupp
.import /local/data/tpch100g/partsupp/partsupp0077 partsupp
.import /local/data/tpch100g/partsupp/partsupp0078 partsupp
.import /local/data/tpch100g/partsupp/partsupp0079 partsupp
.import /local/data/tpch100g/partsupp/partsupp0080 partsupp
.import /local/data/tpch100g/partsupp/partsupp0081 partsupp
.import /local/data/tpch100g/partsupp/partsupp0082 partsupp
.import /local/data/tpch100g/partsupp/partsupp0083 partsupp
.import /local/data/tpch100g/partsupp/partsupp0084 partsupp
.import /local/data/tpch100g/partsupp/partsupp0085 partsupp
.import /local/data/tpch100g/partsupp/partsupp0086 partsupp
.import /local/data/tpch100g/partsupp/partsupp0087 partsupp
.import /local/data/tpch100g/partsupp/partsupp0088 partsupp
.import /local/data/tpch100g/partsupp/partsupp0089 partsupp
.import /local/data/tpch100g/partsupp/partsupp0090 partsupp
.import /local/data/tpch100g/partsupp/partsupp0091 partsupp
.import /local/data/tpch100g/partsupp/partsupp0092 partsupp
.import /local/data/tpch100g/partsupp/partsupp0093 partsupp
.import /local/data/tpch100g/partsupp/partsupp0094 partsupp
.import /local/data/tpch100g/partsupp/partsupp0095 partsupp
.import /local/data/tpch100g/partsupp/partsupp0096 partsupp
.import /local/data/tpch100g/partsupp/partsupp0097 partsupp
.import /local/data/tpch100g/partsupp/partsupp0098 partsupp
.import /local/data/tpch100g/partsupp/partsupp0099 partsupp
.import /local/data/tpch100g/partsupp/partsupp0100 partsupp
.import /local/data/tpch100g/partsupp/partsupp0101 partsupp
.import /local/data/tpch100g/partsupp/partsupp0102 partsupp
.import /local/data/tpch100g/partsupp/partsupp0103 partsupp
.import /local/data/tpch100g/partsupp/partsupp0104 partsupp
.import /local/data/tpch100g/partsupp/partsupp0105 partsupp
.import /local/data/tpch100g/partsupp/partsupp0106 partsupp
.import /local/data/tpch100g/partsupp/partsupp0107 partsupp
.import /local/data/tpch100g/partsupp/partsupp0108 partsupp
.import /local/data/tpch100g/partsupp/partsupp0109 partsupp
.import /local/data/tpch100g/partsupp/partsupp0110 partsupp
.import /local/data/tpch100g/partsupp/partsupp0111 partsupp
.import /local/data/tpch100g/partsupp/partsupp0112 partsupp
.import /local/data/tpch100g/partsupp/partsupp0113 partsupp
.import /local/data/tpch100g/partsupp/partsupp0114 partsupp
.import /local/data/tpch100g/partsupp/partsupp0115 partsupp
.import /local/data/tpch100g/partsupp/partsupp0116 partsupp
.import /local/data/tpch100g/partsupp/partsupp0117 partsupp
.import /local/data/tpch100g/partsupp/partsupp0118 partsupp
.import /local/data/tpch100g/partsupp/partsupp0119 partsupp
.import /local/data/tpch100g/partsupp/partsupp0120 partsupp
.import /local/data/tpch100g/partsupp/partsupp0121 partsupp
.import /local/data/tpch100g/partsupp/partsupp0122 partsupp
.import /local/data/tpch100g/partsupp/partsupp0123 partsupp
.import /local/data/tpch100g/partsupp/partsupp0124 partsupp
.import /local/data/tpch100g/partsupp/partsupp0125 partsupp
.import /local/data/tpch100g/partsupp/partsupp0126 partsupp
.import /local/data/tpch100g/partsupp/partsupp0127 partsupp
.import /local/data/tpch100g/partsupp/partsupp0128 partsupp
.import /local/data/tpch100g/partsupp/partsupp0129 partsupp
.import /local/data/tpch100g/partsupp/partsupp0130 partsupp
.import /local/data/tpch100g/partsupp/partsupp0131 partsupp
.import /local/data/tpch100g/partsupp/partsupp0132 partsupp
.import /local/data/tpch100g/partsupp/partsupp0133 partsupp
.import /local/data/tpch100g/partsupp/partsupp0134 partsupp
.import /local/data/tpch100g/partsupp/partsupp0135 partsupp
.import /local/data/tpch100g/partsupp/partsupp0136 partsupp
.import /local/data/tpch100g/partsupp/partsupp0137 partsupp
.import /local/data/tpch100g/partsupp/partsupp0138 partsupp
.import /local/data/tpch100g/partsupp/partsupp0139 partsupp
.import /local/data/tpch100g/partsupp/partsupp0140 partsupp
.import /local/data/tpch100g/partsupp/partsupp0141 partsupp
.import /local/data/tpch100g/partsupp/partsupp0142 partsupp
.import /local/data/tpch100g/partsupp/partsupp0143 partsupp
.import /local/data/tpch100g/partsupp/partsupp0144 partsupp
.import /local/data/tpch100g/partsupp/partsupp0145 partsupp
.import /local/data/tpch100g/partsupp/partsupp0146 partsupp
.import /local/data/tpch100g/partsupp/partsupp0147 partsupp
.import /local/data/tpch100g/partsupp/partsupp0148 partsupp
.import /local/data/tpch100g/partsupp/partsupp0149 partsupp
.import /local/data/tpch100g/partsupp/partsupp0150 partsupp
.import /local/data/tpch100g/partsupp/partsupp0151 partsupp
.import /local/data/tpch100g/partsupp/partsupp0152 partsupp
.import /local/data/tpch100g/partsupp/partsupp0153 partsupp
.import /local/data/tpch100g/partsupp/partsupp0154 partsupp
.import /local/data/tpch100g/partsupp/partsupp0155 partsupp
.import /local/data/tpch100g/partsupp/partsupp0156 partsupp
.import /local/data/tpch100g/partsupp/partsupp0157 partsupp
.import /local/data/tpch100g/partsupp/partsupp0158 partsupp
.import /local/data/tpch100g/partsupp/partsupp0159 partsupp
.import /local/data/tpch100g/partsupp/partsupp0160 partsupp
.import /local/data/tpch100g/partsupp/partsupp0161 partsupp
.import /local/data/tpch100g/partsupp/partsupp0162 partsupp
.import /local/data/tpch100g/partsupp/partsupp0163 partsupp
.import /local/data/tpch100g/partsupp/partsupp0164 partsupp
.import /local/data/tpch100g/partsupp/partsupp0165 partsupp
.import /local/data/tpch100g/partsupp/partsupp0166 partsupp
.import /local/data/tpch100g/partsupp/partsupp0167 partsupp
.import /local/data/tpch100g/partsupp/partsupp0168 partsupp
.import /local/data/tpch100g/partsupp/partsupp0169 partsupp
.import /local/data/tpch100g/partsupp/partsupp0170 partsupp
.import /local/data/tpch100g/partsupp/partsupp0171 partsupp
.import /local/data/tpch100g/partsupp/partsupp0172 partsupp
.import /local/data/tpch100g/partsupp/partsupp0173 partsupp
.import /local/data/tpch100g/partsupp/partsupp0174 partsupp
.import /local/data/tpch100g/partsupp/partsupp0175 partsupp
.import /local/data/tpch100g/partsupp/partsupp0176 partsupp
.import /local/data/tpch100g/partsupp/partsupp0177 partsupp
.import /local/data/tpch100g/partsupp/partsupp0178 partsupp
.import /local/data/tpch100g/partsupp/partsupp0179 partsupp
.import /local/data/tpch100g/partsupp/partsupp0180 partsupp
.import /local/data/tpch100g/partsupp/partsupp0181 partsupp
.import /local/data/tpch100g/partsupp/partsupp0182 partsupp
.import /local/data/tpch100g/partsupp/partsupp0183 partsupp
.import /local/data/tpch100g/partsupp/partsupp0184 partsupp
.import /local/data/tpch100g/partsupp/partsupp0185 partsupp
.import /local/data/tpch100g/partsupp/partsupp0186 partsupp
.import /local/data/tpch100g/partsupp/partsupp0187 partsupp
.import /local/data/tpch100g/partsupp/partsupp0188 partsupp
.import /local/data/tpch100g/partsupp/partsupp0189 partsupp
.import /local/data/tpch100g/partsupp/partsupp0190 partsupp
.import /local/data/tpch100g/partsupp/partsupp0191 partsupp
.import /local/data/tpch100g/partsupp/partsupp0192 partsupp
.import /local/data/tpch100g/partsupp/partsupp0193 partsupp
.import /local/data/tpch100g/partsupp/partsupp0194 partsupp
.import /local/data/tpch100g/partsupp/partsupp0195 partsupp
.import /local/data/tpch100g/partsupp/partsupp0196 partsupp
.import /local/data/tpch100g/partsupp/partsupp0197 partsupp
.import /local/data/tpch100g/partsupp/partsupp0198 partsupp
.import /local/data/tpch100g/partsupp/partsupp0199 partsupp
.import /local/data/tpch100g/partsupp/partsupp0200 partsupp
.import /local/data/tpch100g/partsupp/partsupp0201 partsupp
.import /local/data/tpch100g/partsupp/partsupp0202 partsupp
.import /local/data/tpch100g/partsupp/partsupp0203 partsupp
.import /local/data/tpch100g/partsupp/partsupp0204 partsupp
.import /local/data/tpch100g/partsupp/partsupp0205 partsupp
.import /local/data/tpch100g/partsupp/partsupp0206 partsupp
.import /local/data/tpch100g/partsupp/partsupp0207 partsupp
.import /local/data/tpch100g/partsupp/partsupp0208 partsupp
.import /local/data/tpch100g/partsupp/partsupp0209 partsupp
.import /local/data/tpch100g/partsupp/partsupp0210 partsupp
.import /local/data/tpch100g/partsupp/partsupp0211 partsupp
.import /local/data/tpch100g/partsupp/partsupp0212 partsupp
.import /local/data/tpch100g/partsupp/partsupp0213 partsupp
.import /local/data/tpch100g/partsupp/partsupp0214 partsupp
.import /local/data/tpch100g/partsupp/partsupp0215 partsupp
.import /local/data/tpch100g/partsupp/partsupp0216 partsupp
.import /local/data/tpch100g/partsupp/partsupp0217 partsupp
.import /local/data/tpch100g/partsupp/partsupp0218 partsupp
.import /local/data/tpch100g/partsupp/partsupp0219 partsupp
.import /local/data/tpch100g/partsupp/partsupp0220 partsupp
.import /local/data/tpch100g/partsupp/partsupp0221 partsupp
.import /local/data/tpch100g/partsupp/partsupp0222 partsupp
.import /local/data/tpch100g/partsupp/partsupp0223 partsupp
.import /local/data/tpch100g/partsupp/partsupp0224 partsupp
.import /local/data/tpch100g/partsupp/partsupp0225 partsupp
.import /local/data/tpch100g/partsupp/partsupp0226 partsupp
.import /local/data/tpch100g/partsupp/partsupp0227 partsupp
.import /local/data/tpch100g/partsupp/partsupp0228 partsupp
.import /local/data/tpch100g/partsupp/partsupp0229 partsupp
.import /local/data/tpch100g/partsupp/partsupp0230 partsupp
.import /local/data/tpch100g/partsupp/partsupp0231 partsupp
.import /local/data/tpch100g/partsupp/partsupp0232 partsupp
.import /local/data/tpch100g/partsupp/partsupp0233 partsupp
.import /local/data/tpch100g/partsupp/partsupp0234 partsupp
.import /local/data/tpch100g/partsupp/partsupp0235 partsupp
.import /local/data/tpch100g/partsupp/partsupp0236 partsupp
.import /local/data/tpch100g/partsupp/partsupp0237 partsupp
.import /local/data/tpch100g/partsupp/partsupp0238 partsupp
.import /local/data/tpch100g/partsupp/partsupp0239 partsupp
.import /local/data/tpch100g/partsupp/partsupp0240 partsupp
.import /local/data/tpch100g/partsupp/partsupp0241 partsupp
.import /local/data/tpch100g/partsupp/partsupp0242 partsupp
.import /local/data/tpch100g/partsupp/partsupp0243 partsupp
.import /local/data/tpch100g/partsupp/partsupp0244 partsupp
.import /local/data/tpch100g/partsupp/partsupp0245 partsupp
.import /local/data/tpch100g/partsupp/partsupp0246 partsupp
.import /local/data/tpch100g/partsupp/partsupp0247 partsupp
.import /local/data/tpch100g/partsupp/partsupp0248 partsupp
.import /local/data/tpch100g/partsupp/partsupp0249 partsupp
.import /local/data/tpch100g/partsupp/partsupp0250 partsupp
.import /local/data/tpch100g/partsupp/partsupp0251 partsupp
.import /local/data/tpch100g/partsupp/partsupp0252 partsupp
.import /local/data/tpch100g/partsupp/partsupp0253 partsupp
.import /local/data/tpch100g/partsupp/partsupp0254 partsupp
.import /local/data/tpch100g/partsupp/partsupp0255 partsupp
.import /local/data/tpch100g/partsupp/partsupp0256 partsupp
.import /local/data/tpch100g/partsupp/partsupp0257 partsupp
.import /local/data/tpch100g/partsupp/partsupp0258 partsupp
.import /local/data/tpch100g/partsupp/partsupp0259 partsupp
.import /local/data/tpch100g/partsupp/partsupp0260 partsupp
.import /local/data/tpch100g/partsupp/partsupp0261 partsupp
.import /local/data/tpch100g/partsupp/partsupp0262 partsupp
.import /local/data/tpch100g/partsupp/partsupp0263 partsupp
.import /local/data/tpch100g/partsupp/partsupp0264 partsupp
.import /local/data/tpch100g/partsupp/partsupp0265 partsupp
.import /local/data/tpch100g/partsupp/partsupp0266 partsupp
.import /local/data/tpch100g/partsupp/partsupp0267 partsupp
.import /local/data/tpch100g/partsupp/partsupp0268 partsupp
.import /local/data/tpch100g/partsupp/partsupp0269 partsupp
.import /local/data/tpch100g/partsupp/partsupp0270 partsupp
.import /local/data/tpch100g/partsupp/partsupp0271 partsupp
.import /local/data/tpch100g/partsupp/partsupp0272 partsupp
.import /local/data/tpch100g/partsupp/partsupp0273 partsupp
.import /local/data/tpch100g/partsupp/partsupp0274 partsupp
.import /local/data/tpch100g/partsupp/partsupp0275 partsupp
.import /local/data/tpch100g/partsupp/partsupp0276 partsupp
.import /local/data/tpch100g/partsupp/partsupp0277 partsupp
.import /local/data/tpch100g/partsupp/partsupp0278 partsupp
.import /local/data/tpch100g/partsupp/partsupp0279 partsupp
.import /local/data/tpch100g/partsupp/partsupp0280 partsupp
.import /local/data/tpch100g/partsupp/partsupp0281 partsupp
.import /local/data/tpch100g/partsupp/partsupp0282 partsupp
.import /local/data/tpch100g/partsupp/partsupp0283 partsupp
.import /local/data/tpch100g/partsupp/partsupp0284 partsupp
.import /local/data/tpch100g/partsupp/partsupp0285 partsupp
.import /local/data/tpch100g/partsupp/partsupp0286 partsupp
.import /local/data/tpch100g/partsupp/partsupp0287 partsupp
.import /local/data/tpch100g/partsupp/partsupp0288 partsupp
.import /local/data/tpch100g/partsupp/partsupp0289 partsupp
.import /local/data/tpch100g/partsupp/partsupp0290 partsupp
.import /local/data/tpch100g/partsupp/partsupp0291 partsupp
.import /local/data/tpch100g/partsupp/partsupp0292 partsupp
.import /local/data/tpch100g/partsupp/partsupp0293 partsupp
.import /local/data/tpch100g/partsupp/partsupp0294 partsupp
.import /local/data/tpch100g/partsupp/partsupp0295 partsupp
.import /local/data/tpch100g/partsupp/partsupp0296 partsupp
.import /local/data/tpch100g/partsupp/partsupp0297 partsupp
.import /local/data/tpch100g/partsupp/partsupp0298 partsupp
.import /local/data/tpch100g/partsupp/partsupp0299 partsupp
.import /local/data/tpch100g/partsupp/partsupp0300 partsupp
.import /local/data/tpch100g/partsupp/partsupp0301 partsupp
.import /local/data/tpch100g/partsupp/partsupp0302 partsupp
.import /local/data/tpch100g/partsupp/partsupp0303 partsupp
.import /local/data/tpch100g/partsupp/partsupp0304 partsupp
.import /local/data/tpch100g/partsupp/partsupp0305 partsupp
.import /local/data/tpch100g/partsupp/partsupp0306 partsupp
.import /local/data/tpch100g/partsupp/partsupp0307 partsupp
.import /local/data/tpch100g/partsupp/partsupp0308 partsupp
.import /local/data/tpch100g/partsupp/partsupp0309 partsupp
.import /local/data/tpch100g/partsupp/partsupp0310 partsupp
.import /local/data/tpch100g/partsupp/partsupp0311 partsupp
.import /local/data/tpch100g/partsupp/partsupp0312 partsupp
.import /local/data/tpch100g/partsupp/partsupp0313 partsupp
.import /local/data/tpch100g/partsupp/partsupp0314 partsupp
.import /local/data/tpch100g/partsupp/partsupp0315 partsupp
.import /local/data/tpch100g/partsupp/partsupp0316 partsupp
.import /local/data/tpch100g/partsupp/partsupp0317 partsupp
.import /local/data/tpch100g/partsupp/partsupp0318 partsupp
.import /local/data/tpch100g/partsupp/partsupp0319 partsupp
.import /local/data/tpch100g/partsupp/partsupp0320 partsupp
.import /local/data/tpch100g/partsupp/partsupp0321 partsupp
.import /local/data/tpch100g/partsupp/partsupp0322 partsupp
.import /local/data/tpch100g/partsupp/partsupp0323 partsupp
.import /local/data/tpch100g/partsupp/partsupp0324 partsupp
.import /local/data/tpch100g/partsupp/partsupp0325 partsupp
.import /local/data/tpch100g/partsupp/partsupp0326 partsupp
.import /local/data/tpch100g/partsupp/partsupp0327 partsupp
.import /local/data/tpch100g/partsupp/partsupp0328 partsupp
.import /local/data/tpch100g/partsupp/partsupp0329 partsupp
.import /local/data/tpch100g/partsupp/partsupp0330 partsupp
.import /local/data/tpch100g/partsupp/partsupp0331 partsupp
.import /local/data/tpch100g/partsupp/partsupp0332 partsupp
.import /local/data/tpch100g/partsupp/partsupp0333 partsupp
.import /local/data/tpch100g/partsupp/partsupp0334 partsupp
.import /local/data/tpch100g/partsupp/partsupp0335 partsupp
.import /local/data/tpch100g/partsupp/partsupp0336 partsupp
.import /local/data/tpch100g/partsupp/partsupp0337 partsupp
.import /local/data/tpch100g/partsupp/partsupp0338 partsupp
.import /local/data/tpch100g/partsupp/partsupp0339 partsupp
.import /local/data/tpch100g/partsupp/partsupp0340 partsupp
.import /local/data/tpch100g/partsupp/partsupp0341 partsupp
.import /local/data/tpch100g/partsupp/partsupp0342 partsupp
.import /local/data/tpch100g/partsupp/partsupp0343 partsupp
.import /local/data/tpch100g/partsupp/partsupp0344 partsupp
.import /local/data/tpch100g/partsupp/partsupp0345 partsupp
.import /local/data/tpch100g/partsupp/partsupp0346 partsupp
.import /local/data/tpch100g/partsupp/partsupp0347 partsupp
.import /local/data/tpch100g/partsupp/partsupp0348 partsupp
.import /local/data/tpch100g/partsupp/partsupp0349 partsupp
.import /local/data/tpch100g/partsupp/partsupp0350 partsupp
.import /local/data/tpch100g/partsupp/partsupp0351 partsupp
.import /local/data/tpch100g/partsupp/partsupp0352 partsupp
.import /local/data/tpch100g/partsupp/partsupp0353 partsupp
.import /local/data/tpch100g/partsupp/partsupp0354 partsupp
.import /local/data/tpch100g/partsupp/partsupp0355 partsupp
.import /local/data/tpch100g/partsupp/partsupp0356 partsupp
.import /local/data/tpch100g/partsupp/partsupp0357 partsupp
.import /local/data/tpch100g/partsupp/partsupp0358 partsupp
.import /local/data/tpch100g/partsupp/partsupp0359 partsupp
.import /local/data/tpch100g/partsupp/partsupp0360 partsupp
.import /local/data/tpch100g/partsupp/partsupp0361 partsupp
.import /local/data/tpch100g/partsupp/partsupp0362 partsupp
.import /local/data/tpch100g/partsupp/partsupp0363 partsupp
.import /local/data/tpch100g/partsupp/partsupp0364 partsupp
.import /local/data/tpch100g/partsupp/partsupp0365 partsupp
.import /local/data/tpch100g/partsupp/partsupp0366 partsupp
.import /local/data/tpch100g/partsupp/partsupp0367 partsupp
.import /local/data/tpch100g/partsupp/partsupp0368 partsupp
.import /local/data/tpch100g/partsupp/partsupp0369 partsupp
.import /local/data/tpch100g/partsupp/partsupp0370 partsupp
.import /local/data/tpch100g/partsupp/partsupp0371 partsupp
.import /local/data/tpch100g/partsupp/partsupp0372 partsupp
.import /local/data/tpch100g/partsupp/partsupp0373 partsupp
.import /local/data/tpch100g/partsupp/partsupp0374 partsupp
.import /local/data/tpch100g/partsupp/partsupp0375 partsupp
.import /local/data/tpch100g/partsupp/partsupp0376 partsupp
.import /local/data/tpch100g/partsupp/partsupp0377 partsupp
.import /local/data/tpch100g/partsupp/partsupp0378 partsupp
.import /local/data/tpch100g/partsupp/partsupp0379 partsupp
.import /local/data/tpch100g/partsupp/partsupp0380 partsupp
.import /local/data/tpch100g/partsupp/partsupp0381 partsupp
.import /local/data/tpch100g/partsupp/partsupp0382 partsupp
.import /local/data/tpch100g/partsupp/partsupp0383 partsupp
.import /local/data/tpch100g/partsupp/partsupp0384 partsupp
.import /local/data/tpch100g/partsupp/partsupp0385 partsupp
.import /local/data/tpch100g/partsupp/partsupp0386 partsupp
.import /local/data/tpch100g/partsupp/partsupp0387 partsupp
.import /local/data/tpch100g/partsupp/partsupp0388 partsupp
.import /local/data/tpch100g/partsupp/partsupp0389 partsupp
.import /local/data/tpch100g/partsupp/partsupp0390 partsupp
.import /local/data/tpch100g/partsupp/partsupp0391 partsupp
.import /local/data/tpch100g/partsupp/partsupp0392 partsupp
.import /local/data/tpch100g/partsupp/partsupp0393 partsupp
.import /local/data/tpch100g/partsupp/partsupp0394 partsupp
.import /local/data/tpch100g/partsupp/partsupp0395 partsupp
.import /local/data/tpch100g/partsupp/partsupp0396 partsupp
.import /local/data/tpch100g/partsupp/partsupp0397 partsupp
.import /local/data/tpch100g/partsupp/partsupp0398 partsupp
.import /local/data/tpch100g/partsupp/partsupp0399 partsupp
.import /local/data/tpch100g/partsupp/partsupp0400 partsupp
.import /local/data/tpch100g/partsupp/partsupp0401 partsupp
.import /local/data/tpch100g/partsupp/partsupp0402 partsupp
.import /local/data/tpch100g/partsupp/partsupp0403 partsupp
.import /local/data/tpch100g/partsupp/partsupp0404 partsupp
.import /local/data/tpch100g/partsupp/partsupp0405 partsupp
.import /local/data/tpch100g/partsupp/partsupp0406 partsupp
.import /local/data/tpch100g/partsupp/partsupp0407 partsupp
.import /local/data/tpch100g/partsupp/partsupp0408 partsupp
.import /local/data/tpch100g/partsupp/partsupp0409 partsupp
.import /local/data/tpch100g/partsupp/partsupp0410 partsupp
.import /local/data/tpch100g/partsupp/partsupp0411 partsupp
.import /local/data/tpch100g/partsupp/partsupp0412 partsupp
.import /local/data/tpch100g/partsupp/partsupp0413 partsupp
.import /local/data/tpch100g/partsupp/partsupp0414 partsupp
.import /local/data/tpch100g/partsupp/partsupp0415 partsupp
.import /local/data/tpch100g/partsupp/partsupp0416 partsupp
.import /local/data/tpch100g/partsupp/partsupp0417 partsupp
.import /local/data/tpch100g/partsupp/partsupp0418 partsupp
.import /local/data/tpch100g/partsupp/partsupp0419 partsupp
.import /local/data/tpch100g/partsupp/partsupp0420 partsupp
.import /local/data/tpch100g/partsupp/partsupp0421 partsupp
.import /local/data/tpch100g/partsupp/partsupp0422 partsupp
.import /local/data/tpch100g/partsupp/partsupp0423 partsupp
.import /local/data/tpch100g/partsupp/partsupp0424 partsupp
.import /local/data/tpch100g/partsupp/partsupp0425 partsupp
.import /local/data/tpch100g/partsupp/partsupp0426 partsupp
.import /local/data/tpch100g/partsupp/partsupp0427 partsupp
.import /local/data/tpch100g/partsupp/partsupp0428 partsupp
.import /local/data/tpch100g/partsupp/partsupp0429 partsupp
.import /local/data/tpch100g/partsupp/partsupp0430 partsupp
.import /local/data/tpch100g/partsupp/partsupp0431 partsupp
.import /local/data/tpch100g/partsupp/partsupp0432 partsupp
.import /local/data/tpch100g/partsupp/partsupp0433 partsupp
.import /local/data/tpch100g/partsupp/partsupp0434 partsupp
.import /local/data/tpch100g/partsupp/partsupp0435 partsupp
.import /local/data/tpch100g/partsupp/partsupp0436 partsupp
.import /local/data/tpch100g/partsupp/partsupp0437 partsupp
.import /local/data/tpch100g/partsupp/partsupp0438 partsupp
.import /local/data/tpch100g/partsupp/partsupp0439 partsupp
.import /local/data/tpch100g/partsupp/partsupp0440 partsupp
.import /local/data/tpch100g/partsupp/partsupp0441 partsupp
.import /local/data/tpch100g/partsupp/partsupp0442 partsupp
.import /local/data/tpch100g/partsupp/partsupp0443 partsupp
.import /local/data/tpch100g/partsupp/partsupp0444 partsupp
.import /local/data/tpch100g/partsupp/partsupp0445 partsupp
.import /local/data/tpch100g/partsupp/partsupp0446 partsupp
.import /local/data/tpch100g/partsupp/partsupp0447 partsupp
.import /local/data/tpch100g/partsupp/partsupp0448 partsupp
.import /local/data/tpch100g/partsupp/partsupp0449 partsupp
.import /local/data/tpch100g/partsupp/partsupp0450 partsupp
.import /local/data/tpch100g/partsupp/partsupp0451 partsupp
.import /local/data/tpch100g/partsupp/partsupp0452 partsupp
.import /local/data/tpch100g/partsupp/partsupp0453 partsupp
.import /local/data/tpch100g/partsupp/partsupp0454 partsupp
.import /local/data/tpch100g/partsupp/partsupp0455 partsupp
.import /local/data/tpch100g/partsupp/partsupp0456 partsupp
.import /local/data/tpch100g/partsupp/partsupp0457 partsupp
.import /local/data/tpch100g/partsupp/partsupp0458 partsupp
.import /local/data/tpch100g/partsupp/partsupp0459 partsupp
.import /local/data/tpch100g/partsupp/partsupp0460 partsupp
.import /local/data/tpch100g/partsupp/partsupp0461 partsupp
.import /local/data/tpch100g/partsupp/partsupp0462 partsupp
.import /local/data/tpch100g/partsupp/partsupp0463 partsupp
.import /local/data/tpch100g/partsupp/partsupp0464 partsupp
.import /local/data/tpch100g/partsupp/partsupp0465 partsupp
.import /local/data/tpch100g/partsupp/partsupp0466 partsupp
.import /local/data/tpch100g/partsupp/partsupp0467 partsupp
.import /local/data/tpch100g/partsupp/partsupp0468 partsupp
.import /local/data/tpch100g/partsupp/partsupp0469 partsupp
.import /local/data/tpch100g/partsupp/partsupp0470 partsupp
.import /local/data/tpch100g/partsupp/partsupp0471 partsupp
.import /local/data/tpch100g/partsupp/partsupp0472 partsupp
.import /local/data/tpch100g/partsupp/partsupp0473 partsupp
.import /local/data/tpch100g/partsupp/partsupp0474 partsupp
.import /local/data/tpch100g/partsupp/partsupp0475 partsupp
.import /local/data/tpch100g/partsupp/partsupp0476 partsupp
.import /local/data/tpch100g/partsupp/partsupp0477 partsupp
.import /local/data/tpch100g/partsupp/partsupp0478 partsupp
.import /local/data/tpch100g/partsupp/partsupp0479 partsupp
.import /local/data/tpch100g/partsupp/partsupp0480 partsupp
.import /local/data/tpch100g/partsupp/partsupp0481 partsupp
.import /local/data/tpch100g/partsupp/partsupp0482 partsupp
.import /local/data/tpch100g/partsupp/partsupp0483 partsupp
.import /local/data/tpch100g/partsupp/partsupp0484 partsupp
.import /local/data/tpch100g/partsupp/partsupp0485 partsupp
.import /local/data/tpch100g/partsupp/partsupp0486 partsupp
.import /local/data/tpch100g/partsupp/partsupp0487 partsupp
.import /local/data/tpch100g/partsupp/partsupp0488 partsupp
.import /local/data/tpch100g/partsupp/partsupp0489 partsupp
.import /local/data/tpch100g/partsupp/partsupp0490 partsupp
.import /local/data/tpch100g/partsupp/partsupp0491 partsupp
.import /local/data/tpch100g/partsupp/partsupp0492 partsupp
.import /local/data/tpch100g/partsupp/partsupp0493 partsupp
.import /local/data/tpch100g/partsupp/partsupp0494 partsupp
.import /local/data/tpch100g/partsupp/partsupp0495 partsupp
.import /local/data/tpch100g/partsupp/partsupp0496 partsupp
.import /local/data/tpch100g/partsupp/partsupp0497 partsupp
.import /local/data/tpch100g/partsupp/partsupp0498 partsupp
.import /local/data/tpch100g/partsupp/partsupp0499 partsupp
.import /local/data/tpch100g/partsupp/partsupp0500 partsupp
.import /local/data/tpch100g/partsupp/partsupp0501 partsupp
.import /local/data/tpch100g/partsupp/partsupp0502 partsupp
.import /local/data/tpch100g/partsupp/partsupp0503 partsupp
.import /local/data/tpch100g/partsupp/partsupp0504 partsupp
.import /local/data/tpch100g/partsupp/partsupp0505 partsupp
.import /local/data/tpch100g/partsupp/partsupp0506 partsupp
.import /local/data/tpch100g/partsupp/partsupp0507 partsupp
.import /local/data/tpch100g/partsupp/partsupp0508 partsupp
.import /local/data/tpch100g/partsupp/partsupp0509 partsupp
.import /local/data/tpch100g/partsupp/partsupp0510 partsupp
.import /local/data/tpch100g/partsupp/partsupp0511 partsupp
.import /local/data/tpch100g/partsupp/partsupp0512 partsupp
.import /local/data/tpch100g/partsupp/partsupp0513 partsupp
.import /local/data/tpch100g/partsupp/partsupp0514 partsupp
.import /local/data/tpch100g/partsupp/partsupp0515 partsupp
.import /local/data/tpch100g/partsupp/partsupp0516 partsupp
.import /local/data/tpch100g/partsupp/partsupp0517 partsupp
.import /local/data/tpch100g/partsupp/partsupp0518 partsupp
.import /local/data/tpch100g/partsupp/partsupp0519 partsupp
.import /local/data/tpch100g/partsupp/partsupp0520 partsupp
.import /local/data/tpch100g/partsupp/partsupp0521 partsupp
.import /local/data/tpch100g/partsupp/partsupp0522 partsupp
.import /local/data/tpch100g/partsupp/partsupp0523 partsupp
.import /local/data/tpch100g/partsupp/partsupp0524 partsupp
.import /local/data/tpch100g/partsupp/partsupp0525 partsupp
.import /local/data/tpch100g/partsupp/partsupp0526 partsupp
.import /local/data/tpch100g/partsupp/partsupp0527 partsupp
.import /local/data/tpch100g/partsupp/partsupp0528 partsupp
.import /local/data/tpch100g/partsupp/partsupp0529 partsupp
.import /local/data/tpch100g/partsupp/partsupp0530 partsupp
.import /local/data/tpch100g/partsupp/partsupp0531 partsupp
.import /local/data/tpch100g/partsupp/partsupp0532 partsupp
.import /local/data/tpch100g/partsupp/partsupp0533 partsupp
.import /local/data/tpch100g/partsupp/partsupp0534 partsupp
.import /local/data/tpch100g/partsupp/partsupp0535 partsupp
.import /local/data/tpch100g/partsupp/partsupp0536 partsupp
.import /local/data/tpch100g/partsupp/partsupp0537 partsupp
.import /local/data/tpch100g/partsupp/partsupp0538 partsupp
.import /local/data/tpch100g/partsupp/partsupp0539 partsupp
.import /local/data/tpch100g/partsupp/partsupp0540 partsupp
.import /local/data/tpch100g/partsupp/partsupp0541 partsupp
.import /local/data/tpch100g/partsupp/partsupp0542 partsupp
.import /local/data/tpch100g/partsupp/partsupp0543 partsupp
.import /local/data/tpch100g/partsupp/partsupp0544 partsupp
.import /local/data/tpch100g/partsupp/partsupp0545 partsupp
.import /local/data/tpch100g/partsupp/partsupp0546 partsupp
.import /local/data/tpch100g/partsupp/partsupp0547 partsupp
.import /local/data/tpch100g/partsupp/partsupp0548 partsupp
.import /local/data/tpch100g/partsupp/partsupp0549 partsupp
.import /local/data/tpch100g/partsupp/partsupp0550 partsupp
.import /local/data/tpch100g/partsupp/partsupp0551 partsupp
.import /local/data/tpch100g/partsupp/partsupp0552 partsupp
.import /local/data/tpch100g/partsupp/partsupp0553 partsupp
.import /local/data/tpch100g/partsupp/partsupp0554 partsupp
.import /local/data/tpch100g/partsupp/partsupp0555 partsupp
.import /local/data/tpch100g/partsupp/partsupp0556 partsupp
.import /local/data/tpch100g/partsupp/partsupp0557 partsupp
.import /local/data/tpch100g/partsupp/partsupp0558 partsupp
.import /local/data/tpch100g/partsupp/partsupp0559 partsupp
.import /local/data/tpch100g/partsupp/partsupp0560 partsupp
.import /local/data/tpch100g/partsupp/partsupp0561 partsupp
.import /local/data/tpch100g/partsupp/partsupp0562 partsupp
.import /local/data/tpch100g/partsupp/partsupp0563 partsupp
.import /local/data/tpch100g/partsupp/partsupp0564 partsupp
.import /local/data/tpch100g/partsupp/partsupp0565 partsupp
.import /local/data/tpch100g/partsupp/partsupp0566 partsupp
.import /local/data/tpch100g/partsupp/partsupp0567 partsupp
.import /local/data/tpch100g/partsupp/partsupp0568 partsupp
.import /local/data/tpch100g/partsupp/partsupp0569 partsupp
.import /local/data/tpch100g/partsupp/partsupp0570 partsupp
.import /local/data/tpch100g/partsupp/partsupp0571 partsupp
.import /local/data/tpch100g/partsupp/partsupp0572 partsupp
.import /local/data/tpch100g/partsupp/partsupp0573 partsupp
.import /local/data/tpch100g/partsupp/partsupp0574 partsupp
.import /local/data/tpch100g/partsupp/partsupp0575 partsupp
.import /local/data/tpch100g/partsupp/partsupp0576 partsupp
.import /local/data/tpch100g/partsupp/partsupp0577 partsupp
.import /local/data/tpch100g/partsupp/partsupp0578 partsupp
.import /local/data/tpch100g/partsupp/partsupp0579 partsupp
.import /local/data/tpch100g/partsupp/partsupp0580 partsupp
.import /local/data/tpch100g/partsupp/partsupp0581 partsupp
.import /local/data/tpch100g/partsupp/partsupp0582 partsupp
.import /local/data/tpch100g/partsupp/partsupp0583 partsupp
.import /local/data/tpch100g/partsupp/partsupp0584 partsupp
.import /local/data/tpch100g/partsupp/partsupp0585 partsupp
.import /local/data/tpch100g/partsupp/partsupp0586 partsupp
.import /local/data/tpch100g/partsupp/partsupp0587 partsupp
.import /local/data/tpch100g/partsupp/partsupp0588 partsupp
.import /local/data/tpch100g/partsupp/partsupp0589 partsupp
.import /local/data/tpch100g/partsupp/partsupp0590 partsupp
.import /local/data/tpch100g/partsupp/partsupp0591 partsupp
.import /local/data/tpch100g/partsupp/partsupp0592 partsupp
.import /local/data/tpch100g/partsupp/partsupp0593 partsupp
.import /local/data/tpch100g/partsupp/partsupp0594 partsupp
.import /local/data/tpch100g/partsupp/partsupp0595 partsupp
.import /local/data/tpch100g/partsupp/partsupp0596 partsupp
.import /local/data/tpch100g/partsupp/partsupp0597 partsupp
.import /local/data/tpch100g/partsupp/partsupp0598 partsupp
.import /local/data/tpch100g/partsupp/partsupp0599 partsupp
.import /local/data/tpch100g/partsupp/partsupp0600 partsupp
.import /local/data/tpch100g/partsupp/partsupp0601 partsupp
.import /local/data/tpch100g/partsupp/partsupp0602 partsupp
.import /local/data/tpch100g/partsupp/partsupp0603 partsupp
.import /local/data/tpch100g/partsupp/partsupp0604 partsupp
.import /local/data/tpch100g/partsupp/partsupp0605 partsupp
.import /local/data/tpch100g/partsupp/partsupp0606 partsupp
.import /local/data/tpch100g/partsupp/partsupp0607 partsupp
.import /local/data/tpch100g/partsupp/partsupp0608 partsupp
.import /local/data/tpch100g/partsupp/partsupp0609 partsupp
.import /local/data/tpch100g/partsupp/partsupp0610 partsupp
.import /local/data/tpch100g/partsupp/partsupp0611 partsupp
.import /local/data/tpch100g/partsupp/partsupp0612 partsupp
.import /local/data/tpch100g/partsupp/partsupp0613 partsupp
.import /local/data/tpch100g/partsupp/partsupp0614 partsupp
.import /local/data/tpch100g/partsupp/partsupp0615 partsupp
.import /local/data/tpch100g/partsupp/partsupp0616 partsupp
.import /local/data/tpch100g/partsupp/partsupp0617 partsupp
.import /local/data/tpch100g/partsupp/partsupp0618 partsupp
.import /local/data/tpch100g/partsupp/partsupp0619 partsupp
.import /local/data/tpch100g/partsupp/partsupp0620 partsupp
.import /local/data/tpch100g/partsupp/partsupp0621 partsupp
.import /local/data/tpch100g/partsupp/partsupp0622 partsupp
.import /local/data/tpch100g/partsupp/partsupp0623 partsupp
.import /local/data/tpch100g/partsupp/partsupp0624 partsupp
.import /local/data/tpch100g/partsupp/partsupp0625 partsupp
.import /local/data/tpch100g/partsupp/partsupp0626 partsupp
.import /local/data/tpch100g/partsupp/partsupp0627 partsupp
.import /local/data/tpch100g/partsupp/partsupp0628 partsupp
.import /local/data/tpch100g/partsupp/partsupp0629 partsupp
.import /local/data/tpch100g/partsupp/partsupp0630 partsupp
.import /local/data/tpch100g/partsupp/partsupp0631 partsupp
.import /local/data/tpch100g/partsupp/partsupp0632 partsupp
.import /local/data/tpch100g/partsupp/partsupp0633 partsupp
.import /local/data/tpch100g/partsupp/partsupp0634 partsupp
.import /local/data/tpch100g/partsupp/partsupp0635 partsupp
.import /local/data/tpch100g/partsupp/partsupp0636 partsupp
.import /local/data/tpch100g/partsupp/partsupp0637 partsupp
.import /local/data/tpch100g/partsupp/partsupp0638 partsupp
.import /local/data/tpch100g/partsupp/partsupp0639 partsupp
.import /local/data/tpch100g/partsupp/partsupp0640 partsupp
.import /local/data/tpch100g/partsupp/partsupp0641 partsupp
.import /local/data/tpch100g/partsupp/partsupp0642 partsupp
.import /local/data/tpch100g/partsupp/partsupp0643 partsupp
.import /local/data/tpch100g/partsupp/partsupp0644 partsupp
.import /local/data/tpch100g/partsupp/partsupp0645 partsupp
.import /local/data/tpch100g/partsupp/partsupp0646 partsupp
.import /local/data/tpch100g/partsupp/partsupp0647 partsupp
.import /local/data/tpch100g/partsupp/partsupp0648 partsupp
.import /local/data/tpch100g/partsupp/partsupp0649 partsupp
.import /local/data/tpch100g/partsupp/partsupp0650 partsupp
.import /local/data/tpch100g/partsupp/partsupp0651 partsupp
.import /local/data/tpch100g/partsupp/partsupp0652 partsupp
.import /local/data/tpch100g/partsupp/partsupp0653 partsupp
.import /local/data/tpch100g/partsupp/partsupp0654 partsupp
.import /local/data/tpch100g/partsupp/partsupp0655 partsupp
.import /local/data/tpch100g/partsupp/partsupp0656 partsupp
.import /local/data/tpch100g/partsupp/partsupp0657 partsupp
.import /local/data/tpch100g/partsupp/partsupp0658 partsupp
.import /local/data/tpch100g/partsupp/partsupp0659 partsupp
.import /local/data/tpch100g/partsupp/partsupp0660 partsupp
.import /local/data/tpch100g/partsupp/partsupp0661 partsupp
.import /local/data/tpch100g/partsupp/partsupp0662 partsupp
.import /local/data/tpch100g/partsupp/partsupp0663 partsupp
.import /local/data/tpch100g/partsupp/partsupp0664 partsupp
.import /local/data/tpch100g/partsupp/partsupp0665 partsupp
.import /local/data/tpch100g/partsupp/partsupp0666 partsupp
.import /local/data/tpch100g/partsupp/partsupp0667 partsupp
.import /local/data/tpch100g/partsupp/partsupp0668 partsupp
.import /local/data/tpch100g/partsupp/partsupp0669 partsupp
.import /local/data/tpch100g/partsupp/partsupp0670 partsupp
.import /local/data/tpch100g/partsupp/partsupp0671 partsupp
.import /local/data/tpch100g/partsupp/partsupp0672 partsupp
.import /local/data/tpch100g/partsupp/partsupp0673 partsupp
.import /local/data/tpch100g/partsupp/partsupp0674 partsupp
.import /local/data/tpch100g/partsupp/partsupp0675 partsupp
.import /local/data/tpch100g/partsupp/partsupp0676 partsupp
.import /local/data/tpch100g/partsupp/partsupp0677 partsupp
.import /local/data/tpch100g/partsupp/partsupp0678 partsupp
.import /local/data/tpch100g/partsupp/partsupp0679 partsupp
.import /local/data/tpch100g/partsupp/partsupp0680 partsupp
.import /local/data/tpch100g/partsupp/partsupp0681 partsupp
.import /local/data/tpch100g/partsupp/partsupp0682 partsupp
.import /local/data/tpch100g/partsupp/partsupp0683 partsupp
.import /local/data/tpch100g/partsupp/partsupp0684 partsupp
.import /local/data/tpch100g/partsupp/partsupp0685 partsupp
.import /local/data/tpch100g/partsupp/partsupp0686 partsupp
.import /local/data/tpch100g/partsupp/partsupp0687 partsupp
.import /local/data/tpch100g/partsupp/partsupp0688 partsupp
.import /local/data/tpch100g/partsupp/partsupp0689 partsupp
.import /local/data/tpch100g/partsupp/partsupp0690 partsupp
.import /local/data/tpch100g/partsupp/partsupp0691 partsupp
.import /local/data/tpch100g/partsupp/partsupp0692 partsupp
.import /local/data/tpch100g/partsupp/partsupp0693 partsupp
.import /local/data/tpch100g/partsupp/partsupp0694 partsupp
.import /local/data/tpch100g/partsupp/partsupp0695 partsupp
.import /local/data/tpch100g/partsupp/partsupp0696 partsupp
.import /local/data/tpch100g/partsupp/partsupp0697 partsupp
.import /local/data/tpch100g/partsupp/partsupp0698 partsupp
.import /local/data/tpch100g/partsupp/partsupp0699 partsupp
.import /local/data/tpch100g/partsupp/partsupp0700 partsupp
.import /local/data/tpch100g/partsupp/partsupp0701 partsupp
.import /local/data/tpch100g/partsupp/partsupp0702 partsupp
.import /local/data/tpch100g/partsupp/partsupp0703 partsupp
.import /local/data/tpch100g/partsupp/partsupp0704 partsupp
.import /local/data/tpch100g/partsupp/partsupp0705 partsupp
.import /local/data/tpch100g/partsupp/partsupp0706 partsupp
.import /local/data/tpch100g/partsupp/partsupp0707 partsupp
.import /local/data/tpch100g/partsupp/partsupp0708 partsupp
.import /local/data/tpch100g/partsupp/partsupp0709 partsupp
.import /local/data/tpch100g/partsupp/partsupp0710 partsupp
.import /local/data/tpch100g/partsupp/partsupp0711 partsupp
.import /local/data/tpch100g/partsupp/partsupp0712 partsupp
.import /local/data/tpch100g/partsupp/partsupp0713 partsupp
.import /local/data/tpch100g/partsupp/partsupp0714 partsupp
.import /local/data/tpch100g/partsupp/partsupp0715 partsupp
.import /local/data/tpch100g/partsupp/partsupp0716 partsupp
.import /local/data/tpch100g/partsupp/partsupp0717 partsupp
.import /local/data/tpch100g/partsupp/partsupp0718 partsupp
.import /local/data/tpch100g/partsupp/partsupp0719 partsupp
.import /local/data/tpch100g/partsupp/partsupp0720 partsupp
.import /local/data/tpch100g/partsupp/partsupp0721 partsupp
.import /local/data/tpch100g/partsupp/partsupp0722 partsupp
.import /local/data/tpch100g/partsupp/partsupp0723 partsupp
.import /local/data/tpch100g/partsupp/partsupp0724 partsupp
.import /local/data/tpch100g/partsupp/partsupp0725 partsupp
.import /local/data/tpch100g/partsupp/partsupp0726 partsupp
.import /local/data/tpch100g/partsupp/partsupp0727 partsupp
.import /local/data/tpch100g/partsupp/partsupp0728 partsupp
.import /local/data/tpch100g/partsupp/partsupp0729 partsupp
.import /local/data/tpch100g/partsupp/partsupp0730 partsupp
.import /local/data/tpch100g/partsupp/partsupp0731 partsupp
.import /local/data/tpch100g/partsupp/partsupp0732 partsupp
.import /local/data/tpch100g/partsupp/partsupp0733 partsupp
.import /local/data/tpch100g/partsupp/partsupp0734 partsupp
.import /local/data/tpch100g/partsupp/partsupp0735 partsupp
.import /local/data/tpch100g/partsupp/partsupp0736 partsupp
.import /local/data/tpch100g/partsupp/partsupp0737 partsupp
.import /local/data/tpch100g/partsupp/partsupp0738 partsupp
.import /local/data/tpch100g/partsupp/partsupp0739 partsupp
.import /local/data/tpch100g/partsupp/partsupp0740 partsupp
.import /local/data/tpch100g/partsupp/partsupp0741 partsupp
.import /local/data/tpch100g/partsupp/partsupp0742 partsupp
.import /local/data/tpch100g/partsupp/partsupp0743 partsupp
.import /local/data/tpch100g/partsupp/partsupp0744 partsupp
.import /local/data/tpch100g/partsupp/partsupp0745 partsupp
.import /local/data/tpch100g/partsupp/partsupp0746 partsupp
.import /local/data/tpch100g/partsupp/partsupp0747 partsupp
.import /local/data/tpch100g/partsupp/partsupp0748 partsupp
.import /local/data/tpch100g/partsupp/partsupp0749 partsupp
.import /local/data/tpch100g/partsupp/partsupp0750 partsupp
.import /local/data/tpch100g/partsupp/partsupp0751 partsupp
.import /local/data/tpch100g/partsupp/partsupp0752 partsupp
.import /local/data/tpch100g/partsupp/partsupp0753 partsupp
.import /local/data/tpch100g/partsupp/partsupp0754 partsupp
.import /local/data/tpch100g/partsupp/partsupp0755 partsupp
.import /local/data/tpch100g/partsupp/partsupp0756 partsupp
.import /local/data/tpch100g/partsupp/partsupp0757 partsupp
.import /local/data/tpch100g/partsupp/partsupp0758 partsupp
.import /local/data/tpch100g/partsupp/partsupp0759 partsupp
.import /local/data/tpch100g/partsupp/partsupp0760 partsupp
.import /local/data/tpch100g/partsupp/partsupp0761 partsupp
.import /local/data/tpch100g/partsupp/partsupp0762 partsupp
.import /local/data/tpch100g/partsupp/partsupp0763 partsupp
.import /local/data/tpch100g/partsupp/partsupp0764 partsupp
.import /local/data/tpch100g/partsupp/partsupp0765 partsupp
.import /local/data/tpch100g/partsupp/partsupp0766 partsupp
.import /local/data/tpch100g/partsupp/partsupp0767 partsupp
.import /local/data/tpch100g/partsupp/partsupp0768 partsupp
.import /local/data/tpch100g/partsupp/partsupp0769 partsupp
.import /local/data/tpch100g/partsupp/partsupp0770 partsupp
.import /local/data/tpch100g/partsupp/partsupp0771 partsupp
.import /local/data/tpch100g/partsupp/partsupp0772 partsupp
.import /local/data/tpch100g/partsupp/partsupp0773 partsupp
.import /local/data/tpch100g/partsupp/partsupp0774 partsupp
.import /local/data/tpch100g/partsupp/partsupp0775 partsupp
.import /local/data/tpch100g/partsupp/partsupp0776 partsupp
.import /local/data/tpch100g/partsupp/partsupp0777 partsupp
.import /local/data/tpch100g/partsupp/partsupp0778 partsupp
.import /local/data/tpch100g/partsupp/partsupp0779 partsupp
.import /local/data/tpch100g/partsupp/partsupp0780 partsupp
.import /local/data/tpch100g/partsupp/partsupp0781 partsupp
.import /local/data/tpch100g/partsupp/partsupp0782 partsupp
.import /local/data/tpch100g/partsupp/partsupp0783 partsupp
.import /local/data/tpch100g/partsupp/partsupp0784 partsupp
.import /local/data/tpch100g/partsupp/partsupp0785 partsupp
.import /local/data/tpch100g/partsupp/partsupp0786 partsupp
.import /local/data/tpch100g/partsupp/partsupp0787 partsupp
.import /local/data/tpch100g/partsupp/partsupp0788 partsupp
.import /local/data/tpch100g/partsupp/partsupp0789 partsupp
.import /local/data/tpch100g/partsupp/partsupp0790 partsupp
.import /local/data/tpch100g/partsupp/partsupp0791 partsupp
.import /local/data/tpch100g/partsupp/partsupp0792 partsupp
.import /local/data/tpch100g/partsupp/partsupp0793 partsupp
.import /local/data/tpch100g/partsupp/partsupp0794 partsupp
.import /local/data/tpch100g/partsupp/partsupp0795 partsupp
.import /local/data/tpch100g/partsupp/partsupp0796 partsupp
.import /local/data/tpch100g/partsupp/partsupp0797 partsupp
.import /local/data/tpch100g/partsupp/partsupp0798 partsupp
.import /local/data/tpch100g/partsupp/partsupp0799 partsupp
.import /local/data/tpch100g/partsupp/partsupp0800 partsupp
.import /local/data/tpch100g/partsupp/partsupp0801 partsupp
.import /local/data/tpch100g/partsupp/partsupp0802 partsupp
.import /local/data/tpch100g/partsupp/partsupp0803 partsupp
.import /local/data/tpch100g/partsupp/partsupp0804 partsupp
.import /local/data/tpch100g/partsupp/partsupp0805 partsupp
.import /local/data/tpch100g/partsupp/partsupp0806 partsupp
.import /local/data/tpch100g/partsupp/partsupp0807 partsupp
.import /local/data/tpch100g/partsupp/partsupp0808 partsupp
.import /local/data/tpch100g/partsupp/partsupp0809 partsupp
.import /local/data/tpch100g/partsupp/partsupp0810 partsupp
.import /local/data/tpch100g/partsupp/partsupp0811 partsupp
.import /local/data/tpch100g/partsupp/partsupp0812 partsupp
.import /local/data/tpch100g/partsupp/partsupp0813 partsupp
.import /local/data/tpch100g/partsupp/partsupp0814 partsupp
.import /local/data/tpch100g/partsupp/partsupp0815 partsupp
.import /local/data/tpch100g/partsupp/partsupp0816 partsupp
.import /local/data/tpch100g/partsupp/partsupp0817 partsupp
.import /local/data/tpch100g/partsupp/partsupp0818 partsupp
.import /local/data/tpch100g/partsupp/partsupp0819 partsupp
.import /local/data/tpch100g/partsupp/partsupp0820 partsupp
.import /local/data/tpch100g/partsupp/partsupp0821 partsupp
.import /local/data/tpch100g/partsupp/partsupp0822 partsupp
.import /local/data/tpch100g/partsupp/partsupp0823 partsupp
.import /local/data/tpch100g/partsupp/partsupp0824 partsupp
.import /local/data/tpch100g/partsupp/partsupp0825 partsupp
.import /local/data/tpch100g/partsupp/partsupp0826 partsupp
.import /local/data/tpch100g/partsupp/partsupp0827 partsupp
.import /local/data/tpch100g/partsupp/partsupp0828 partsupp
.import /local/data/tpch100g/partsupp/partsupp0829 partsupp
.import /local/data/tpch100g/partsupp/partsupp0830 partsupp
.import /local/data/tpch100g/partsupp/partsupp0831 partsupp
.import /local/data/tpch100g/partsupp/partsupp0832 partsupp
.import /local/data/tpch100g/partsupp/partsupp0833 partsupp
.import /local/data/tpch100g/partsupp/partsupp0834 partsupp
.import /local/data/tpch100g/partsupp/partsupp0835 partsupp
.import /local/data/tpch100g/partsupp/partsupp0836 partsupp
.import /local/data/tpch100g/partsupp/partsupp0837 partsupp
.import /local/data/tpch100g/partsupp/partsupp0838 partsupp
.import /local/data/tpch100g/partsupp/partsupp0839 partsupp
.import /local/data/tpch100g/partsupp/partsupp0840 partsupp
.import /local/data/tpch100g/partsupp/partsupp0841 partsupp
.import /local/data/tpch100g/partsupp/partsupp0842 partsupp
.import /local/data/tpch100g/partsupp/partsupp0843 partsupp
.import /local/data/tpch100g/partsupp/partsupp0844 partsupp
.import /local/data/tpch100g/partsupp/partsupp0845 partsupp
.import /local/data/tpch100g/partsupp/partsupp0846 partsupp
.import /local/data/tpch100g/partsupp/partsupp0847 partsupp
.import /local/data/tpch100g/partsupp/partsupp0848 partsupp
.import /local/data/tpch100g/partsupp/partsupp0849 partsupp
.import /local/data/tpch100g/partsupp/partsupp0850 partsupp
.import /local/data/tpch100g/partsupp/partsupp0851 partsupp
.import /local/data/tpch100g/partsupp/partsupp0852 partsupp
.import /local/data/tpch100g/partsupp/partsupp0853 partsupp
.import /local/data/tpch100g/partsupp/partsupp0854 partsupp
.import /local/data/tpch100g/partsupp/partsupp0855 partsupp
.import /local/data/tpch100g/partsupp/partsupp0856 partsupp
.import /local/data/tpch100g/partsupp/partsupp0857 partsupp
.import /local/data/tpch100g/partsupp/partsupp0858 partsupp
.import /local/data/tpch100g/partsupp/partsupp0859 partsupp
.import /local/data/tpch100g/partsupp/partsupp0860 partsupp
.import /local/data/tpch100g/partsupp/partsupp0861 partsupp
.import /local/data/tpch100g/partsupp/partsupp0862 partsupp
.import /local/data/tpch100g/partsupp/partsupp0863 partsupp
.import /local/data/tpch100g/partsupp/partsupp0864 partsupp
.import /local/data/tpch100g/partsupp/partsupp0865 partsupp
.import /local/data/tpch100g/partsupp/partsupp0866 partsupp
.import /local/data/tpch100g/partsupp/partsupp0867 partsupp
.import /local/data/tpch100g/partsupp/partsupp0868 partsupp
.import /local/data/tpch100g/partsupp/partsupp0869 partsupp
.import /local/data/tpch100g/partsupp/partsupp0870 partsupp
.import /local/data/tpch100g/partsupp/partsupp0871 partsupp
.import /local/data/tpch100g/partsupp/partsupp0872 partsupp
.import /local/data/tpch100g/partsupp/partsupp0873 partsupp
.import /local/data/tpch100g/partsupp/partsupp0874 partsupp
.import /local/data/tpch100g/partsupp/partsupp0875 partsupp
.import /local/data/tpch100g/partsupp/partsupp0876 partsupp
.import /local/data/tpch100g/partsupp/partsupp0877 partsupp
.import /local/data/tpch100g/partsupp/partsupp0878 partsupp
.import /local/data/tpch100g/partsupp/partsupp0879 partsupp
.import /local/data/tpch100g/partsupp/partsupp0880 partsupp
.import /local/data/tpch100g/partsupp/partsupp0881 partsupp
.import /local/data/tpch100g/partsupp/partsupp0882 partsupp
.import /local/data/tpch100g/partsupp/partsupp0883 partsupp
.import /local/data/tpch100g/partsupp/partsupp0884 partsupp
.import /local/data/tpch100g/partsupp/partsupp0885 partsupp
.import /local/data/tpch100g/partsupp/partsupp0886 partsupp
.import /local/data/tpch100g/partsupp/partsupp0887 partsupp
.import /local/data/tpch100g/partsupp/partsupp0888 partsupp
.import /local/data/tpch100g/partsupp/partsupp0889 partsupp
.import /local/data/tpch100g/partsupp/partsupp0890 partsupp
.import /local/data/tpch100g/partsupp/partsupp0891 partsupp
.import /local/data/tpch100g/partsupp/partsupp0892 partsupp
.import /local/data/tpch100g/partsupp/partsupp0893 partsupp
.import /local/data/tpch100g/partsupp/partsupp0894 partsupp
.import /local/data/tpch100g/partsupp/partsupp0895 partsupp
.import /local/data/tpch100g/partsupp/partsupp0896 partsupp
.import /local/data/tpch100g/partsupp/partsupp0897 partsupp
.import /local/data/tpch100g/partsupp/partsupp0898 partsupp
.import /local/data/tpch100g/partsupp/partsupp0899 partsupp
.import /local/data/tpch100g/partsupp/partsupp0900 partsupp
.import /local/data/tpch100g/partsupp/partsupp0901 partsupp
.import /local/data/tpch100g/partsupp/partsupp0902 partsupp
.import /local/data/tpch100g/partsupp/partsupp0903 partsupp
.import /local/data/tpch100g/partsupp/partsupp0904 partsupp
.import /local/data/tpch100g/partsupp/partsupp0905 partsupp
.import /local/data/tpch100g/partsupp/partsupp0906 partsupp
.import /local/data/tpch100g/partsupp/partsupp0907 partsupp
.import /local/data/tpch100g/partsupp/partsupp0908 partsupp
.import /local/data/tpch100g/partsupp/partsupp0909 partsupp
.import /local/data/tpch100g/partsupp/partsupp0910 partsupp
.import /local/data/tpch100g/partsupp/partsupp0911 partsupp
.import /local/data/tpch100g/partsupp/partsupp0912 partsupp
.import /local/data/tpch100g/partsupp/partsupp0913 partsupp
.import /local/data/tpch100g/partsupp/partsupp0914 partsupp
.import /local/data/tpch100g/partsupp/partsupp0915 partsupp
.import /local/data/tpch100g/partsupp/partsupp0916 partsupp
.import /local/data/tpch100g/partsupp/partsupp0917 partsupp
.import /local/data/tpch100g/partsupp/partsupp0918 partsupp
.import /local/data/tpch100g/partsupp/partsupp0919 partsupp
.import /local/data/tpch100g/partsupp/partsupp0920 partsupp
.import /local/data/tpch100g/partsupp/partsupp0921 partsupp
.import /local/data/tpch100g/partsupp/partsupp0922 partsupp
.import /local/data/tpch100g/partsupp/partsupp0923 partsupp
.import /local/data/tpch100g/partsupp/partsupp0924 partsupp
.import /local/data/tpch100g/partsupp/partsupp0925 partsupp
.import /local/data/tpch100g/partsupp/partsupp0926 partsupp
.import /local/data/tpch100g/partsupp/partsupp0927 partsupp
.import /local/data/tpch100g/partsupp/partsupp0928 partsupp
.import /local/data/tpch100g/partsupp/partsupp0929 partsupp
.import /local/data/tpch100g/partsupp/partsupp0930 partsupp
.import /local/data/tpch100g/partsupp/partsupp0931 partsupp
.import /local/data/tpch100g/partsupp/partsupp0932 partsupp
.import /local/data/tpch100g/partsupp/partsupp0933 partsupp
.import /local/data/tpch100g/partsupp/partsupp0934 partsupp
.import /local/data/tpch100g/partsupp/partsupp0935 partsupp
.import /local/data/tpch100g/partsupp/partsupp0936 partsupp
.import /local/data/tpch100g/partsupp/partsupp0937 partsupp
.import /local/data/tpch100g/partsupp/partsupp0938 partsupp
.import /local/data/tpch100g/partsupp/partsupp0939 partsupp
.import /local/data/tpch100g/partsupp/partsupp0940 partsupp
.import /local/data/tpch100g/partsupp/partsupp0941 partsupp
.import /local/data/tpch100g/partsupp/partsupp0942 partsupp
.import /local/data/tpch100g/partsupp/partsupp0943 partsupp
.import /local/data/tpch100g/partsupp/partsupp0944 partsupp
.import /local/data/tpch100g/partsupp/partsupp0945 partsupp
.import /local/data/tpch100g/partsupp/partsupp0946 partsupp
.import /local/data/tpch100g/partsupp/partsupp0947 partsupp
.import /local/data/tpch100g/partsupp/partsupp0948 partsupp
.import /local/data/tpch100g/partsupp/partsupp0949 partsupp
.import /local/data/tpch100g/partsupp/partsupp0950 partsupp
.import /local/data/tpch100g/partsupp/partsupp0951 partsupp
.import /local/data/tpch100g/partsupp/partsupp0952 partsupp
.import /local/data/tpch100g/partsupp/partsupp0953 partsupp
.import /local/data/tpch100g/partsupp/partsupp0954 partsupp
.import /local/data/tpch100g/partsupp/partsupp0955 partsupp
.import /local/data/tpch100g/partsupp/partsupp0956 partsupp
.import /local/data/tpch100g/partsupp/partsupp0957 partsupp
.import /local/data/tpch100g/partsupp/partsupp0958 partsupp
.import /local/data/tpch100g/partsupp/partsupp0959 partsupp
.import /local/data/tpch100g/partsupp/partsupp0960 partsupp
.import /local/data/tpch100g/partsupp/partsupp0961 partsupp
.import /local/data/tpch100g/partsupp/partsupp0962 partsupp
.import /local/data/tpch100g/partsupp/partsupp0963 partsupp
.import /local/data/tpch100g/partsupp/partsupp0964 partsupp
.import /local/data/tpch100g/partsupp/partsupp0965 partsupp
.import /local/data/tpch100g/partsupp/partsupp0966 partsupp
.import /local/data/tpch100g/partsupp/partsupp0967 partsupp
.import /local/data/tpch100g/partsupp/partsupp0968 partsupp
.import /local/data/tpch100g/partsupp/partsupp0969 partsupp
.import /local/data/tpch100g/partsupp/partsupp0970 partsupp
.import /local/data/tpch100g/partsupp/partsupp0971 partsupp
.import /local/data/tpch100g/partsupp/partsupp0972 partsupp
.import /local/data/tpch100g/partsupp/partsupp0973 partsupp
.import /local/data/tpch100g/partsupp/partsupp0974 partsupp
.import /local/data/tpch100g/partsupp/partsupp0975 partsupp
.import /local/data/tpch100g/partsupp/partsupp0976 partsupp
.import /local/data/tpch100g/partsupp/partsupp0977 partsupp
.import /local/data/tpch100g/partsupp/partsupp0978 partsupp
.import /local/data/tpch100g/partsupp/partsupp0979 partsupp
.import /local/data/tpch100g/partsupp/partsupp0980 partsupp
.import /local/data/tpch100g/partsupp/partsupp0981 partsupp
.import /local/data/tpch100g/partsupp/partsupp0982 partsupp
.import /local/data/tpch100g/partsupp/partsupp0983 partsupp
.import /local/data/tpch100g/partsupp/partsupp0984 partsupp
.import /local/data/tpch100g/partsupp/partsupp0985 partsupp
.import /local/data/tpch100g/partsupp/partsupp0986 partsupp
.import /local/data/tpch100g/partsupp/partsupp0987 partsupp
.import /local/data/tpch100g/partsupp/partsupp0988 partsupp
.import /local/data/tpch100g/partsupp/partsupp0989 partsupp
.import /local/data/tpch100g/partsupp/partsupp0990 partsupp
.import /local/data/tpch100g/partsupp/partsupp0991 partsupp
.import /local/data/tpch100g/partsupp/partsupp0992 partsupp
.import /local/data/tpch100g/partsupp/partsupp0993 partsupp
.import /local/data/tpch100g/partsupp/partsupp0994 partsupp
.import /local/data/tpch100g/partsupp/partsupp0995 partsupp
.import /local/data/tpch100g/partsupp/partsupp0996 partsupp
.import /local/data/tpch100g/partsupp/partsupp0997 partsupp
.import /local/data/tpch100g/partsupp/partsupp0998 partsupp
.import /local/data/tpch100g/partsupp/partsupp0999 partsupp
.import /local/data/tpch100g/partsupp/partsupp1000 partsupp
.import /local/data/tpch100g/partsupp/partsupp1001 partsupp
.import /local/data/tpch100g/partsupp/partsupp1002 partsupp
.import /local/data/tpch100g/partsupp/partsupp1003 partsupp
.import /local/data/tpch100g/partsupp/partsupp1004 partsupp
.import /local/data/tpch100g/partsupp/partsupp1005 partsupp
.import /local/data/tpch100g/partsupp/partsupp1006 partsupp
.import /local/data/tpch100g/partsupp/partsupp1007 partsupp
.import /local/data/tpch100g/partsupp/partsupp1008 partsupp
.import /local/data/tpch100g/partsupp/partsupp1009 partsupp
.import /local/data/tpch100g/partsupp/partsupp1010 partsupp
.import /local/data/tpch100g/partsupp/partsupp1011 partsupp
.import /local/data/tpch100g/partsupp/partsupp1012 partsupp
.import /local/data/tpch100g/partsupp/partsupp1013 partsupp
.import /local/data/tpch100g/partsupp/partsupp1014 partsupp
.import /local/data/tpch100g/partsupp/partsupp1015 partsupp
.import /local/data/tpch100g/partsupp/partsupp1016 partsupp
.import /local/data/tpch100g/partsupp/partsupp1017 partsupp
.import /local/data/tpch100g/partsupp/partsupp1018 partsupp
.import /local/data/tpch100g/partsupp/partsupp1019 partsupp
.import /local/data/tpch100g/partsupp/partsupp1020 partsupp
.import /local/data/tpch100g/partsupp/partsupp1021 partsupp
.import /local/data/tpch100g/partsupp/partsupp1022 partsupp
.import /local/data/tpch100g/partsupp/partsupp1023 partsupp
.import /local/data/tpch100g/nation/nation0000
|
DaMSL/K3
|
tools/ktrace/load_100g.sh
|
Shell
|
apache-2.0
| 243,763 |
#!/usr/bin/env bash
#
# Run tests for s3-bash4 commands
# (c) 2015 Chi Vinh Le <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
set -euo pipefail
readonly PROJECT_PATH=$(dirname $(pwd))
readonly SCRIPT_NAME="$(basename $0)"
# Includes
source ${PROJECT_PATH}/lib/s3-common.sh
##
# Print help and exit
# Arguments:
# $1 int exit code
# Output:
# string help
##
printUsageAndExitWith() {
printf "Usage:\n"
printf " $SCRIPT_NAME [-k key] [-s file] [-r region] resource_path\n"
printf " $SCRIPT_NAME -h\n"
printf "Example:\n"
printf " $SCRIPT_NAME -k key -s secret -r eu-central-1 /bucket/file.ext\n"
printf "Options:\n"
printf " -h,--help\tPrint this help\n"
printf " -k,--key\tAWS Access Key ID. Default to environment variable AWS_ACCESS_KEY_ID\n"
printf " -r,--region\tAWS S3 Region. Default to environment variable AWS_DEFAULT_REGION\n"
printf " -s,--secret\tFile containing AWS Secret Access Key. If not set, secret will be environment variable AWS_SECRET_ACCESS_KEY\n"
printf " --version\tShow version\n"
exit $1
}
##
# Parse command line and set global variables
# Arguments:
# $@ command line
# Globals:
# AWS_ACCESS_KEY_ID string
# AWS_SECRET_ACCESS_KEY string
# AWS_REGION string
# RESOURCE_PATH string
##
parseCommandLine() {
# Init globals
AWS_REGION=${AWS_DEFAULT_REGION:-""}
AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-""}
AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-""}
# Parse options
local remaining=
local secretKeyFile=
while [[ $# > 0 ]]; do
local key="$1"
case $key in
-h|--help) printUsageAndExitWith 0;;
-r|--region) assertArgument $@; AWS_REGION=$2; shift;;
-k|--key) assertArgument $@; AWS_ACCESS_KEY_ID=$2; shift;;
-s|--secret) assertArgument $@; secretKeyFile=$2; shift;;
-*) err "Unknown option $1"
printUsageAndExitWith $INVALID_USAGE_EXIT_CODE;;
*) remaining="$remaining \"$key\"";;
esac
shift
done
# Set the non-parameters back into the positional parameters ($1 $2 ..)
eval set -- $remaining
# Read secret file if set
if ! [[ -z "$secretKeyFile" ]]; then
AWS_SECRET_ACCESS_KEY=$(processAWSSecretFile "$secretKeyFile")
fi
# Parse arguments
if [[ $# != 1 ]]; then
err "You need to specify the resource path to download e.g. /bucket/file.ext"
printUsageAndExitWith $INVALID_USAGE_EXIT_CODE
fi
assertResourcePath "$1"
RESOURCE_PATH="$1"
# Freeze globals
readonly AWS_REGION
readonly AWS_ACCESS_KEY_ID
readonly AWS_SECRET_ACCESS_KEY
readonly RESOURCE_PATH
}
##
# Main routine
##
main() {
parseCommandLine $@
local get="${PROJECT_PATH}/bin/s3-get"
local put="${PROJECT_PATH}/bin/s3-put"
local delete="${PROJECT_PATH}/bin/s3-delete"
local testfile="${PROJECT_PATH}/test/testfile"
export AWS_DEFAULT_REGION=${AWS_REGION}
export AWS_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY
echo "Upload test file to $RESOURCE_PATH"
"${put}" -T "${testfile}" "${RESOURCE_PATH}"
echo "Download test file $RESOURCE_PATH"
"${get}" "${RESOURCE_PATH}"
echo "Delete test file $RESOURCE_PATH"
"${delete}" "${RESOURCE_PATH}"
}
main $@
|
wingedkiwi/s3-bash4
|
test/run.sh
|
Shell
|
apache-2.0
| 3,741 |
#!/bin/sh
#
# (c) Copyright 2016 Cloudera, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We remove any natively installed JDKs, as both Cloudera Manager and Cloudera Director only support Oracle JDKs
yum remove --assumeyes *openjdk*
rpm -ivh "https://archive.cloudera.com/director/redhat/7/x86_64/director/2/RPMS/x86_64/oracle-j2sdk1.8-1.8.0+update121-1.x86_64.rpm"
|
TobyHFerguson/director-scripts
|
java8/java8-bootstrap-script.sh
|
Shell
|
apache-2.0
| 872 |
#!/bin/sh
wget -O ../native_extension/ane/FreSwift.ane https://github.com/tuarua/Swift-IOS-ANE/releases/download/5.5.2/FreSwift.ane?raw=true
|
tuarua/Swift-IOS-ANE
|
starter_projects/example-mac/get_mac_dependencies.sh
|
Shell
|
apache-2.0
| 142 |
#!/usr/bin/env sh
# Script run by Perforce server trigger to authenticate SSO logins.
client_username="$1"
read client_auth_code < /dev/stdin
if [ -z "$client_auth_code" ]; then
echo "No authentication."
exit 1
fi
if [ "$client_auth_code" = "sso pass" ]; then
exit 0
fi
echo "Unauthorized"
exit 1
|
groboclown/p4ic4idea
|
integration-test/src/docker/p4d/server-sso-script.sh
|
Shell
|
apache-2.0
| 311 |
#!/bin/bash
#
# Copyright 2012 Marco Vermeulen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
function __gvmtool_offline {
if [[ "$1" == "enable" ]]; then
GVM_FORCE_OFFLINE="true"
echo "Forced offline mode enabled."
fi
if [[ "$1" == "disable" ]]; then
GVM_FORCE_OFFLINE="false"
GVM_ONLINE="true"
echo "Online mode re-enabled!"
fi
}
|
nobeans/gvm-cli
|
src/main/bash/gvm-offline.sh
|
Shell
|
apache-2.0
| 868 |
#!/bin/bash
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A script for deploying to Cloud Functions, example usage:
# ./deploy.sh --name demo \
# --topic demo-topic \
# --env_vars MODEL=devdaysdemo,VERSION=v1
set -ue
cd $(dirname "$0")
print_usage() {
echo "A script for deploying main.py and its dependencies to Cloud Functions"
echo "Usage:"
echo " ./deploy.sh \ "
echo " --name <FUNCTION_NAME> \ "
echo " --topic <TRIGGER_TOPIC> \ "
echo " --env_vars <ENVIRONMENT_VARIABLES>"
}
NAME=""
TOPIC=""
ENV_VARS=""
while (( "$#" )); do
if [[ $2 == --* ]]; then
echo "Value of $1 starts with '--'. Missing value?"
exit 1
fi
if [[ $1 == "--name" ]]; then
NAME=$2
elif [[ $1 == "--topic" ]]; then
TOPIC=$2
elif [[ $1 == "--env_vars" ]]; then
ENV_VARS=$2
else
echo "Unknown flag $1"
exit 1
fi
shift 2
done
if [[ -z ${NAME} ]] || [[ -z ${TOPIC} ]] || [[ -z ${ENV_VARS} ]]; then
print_usage
exit 1
fi
# Copy the helper file over.
cp -R ../shared .
# Deploy to Cloud Functions.
gcloud beta functions deploy ${NAME} --runtime python37 \
--entry-point main --trigger-topic ${TOPIC} \
--set-env-vars ${ENV_VARS}
# Cleanup.
rm -r shared
|
GoogleCloudPlatform/healthcare
|
fhir/lung-cancer/inference/deploy.sh
|
Shell
|
apache-2.0
| 1,812 |
#!/bin/bash -eux
# Install Java 7
sudo apt-get install -y python-software-properties
sudo add-apt-repository -y ppa:webupd8team/java
sudo apt-get update
echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | sudo /usr/bin/debconf-set-selections
sudo apt-get -y install oracle-java7-installer
sudo update-alternatives --display java
sudo apt-get -y install oracle-java7-set-default
|
joescii/pac-infra
|
java7.sh
|
Shell
|
apache-2.0
| 408 |
cd ~/src/sahara
source ~/src/sahara/.tox/venv/bin/activate
echo "Using sahara from: `which sahara`"
echo "Creating Sahara Database (mysql)"
mysql -uroot -Bse "CREATE USER 'sahara'@'localhost' IDENTIFIED BY 'sahara'"
mysql -uroot -Bse "GRANT ALL ON sahara.* TO 'sahara'@'localhost'"
mysql -uroot -Bse "drop database sahara"
mysql -uroot -Bse "create database sahara"
mysql -uroot -Bse "SET PASSWORD FOR 'sahara'@'localhost' = PASSWORD('sahara')"
sahara-db-manage --config-file etc/sahara/sahara.conf upgrade head
## needed to run sahara cli
#export OS_AUTH_URL=http://192.168.1.98:5000/v2.0/
export OS_AUTH_URL=http://127.0.0.1:5000/v2.0/
export OS_TENANT_NAME=admin
export OS_USERNAME=admin
export OS_PASSWORD=admin
## end of sahara cli stuff
export TOKEN=$(tools/get_auth_token | grep "Auth token:" | awk '{print $3}')
export TENANT=$(tools/get_auth_token | grep "Tenant \[admin\]" | awk '{print $4}')
sahara node-group-template-create < ~croberts/src/master.ngt
sahara node-group-template-create < ~croberts/src/worker.ngt
#http http://localhost:18080/v1.1/$TENANT/node-group-templates X-Auth-Token:$TOKEN < ~/src/master.ngt
#http http://localhost:18080/v1.1/$TENANT/node-group-templates X-Auth-Token:$TOKEN < ~/src/worker.ngt
#a little json query tool
wget http://stedolan.github.io/jq/download/linux64/jq
chmod +x jq
export MASTER_ID=$(http http://localhost:18080/v1.0/$TENANT/node-group-templates X-Auth-Token:$TOKEN | ./jq ".node_group_templates[1].id")
export WORKER_ID=$(http http://localhost:18080/v1.0/$TENANT/node-group-templates X-Auth-Token:$TOKEN | ./jq ".node_group_templates[0].id")
rm jq
cp ~/src/cluster.tmp ~/src/cluster.tmp.bak
sed -i "s/MASTER_ID/$MASTER_ID/g" ~/src/cluster.tmp
sed -i "s/WORKER_ID/$WORKER_ID/g" ~/src/cluster.tmp
#http http://localhost:18080/v1.0/$TENANT/cluster-templates X-Auth-Token:$TOKEN < ~/src/cluster.tmp
sahara cluster-template-create < ~/src/cluster.tmp
cp ~/src/cluster.tmp.bak ~/src/cluster.tmp
rm ~/src/cluster.tmp.bak
#register our image
#get the image id for our known image
export AUTH_URL="http://127.0.0.1:5000/v2.0"
#echo "Setting up ubuntu oozie image"
export IMAGE_ID=$(nova --os-username=admin --os-tenant-name=admin --os-password=admin --os-auth-url=$AUTH_URL image-list | grep ubuntu-oozie-hdp.image | awk {'print $2'})
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID X-Auth-Token:$TOKEN < ~/src/ubu-image.reg
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID/tag X-Auth-Token:$TOKEN < ~/src/tags.reg
export IMAGE_ID=$(nova --os-username=admin --os-tenant-name=admin --os-password=admin --os-auth-url=$AUTH_URL image-list | grep fedora | awk {'print $2'})
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID X-Auth-Token:$TOKEN < ~/src/fedora-image.reg
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID/tag X-Auth-Token:$TOKEN < ~/src/tags.reg
export IMAGE_ID=$(nova --os-username=admin --os-tenant-name=admin --os-password=admin --os-auth-url=$AUTH_URL image-list | grep icehouse | awk {'print $2'})
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID X-Auth-Token:$TOKEN < ~/src/fedora-image.reg
http http://localhost:18080/v1.0/$TENANT/images/$IMAGE_ID/tag X-Auth-Token:$TOKEN < ~/src/tags.reg
#sample data sources
echo "Setting up sample data sources"
http http://localhost:18080/v1.1/$TENANT/data-sources X-Auth-Token:$TOKEN < ~/src/output.tmp
http http://localhost:18080/v1.1/$TENANT/data-sources X-Auth-Token:$TOKEN < ~/src/output2.tmp
http http://localhost:18080/v1.1/$TENANT/data-sources X-Auth-Token:$TOKEN < ~/src/input.tmp
#sample pig job binary
#http PUT http://localhost:18080/v1.1/$TENANT/job-binary-internals/script.pig X-Auth-Token:$TOKEN < ../jobbinary.txt
#http http://localhost:18080/v1.1/$TENANT/jobs X-Auth-Token:$TOKEN < ../job.tmp
|
crobby/stack-init
|
sahara/sahara-boot.sh
|
Shell
|
apache-2.0
| 3,777 |
#!/bin/bash
#
export SPARK_HOME=$HOME/spark
# no need to change these
export SPARK_JOB_DIR=`pwd`
export SPARK_PREFIX=$SPARK_HOME
# Create/append spark configuration in SPARK_JOB_DIR from PBS environments.
# Specific spark configuration can be put into the SPARK_JOB_DIR/conf/* a priori.
# If existing configuration is found SPARK_JOB_DIR/conf/*, spark-on-hpc.sh will
# append its auto-generated section.
# If rerun the job, delete the auto-generated section first
$SPARK_HOME/sbin/spark-on-hpc.sh config || { exit 1; }
# Start a spark cluster inside HPC using oar
# First node will be dedicated to the master. The rest nodes are workers.
$SPARK_HOME/sbin/spark-on-hpc.sh start
#User submissions:
$SPARK_HOME/sbin/spark-submit-on-hpc.sh org.apache.spark.examples.SparkPi $SPARK_HOME/examples/jars/spark-examples_2.11-2.0.1.jar 10
#change/add more submissions here...
# Stop the spark cluster
$SPARK_HOME/sbin/spark-on-hpc.sh stop
|
ekasitk/spark-on-hpc
|
oar/examples/spark-multi.sh
|
Shell
|
apache-2.0
| 947 |
# [START maps_http_places_autocomplete_amoeba_strict]
curl -L -X GET 'https://maps.googleapis.com/maps/api/place/autocomplete/json?input=amoeba&types=establishment&location=37.76999%2C-122.44696&radius=500&strictbounds=true&key=YOUR_API_KEY'
# [END maps_http_places_autocomplete_amoeba_strict]
|
googlemaps/openapi-specification
|
dist/snippets/maps_http_places_autocomplete_amoeba_strict/maps_http_places_autocomplete_amoeba_strict.sh
|
Shell
|
apache-2.0
| 293 |
#!/bin/bash
#
# This script provides the command and control utility for the
# GigaSpaces Technologies gs-memcached script.
# The gs-memcached script starts a memcached agent.
MEMCACHED_URL=$1
if [ "${MEMCACHED_URL}" = "" ] ; then
MEMCACHED_URL=/./memcached
fi
"`dirname $0`/puInstance.sh" -properties space embed://url=${MEMCACHED_URL} "`dirname $0`/../deploy/templates/memcached"
|
Gigaspaces/xap-openspaces
|
bin/gs-memcached.sh
|
Shell
|
apache-2.0
| 388 |
#!/bin/bash
set -euo verbose -o pipefail
IFS=$'\n\t'
BOWTIE_DB=/labcommon/db/bowtie2/grch38
SAMPLE_ID=$1
IN_DIR=$2
OUT_FILE_R1=${SAMPLE_ID}_1.fq.gz
OUT_FILE_R2=${SAMPLE_ID}_2.fq.gz
if [ -s ${OUT_FILE_R1} ] || [ -s ${OUT_FILE_R2} ]; then
echo "File ${OUT_FILE_R1} and/or ${OUT_FILE_R2} already exist. Delete file(s) to regenerate."
exit 0;
fi
#Create named PIPEs
mkfifo R1_PIPE R2_PIPE
mkdir -p log
#Requires cutadapt
cutadapt --cut=3 -g ^GCCGGAGCTCTGCAGATATC -g ^GGAGCTCTGCAGATATC --no-indels --error-rate=0.1 -f 'fastq' \
-o R1_PIPE ${IN_DIR}/*_val_1.fq.gz 2>&1 > log/${SAMPLE_ID}_overhangs_R1.log &
cutadapt --cut=3 -g ^GCCGGAGCTCTGCAGATATC -g ^GGAGCTCTGCAGATATC --no-indels --error-rate=0.1 -f 'fastq' \
-o R2_PIPE ${IN_DIR}/*_val_2.fq.gz 2>&1 > log/${SAMPLE_ID}_overhangs_R2.log &
bowtie2 --local --very-sensitive-local -t -p 12 -x ${BOWTIE_DB} -1 R1_PIPE -2 R2_PIPE | \
tee >(samtools view -hSb - | samtools flagstat - > log/${SAMPLE_ID}_bowtie_pe.flagstat ) | \
samtools view -hSb -f12 -F256 - | samtools fastq -1 ${OUT_FILE_R1} -2 ${OUT_FILE_R2} -
#Remove named PIPEs
rm R1_PIPE R2_PIPE
|
maubarsom/meta_illumina_pipeline
|
batch_processing/preprocessing/2_filter_pe.sh
|
Shell
|
apache-2.0
| 1,110 |
#! /bin/bash
# npm install
grunt clean && grunt concat && grunt uglify
./ichat.sh
|
i5ting/Collection.js
|
build.sh
|
Shell
|
apache-2.0
| 82 |
#!/bin/bash
#
# Copyright (c) 2013 Charles H Martin, PhD
#
# Calculated Content
# http://calculatedcontent.com
# [email protected]
#
#TODO: add switch / select based on location
# re-write in ruby
# http://cloud-images.ubuntu.com/releases/precise/release-20120822/
# see http://cloud-images.ubuntu.com/locator/ec2/
#TODO: redo this aturdya
# damn this
AMI_ID="ami-696e652c"
INSTANCE_TYPE="m1.small"
SECURITY_GROUPS="chefami"
RUNLIST="role[cloud_master]"
# Careful: Server name cannot have a space in it (was fine for the Chef Server, not here)
echo "knife ec2 server create -c .chef/knife.rb -N CCMaster -x ubuntu -I $AMI_ID -f $INSTANCE_TYPE -G $SECURITY_GROUPS -r $RUNLIST"
knife ec2 server create -c .chef/knife.rb -N CCMaster -x ubuntu -I $AMI_ID -f $INSTANCE_TYPE -G $SECURITY_GROUPS -r "$RUNLIST" --hint ec2 -a public_ip_address
#TODO: try creating new security groups
# TODO: try using another region, like US EAST
#
|
CalculatedContent/chef-repo
|
deploy_cloud_master.sh
|
Shell
|
apache-2.0
| 962 |
#!/bin/bash
set -e
# Enable tracing if set.
[ -n "$BASH_XTRACEFD" ] && set -x
function _init() {
## All binaries are static make sure to disable CGO.
export CGO_ENABLED=0
## List of architectures and OS to test coss compilation.
SUPPORTED_OSARCH="linux/ppc64le linux/arm64 linux/s390x darwin/amd64 freebsd/amd64 windows/amd64 linux/arm linux/386 netbsd/amd64"
}
function _build() {
local osarch=$1
IFS=/ read -r -a arr <<<"$osarch"
os="${arr[0]}"
arch="${arr[1]}"
package=$(go list -f '{{.ImportPath}}')
printf -- "--> %15s:%s\n" "${osarch}" "${package}"
# go build -trimpath to build the binary.
export GOOS=$os
export GOARCH=$arch
export GO111MODULE=on
go build -trimpath -tags kqueue -o /dev/null
}
function main() {
echo "Testing builds for OS/Arch: ${SUPPORTED_OSARCH}"
for each_osarch in ${SUPPORTED_OSARCH}; do
_build "${each_osarch}"
done
}
_init && main "$@"
|
aead/minio
|
buildscripts/cross-compile.sh
|
Shell
|
apache-2.0
| 955 |
#!/bin/bash
exec > decode-demo.out.txt
find TestData -type f -print | grep -v .DS_Store | grep -v decode-demo |
while read data_file
do
echo $data_file
src/decode-demo < "$data_file"
echo
echo
done
|
libamqp/libamqp
|
decode-demo.sh
|
Shell
|
apache-2.0
| 222 |
#!/bin/bash
# "Tom Jerry" is non-option parameters.
# -ab Book -c Cake -d Dog -e Tom Jerry
# "-d Dog -e" will be non-option parameters due to after "Tom".
# -ab Book -c Cake Tom -d Dog -e
# "-d Dog -e" will be non-option parameters due to "--".
# "--" means the end of options part.
# -ab Book -c Cake -- -d Dog -e
set -v -x
# The number of all command line parameters.
echo "$#"
# Option string.
OPTSTRING=ab:c:d:e
# Parse all option parameters.
while getopts $OPTSTRING opt
do
echo $opt "-" $OPTARG
done
# Bypass all options parameters, leave all non-option parameters.
shift $(($OPTIND - 1))
# The number of all non-option command line parameters.
echo "$#"
# All non-option command line parameters.
echo "$@"
|
djsilenceboy/LearnTest
|
Shell_Test/commandline_options.sh
|
Shell
|
apache-2.0
| 724 |
#!/bin/bash
set -eu
set -o pipefail
SCRIPT_NAME=$(basename $0)
SCRIPT_HOME=$(dirname $0)
BUILD_ONLY=
DEBUG_LOGGING=
HEAT_ENV=
FLAVOR="baremetal"
function show_options {
echo "Usage: $SCRIPT_NAME [options]"
echo
echo "Deploys a baremetal cloud via heat."
echo
echo "Options:"
echo " -h -- this help"
echo " -c -- re-use existing source/images if they exist."
echo " --build-only -- build the needed images but don't deploy them."
echo " --debug-logging -- Turn on debug logging in the undercloud. Sets"
echo " both OS_DEBUG_LOGGING and the heat Debug parameter."
echo " --heat-env -- path to a JSON heat environment file."
echo " Defaults to \$TRIPLEO_ROOT/undercloud-env.json."
echo " --flavor -- flavor to use for the undercloud. Defaults"
echo " to 'baremetal'."
echo
exit $1
}
TEMP=$(getopt -o c,h -l build-only,debug-logging,heat-env:,flavor:,help -n $SCRIPT_NAME -- "$@")
if [ $? != 0 ]; then
echo "Terminating..." >&2
exit 1
fi
# Note the quotes around `$TEMP': they are essential!
eval set -- "$TEMP"
while true ; do
case "$1" in
-c) USE_CACHE=1; shift 1;;
--build-only) BUILD_ONLY="1"; shift 1;;
--debug-logging)
DEBUG_LOGGING="1"
export OS_DEBUG_LOGGING="1"
shift 1
;;
--heat-env) HEAT_ENV="$2"; shift 2;;
--flavor) FLAVOR="$2"; shift 2;;
-h | --help) show_options 0;;
--) shift ; break ;;
*) echo "Error: unsupported option $1." ; exit 1 ;;
esac
done
set -x
USE_CACHE=${USE_CACHE:-0}
TE_DATAFILE=${1:?"A test environment description is required as \$1."}
UNDERCLOUD_DIB_EXTRA_ARGS=${UNDERCLOUD_DIB_EXTRA_ARGS:-'rabbitmq-server'}
if [ "${USE_MARIADB:-}" = 1 ] ; then
UNDERCLOUD_DIB_EXTRA_ARGS="$UNDERCLOUD_DIB_EXTRA_ARGS mariadb-rpm"
fi
### --include
## devtest_undercloud
## ==================
## #. Specify whether to use the nova-baremetal or nova-ironic drivers
## for provisioning within the undercloud.
## ::
if [ "$USE_IRONIC" -eq 0 ] ; then
UNDERCLOUD_DIB_EXTRA_ARGS="$UNDERCLOUD_DIB_EXTRA_ARGS nova-baremetal"
else
UNDERCLOUD_DIB_EXTRA_ARGS="$UNDERCLOUD_DIB_EXTRA_ARGS nova-ironic"
fi
## #. Add extra elements for Undercloud UI
## ::
if [ "$USE_UNDERCLOUD_UI" -ne 0 ] ; then
UNDERCLOUD_DIB_EXTRA_ARGS="$UNDERCLOUD_DIB_EXTRA_ARGS ceilometer-collector \
ceilometer-api ceilometer-agent-central ceilometer-agent-notification \
ceilometer-undercloud-config horizon"
fi
## #. Specifiy a client-side timeout in minutes for creating or updating the
## undercloud Heat stack.
## ::
UNDERCLOUD_STACK_TIMEOUT=${UNDERCLOUD_STACK_TIMEOUT:-60}
## #. Create your undercloud image. This is the image that the seed nova
## will deploy to become the baremetal undercloud. $UNDERCLOUD_DIB_EXTRA_ARGS is
## meant to be used to pass additional arguments to disk-image-create.
## ::
NODE_ARCH=$(os-apply-config -m $TE_DATAFILE --key arch --type raw)
if [ ! -e $TRIPLEO_ROOT/undercloud.qcow2 -o "$USE_CACHE" == "0" ] ; then #nodocs
$TRIPLEO_ROOT/diskimage-builder/bin/disk-image-create $NODE_DIST \
-a $NODE_ARCH -o $TRIPLEO_ROOT/undercloud \
ntp baremetal boot-stack os-collect-config dhcp-all-interfaces \
neutron-dhcp-agent $DIB_COMMON_ELEMENTS $UNDERCLOUD_DIB_EXTRA_ARGS 2>&1 | \
tee $TRIPLEO_ROOT/dib-undercloud.log
### --end
fi
if [ -n "$BUILD_ONLY" ]; then
exit 0
fi
### --include
## #. If you wanted to build the image and run it elsewhere, you can stop at
## this point and head onto the overcloud image building.
## #. Load the undercloud image into Glance:
## ::
UNDERCLOUD_ID=$(load-image -d $TRIPLEO_ROOT/undercloud.qcow2)
## #. Set the public interface of the undercloud network node:
## ::
NeutronPublicInterface=${NeutronPublicInterface:-'nic1'}
## #. Set the NTP server for the undercloud::
## ::
UNDERCLOUD_NTP_SERVER=${UNDERCLOUD_NTP_SERVER:-''}
## #. Create secrets for the cloud. The secrets will be written to a file
## ($TRIPLEO_ROOT/tripleo-undercloud-passwords by default)
## that you need to source into your shell environment.
##
## .. note::
##
## You can also make or change these later and
## update the heat stack definition to inject them - as long as you also
## update the keystone recorded password.
##
## .. note::
##
## There will be a window between updating keystone and
## instances where they will disagree and service will be down. Instead
## consider adding a new service account and changing everything across
## to it, then deleting the old account after the cluster is updated.
##
## ::
### --end
# NOTE(tchaypo): We used to write these passwords in $CWD; so check to see if the
# file exists there first. As well as providing backwards compatibility, this
# allows for people to run multiple test environments on the same machine - just
# make sure to have a different directory for running the scripts for each
# different environment you wish to use.
#
if [ -e tripleo-undercloud-passwords ]; then
echo "Re-using existing passwords in $PWD/tripleo-undercloud-passwords"
# Add any new passwords since the file was generated
setup-undercloud-passwords tripleo-undercloud-passwords
source tripleo-undercloud-passwords
else
### --include
setup-undercloud-passwords $TRIPLEO_ROOT/tripleo-undercloud-passwords
source $TRIPLEO_ROOT/tripleo-undercloud-passwords
fi #nodocs
## #. Export UNDERCLOUD_CEILOMETER_SNMPD_PASSWORD to your environment
## so it can be applied to the SNMPd of all Overcloud nodes.
NEW_JSON=$(jq '.undercloud.ceilometer_snmpd_password="'${UNDERCLOUD_CEILOMETER_SNMPD_PASSWORD}'"' $TE_DATAFILE)
echo $NEW_JSON > $TE_DATAFILE
## #. Pull out needed variables from the test environment definition.
## ::
POWER_MANAGER=$(os-apply-config -m $TE_DATAFILE --key power_manager --type raw)
POWER_KEY=$(os-apply-config -m $TE_DATAFILE --key ssh-key --type raw)
POWER_HOST=$(os-apply-config -m $TE_DATAFILE --key host-ip --type raw)
POWER_USER=$(os-apply-config -m $TE_DATAFILE --key ssh-user --type raw)
## #. Wait for the BM cloud to register BM nodes with the scheduler::
wait_for -w 60 --delay 1 -- wait_for_hypervisor_stats
## #. We need an environment file to store the parameters we're going to give
## heat.::
HEAT_ENV=${HEAT_ENV:-"${TRIPLEO_ROOT}/undercloud-env.json"}
## #. Read the heat env in for updating.::
if [ -e "${HEAT_ENV}" ]; then
### --end
if [ "$(stat -c %a ${HEAT_ENV})" != "600" ]; then
echo "Error: Heat environment cache \"${HEAT_ENV}\" not set to permissions of 0600."
# We should exit 1 so all the users from before the permissions
# requirement dont have their HEAT_ENV files ignored in a nearly silent way
exit 1
fi
### --include
ENV_JSON=$(cat "${HEAT_ENV}")
else
ENV_JSON='{"parameters":{}}'
fi
## #. Detect if we are deploying with a VLAN for API endpoints / floating IPs.
## This is done by looking for a 'public' network in Neutron, and if found
## we pull out the VLAN id and pass that into Heat, as well as using a VLAN
## enabled Heat template.
## ::
if (neutron net-list | grep -q public); then
VLAN_ID=$(neutron net-show public | awk '/provider:segmentation_id/ { print $4 }')
else
VLAN_ID=
fi
## #. Nova-baremetal and Ironic require different Heat templates
## and different options.
## ::
if [ "$USE_IRONIC" -eq 0 ] ; then
if [ -n "$VLAN_ID" ]; then
echo "VLANs not supported with Nova-BM" >&2
exit 1
fi
HEAT_UNDERCLOUD_TEMPLATE="undercloud-vm.yaml"
ENV_JSON=$(jq .parameters.PowerSSHHost=\"${POWER_HOST}\" <<< $ENV_JSON)
ENV_JSON=$(jq .parameters.PowerManager=\"${POWER_MANAGER}\" <<< $ENV_JSON)
ENV_JSON=$(jq .parameters.PowerUserName=\"${POWER_USER}\" <<< $ENV_JSON)
REGISTER_SERVICE_OPTS=""
else
if [ -n "$VLAN_ID" ]; then
HEAT_UNDERCLOUD_TEMPLATE="undercloud-vm-ironic-vlan.yaml"
ENV_JSON=$(jq .parameters.NeutronPublicInterfaceTag=\"${VLAN_ID}\" <<< $ENV_JSON)
# This should be in the heat template, but see
# https://bugs.launchpad.net/heat/+bug/1336656
# note that this will break if there are more than one subnet, as if
# more reason to fix the bug is needed :).
PUBLIC_SUBNET_ID=$(neutron net-show public | awk '/subnets/ { print $4 }')
VLAN_GW=$(neutron subnet-show $PUBLIC_SUBNET_ID | awk '/gateway_ip/ { print $4}')
BM_VLAN_CIDR=$(neutron subnet-show $PUBLIC_SUBNET_ID | awk '/cidr/ { print $4}')
ENV_JSON=$(jq .parameters.NeutronPublicInterfaceDefaultRoute=\"${VLAN_GW}\" <<< $ENV_JSON)
else
HEAT_UNDERCLOUD_TEMPLATE="undercloud-vm-ironic.yaml"
fi
ENV_JSON=$(jq .parameters.IronicPassword=\"${UNDERCLOUD_IRONIC_PASSWORD}\" <<< $ENV_JSON)
REGISTER_SERVICE_OPTS="--ironic-password $UNDERCLOUD_IRONIC_PASSWORD"
fi
STACKNAME_UNDERCLOUD=${STACKNAME_UNDERCLOUD:-'undercloud'}
## #. Choose whether to deploy or update. Use stack-update to update::
## HEAT_OP=stack-create
## ::
if heat stack-show $STACKNAME_UNDERCLOUD > /dev/null; then
HEAT_OP=stack-update
if (heat stack-show $STACKNAME_UNDERCLOUD | grep -q FAILED); then
echo "Updating a failed stack. this is a new ability and may cause problems." >&2
fi
else
HEAT_OP=stack-create
fi
## #. Set parameters we need to deploy a baremetal undercloud::
ENV_JSON=$(jq '.parameters = {
"MysqlInnodbBufferPoolSize": 100
} + .parameters + {
"AdminPassword": "'"${UNDERCLOUD_ADMIN_PASSWORD}"'",
"AdminToken": "'"${UNDERCLOUD_ADMIN_TOKEN}"'",
"SnmpdReadonlyUserPassword": "'"${UNDERCLOUD_CEILOMETER_SNMPD_PASSWORD}"'",
"GlancePassword": "'"${UNDERCLOUD_GLANCE_PASSWORD}"'",
"HeatPassword": "'"${UNDERCLOUD_HEAT_PASSWORD}"'",
"NovaPassword": "'"${UNDERCLOUD_NOVA_PASSWORD}"'",
"NeutronPassword": "'"${UNDERCLOUD_NEUTRON_PASSWORD}"'",
"NeutronPublicInterface": "'"${NeutronPublicInterface}"'",
"undercloudImage": "'"${UNDERCLOUD_ID}"'",
"BaremetalArch": "'"${NODE_ARCH}"'",
"PowerSSHPrivateKey": "'"${POWER_KEY}"'",
"NtpServer": "'"${UNDERCLOUD_NTP_SERVER}"'",
"Flavor": "'"${FLAVOR}"'"
}' <<< $ENV_JSON)
### --end
if [ "$DEBUG_LOGGING" = "1" ]; then
ENV_JSON=$(jq '.parameters = .parameters + {
"Debug": "True",
}' <<< $ENV_JSON)
fi
### --include
#Add Ceilometer to env only if USE_UNDERCLOUD_UI is specified
if [ "$USE_UNDERCLOUD_UI" -ne 0 ] ; then
ENV_JSON=$(jq '.parameters = .parameters + {
"CeilometerPassword": "'"${UNDERCLOUD_CEILOMETER_PASSWORD}"'"
}' <<< $ENV_JSON)
fi
## #. Save the finished environment file.::
jq . > "${HEAT_ENV}" <<< $ENV_JSON
chmod 0600 "${HEAT_ENV}"
## #. Add Keystone certs/key into the environment file.::
generate-keystone-pki --heatenv $HEAT_ENV
## #. Deploy an undercloud.
## ::
make -C $TRIPLEO_ROOT/tripleo-heat-templates $HEAT_UNDERCLOUD_TEMPLATE
heat $HEAT_OP -e $HEAT_ENV \
-t 360 \
-f $TRIPLEO_ROOT/tripleo-heat-templates/$HEAT_UNDERCLOUD_TEMPLATE \
$STACKNAME_UNDERCLOUD
## You can watch the console via ``virsh``/``virt-manager`` to observe the PXE
## boot/deploy process. After the deploy is complete, it will reboot into the
## image.
##
## #. Get the undercloud IP from ``nova list``
## ::
echo "Waiting for the undercloud stack to be ready" #nodocs
# Make time out 60 mins as like the Heat stack-create default timeout.
wait_for_stack_ready -w $(($UNDERCLOUD_STACK_TIMEOUT * 60 )) 10 undercloud
UNDERCLOUD_CTL_IP=$(nova list | grep ctlplane | sed -e "s/.*=\\([0-9.]*\\).*/\1/")
## #. If we're deploying with a public VLAN we must use it, not the control plane
## network (which we may not even have access to) to ping and configure thing.
## ::
if [ -n "$VLAN_ID" ]; then
UNDERCLOUD_IP=$(heat output-show undercloud PublicIP|sed 's/^"\(.*\)"$/\1/')
else
UNDERCLOUD_IP=$UNDERCLOUD_CTL_IP
fi
## #. We don't (yet) preserve ssh keys on rebuilds.
## ::
ssh-keygen -R $UNDERCLOUD_IP
ssh-keygen -R $UNDERCLOUD_CTL_IP
## #. Exclude the undercloud from proxies:
## ::
set +u #nodocs
export no_proxy=$no_proxy,$UNDERCLOUD_IP
set -u #nodocs
## #. Export the undercloud endpoint and credentials to your test environment.
## ::
UNDERCLOUD_ENDPOINT="http://$UNDERCLOUD_IP:5000/v2.0"
NEW_JSON=$(jq '.undercloud.password="'${UNDERCLOUD_ADMIN_PASSWORD}'" | .undercloud.endpoint="'${UNDERCLOUD_ENDPOINT}'" | .undercloud.endpointhost="'${UNDERCLOUD_IP}'"' $TE_DATAFILE)
echo $NEW_JSON > $TE_DATAFILE
## #. Source the undercloud configuration:
## ::
source $TRIPLEO_ROOT/tripleo-incubator/undercloudrc
## #. Perform setup of your undercloud.
## ::
init-keystone -o $UNDERCLOUD_CTL_IP -t $UNDERCLOUD_ADMIN_TOKEN \
-e [email protected] -p $UNDERCLOUD_ADMIN_PASSWORD \
--public $UNDERCLOUD_IP --no-pki-setup
# Creating these roles to be used by tenants using swift
openstack role create swiftoperator
openstack role create ResellerAdmin
# Create service endpoints and optionally include Ceilometer for UI support
ENDPOINT_LIST="--glance-password $UNDERCLOUD_GLANCE_PASSWORD
--heat-password $UNDERCLOUD_HEAT_PASSWORD
--neutron-password $UNDERCLOUD_NEUTRON_PASSWORD
--nova-password $UNDERCLOUD_NOVA_PASSWORD
--tuskar-password $UNDERCLOUD_TUSKAR_PASSWORD"
if [ "$USE_UNDERCLOUD_UI" -ne 0 ] ; then
ENDPOINT_LIST="$ENDPOINT_LIST --ceilometer-password $UNDERCLOUD_CEILOMETER_PASSWORD"
fi
setup-endpoints $UNDERCLOUD_CTL_IP $ENDPOINT_LIST $REGISTER_SERVICE_OPTS \
--public $UNDERCLOUD_IP
openstack role create heat_stack_user
user-config
BM_NETWORK_CIDR=$(os-apply-config -m $TE_DATAFILE --key baremetal-network.cidr --type raw --key-default '192.0.2.0/24')
if [ -n "$VLAN_ID" ]; then
# No ctl plane gateway - public net gateway is needed.
# XXX (lifeless) - Neutron still configures one, first position in the subnet.
BM_NETWORK_GATEWAY=
else
# Use a control plane gateway.
BM_NETWORK_GATEWAY=$(os-apply-config -m $TE_DATAFILE --key baremetal-network.gateway-ip --type raw --key-default '192.0.2.1')
fi
BM_NETWORK_UNDERCLOUD_RANGE_START=$(os-apply-config -m $TE_DATAFILE --key baremetal-network.undercloud.range-start --type raw --key-default '192.0.2.21')
BM_NETWORK_UNDERCLOUD_RANGE_END=$(os-apply-config -m $TE_DATAFILE --key baremetal-network.undercloud.range-end --type raw --key-default '192.0.2.40')
UNDERCLOUD_NAMESERVER=$(os-apply-config -m $TE_DATAFILE --key undercloud.nameserver --type netaddress --key-default "${UNDERCLOUD_NAMESERVER:-}")
NETWORK_JSON=$(mktemp)
jq "." <<EOF > $NETWORK_JSON
{
"physical": {
"gateway": "$BM_NETWORK_GATEWAY",
"metadata_server": "$UNDERCLOUD_CTL_IP",
"cidr": "$BM_NETWORK_CIDR",
"allocation_start": "$BM_NETWORK_UNDERCLOUD_RANGE_START",
"allocation_end": "$BM_NETWORK_UNDERCLOUD_RANGE_END",
"name": "ctlplane",
"nameserver": "$UNDERCLOUD_NAMESERVER"
}
}
EOF
setup-neutron -n $NETWORK_JSON
rm $NETWORK_JSON
if [ -n "$VLAN_ID" ]; then
BM_VLAN_START=$(jq -r '.["baremetal-network"].undercloud.public_vlan.start' $TE_DATAFILE)
BM_VLAN_END=$(jq -r '.["baremetal-network"].undercloud.public_vlan.finish' $TE_DATAFILE)
PUBLIC_NETWORK_JSON=$(mktemp)
jq "." <<EOF > $PUBLIC_NETWORK_JSON
{
"physical": {
"gateway": "$VLAN_GW",
"metadata_server": "$UNDERCLOUD_CTL_IP",
"cidr": "$BM_VLAN_CIDR",
"allocation_start": "$BM_VLAN_START",
"allocation_end": "$BM_VLAN_END",
"name": "public",
"nameserver": "$UNDERCLOUD_NAMESERVER",
"segmentation_id": "$VLAN_ID",
"physical_network": "ctlplane",
"enable_dhcp": false
}
}
EOF
setup-neutron -n $PUBLIC_NETWORK_JSON
fi
## #. Nova quota runs up with the defaults quota so overide the default to
## allow unlimited cores, instances and ram.
## ::
nova quota-update --cores -1 --instances -1 --ram -1 $(openstack project show admin | awk '$2=="id" {print $4}')
## #. Register two baremetal nodes with your undercloud.
## ::
setup-baremetal --service-host undercloud --nodes <(jq '.nodes - [.nodes[0]]' $TE_DATAFILE)
### --end
|
rdo-management/tripleo
|
scripts/devtest_undercloud.sh
|
Shell
|
apache-2.0
| 16,295 |
#!/bin/bash
# ******************************************************************************
# Copyright 2017-2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
#===================================================================================================
# Provides Bash functions for dealing with clang-format.
#===================================================================================================
declare _intelnervana_clang_format_lib_SCRIPT_NAME="${BASH_SOURCE[${#BASH_SOURCE[@]} - 1]}"
declare _maint_SCRIPT_DIR="$( cd $(dirname "${_intelnervana_clang_format_lib_SCRIPT_NAME}") && pwd )"
source "${_maint_SCRIPT_DIR}/bash_lib.sh"
format_lib_verify_version() {
if (( $# != 3 )); then
bash_lib_print_error "Usage: ${FUNCNAME[0]} <clang-format-prog-pathname> <required-version-number> <CLANG or YAPF>"
return 1
fi
local PROGNAME="${1}"
local REQUIRED_VERSION_X_Y="${2}"
local CLANG_OR_YAPF="${3}"
if ! [[ "${REQUIRED_VERSION_X_Y}" =~ ^[0-9]+.[0-9]+$ ]]; then
bash_lib_print_error "${FUNCNAME[0]}: required-version-number must have the form (number).(number)."
return 1
fi
if ! [[ -f "${PROGNAME}" ]]; then
bash_lib_print_error "Unable to find clang-format program named '${PROGNAME}'"
return 1
fi
local VERSION_LINE
if ! VERSION_LINE=$("${PROGNAME}" --version); then
bash_lib_print_error "Failed invocation of command '${PROGNAME} --version'"
return 1
fi
local SED_FLAGS
if [[ "$(uname)" == 'Darwin' ]]; then
SED_FLAGS='-En'
else
SED_FLAGS='-rn'
fi
local VERSION_X_Y
if [[ "${CLANG_OR_YAPF}" =~ "CLANG" ]]; then
if ! VERSION_X_Y=$(echo "${VERSION_LINE}" | sed ${SED_FLAGS} 's/^clang-format version ([0-9]+.[0-9]+).*$/\1/p')
then
bash_lib_print_error "Failed invocation of sed to find clang verion."
return 1
fi
else
if ! VERSION_X_Y=$(echo "${VERSION_LINE}" | sed ${SED_FLAGS} 's/^yapf ([0-9]+.[0-9]+).*$/\1/p')
then
bash_lib_print_error "Failed invocation of sed to find yapf version."
return 1
fi
fi
if [[ "${REQUIRED_VERSION_X_Y}" != "${VERSION_X_Y}" ]]; then
bash_lib_print_error \
"Program '${PROGNAME}' reports version number '${VERSION_X_Y}'" \
"but we require '${REQUIRED_VERSION_X_Y}'"
return 1
fi
}
|
tensorflow/ngraph-bridge
|
maint/clang_format_lib.sh
|
Shell
|
apache-2.0
| 3,053 |
#!/bin/sh
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2016 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
#
cleanup()
{
path=$1
size=$2
optSize=`du -k $path | awk '{print $1}'| sed 's/[^0-9]*//g'`
if [ $optSize -gt $size ]; then
while [ $optSize -gt $size ]
do
oldFile=`ls -t $path | tail -1`
echo $oldFile
if [ -f $path/$oldFile ]; then rm -rf $path/$oldFile; fi
optSize=`du -k $path | awk '{print $1}'| sed 's/[^0-9]*//g'`
sleep 1
done
fi
}
# cleaning coredump backup area
cleanup /opt/corefiles_back/ 2097152
# cleaning coredump area
cleanup /opt/corefiles/ 2097152
# cleaning minidump area
cleanup /opt/minidumps/ 512000
exit 0
|
rdkcmf/rdk-sysint
|
lib/rdk/diskCleanup.sh
|
Shell
|
apache-2.0
| 1,500 |
#!/bin/bash
set -e
set -u
# Set defaults
SCRIPT=$(basename $0)
MY_VERSION="2.1"
MY_CITATION="Hello World example in Bash by Lee Katz"
function usage(){
echo "Usage: $0 [options] run/"
echo "Prints all options to run/SneakerNet/properties.txt"
echo "OPTIONS
--help
--numcpus 1
--debug
--tempdir ''
--force
--version
--citation
--check-dependencies
"
}
OPTIONSUSED=""
# Set expected variables and defaults to avoid "-u errors"
# Booleans are ints in this example
HELP=0
NUMCPUS=1
DEBUG=0
TEMPDIR="" # TODO add mktemp
FORCE=0
VERSION=0
CITATION=0
CHECK_DEPENDENCIES=0
POSITIONAL=()
while [[ $# -gt 0 ]]; do
case "$1" in
--help|-h)
HELP=1
shift 1
;;
--numcpus|-n)
NUMCPUS=$2
shift 2
;;
--debug)
DEBUG=1
shift 1
;;
--tempdir)
TEMPDIR=$2
shift 2
;;
--force)
FORCE=1
shift 1
;;
--version)
VERSION=1
shift 1
;;
--citation)
CITATION=1
shift 1
;;
--check-dependencies)
CHECK_DEPENDENCIES=1
shift 1
;;
*)
POSITIONAL+=("$1")
shift
;;
esac
done
# restore positional parameters if there are any
if [[ "${#POSITIONAL[@]}" -gt 0 ]]; then
set -- "${POSITIONAL[@]}"
fi
# print and exit for certain options
if [[ "$CITATION" == 1 ]]; then
# Print the citation and exit
echo "$MY_CITATION"
exit 0;
fi
if [[ "$VERSION" == 1 ]]; then
# Print the version and exit
echo "$MY_VERSION"
exit 0;
fi
if [[ "$CHECK_DEPENDENCIES" == 1 ]]; then
# Check for dependencies: print results to stderr and execs checked to stdout
# Check version of bash
echo "bash"
bash --version 2>&1 | grep -m 1 version 1>&2
# Check version of basename
echo "basename"
basename --version 2>&1 | grep -m 1 basename 1>&2
exit 0;
fi
# exit if there are no arguments
if [[ "$HELP" == 1 ]] || [[ $# -lt 1 ]]; then
usage;
exit 0;
fi
# The run number is the first positional argument
RUN=$1
echo "Run was given as $RUN"
# Write all options to a table
TABLE="$RUN/SneakerNet/forEmail/helloworld.sh.txt"
echo -e "foo\tbar" > $TABLE
echo -e "run\t$RUN" >> $TABLE
echo -e "help\t$HELP" >> $TABLE
echo -e "numcpus\t$NUMCPUS" >> $TABLE
echo -e "debug\t$DEBUG" >> $TABLE
echo -e "tempdir\t$TEMPDIR" >> $TABLE
echo -e "force\t$FORCE" >> $TABLE
echo -e "VERSION\t$VERSION" >> $TABLE
echo -e "CITATION\t$CITATION" >> $TABLE
echo -e "check-dependencies\t$CHECK_DEPENDENCIES" >> $TABLE
echo "Wrote to table $TABLE"
# Record properties from this plugin into properties.txt
echo -e "$SCRIPT\ttable\t$TABLE" >> $RUN/SneakerNet/properties.txt
echo -e "$SCRIPT\tversion\t$MY_VERSION" >> $RUN/SneakerNet/properties.txt
|
lskatz/SneakerNet
|
SneakerNet.plugins/sn_helloWorld.sh
|
Shell
|
apache-2.0
| 3,163 |
#!/bin/bash
# Show command before executing
set -x
# Exit on error
set -e
# We need to disable selinux for now, XXX
/usr/sbin/setenforce 0
# Get all the deps in
yum -y install \
docker \
make \
git
service docker start
# Build builder image
docker build -t fabric8-ui-builder -f Dockerfile.builder .
mkdir -p dist && docker run --detach=true --name=fabric8-ui-builder -t -v $(pwd)/dist:/dist:Z fabric8-ui-builder
# Build almighty-ui
docker exec fabric8-ui-builder npm install
## Exec unit tests
docker exec fabric8-ui-builder ./run_unit_tests.sh
if [ $? -eq 0 ]; then
echo 'CICO: unit tests OK'
else
echo 'CICO: unit tests FAIL'
exit 1
fi
## Exec functional tests
docker exec fabric8-ui-builder ./run_functional_tests.sh
## All ok, build prod version
if [ $? -eq 0 ]; then
echo 'CICO: functional tests OK'
docker exec fabric8-ui-builder npm run build:prod
docker exec -u root fabric8-ui-builder cp -r /home/fabric8/fabric8-ui/dist /
else
echo 'CICO: functional tests FAIL'
exit 1
fi
|
pmuir/fabric8-ui
|
cico_run_tests.sh
|
Shell
|
apache-2.0
| 1,016 |
#!/usr/bin/env bash
# Copyright 2017 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Constants. Enter relevant repo information here.
UPSTREAM_REPO="kubernetes"
CLI="kompose"
GITPATH="$GOPATH/src/github.com/kubernetes/kompose"
usage() {
echo "This will prepare $CLI for release!"
echo ""
echo "Requirements:"
echo " git"
echo " hub"
echo " github-release"
echo " github_changelog_generator"
echo " GITHUB_TOKEN in your env variable"
echo " "
echo "Not only that, but you must have permission for:"
echo " Tagging releases within Github"
echo ""
}
requirements() {
if [ "$PWD" != "$GITPATH" ]; then
echo "ERROR: Must be in the $GITPATH directory"
exit 0
fi
if ! hash git 2>/dev/null; then
echo "ERROR: No git."
exit 0
fi
if ! hash github-release 2>/dev/null; then
echo "ERROR: No $GOPATH/bin/github-release. Please run 'go get -v github.com/aktau/github-release'"
exit 0
fi
if ! hash github_changelog_generator 2>/dev/null; then
echo "ERROR: github_changelog_generator required to generate the change log. Please run 'gem install github_changelog_generator"
exit 0
fi
if ! hash hub 2>/dev/null; then
echo "ERROR: Hub needed in order to create the relevant PR's. Please install hub @ https://github.com/github/hub"
exit 0
fi
if [[ -z "$GITHUB_TOKEN" ]]; then
echo "ERROR: export GITHUB_TOKEN=yourtoken needed for using github-release"
exit 0
fi
}
# Make sure that upstream had been added to the repo
init_sync() {
CURRENT_ORIGIN=`git config --get remote.origin.url`
CURRENT_UPSTREAM=`git config --get remote.upstream.url`
ORIGIN="[email protected]:$ORIGIN_REPO/$CLI.git"
UPSTREAM="[email protected]:$UPSTREAM_REPO/$CLI.git"
if [ $CURRENT_ORIGIN != $ORIGIN ]; then
echo "Origin repo must be set to $ORIGIN"
exit 0
fi
if [ $CURRENT_UPSTREAM != $UPSTREAM ]; then
echo "Upstream repo must be set to $UPSTREAM"
exit 0
fi
git checkout master
git fetch upstream
git merge upstream/master
git checkout -b release-$1
}
replaceversion() {
echo "Replaced version in pkg/version/version.go"
sed -i "s/$1/$2/g" pkg/version/version.go
echo "Replaced version in README.md"
sed -i "s/$1/$2/g" README.md
echo "Replaced version in docs/installation.md"
sed -i "s/$1/$2/g" docs/installation.md
echo "Replaced version in docs/introduction.md"
sed -i "s/$1/$2/g" docs/introduction.md
echo "Replaced version in build/VERSION"
sed -i "s/$1/$2/g" build/VERSION
}
changelog() {
echo "Generating changelog using github-changelog-generator"
github_changelog_generator $UPSTREAM_REPO/$CLI -t $GITHUB_TOKEN --future-release v$1
}
changelog_github() {
touch changes.txt
echo "Write your GitHub changelog here" >> changes.txt
$EDITOR changes.txt
}
build_binaries() {
make cross
}
create_tarballs() {
# cd into the bin directory so we don't have '/bin' inside the tarball
cd bin
for f in *
do
tar cvzf $f.tar.gz $f
done
cd ..
}
git_commit() {
BRANCH=`git symbolic-ref --short HEAD`
if [ -z "$BRANCH" ]; then
echo "Unable to get branch name, is this even a git repo?"
return 1
fi
echo "Branch: " $BRANCH
git add .
git commit -m "$1 Release"
git push origin $BRANCH
hub pull-request -b $UPSTREAM_REPO/$CLI:master -h $ORIGIN_REPO/$CLI:$BRANCH
echo ""
echo "PR opened against master to update version"
echo "MERGE THIS BEFORE CONTINUING"
echo ""
}
git_pull() {
git pull
}
git_sync() {
git fetch upstream master
git rebase upstream/master
}
git_tag() {
git tag v$1
}
generate_install_guide() {
echo "
# Installation
__Linux and macOS:__
\`\`\`sh
# Linux
curl -L https://github.com/kubernetes/kompose/releases/download/v$1/kompose-linux-amd64 -o kompose
# macOS
curl -L https://github.com/kubernetes/kompose/releases/download/v$1/kompose-darwin-amd64 -o kompose
chmod +x kompose
sudo mv ./kompose /usr/local/bin/kompose
\`\`\`
__Windows:__
Download from [GitHub](https://github.com/kubernetes/kompose/releases/download/v$1/kompose-windows-amd64.exe) and add the binary to your PATH.
__Checksums:__
| Filename | SHA256 Hash |
| ------------- |:-------------:|" > install_guide.txt
for f in bin/*
do
HASH=`sha256sum $f | head -n1 | awk '{print $1;}'`
NAME=`echo $f | sed "s,bin/,,g"`
echo "[$NAME](https://github.com/kubernetes/kompose/releases/download/v$1/$NAME) | $HASH" >> install_guide.txt
done
# Append the file to the file
cat install_guide.txt >> changes.txt
}
push() {
CHANGES=$(cat changes.txt)
# Release it!
echo "Creating GitHub tag"
github-release release \
--user $UPSTREAM_REPO \
--repo $CLI \
--tag v$1 \
--name "v$1" \
--description "$CHANGES"
if [ $? -eq 0 ]; then
echo UPLOAD OK
else
echo UPLOAD FAIL
exit
fi
# Upload all the binaries and tarballs generated in bin/
for f in bin/*
do
echo "Uploading file $f"
NAME=`echo $f | sed "s,bin/,,g"`
github-release upload \
--user $UPSTREAM_REPO \
--repo $CLI \
--tag v$1 \
--file $f \
--name $NAME
if [ $? -eq 0 ]; then
echo UPLOAD OK
else
echo UPLOAD FAIL
exit
fi
done
echo "DONE"
echo "DOUBLE CHECK IT:"
echo "!!!"
echo "https://github.com/$UPSTREAM_REPO/$CLI/releases/edit/$1"
echo "!!!"
}
clean() {
rm changes.txt install_guide.txt
}
main() {
local cmd=$1
usage
requirements
echo "What is your Github username? (location of your $CLI fork)"
read ORIGIN_REPO
echo "You entered: $ORIGIN_REPO"
echo ""
echo ""
echo "First, please enter the version of the NEW release: "
read VERSION
echo "You entered: $VERSION"
echo ""
echo ""
echo "Second, please enter the version of the LAST release: "
read PREV_VERSION
echo "You entered: $PREV_VERSION"
echo ""
clear
echo "Now! It's time to go through each step of releasing $CLI!"
echo "If one of these steps fails / does not work, simply re-run ./release.sh"
echo "Re-enter the information at the beginning and continue on the failed step"
echo ""
PS3='Please enter your choice: '
options=(
"Initial sync with upstream"
"Replace version number"
"Generate changelog"
"Generate GitHub changelog"
"Create PR"
"Sync with upstream"
"Create tag"
"Build binaries"
"Create tarballs"
"Generate install guide"
"Upload the binaries and push to GitHub release page"
"Clean"
"Quit")
select opt in "${options[@]}"
do
echo ""
case $opt in
"Initial sync with upstream")
init_sync $VERSION
;;
"Replace version number")
replaceversion $PREV_VERSION $VERSION
;;
"Generate changelog")
changelog $VERSION
;;
"Generate GitHub changelog")
changelog_github $VERSION
;;
"Create PR")
git_commit $VERSION
;;
"Sync with upstream")
git_sync
;;
"Create tag")
git_tag $VERSION
;;
"Build binaries")
build_binaries
;;
"Create tarballs")
create_tarballs
;;
"Generate install guide")
generate_install_guide $VERSION
;;
"Upload the binaries and push to GitHub release page")
push $VERSION
;;
"Clean")
clean $VERSION
;;
"Quit")
clear
break
;;
*) echo invalid option;;
esac
echo ""
done
}
main "$@"
|
procrypt/kompose
|
script/release.sh
|
Shell
|
apache-2.0
| 8,281 |
#!/bin/bash
#
# Compiles pertinent Closure library files.
# TODO(joeltine): Make strictMissingRequire an error when
# @suppress {missingRequire} works for it.
java -Xmx1G -jar ../closure-compiler-1.0-SNAPSHOT.jar \
-O ADVANCED \
--warning_level VERBOSE \
--jscomp_error='*' \
--jscomp_off=strictMissingRequire \
--jscomp_off=extraRequire \
--jscomp_off=deprecated \
--jscomp_off=lintChecks \
--jscomp_off=analyzerChecks \
--jscomp_warning=unusedLocalVariables \
--js='**.js' \
--js='!**_test.js' \
--js='!**_perf.js' \
--js='!**tester.js' \
--js='!**promise/testsuiteadapter.js' \
--js='!**relativecommontests.js' \
--js='!**osapi/osapi.js' \
--js='!**svgpan/svgpan.js' \
--js='!**alltests.js' \
--js='!**node_modules**.js' \
--js='!**protractor_spec.js' \
--js='!**protractor.conf.js' \
--js='!**browser_capabilities.js' \
--js='!**generate_closure_unit_tests.js' \
--js='!./doc/**.js' \
--js_output_file=$(mktemp);
|
platinumpixs/google-closure-library
|
scripts/ci/compile_closure.sh
|
Shell
|
apache-2.0
| 970 |
#!/bin/sh
# CheckInputOutputFiles.sh
# buildtools
#
# Copyright 2004-present Greg Hurrell. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# A script to work around Xcode Input/Output file bugs (dependency checking broken)
#
# Main
#
set -e
# process environment variables
if [ ${SCRIPT_INPUT_FILE_COUNT} -lt 1 -o ${SCRIPT_OUTPUT_FILE_COUNT} -lt 1 ]; then
builtin echo "No Input/Output files specified. Exiting."
exit 0
fi
# find out the newest input file
NEWEST_INPUT=$SCRIPT_INPUT_FILE_0
builtin echo "Checking modification date for input file $SCRIPT_INPUT_FILE_0"
for (( FILE_COUNT=1 ; $FILE_COUNT < $SCRIPT_INPUT_FILE_COUNT ; FILE_COUNT++ ))
do
FILE=$(declare -p SCRIPT_INPUT_FILE_$FILE_COUNT | awk -F "=" '{print $2}' | sed -e 's/"$//' -e 's/^"//')
builtin echo "Checking modification date for input file ${FILE}"
if [ "$FILE" -nt "$NEWEST_INPUT" ]; then
NEWEST_INPUT=$FILE
fi
done
# find out the oldest output file
if [ ! -e "$SCRIPT_OUTPUT_FILE_0" ]; then
builtin echo "Output file $SCRIPT_OUTPUT_FILE_0 does not exist; aborting and returning 1"
return 1
fi
OLDEST_OUTPUT=$SCRIPT_OUTPUT_FILE_0
builtin echo "Checking modification date for output file $SCRIPT_OUTPUT_FILE_0"
for (( FILE_COUNT=1 ; $FILE_COUNT < $SCRIPT_OUTPUT_FILE_COUNT ; FILE_COUNT++ ))
do
FILE=$(declare -p SCRIPT_OUTPUT_FILE_$FILE_COUNT | awk -F "=" '{print $2}' | sed -e 's/"$//' -e 's/^"//')
if [ -e "${FILE}" ]; then
builtin echo "Checking modification date for output file ${FILE}"
if [ "$FILE" -ot "$OLDEST_OUTPUT" ]; then
OLDEST_OUTPUT=$FILE
fi
else
builtin echo "Output file $FILE does not exist; aborting and returning 1"
return 1
fi
done
if [ "$NEWEST_INPUT" -nt "$OLDEST_OUTPUT" ]; then
builtin echo "Newest input file is newer than oldest output file: returning 1"
return 1
else
builtin echo "Newest input file is not newer than oldest output file: returning 0"
return 0
fi
|
wincent/buildtools
|
CheckInputOutputFiles.sh
|
Shell
|
bsd-2-clause
| 3,196 |
#!/bin/sh
if [ $# -lt 2 ] ; then echo "usage: $0 <log> <frame_no> [delta|key] [diff]"; exit 1; fi
logFile="$1"
frameNo=$2
type="delta"
if [ $# -ge 3 ]; then
if [ "$3" -eq "key" ] ; then
type="key"
else
type="delta"
fi
fi
useDiff=0
if [ $# -eq 4 ]; then
if [ $4 -eq "diff" ] ; then
useDiff=1
else
useDiff=0
fi
fi
grepPipeline="cat ${logFile} | grep \"/ndn/edu/ucla/apps/ndnrtc/user/.*/streams/video0/vp8/frames/${type}/${frameNo}\"| grep -v \"vconsumer-buffer\" | grep -v \"new pit entry\" | grep -v \"pit hit\""
echo $logFile
if [ $useDiff -eq 1 ] ; then
eval $grePipeline | time-diff.sh
else
eval $grepPipeline
fi
|
remap/ndnrtc
|
cpp/resources/trace-frame.sh
|
Shell
|
bsd-2-clause
| 635 |
php vendor/crodas/simple-view-engine/cli.php compile lib/crodas/SitemapGenerator lib/crodas/SitemapGenerator/Templates.php -N "crodas\\SitemapGenerator"
|
crodas/SitemapGenerator
|
compile.sh
|
Shell
|
bsd-3-clause
| 153 |
#!/bin/bash
########################################################################
#
# NOTE: This is a first draft installation script
#
# It is a copy and may not be fully in sync with latest version
#
########################################################################
# RSYNC NOTES
# a = archive, recurse through directories, preserves file permissions, owner [ NOT USED, DON'T WANT TO MESS W/ PERMISSIONS ]
# r = recursive
# v = verbose, what was actually copied
# u = only copy things that have changed
# z = compresses (faster for text, maybe not for binary)
# (--delete, but probably dont want)
# / trailing slash, copies contents into target
# no slash, copies the directory & contents to target
BASEDIR=/fill/this/in
RCOS_REPO=/fill/this/in
PRIVATE_REPO=/fill/this/in
PHPUSER=fillthisin
CRONGROUP=fillthisin
########################################
# COPY THE SUBMISSION SERVER WEBSITE
rsync -rvuz $RCOS_REPO/public $PHPUSER@localhost:$BASEDIR/website
########################################
# COPY THE WEBSITE CUSTOMIZATIONS FOR THE SPECIFIC COURSES
rsync -vuz XXXXXXXXXXXXXX/fall14/csci1200/template_before_https.php $PHPUSER@localhost:$BASEDIR/website/public/view/f14_csci1200_container.php
rsync -vuz XXXXXXXXXXXXXX/fall14/csci1200/f14_csci1200_main.css $PHPUSER@localhost:$BASEDIR/website/public/resources/f14_csci1200_main.css
rsync -vuz $RCOS_REPO/Sample_Files/sample_class/f14_csci1200_upload_message.php $PHPUSER@localhost:$BASEDIR/website/public/view/f14_csci1200_upload_message.php
rsync -vuz XXXXXXXXXXXXXX/fall14/csci1200/template_before_https.php $PHPUSER@localhost:$BASEDIR/website/public/view/f14_csci1200test_container.php
rsync -vuz XXXXXXXXXXXXXX/fall14/csci1200/f14_csci1200_main.css $PHPUSER@localhost:$BASEDIR/website/public/resources/f14_csci1200test_main.css
rsync -vuz XXXXXXXXXXXXXXX/visualization/F14/template_before_https.php $PHPUSER@localhost:$BASEDIR/website/public/view/f14_csci4960_container.php
rsync -vuz XXXXXXXXXXXXXXX/visualization/F14/f14_csci4960_main.css $PHPUSER@localhost:$BASEDIR/website/public/resources/f14_csci4960_main.css
########################################
# COPY THE CORE GRADING CODE
rsync -rvuz $RCOS_REPO/grading $BASEDIR/gradingcode
rsync -rvuz $RCOS_REPO/modules $BASEDIR/gradingcode
####
# DISALLOWED & WARNING KEYWORDS FROM SUBMITTED CODE
#rsync -rvuz $PRIVATE_REPO/disallowed_words.txt $BASEDIR/courses/f14/csci1200/config/disallowed_words.txt
####
########################################
# COPY THE SCRIPT TO GRADE UPLOADED CODE
rsync -vuz $RCOS_REPO/bashScript/grade_students.sh $BASEDIR/bin/grade_students.sh
chgrp $CRONGROUP $BASEDIR/bin/grade_students.sh
chmod u+x $BASEDIR/bin/grade_students.sh
chmod g+x $BASEDIR/bin/grade_students.sh
################################################################################################################
################################################################################################################
function install_homework {
# location of the homework files, including:
# $hw_source/config.h
# $hw_source/test_input/<input files>
# $hw_source/test_output/<output files>
# $hw_sourre/test_code/<solution/instructor code files>
hw_source=$1
# where it should be installed (what semester, course, and assignment number/name)
semester=$2
course=$3
assignment=$4
hw_code_path=$BASEDIR/courses/$semester/$course/hwconfig/$assignment
hw_bin_path=$BASEDIR/courses/$semester/$course/bin/$assignment
hw_config=$BASEDIR/courses/$semester/$course/config/${assignment}_assignment_config.json
echo "---------------------------------------------------"
echo "install $hw_source $hw_code_path"
# copy the files
rsync -rvuz $hw_source/ $hw_code_path
# grab the universal cmake file
cp $RCOS_REPO/Sample_Files/Sample_CMakeLists.txt $hw_code_path/CMakeLists.txt
# go to the code directory
pushd $hw_code_path
# build the configuration, compilation, runner, and validation executables
# configure cmake, specifying the clang compiler
CXX=/usr/bin/clang++ cmake .
# build in parallel
# FIXME: using -j 8 causes fork errors on the server
make -j 2
# copy the json config file
cp $hw_bin_path/assignment_config.json $hw_config
# set the permissions
chmod o+r $hw_config
chmod o+x $hw_bin_path
chmod o+rx $hw_bin_path/*out
# copy the test input, test output, test solution code files to the appropriate directories
if [ -d $hw_code_path/test_input/ ]; then
rsync -rvuz $hw_code_path/test_input/ $BASEDIR/courses/$semester/$course/test_input/$assignment/
fi
if [ -d $hw_code_path/test_output/ ]; then
rsync -rvuz $hw_code_path/test_output/ $BASEDIR/courses/$semester/$course/test_output/$assignment/
fi
if [ -d $hw_code_path/test_code/ ]; then
rsync -rvuz $hw_code_path/test_code/ $BASEDIR/courses/$semester/$course/test_code/$assignment/
fi
popd
echo "---------------------------------------------------"
}
################################################################################################################
################################################################################################################
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci1200_lab01_getting_started/ f14 csci1200 lab1
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci1200_lab05_memory_debugging/ f14 csci1200 lab5
install_homework $PRIVATE_REPO/csci1200_hw01_moire_strings/ f14 csci1200 hw01
install_homework $PRIVATE_REPO/csci1200_hw02_bowling_classes/ f14 csci1200 hw02
install_homework $PRIVATE_REPO/csci1200_hw03_jagged_array/ f14 csci1200 hw03
install_homework $PRIVATE_REPO/csci1200_hw04_preference_lists/ f14 csci1200 hw04
install_homework $PRIVATE_REPO/csci1200_hw05_unrolled_linked_lists/ f14 csci1200 hw05
install_homework $PRIVATE_REPO/csci1200_hw06_carcassonne_recursion/ f14 csci1200 hw06
install_homework $PRIVATE_REPO/csci1200_hw07_library_maps/ f14 csci1200 hw07
install_homework $PRIVATE_REPO/csci1200_hw08_bidirectional_map/ f14 csci1200 hw08
install_homework $PRIVATE_REPO/csci1200_hw09_perfect_hashing/ f14 csci1200 hw09
install_homework $PRIVATE_REPO/csci1200_hw10_organism_inheritance/ f14 csci1200 hw10
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci1100_hw01part1/ f14 csci1200 pythontest
#install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci1100_hw01part2/ f14 csci1100 hw01part2
#install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci1100_hw01part3/ f14 csci1100 hw01part3
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw01
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw02
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw03
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw04
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw05
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw06
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw07
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw08
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw09
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw10
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw11
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw12
install_homework $RCOS_REPO/Sample_Files/sample_assignment_config/csci4960_any_homework f14 csci4960 hw13
########################################
|
UuqV/HomeworkServerHack
|
bashScript/INSTALL_HWserver.sh
|
Shell
|
bsd-3-clause
| 8,796 |
#!/bin/sh -x
#
# This script converts pdf files to jpeg image files. The jpeg filename is
# created using the pdf filename by replacing the "pdf" to "jpg". The resolution
# is 300x300.
#
# Usage: bash pdf2jpeg.sh FILE...
while [ $# -gt 0 ]; do
fullfile=$1
filename=$(basename "$fullfile")
extension="${filename##*.}"
filename="${filename%.*}"
gs -dNOPAUSE \
-q \
-r300x300 \
-sDEVICE=jpeg \
-dBATCH \
-sCompression=lzw \
-sOutputFile="$filename.jpg" \
"$filename.pdf"
shift
done
|
yfpeng/pengyifan-commons
|
src/main/scripts/pdf2jpeg.sh
|
Shell
|
bsd-3-clause
| 520 |
#!/bin/bash
set -e
set -x
if [[ "$(uname -s)" == 'Darwin' ]]; then
if which pyenv > /dev/null; then
eval "$(pyenv init -)"
fi
pyenv activate psutil
fi
python setup.py build
python setup.py develop
coverage run psutil/tests/runner.py --include="psutil/*" --omit="test/*,*setup*"
python psutil/tests/test_memory_leaks.py
flake8
pep8
|
0-wiz-0/psutil
|
.ci/travis/run.sh
|
Shell
|
bsd-3-clause
| 354 |
#!/bin/bash
for plugin in $(cat packages.txt); do
PLUGIN=$(echo "$plugin" | awk -F == '{print }')
echo "Uninstalling $PLUGIN..."
expect -c "spawn pip uninstall $PLUGIN
expect {
\"Proceed (y/n)?\" {
send \"y\r\n\"
expect {
exit
}
}
}"
done
|
matthewayne/evernote-sdk-python
|
lib/uninstall.sh
|
Shell
|
bsd-3-clause
| 332 |
#!/bin/bash
FILES="*.go"
dorun(){
echo
echo
echo
echo
go test -test.run="upp01"
}
while true; do
inotifywait -q -e modify $FILES
dorun
done
|
cpmech/gofem
|
ele/porous/xmonitor.bash
|
Shell
|
bsd-3-clause
| 171 |
#!/bin/bash
FN="hugene21sttranscriptcluster.db_8.7.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.11/data/annotation/src/contrib/hugene21sttranscriptcluster.db_8.7.0.tar.gz"
"https://bioarchive.galaxyproject.org/hugene21sttranscriptcluster.db_8.7.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-hugene21sttranscriptcluster.db/bioconductor-hugene21sttranscriptcluster.db_8.7.0_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-hugene21sttranscriptcluster.db/bioconductor-hugene21sttranscriptcluster.db_8.7.0_src_all.tar.gz"
)
MD5="7d4d913f3e48f3eab45e036323d223b5"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
roryk/recipes
|
recipes/bioconductor-hugene21sttranscriptcluster.db/post-link.sh
|
Shell
|
mit
| 1,552 |
#!/bin/bash
/usr/sbin/sshd
su hduser -c "$HADOOP_HOME/sbin/start-dfs.sh"
su hduser -c "$HADOOP_HOME/sbin/start-yarn.sh"
tail -f $HADOOP_HOME/logs/*
|
sticksnleaves/docker-hadoop-single-node
|
services/start-hadoop.sh
|
Shell
|
mit
| 148 |
#!/bin/bash
# start polipo in background process. If it's running, will be killed and restart new.
function start(){
sudo polipo -c /etc/polipo/config
}
function restart(){
pid=$( pgrep -u root polipo )
[ "$pid" ] && sudo kill "$pid" 2>/dev/null && echo "Process $pid has been killed."
start && sleep 1
newpid=$( pgrep -u root polipo )
echo "Restarted with PID $newpid"
}
restart
|
weaming/dotfiles
|
shadowsocks/start-polipo.sh
|
Shell
|
mit
| 390 |
#!/bin/bash
PROFILE=${BEHAT_PROFILE:-"selenium"}
LOGS_PATH=${LOGS_DIR:-"/tmp/sulu"}
if [ -e /tmp/failed.tests ]; then
rm /tmp/failed.tests
fi
touch /tmp/failed.tests
export BEHAT_PARAMS=
php bin/behat --profile $PROFILE
|
wachterjohannes/sulu-standard
|
tests/runtests.sh
|
Shell
|
mit
| 227 |
#!/bin/bash
detect_platform() {
if [ $(uname | grep 'SunOS') ]; then
echo "SmartOS"
elif [ $(uname | grep 'Linux') ]; then
echo "Linux"
fi
}
project='gonano'
platform=$(detect_platform)
depends=(
net/libfetch
)
packages=(
devel/bmake
pkgtools/bootstrap-mk-files
archivers/bzip2
sysutils/coreutils
devel/gettext-lib
archivers/libarchive
security/mozilla-rootcerts
lang/nawk
textproc/nbsed
devel/ncurses
security/openssl
pkgtools/pkg_alternatives
pkgtools/pkg_install
pkgtools/pkg_install-info
pkgtools/pkgin
devel/readline
databases/sqlite3
archivers/xz
devel/zlib
)
extra_packages=(
pkgtools/mksandbox
)
# 0) clean start
sudo rm -rf /opt/${project}
sudo rm -rf /var/tmp/${project}-bootstrap
# sudo rm -rf /content/packages/pkgsrc/gonano/Linux
# 1) install build-essential
sudo apt-get -y update -qq && \
sudo apt-get install -y build-essential
# 2) run bootstrap process
sudo GCCBASE=/usr \
SH=/bin/bash \
/content/pkgsrc/bootstrap/bootstrap \
--compiler=gcc \
--abi 64 \
--full \
--prefer-pkgsrc=yes \
--pkgdbdir=/opt/${project}/pkg \
--prefix=/opt/${project} \
--varbase=/var/${project} \
--workdir=/var/tmp/${project}-bootstrap
# 3) generate mk.conf
sudo bash -c "/bin/cat > /opt/${project}/etc/mk.conf" <<END
.ifdef BSD_PKG_MK # begin pkgsrc settings
ABI= 64
PKGSRC_COMPILER= gcc
GCCBASE= /usr
PKG_DBDIR= /opt/${project}/pkg
LOCALBASE= /opt/${project}
VARBASE= /var/${project}
PKG_TOOLS_BIN= /opt/${project}/sbin
PKGINFODIR= info
PKGMANDIR= man
DEPENDS_TARGET= bin-install
PREFER_PKGSRC= yes
TOOLS_PLATFORM.install?= /opt/${project}/bin/install-sh
TOOLS_PLATFORM.sh?= /opt/${project}/bin/pdksh
TOOLS_PLATFORM.ksh?= /opt/${project}/bin/pdksh
TOOLS_PLATFORM.awk?= /opt/${project}/bin/nawk
TOOLS_PLATFORM.sed?= /opt/${project}/bin/nbsed
TOOLS_PLATFORM.sh?= /bin/bash
DISTDIR= /content/distfiles
PACKAGES= /content/packages/pkgsrc/${project}/Linux
WRKOBJDIR= /var/tmp/pkgsrc-build-${project}
MAKE_JOBS= 6
SU_CMD= sudo sh -c
LDFLAGS+= -L/lib/x86_64-linux-gnu
LDFLAGS+= -L/usr/lib/x86_64-linux-gnu
CFLAGS+= -I/usr/include/x86_64-linux-gnu
ALLOW_VULNERABLE_PACKAGES= yes
SKIP_LICENSE_CHECK= yes
FETCH_USING= curl
.endif # end pkgsrc settings
END
# 4) make a tempoary tarball of the minimal bootstrap
sudo tar -czf /var/tmp/bootstrap.minimal.tar.gz -C / opt/${project}
# 5) build and install dependencies
for i in ${depends[@]}; do
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} package
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} install
done
# 6) create extra necessary packages
for i in ${packages[@]}; do
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} package
done
# 7) blast /opt/${project}
sudo rm -rf /opt/${project}
# 8) extract minimal bootstrap
sudo tar -xzf /var/tmp/bootstrap.minimal.tar.gz -C /
# 9) install the packages we just built
for i in ${packages[@]}; do
pkg_name=$(/opt/${project}/bin/bmake -C /content/pkgsrc/${i} show-var VARNAME=PKGNAME)
sudo /opt/${project}/sbin/pkg_add /content/packages/pkgsrc/${project}/${platform}/All/${pkg_name}.tgz
done
# 10) set the source in pkgin
sudo bash -c "/bin/cat > /opt/${project}/etc/pkgin/repositories.conf" <<END
# $Id: repositories.conf,v 1.3 2012/06/13 13:50:17 imilh Exp $
#
# Pkgin repositories list
#
# Simply add repositories URIs one below the other
#
# WARNING: order matters, duplicates will not be added, if two
# repositories hold the same package, it will be fetched from
# the first one listed in this file.
#
# This file format supports the following macros:
# $arch to define the machine hardware platform
# $osrelease to define the release version for the operating system
#
# Remote ftp repository
#
# ftp://ftp.netbsd.org/pub/pkgsrc/packages/NetBSD/$arch/5.1/All
#
# Remote http repository
#
# http://mirror-master.dragonflybsd.org/packages/$arch/DragonFly-$osrelease/stable/All
#
# Local repository (must contain a pkg_summary.gz or bz2)
#
# file:///usr/pkgsrc/packages/All
# Nanobox public repository
http://pkgsrc.nanobox.io/nanobox/${project}/${platform}
END
# 10) tar
sudo tar -czf /var/tmp/bootstrap.tar.gz -C / opt/${project}
# 11) upload
curl \
-k \
-X POST \
-H "Key: ${NANOBOX_SECRET}" \
--data-binary \@/var/tmp/bootstrap.tar.gz \
https://pkgsrc.nanobox.io/${NANOBOX_USER}/${project}/${platform}/bootstrap.tar.gz
# 12) build/install/publish extra packages
for i in ${extra_packages[@]}; do
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} package
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} install
/opt/${project}/bin/bmake -C /content/pkgsrc/${i} publish
done
# 13) mv bootstrap into cache for chroots
cp -f /var/tmp/bootstrap.tar.gz \
/content/packages/pkgsrc/${project}/${platform}/bootstrap.tar.gz
|
nanobox-io/nanobox-pkgsrc-gonano
|
.scripts/bootstrap.sh
|
Shell
|
mit
| 4,863 |
#!/bin/bash
source setup/functions.sh
apt_install python3-flask links duplicity libyaml-dev python3-dnspython python3-dateutil
hide_output pip3 install rtyaml
# Create a backup directory and a random key for encrypting backups.
mkdir -p $STORAGE_ROOT/backup
if [ ! -f $STORAGE_ROOT/backup/secret_key.txt ]; then
$(umask 077; openssl rand -base64 2048 > $STORAGE_ROOT/backup/secret_key.txt)
fi
# Link the management server daemon into a well known location.
rm -f /usr/local/bin/mailinabox-daemon
ln -s `pwd`/management/daemon.py /usr/local/bin/mailinabox-daemon
# Create an init script to start the management daemon and keep it
# running after a reboot.
rm -f /etc/init.d/mailinabox
ln -s $(pwd)/conf/management-initscript /etc/init.d/mailinabox
hide_output update-rc.d mailinabox defaults
# Perform a daily backup.
cat > /etc/cron.daily/mailinabox-backup << EOF;
#!/bin/bash
# Mail-in-a-Box --- Do not edit / will be overwritten on update.
# Perform a backup.
$(pwd)/management/backup.py
EOF
chmod +x /etc/cron.daily/mailinabox-backup
# Start it. Remove the api key file first so that start.sh
# can wait for it to be created to know that the management
# server is ready.
rm -f /var/lib/mailinabox/api.key
restart_service mailinabox
|
Toilal/mailinabox
|
setup/management.sh
|
Shell
|
cc0-1.0
| 1,244 |
#!/bin/sh
wp core install --url="$SITE_NAME.$LANDO_DOMAIN" --title="$SITE_TITLE" --admin_user=admin --admin_password=admin [email protected] --path=/app/wordpress
|
leoloso/PoP
|
webservers/graphql-by-pop/setup/install.sh
|
Shell
|
gpl-2.0
| 178 |
#!/bin/sh
SOURCES_DIR=`pwd`
THIRD_PARTY_DIR=$SOURCES_DIR/third_party
LIBAV_DIR=$THIRD_PARTY_DIR/libav
if [ ! -d $THIRD_PARTY_DIR ]; then
mkdir $THIRD_PARTY_DIR
fi
if [ ! -d $LIBAV_DIR ]; then
git clone git://git.libav.org/libav.git $LIBAV_DIR
else
cd $LIBAV_DIR && git pull origin master
fi
./configure
make
cd $SOURCES_DIR
|
drunknbass/yasem
|
update_dependencies.sh
|
Shell
|
gpl-2.0
| 342 |
#!/bin/bash
python pyBicolorMatrix.py
|
noppelmax/boxup_bicolormatrix
|
run.sh
|
Shell
|
gpl-2.0
| 39 |
#!/bin/sh
#
# Copyright (c) 2009 Red Hat, Inc.
#
test_description='Test updating submodules
This test verifies that "git submodule update" detaches the HEAD of the
submodule and "git submodule update --rebase/--merge" does not detach the HEAD.
'
GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME=main
export GIT_TEST_DEFAULT_INITIAL_BRANCH_NAME
. ./test-lib.sh
compare_head()
{
sha_main=$(git rev-list --max-count=1 main)
sha_head=$(git rev-list --max-count=1 HEAD)
test "$sha_main" = "$sha_head"
}
test_expect_success 'setup a submodule tree' '
echo file > file &&
git add file &&
test_tick &&
git commit -m upstream &&
git clone . super &&
git clone super submodule &&
git clone super rebasing &&
git clone super merging &&
git clone super none &&
(cd super &&
git submodule add ../submodule submodule &&
test_tick &&
git commit -m "submodule" &&
git submodule init submodule
) &&
(cd submodule &&
echo "line2" > file &&
git add file &&
git commit -m "Commit 2"
) &&
(cd super &&
(cd submodule &&
git pull --rebase origin
) &&
git add submodule &&
git commit -m "submodule update"
) &&
(cd super &&
git submodule add ../rebasing rebasing &&
test_tick &&
git commit -m "rebasing"
) &&
(cd super &&
git submodule add ../merging merging &&
test_tick &&
git commit -m "rebasing"
) &&
(cd super &&
git submodule add ../none none &&
test_tick &&
git commit -m "none"
) &&
git clone . recursivesuper &&
( cd recursivesuper &&
git submodule add ../super super
)
'
test_expect_success 'update --remote falls back to using HEAD' '
test_create_repo main-branch-submodule &&
test_commit -C main-branch-submodule initial &&
test_create_repo main-branch &&
git -C main-branch submodule add ../main-branch-submodule &&
git -C main-branch commit -m add-submodule &&
git -C main-branch-submodule switch -c hello &&
test_commit -C main-branch-submodule world &&
git clone --recursive main-branch main-branch-clone &&
git -C main-branch-clone submodule update --remote main-branch-submodule &&
test_path_exists main-branch-clone/main-branch-submodule/world.t
'
test_expect_success 'submodule update detaching the HEAD ' '
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update submodule &&
cd submodule &&
! compare_head
)
'
test_expect_success 'submodule update from subdirectory' '
(cd super/submodule &&
git reset --hard HEAD~1
) &&
mkdir super/sub &&
(cd super/sub &&
(cd ../submodule &&
compare_head
) &&
git submodule update ../submodule &&
cd ../submodule &&
! compare_head
)
'
supersha1=$(git -C super rev-parse HEAD)
mergingsha1=$(git -C super/merging rev-parse HEAD)
nonesha1=$(git -C super/none rev-parse HEAD)
rebasingsha1=$(git -C super/rebasing rev-parse HEAD)
submodulesha1=$(git -C super/submodule rev-parse HEAD)
pwd=$(pwd)
cat <<EOF >expect
Submodule path '../super': checked out '$supersha1'
Submodule path '../super/merging': checked out '$mergingsha1'
Submodule path '../super/none': checked out '$nonesha1'
Submodule path '../super/rebasing': checked out '$rebasingsha1'
Submodule path '../super/submodule': checked out '$submodulesha1'
EOF
cat <<EOF >expect2
Cloning into '$pwd/recursivesuper/super/merging'...
Cloning into '$pwd/recursivesuper/super/none'...
Cloning into '$pwd/recursivesuper/super/rebasing'...
Cloning into '$pwd/recursivesuper/super/submodule'...
Submodule 'merging' ($pwd/merging) registered for path '../super/merging'
Submodule 'none' ($pwd/none) registered for path '../super/none'
Submodule 'rebasing' ($pwd/rebasing) registered for path '../super/rebasing'
Submodule 'submodule' ($pwd/submodule) registered for path '../super/submodule'
done.
done.
done.
done.
EOF
test_expect_success 'submodule update --init --recursive from subdirectory' '
git -C recursivesuper/super reset --hard HEAD^ &&
(cd recursivesuper &&
mkdir tmp &&
cd tmp &&
git submodule update --init --recursive ../super >../../actual 2>../../actual2
) &&
test_i18ncmp expect actual &&
sort actual2 >actual2.sorted &&
test_i18ncmp expect2 actual2.sorted
'
cat <<EOF >expect2
Submodule 'foo/sub' ($pwd/withsubs/../rebasing) registered for path 'sub'
EOF
test_expect_success 'submodule update --init from and of subdirectory' '
git init withsubs &&
(cd withsubs &&
mkdir foo &&
git submodule add "$(pwd)/../rebasing" foo/sub &&
(cd foo &&
git submodule deinit -f sub &&
git submodule update --init sub 2>../../actual2
)
) &&
test_i18ncmp expect2 actual2
'
test_expect_success 'submodule update does not fetch already present commits' '
(cd submodule &&
echo line3 >> file &&
git add file &&
test_tick &&
git commit -m "upstream line3"
) &&
(cd super/submodule &&
head=$(git rev-parse --verify HEAD) &&
echo "Submodule path ${SQ}submodule$SQ: checked out $SQ$head$SQ" > ../../expected &&
git reset --hard HEAD~1
) &&
(cd super &&
git submodule update > ../actual 2> ../actual.err
) &&
test_i18ncmp expected actual &&
test_must_be_empty actual.err
'
test_expect_success 'submodule update should fail due to local changes' '
(cd super/submodule &&
git reset --hard HEAD~1 &&
echo "local change" > file
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
test_must_fail git submodule update submodule
)
'
test_expect_success 'submodule update should throw away changes with --force ' '
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update --force submodule &&
cd submodule &&
! compare_head
)
'
test_expect_success 'submodule update --force forcibly checks out submodules' '
(cd super &&
(cd submodule &&
rm -f file
) &&
git submodule update --force submodule &&
(cd submodule &&
test "$(git status -s file)" = ""
)
)
'
test_expect_success 'submodule update --remote should fetch upstream changes' '
(cd submodule &&
echo line4 >> file &&
git add file &&
test_tick &&
git commit -m "upstream line4"
) &&
(cd super &&
git submodule update --remote --force submodule &&
cd submodule &&
test "$(git log -1 --oneline)" = "$(GIT_DIR=../../submodule/.git git log -1 --oneline)"
)
'
test_expect_success 'submodule update --remote should fetch upstream changes with .' '
(
cd super &&
git config -f .gitmodules submodule."submodule".branch "." &&
git add .gitmodules &&
git commit -m "submodules: update from the respective superproject branch"
) &&
(
cd submodule &&
echo line4a >> file &&
git add file &&
test_tick &&
git commit -m "upstream line4a" &&
git checkout -b test-branch &&
test_commit on-test-branch
) &&
(
cd super &&
git submodule update --remote --force submodule &&
git -C submodule log -1 --oneline >actual &&
git -C ../submodule log -1 --oneline main >expect &&
test_cmp expect actual &&
git checkout -b test-branch &&
git submodule update --remote --force submodule &&
git -C submodule log -1 --oneline >actual &&
git -C ../submodule log -1 --oneline test-branch >expect &&
test_cmp expect actual &&
git checkout main &&
git branch -d test-branch &&
git reset --hard HEAD^
)
'
test_expect_success 'local config should override .gitmodules branch' '
(cd submodule &&
git checkout test-branch &&
echo line5 >> file &&
git add file &&
test_tick &&
git commit -m "upstream line5" &&
git checkout main
) &&
(cd super &&
git config submodule.submodule.branch test-branch &&
git submodule update --remote --force submodule &&
cd submodule &&
test "$(git log -1 --oneline)" = "$(GIT_DIR=../../submodule/.git git log -1 --oneline test-branch)"
)
'
test_expect_success 'submodule update --rebase staying on main' '
(cd super/submodule &&
git checkout main
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update --rebase submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update --merge staying on main' '
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update --merge submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update - rebase in .git/config' '
(cd super &&
git config submodule.submodule.update rebase
) &&
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update - checkout in .git/config but --rebase given' '
(cd super &&
git config submodule.submodule.update checkout
) &&
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update --rebase submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update - merge in .git/config' '
(cd super &&
git config submodule.submodule.update merge
) &&
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update - checkout in .git/config but --merge given' '
(cd super &&
git config submodule.submodule.update checkout
) &&
(cd super/submodule &&
git reset --hard HEAD~1
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update --merge submodule &&
cd submodule &&
compare_head
)
'
test_expect_success 'submodule update - checkout in .git/config' '
(cd super &&
git config submodule.submodule.update checkout
) &&
(cd super/submodule &&
git reset --hard HEAD^
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update submodule &&
cd submodule &&
! compare_head
)
'
test_expect_success 'submodule update - command in .git/config' '
(cd super &&
git config submodule.submodule.update "!git checkout"
) &&
(cd super/submodule &&
git reset --hard HEAD^
) &&
(cd super &&
(cd submodule &&
compare_head
) &&
git submodule update submodule &&
cd submodule &&
! compare_head
)
'
test_expect_success 'submodule update - command in .gitmodules is rejected' '
test_when_finished "git -C super reset --hard HEAD^" &&
git -C super config -f .gitmodules submodule.submodule.update "!false" &&
git -C super commit -a -m "add command to .gitmodules file" &&
git -C super/submodule reset --hard $submodulesha1^ &&
test_must_fail git -C super submodule update submodule
'
test_expect_success 'fsck detects command in .gitmodules' '
git init command-in-gitmodules &&
(
cd command-in-gitmodules &&
git submodule add ../submodule submodule &&
test_commit adding-submodule &&
git config -f .gitmodules submodule.submodule.update "!false" &&
git add .gitmodules &&
test_commit configuring-update &&
test_must_fail git fsck
)
'
cat << EOF >expect
Execution of 'false $submodulesha1' failed in submodule path 'submodule'
EOF
test_expect_success 'submodule update - command in .git/config catches failure' '
(cd super &&
git config submodule.submodule.update "!false"
) &&
(cd super/submodule &&
git reset --hard $submodulesha1^
) &&
(cd super &&
test_must_fail git submodule update submodule 2>../actual
) &&
test_i18ncmp actual expect
'
cat << EOF >expect
Execution of 'false $submodulesha1' failed in submodule path '../submodule'
EOF
test_expect_success 'submodule update - command in .git/config catches failure -- subdirectory' '
(cd super &&
git config submodule.submodule.update "!false"
) &&
(cd super/submodule &&
git reset --hard $submodulesha1^
) &&
(cd super &&
mkdir tmp && cd tmp &&
test_must_fail git submodule update ../submodule 2>../../actual
) &&
test_i18ncmp actual expect
'
test_expect_success 'submodule update - command run for initial population of submodule' '
cat >expect <<-EOF &&
Execution of '\''false $submodulesha1'\'' failed in submodule path '\''submodule'\''
EOF
rm -rf super/submodule &&
test_must_fail git -C super submodule update 2>actual &&
test_i18ncmp expect actual &&
git -C super submodule update --checkout
'
cat << EOF >expect
Execution of 'false $submodulesha1' failed in submodule path '../super/submodule'
Failed to recurse into submodule path '../super'
EOF
test_expect_success 'recursive submodule update - command in .git/config catches failure -- subdirectory' '
(cd recursivesuper &&
git submodule update --remote super &&
git add super &&
git commit -m "update to latest to have more than one commit in submodules"
) &&
git -C recursivesuper/super config submodule.submodule.update "!false" &&
git -C recursivesuper/super/submodule reset --hard $submodulesha1^ &&
(cd recursivesuper &&
mkdir -p tmp && cd tmp &&
test_must_fail git submodule update --recursive ../super 2>../../actual
) &&
test_i18ncmp actual expect
'
test_expect_success 'submodule init does not copy command into .git/config' '
test_when_finished "git -C super update-index --force-remove submodule1" &&
test_when_finished git config -f super/.gitmodules \
--remove-section submodule.submodule1 &&
(cd super &&
git ls-files -s submodule >out &&
H=$(cut -d" " -f2 out) &&
mkdir submodule1 &&
git update-index --add --cacheinfo 160000 $H submodule1 &&
git config -f .gitmodules submodule.submodule1.path submodule1 &&
git config -f .gitmodules submodule.submodule1.url ../submodule &&
git config -f .gitmodules submodule.submodule1.update !false &&
test_must_fail git submodule init submodule1 &&
test_expect_code 1 git config submodule.submodule1.update >actual &&
test_must_be_empty actual
)
'
test_expect_success 'submodule init picks up rebase' '
(cd super &&
git config -f .gitmodules submodule.rebasing.update rebase &&
git submodule init rebasing &&
test "rebase" = "$(git config submodule.rebasing.update)"
)
'
test_expect_success 'submodule init picks up merge' '
(cd super &&
git config -f .gitmodules submodule.merging.update merge &&
git submodule init merging &&
test "merge" = "$(git config submodule.merging.update)"
)
'
test_expect_success 'submodule update --merge - ignores --merge for new submodules' '
test_config -C super submodule.submodule.update checkout &&
(cd super &&
rm -rf submodule &&
git submodule update submodule &&
git status -s submodule >expect &&
rm -rf submodule &&
git submodule update --merge submodule &&
git status -s submodule >actual &&
test_cmp expect actual
)
'
test_expect_success 'submodule update --rebase - ignores --rebase for new submodules' '
test_config -C super submodule.submodule.update checkout &&
(cd super &&
rm -rf submodule &&
git submodule update submodule &&
git status -s submodule >expect &&
rm -rf submodule &&
git submodule update --rebase submodule &&
git status -s submodule >actual &&
test_cmp expect actual
)
'
test_expect_success 'submodule update ignores update=merge config for new submodules' '
(cd super &&
rm -rf submodule &&
git submodule update submodule &&
git status -s submodule >expect &&
rm -rf submodule &&
git config submodule.submodule.update merge &&
git submodule update submodule &&
git status -s submodule >actual &&
git config --unset submodule.submodule.update &&
test_cmp expect actual
)
'
test_expect_success 'submodule update ignores update=rebase config for new submodules' '
(cd super &&
rm -rf submodule &&
git submodule update submodule &&
git status -s submodule >expect &&
rm -rf submodule &&
git config submodule.submodule.update rebase &&
git submodule update submodule &&
git status -s submodule >actual &&
git config --unset submodule.submodule.update &&
test_cmp expect actual
)
'
test_expect_success 'submodule init picks up update=none' '
(cd super &&
git config -f .gitmodules submodule.none.update none &&
git submodule init none &&
test "none" = "$(git config submodule.none.update)"
)
'
test_expect_success 'submodule update - update=none in .git/config' '
(cd super &&
git config submodule.submodule.update none &&
(cd submodule &&
git checkout main &&
compare_head
) &&
git diff --name-only >out &&
grep ^submodule$ out &&
git submodule update &&
git diff --name-only >out &&
grep ^submodule$ out &&
(cd submodule &&
compare_head
) &&
git config --unset submodule.submodule.update &&
git submodule update submodule
)
'
test_expect_success 'submodule update - update=none in .git/config but --checkout given' '
(cd super &&
git config submodule.submodule.update none &&
(cd submodule &&
git checkout main &&
compare_head
) &&
git diff --name-only >out &&
grep ^submodule$ out &&
git submodule update --checkout &&
git diff --name-only >out &&
! grep ^submodule$ out &&
(cd submodule &&
! compare_head
) &&
git config --unset submodule.submodule.update
)
'
test_expect_success 'submodule update --init skips submodule with update=none' '
(cd super &&
git add .gitmodules &&
git commit -m ".gitmodules"
) &&
git clone super cloned &&
(cd cloned &&
git submodule update --init &&
test_path_exists submodule/.git &&
test_path_is_missing none/.git
)
'
test_expect_success 'submodule update continues after checkout error' '
(cd super &&
git reset --hard HEAD &&
git submodule add ../submodule submodule2 &&
git submodule init &&
git commit -am "new_submodule" &&
(cd submodule2 &&
git rev-parse --verify HEAD >../expect
) &&
(cd submodule &&
test_commit "update_submodule" file
) &&
(cd submodule2 &&
test_commit "update_submodule2" file
) &&
git add submodule &&
git add submodule2 &&
git commit -m "two_new_submodule_commits" &&
(cd submodule &&
echo "" > file
) &&
git checkout HEAD^ &&
test_must_fail git submodule update &&
(cd submodule2 &&
git rev-parse --verify HEAD >../actual
) &&
test_cmp expect actual
)
'
test_expect_success 'submodule update continues after recursive checkout error' '
(cd super &&
git reset --hard HEAD &&
git checkout main &&
git submodule update &&
(cd submodule &&
git submodule add ../submodule subsubmodule &&
git submodule init &&
git commit -m "new_subsubmodule"
) &&
git add submodule &&
git commit -m "update_submodule" &&
(cd submodule &&
(cd subsubmodule &&
test_commit "update_subsubmodule" file
) &&
git add subsubmodule &&
test_commit "update_submodule_again" file &&
(cd subsubmodule &&
test_commit "update_subsubmodule_again" file
) &&
test_commit "update_submodule_again_again" file
) &&
(cd submodule2 &&
git rev-parse --verify HEAD >../expect &&
test_commit "update_submodule2_again" file
) &&
git add submodule &&
git add submodule2 &&
git commit -m "new_commits" &&
git checkout HEAD^ &&
(cd submodule &&
git checkout HEAD^ &&
(cd subsubmodule &&
echo "" > file
)
) &&
test_must_fail git submodule update --recursive &&
(cd submodule2 &&
git rev-parse --verify HEAD >../actual
) &&
test_cmp expect actual
)
'
test_expect_success 'submodule update exit immediately in case of merge conflict' '
(cd super &&
git checkout main &&
git reset --hard HEAD &&
(cd submodule &&
(cd subsubmodule &&
git reset --hard HEAD
)
) &&
git submodule update --recursive &&
(cd submodule &&
test_commit "update_submodule_2" file
) &&
(cd submodule2 &&
test_commit "update_submodule2_2" file
) &&
git add submodule &&
git add submodule2 &&
git commit -m "two_new_submodule_commits" &&
(cd submodule &&
git checkout main &&
test_commit "conflict" file &&
echo "conflict" > file
) &&
git checkout HEAD^ &&
(cd submodule2 &&
git rev-parse --verify HEAD >../expect
) &&
git config submodule.submodule.update merge &&
test_must_fail git submodule update &&
(cd submodule2 &&
git rev-parse --verify HEAD >../actual
) &&
test_cmp expect actual
)
'
test_expect_success 'submodule update exit immediately after recursive rebase error' '
(cd super &&
git checkout main &&
git reset --hard HEAD &&
(cd submodule &&
git reset --hard HEAD &&
git submodule update --recursive
) &&
(cd submodule &&
test_commit "update_submodule_3" file
) &&
(cd submodule2 &&
test_commit "update_submodule2_3" file
) &&
git add submodule &&
git add submodule2 &&
git commit -m "two_new_submodule_commits" &&
(cd submodule &&
git checkout main &&
test_commit "conflict2" file &&
echo "conflict" > file
) &&
git checkout HEAD^ &&
(cd submodule2 &&
git rev-parse --verify HEAD >../expect
) &&
git config submodule.submodule.update rebase &&
test_must_fail git submodule update &&
(cd submodule2 &&
git rev-parse --verify HEAD >../actual
) &&
test_cmp expect actual
)
'
test_expect_success 'add different submodules to the same path' '
(cd super &&
git submodule add ../submodule s1 &&
test_must_fail git submodule add ../merging s1
)
'
test_expect_success 'submodule add places git-dir in superprojects git-dir' '
(cd super &&
mkdir deeper &&
git submodule add ../submodule deeper/submodule &&
(cd deeper/submodule &&
git log > ../../expected
) &&
(cd .git/modules/deeper/submodule &&
git log > ../../../../actual
) &&
test_cmp expected actual
)
'
test_expect_success 'submodule update places git-dir in superprojects git-dir' '
(cd super &&
git commit -m "added submodule"
) &&
git clone super super2 &&
(cd super2 &&
git submodule init deeper/submodule &&
git submodule update &&
(cd deeper/submodule &&
git log > ../../expected
) &&
(cd .git/modules/deeper/submodule &&
git log > ../../../../actual
) &&
test_cmp expected actual
)
'
test_expect_success 'submodule add places git-dir in superprojects git-dir recursive' '
(cd super2 &&
(cd deeper/submodule &&
git submodule add ../submodule subsubmodule &&
(cd subsubmodule &&
git log > ../../../expected
) &&
git commit -m "added subsubmodule" &&
git push origin :
) &&
(cd .git/modules/deeper/submodule/modules/subsubmodule &&
git log > ../../../../../actual
) &&
git add deeper/submodule &&
git commit -m "update submodule" &&
git push origin : &&
test_cmp expected actual
)
'
test_expect_success 'submodule update places git-dir in superprojects git-dir recursive' '
mkdir super_update_r &&
(cd super_update_r &&
git init --bare
) &&
mkdir subsuper_update_r &&
(cd subsuper_update_r &&
git init --bare
) &&
mkdir subsubsuper_update_r &&
(cd subsubsuper_update_r &&
git init --bare
) &&
git clone subsubsuper_update_r subsubsuper_update_r2 &&
(cd subsubsuper_update_r2 &&
test_commit "update_subsubsuper" file &&
git push origin main
) &&
git clone subsuper_update_r subsuper_update_r2 &&
(cd subsuper_update_r2 &&
test_commit "update_subsuper" file &&
git submodule add ../subsubsuper_update_r subsubmodule &&
git commit -am "subsubmodule" &&
git push origin main
) &&
git clone super_update_r super_update_r2 &&
(cd super_update_r2 &&
test_commit "update_super" file &&
git submodule add ../subsuper_update_r submodule &&
git commit -am "submodule" &&
git push origin main
) &&
rm -rf super_update_r2 &&
git clone super_update_r super_update_r2 &&
(cd super_update_r2 &&
git submodule update --init --recursive >actual &&
test_i18ngrep "Submodule path .submodule/subsubmodule.: checked out" actual &&
(cd submodule/subsubmodule &&
git log > ../../expected
) &&
(cd .git/modules/submodule/modules/subsubmodule &&
git log > ../../../../../actual
) &&
test_cmp expected actual
)
'
test_expect_success 'submodule add properly re-creates deeper level submodules' '
(cd super &&
git reset --hard main &&
rm -rf deeper/ &&
git submodule add --force ../submodule deeper/submodule
)
'
test_expect_success 'submodule update properly revives a moved submodule' '
(cd super &&
H=$(git rev-parse --short HEAD) &&
git commit -am "pre move" &&
H2=$(git rev-parse --short HEAD) &&
git status >out &&
sed "s/$H/XXX/" out >expect &&
H=$(cd submodule2 && git rev-parse HEAD) &&
git rm --cached submodule2 &&
rm -rf submodule2 &&
mkdir -p "moved/sub module" &&
git update-index --add --cacheinfo 160000 $H "moved/sub module" &&
git config -f .gitmodules submodule.submodule2.path "moved/sub module" &&
git commit -am "post move" &&
git submodule update &&
git status > out &&
sed "s/$H2/XXX/" out >actual &&
test_cmp expect actual
)
'
test_expect_success SYMLINKS 'submodule update can handle symbolic links in pwd' '
mkdir -p linked/dir &&
ln -s linked/dir linkto &&
(cd linkto &&
git clone "$TRASH_DIRECTORY"/super_update_r2 super &&
(cd super &&
git submodule update --init --recursive
)
)
'
test_expect_success 'submodule update clone shallow submodule' '
test_when_finished "rm -rf super3" &&
first=$(git -C cloned rev-parse HEAD:submodule) &&
second=$(git -C submodule rev-parse HEAD) &&
commit_count=$(git -C submodule rev-list --count $first^..$second) &&
git clone cloned super3 &&
pwd=$(pwd) &&
(
cd super3 &&
sed -e "s#url = ../#url = file://$pwd/#" <.gitmodules >.gitmodules.tmp &&
mv -f .gitmodules.tmp .gitmodules &&
git submodule update --init --depth=$commit_count &&
git -C submodule log --oneline >out &&
test_line_count = 1 out
)
'
test_expect_success 'submodule update clone shallow submodule outside of depth' '
test_when_finished "rm -rf super3" &&
git clone cloned super3 &&
pwd=$(pwd) &&
(
cd super3 &&
sed -e "s#url = ../#url = file://$pwd/#" <.gitmodules >.gitmodules.tmp &&
mv -f .gitmodules.tmp .gitmodules &&
# Some protocol versions (e.g. 2) support fetching
# unadvertised objects, so restrict this test to v0.
test_must_fail env GIT_TEST_PROTOCOL_VERSION=0 \
git submodule update --init --depth=1 2>actual &&
test_i18ngrep "Direct fetching of that commit failed." actual &&
git -C ../submodule config uploadpack.allowReachableSHA1InWant true &&
git submodule update --init --depth=1 >actual &&
git -C submodule log --oneline >out &&
test_line_count = 1 out
)
'
test_expect_success 'submodule update --recursive drops module name before recursing' '
(cd super2 &&
(cd deeper/submodule/subsubmodule &&
git checkout HEAD^
) &&
git submodule update --recursive deeper/submodule >actual &&
test_i18ngrep "Submodule path .deeper/submodule/subsubmodule.: checked out" actual
)
'
test_expect_success 'submodule update can be run in parallel' '
(cd super2 &&
GIT_TRACE=$(pwd)/trace.out git submodule update --jobs 7 &&
grep "7 tasks" trace.out &&
git config submodule.fetchJobs 8 &&
GIT_TRACE=$(pwd)/trace.out git submodule update &&
grep "8 tasks" trace.out &&
GIT_TRACE=$(pwd)/trace.out git submodule update --jobs 9 &&
grep "9 tasks" trace.out
)
'
test_expect_success 'git clone passes the parallel jobs config on to submodules' '
test_when_finished "rm -rf super4" &&
GIT_TRACE=$(pwd)/trace.out git clone --recurse-submodules --jobs 7 . super4 &&
grep "7 tasks" trace.out &&
rm -rf super4 &&
git config --global submodule.fetchJobs 8 &&
GIT_TRACE=$(pwd)/trace.out git clone --recurse-submodules . super4 &&
grep "8 tasks" trace.out &&
rm -rf super4 &&
GIT_TRACE=$(pwd)/trace.out git clone --recurse-submodules --jobs 9 . super4 &&
grep "9 tasks" trace.out &&
rm -rf super4
'
test_expect_success 'submodule update --quiet passes quietness to merge/rebase' '
(cd super &&
test_commit -C rebasing message &&
git submodule update --rebase --quiet >out 2>err &&
test_must_be_empty out &&
test_must_be_empty err &&
git submodule update --rebase -v >out 2>err &&
test_file_not_empty out &&
test_must_be_empty err
)
'
test_done
|
tacker66/git
|
t/t7406-submodule-update.sh
|
Shell
|
gpl-2.0
| 27,862 |
#! /bin/sh
pyFoamClearCase.py .
rm -f 0/*.gz
blockMesh
changeDictionary
funkySetFields -time 0
|
Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder1.7-libraries-swak4Foam
|
Examples/tests/mappingChannels/prepareCase.sh
|
Shell
|
gpl-2.0
| 98 |
#!/bin/bash
dir=$GOPATH/src/go.dedis.ch/cothority
cwd=$PWD
cd $dir/conode
go install
cd $cwd
echo "" > public.toml
for (( n=1; n<=7; n++ )) do
printf "127.0.0.1:77%02d\nConode_$n\nco$n\nY\nY\n" $((70 + 2*$n - 2)) | conode setup
cat "co$n/public.toml" >> public.toml
echo "" >> public.toml
done
cp public.toml $dir/external/java/src/test/resources/.
cp public.toml $dir/external/js/cothority/spec/support/.
|
DeDiS/cothority
|
external/docker/generate_conode_toml.sh
|
Shell
|
gpl-2.0
| 423 |
#!/bin/bash
UPLOADDIR=forupload-revised
if [ -d "$UPLOADDIR" ]; then
echo "Directory '$UPLOADDIR' already exists, remove/rename it and rerun this script"
exit 1
fi
if [ -f "$UPLOADDIR.tgz" ]; then
echo "File '$UPLOADDIR.tgz' already exists, remove/rename it and rerun this script"
exit 2
fi
FILES="\
cpnew-revised.sh\
genplot-revised.sh\
iitb-papers-revised.sh\
tabulate-marks-revised.sh\
zrn-revised.sh\
"
if [ ! -f "complaints.txt" ]; then
echo "Expecting to find file complaints.txt explaining your complaints"
exit 3
fi
for file in $FILES
do
if [ -f $file ]; then
echo "Found $file for revised submission"
else
echo "$file not found; perhaps you don't have a complaint in that question, that's great!"
fi
done
mkdir $UPLOADDIR
cp "complaints.txt" $UPLOADDIR/
echo "Copied 'complaints.txt' to '$UPLOADDIR'"
for file in $FILES
do
if [ -f $file ]; then
cp $file $UPLOADDIR/
echo "Copied file '$file' to '$UPLOADDIR'"
fi
done
tar zcvf $UPLOADDIR.tgz $UPLOADDIR
echo "Created file '$UPLOADDIR.tgz' for uploading"
echo "md5sum of $UPLOADDIR.tgz is as below: check on browser after successful upload"
md5sum "$UPLOADDIR.tgz"
|
sm88/cs699
|
midsem/complaint/prepare-upload-revised.sh
|
Shell
|
gpl-2.0
| 1,184 |
#!/bin/sh
# Run this to set up the build system: configure, makefiles, etc.
set -e
srcdir=$(dirname $0)
test -n "$srcdir" && cd "$srcdir"
echo "Updating build configure files for Ultimarc-linux, please wait..."
autoreconf -isf
|
philenotfound/Ultimarc-linux
|
autogen.sh
|
Shell
|
gpl-2.0
| 231 |
#!/bin/sh
# Ensure that cp --parents works properly with a preexisting dest. directory
# Copyright (C) 2008-2017 Free Software Foundation, Inc.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
print_ver_ cp
working_umask_or_skip_
# cp -p gives ENOTSUP on NFS on Linux 2.6.9 at least
require_local_dir_
mkdir -p a/b/c a/b/d e || framework_failure_
touch a/b/c/foo a/b/d/foo || framework_failure_
cp -p --parent a/b/c/foo e || framework_failure_
# Make permissions of e/a different, so that we exercise the
# code in cp -p --parents that propagates permissions even
# to a destination directory that it doesn't create.
chmod g-rx e/a e/a/b || framework_failure_
cp -p --parent a/b/d/foo e || fail=1
# Ensure that permissions on just-created directory, e/a/,
# are the same as those on original, a/.
# The sed filter maps any 's' from an inherited set-GID bit
# to the usual 'x'. Otherwise, under unusual circumstances, this
# test would fail with e.g., drwxr-sr-x != drwxr-xr-x .
# For reference, the unusual circumstances is: build dir is set-gid,
# so "a/" inherits that. However, when the user does not belong to
# the group of the build directory, chmod ("a/e", 02755) returns 0,
# yet fails to set the S_ISGID bit.
for dir in a a/b a/b/d; do
test $(stat --printf %A $dir|sed s/s/x/g) \
= $(stat --printf %A e/$dir|sed s/s/x/g) ||
fail=1
done
Exit $fail
|
adtools/coreutils
|
tests/cp/parent-perm.sh
|
Shell
|
gpl-3.0
| 2,011 |
_esmith_signal-event ()
{
if [ ! $(which $1 2>/dev/null) ]; then return 0; fi
local cur; cur=${COMP_WORDS[$COMP_CWORD]}
case $COMP_CWORD in
1)
COMPREPLY=( $(find /etc/e-smith/events/ -maxdepth 1 -type d \
\( -name 'actions' -prune -o \
-name "$cur*" -printf "%f\n" \)) )
;;
*) ;;
esac
return 0
}
complete -F _esmith_signal-event signal-event
_esmith_expand-template ()
{
if [ ! $(which $1 2>/dev/null) ]; then return 0; fi
local cur; cur=${COMP_WORDS[$COMP_CWORD]}
case $COMP_CWORD in
1) # need to distinguish between templates and fragments
COMPREPLY=( $(find /etc/e-smith/templates \
/etc/e-smith/templates-custom \
-regex "/etc/e-smith/templates\(-custom\)?$cur.*" \
-printf "[ -f /%P ] && echo /%P\n" \
| sh | uniq) )
;;
*) ;;
esac
return 0
}
complete -F _esmith_expand-template expand-template
_esmith_db ()
{
if [ ! $(which $1 2>/dev/null) ]; then return 0; fi
local cur; cur=${COMP_WORDS[$COMP_CWORD]}
case $COMP_CWORD in
1) # config file
COMPREPLY=( $(find /var/lib/nethserver/db -maxdepth 1 -type f \
\( -name '.*' -prune -o \
-name "$cur*" -printf "%f\n" \)) )
;;
2) # subcommand
COMPREPLY=( $(/sbin/e-smith/db 2>&1 |awk '{print $3}' \
|grep "^$cur" ) )
;;
3) # key
local file; file=${COMP_WORDS[1]}
local cmd; cmd=${COMP_WORDS[2]}
local haskey
haskey=$(/sbin/e-smith/db 2>&1 | grep "dbfile $cmd" | awk '{print $4}')
if [ -n "$haskey" ]; then
COMPREPLY=( $(/sbin/e-smith/db $file keys |grep "^$cur") )
fi
;;
*) # type/prop/val
local file; file=${COMP_WORDS[1]}
local cmd; cmd=${COMP_WORDS[2]}
local key; key=${COMP_WORDS[3]}
local i; i=$COMP_CWORD
local prev
local valtype
while [ "$valtype" == "..." ] || [ "$valtype" == "" ]; do
prev=${COMP_WORDS[$[i-1]]}
PAT='$3'
for j in $(seq 4 $[i+1]); do PAT="$PAT,\$$j"; done
valtype=$(/sbin/e-smith/db 2>&1 | awk "{print $PAT}" \
| grep "^$cmd" | awk "{print \$$[i-1]}")
i=$[i-2]
done
case $(echo "$valtype" |sed -e 's/[][0-9]//g') in
"type") COMPREPLY=( $(/sbin/e-smith/db $file gettype $key \
| grep "^$cur") )
;;
"prop")
COMPREPLY=( $(/sbin/e-smith/db $file printprop $key \
| sed -e 's/=.*//' | grep "^$cur") )
;;
"val") COMPREPLY=( $(/sbin/e-smith/db $file getprop $key $prev \
| grep "^$cur"))
;;
*) ;;
esac
;;
esac
return 0
}
complete -F _esmith_db db
_esmith_config ()
{
cmd=$(echo $1 | sed -e 's/config$/db/')
COMP_WORDS=($cmd ${COMP_WORDS[*]})
COMP_WORDS[1]=configuration
COMP_CWORD=$[ $COMP_CWORD + 1 ]
_esmith_db $*
return $?
}
complete -F _esmith_config config
|
NethServer/nethserver-base
|
root/etc/profile.d/e-smith-lib_compspec.sh
|
Shell
|
gpl-3.0
| 2,800 |
#!/bin/sh
#
# srecord - manipulate eprom load files
# Copyright (C) 2001, 2006-2008 Peter Miller
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
TEST_SUBJECT="bitwise-not filter"
. test_prelude
cat > test.in << 'fubar'
S00600004844521B
S111000048656C6C6F2C20576F726C64210A7B
S5030001FB
S9030000FC
fubar
if test $? -ne 0; then no_result; fi
cat > test.ok << 'fubar'
S00600004844521B
S1110000B79A939390D3DFA8908D939BDEF56F
S5030001FB
S9030000FC
fubar
if test $? -ne 0; then no_result; fi
srec_cat test.in -not -o test.out
if test $? -ne 0; then fail; fi
diff test.ok test.out
if test $? -ne 0; then fail; fi
#
# The things tested here, worked.
# No other guarantees are made.
#
pass
|
freyc/SRecord
|
test/00/t0061a.sh
|
Shell
|
gpl-3.0
| 1,356 |
#!/bin/sh
###################################################################################################################################
#
# Copyright 2014-2018 IRD-CIRAD-INRA-ADNid
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/> or
# write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# You should have received a copy of the CeCILL-C license with this program.
#If not see <http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.txt>
#
# Intellectual property belongs to IRD, CIRAD and South Green developpement plateform for all versions also for ADNid for v2 and v3 and INRA for v3
# Version 1 written by Cecile Monat, Ayite Kougbeadjo, Christine Tranchant, Cedric Farcy, Mawusse Agbessi, Maryline Summo, and Francois Sabot
# Version 2 written by Cecile Monat, Christine Tranchant, Cedric Farcy, Enrique Ortega-Abboud, Julie Orjuela-Bouniol, Sebastien Ravel, Souhila Amanzougarene, and Francois Sabot
# Version 3 written by Cecile Monat, Christine Tranchant, Laura Helou, Abdoulaye Diallo, Julie Orjuela-Bouniol, Sebastien Ravel, Gautier Sarah, and Francois Sabot
#
###################################################################################################################################
cd modules
################################ TEST MODULE
for file in *.t;
do
cmd="prove -v $file";
echo "
#########################################################
########### $cmd #################
#########################################################";
$cmd;
echo "
#########################################################
########### $cmd DONE ##############
#########################################################";
done
|
SouthGreenPlatform/TOGGLE-DEV
|
test/allTestModules.sh
|
Shell
|
gpl-3.0
| 2,309 |
#!/usr/bin/env bash
set -o errexit
source /tmp/00-settings.sh || true
[[ $(whoami) == 'root' ]] || exec sudo su -c $0 root
cd /
umount -l /mnt/gentoo/dev{/shm,/pts,} || true
umount -l /mnt/gentoo{/boot,/proc,} || true
rm -rf /mnt/gentoo || true
|
sikofitt/genstall
|
genstall.d/99-final.sh
|
Shell
|
gpl-3.0
| 246 |
#!/bin/bash
rm -r svg
mkdir svg
for f in *.dia
do
dia -e "svg/`basename "$f" .dia`.svg" "$f"
done
|
ubruhin/LibrePCB
|
dev/diagrams/convert.sh
|
Shell
|
gpl-3.0
| 102 |
#!/bin/bash
#PBS -l walltime=4:00:00
#PBS -l nodes=1:ppn=2
#PBS -l vmem=32G
#PBS -N Baltic_2_2_4_250_5_no_no_60_78
cd /zhome/fc/e/102910/maritime-vrp/build
LD_LIBRARY_PATH=/zhome/fc/e/102910/gcc/lib64 ./maritime_vrp ../data/old_thesis_data/program_params.json ../data/new/Baltic_2_2_4_250_5_no_no_60_78.json
|
OR-Bologna/maritime-vrp
|
opt/launchers/Baltic_2_2_4_250_5_no_no_60_78.sh
|
Shell
|
gpl-3.0
| 308 |
#!/bin/bash
#Run this in the repo root after compiling
#First arg is path to where you want to deploy
#creates a work tree free of everything except what's necessary to run the game
#second arg is working directory if necessary
if [[ $# -eq 2 ]] ; then
cd $2
fi
mkdir -p \
$1/_maps \
$1/icons \
$1/sound/chatter \
$1/sound/voice/complionator \
$1/sound/instruments \
$1/strings
if [ -d ".git" ]; then
mkdir -p $1/.git/logs
cp -r .git/logs/* $1/.git/logs/
fi
cp yogstation.dmb yogstation.rsc $1/
cp -r _maps/* $1/_maps/
cp icons/default_title.dmi $1/icons/
cp -r sound/chatter/* $1/sound/chatter/
cp -r sound/voice/complionator/* $1/sound/voice/complionator/
cp -r sound/instruments/* $1/sound/instruments/
cp -r strings/* $1/strings/
#remove .dm files from _maps
#this regrettably doesn't work with windows find
#find $1/_maps -name "*.dm" -type f -delete
#dlls on windows
cp rust_g* $1/ || true
cp *byond-extools.* $1/ || true
|
ToGWtF/yogstation
|
tools/deploy.sh
|
Shell
|
agpl-3.0
| 965 |
#!/bin/bash
# Script for the M4ATX power supply to ensure proper permissions when executing the ROS node
BUS_NUM=$((lsusb | grep "04d8:d001 Microchip Technology, Inc.") | awk '{print $2}')
DEV_NUM=$(((lsusb | grep "04d8:d001 Microchip Technology, Inc.") | awk '{print $4}') | cut -c 1-3)
LOCATION="/dev/bus/usb/${BUS_NUM}/${DEV_NUM}"
chmod a+rw ${LOCATION}
|
GT-RAIL/m4atx_battery_monitor
|
scripts/m4atx_permissions.bash
|
Shell
|
lgpl-3.0
| 359 |
#!/bin/bash
#
# This is job step0_0
#
#
## Start of header for backend 'local'.
#
set -e
set -u
ENVIRONMENT_DIR='.'
#
# Variables declared in MOLGENIS Compute headers/footers always start with an MC_ prefix.
#
declare MC_jobScript="step0_0.sh"
declare MC_jobScriptSTDERR="step0_0.err"
declare MC_jobScriptSTDOUT="step0_0.out"
declare MC_failedFile="molgenis.pipeline.failed"
declare MC_singleSeperatorLine=$(head -c 120 /dev/zero | tr '\0' '-')
declare MC_doubleSeperatorLine=$(head -c 120 /dev/zero | tr '\0' '=')
declare MC_tmpFolder='tmpFolder'
declare MC_tmpFile='tmpFile'
declare MC_tmpFolderCreated=0
#
##
### Header functions.
##
#
function errorExitAndCleanUp() {
local _signal="${1}"
local _problematicLine="${2}"
local _exitStatus="${3:-$?}"
local _executionHost="$(hostname)"
local _format='INFO: Last 50 lines or less of %s:\n'
local _errorMessage="FATAL: Trapped ${_signal} signal in ${MC_jobScript} running on ${_executionHost}. Exit status code was ${_exitStatus}."
if [ "${_signal}" == 'ERR' ]; then
_errorMessage="FATAL: Trapped ${_signal} signal on line ${_problematicLine} in ${MC_jobScript} running on ${_executionHost}. Exit status code was ${_exitStatus}."
fi
_errorMessage=${4:-"${_errorMessage}"} # Optionally use custom error message as 4th argument.
echo "${_errorMessage}"
echo "${MC_doubleSeperatorLine}" > "${MC_failedFile}"
echo "${_errorMessage}" >> "${MC_failedFile}"
if [ -f "${MC_jobScriptSTDERR}" ]; then
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
printf "${_format}" "${MC_jobScriptSTDERR}" >> "${MC_failedFile}"
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
tail -50 "${MC_jobScriptSTDERR}" >> "${MC_failedFile}"
fi
if [ -f "${MC_jobScriptSTDOUT}" ]; then
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
printf "${_format}" "${MC_jobScriptSTDOUT}" >> "${MC_failedFile}"
echo "${MC_singleSeperatorLine}" >> "${MC_failedFile}"
tail -50 "${MC_jobScriptSTDOUT}" >> "${MC_failedFile}"
fi
echo "${MC_doubleSeperatorLine}" >> "${MC_failedFile}"
}
#
# Create tmp dir per script/job.
# To be called with with either a file or folder as first and only argument.
# Defines two globally set variables:
# 1. MC_tmpFolder: a tmp dir for this job/script. When function is called multiple times MC_tmpFolder will always be the same.
# 2. MC_tmpFile: when the first argument was a folder, MC_tmpFile == MC_tmpFolder
# when the first argument was a file, MC_tmpFile will be a path to a tmp file inside MC_tmpFolder.
#
function makeTmpDir {
#
# Compile paths.
#
local _originalPath="${1}"
local _myMD5="$(md5sum ${MC_jobScript} | cut -d ' ' -f 1)"
local _tmpSubFolder="tmp_${MC_jobScript}_${_myMD5}"
local _dir
local _base
if [[ -d "${_originalPath}" ]]; then
_dir="${_originalPath}"
_base=''
else
_base=$(basename "${_originalPath}")
_dir=$(dirname "${_originalPath}")
fi
MC_tmpFolder="${_dir}/${_tmpSubFolder}/"
MC_tmpFile="${MC_tmpFolder}/${_base}"
echo "DEBUG ${MC_jobScript}::makeTmpDir: dir='${_dir}';base='${_base}';MC_tmpFile='${MC_tmpFile}'"
#
# Cleanup the previously created tmpFolder first if this script was resubmitted.
#
if [[ ${MC_tmpFolderCreated} -eq 0 && -d "${MC_tmpFolder}" ]]; then
rm -rf "${MC_tmpFolder}"
fi
#
# (Re-)create tmpFolder.
#
mkdir -p "${MC_tmpFolder}"
MC_tmpFolderCreated=1
}
trap 'errorExitAndCleanUp HUP NA $?' HUP
trap 'errorExitAndCleanUp INT NA $?' INT
trap 'errorExitAndCleanUp QUIT NA $?' QUIT
trap 'errorExitAndCleanUp TERM NA $?' TERM
trap 'errorExitAndCleanUp EXIT NA $?' EXIT
trap 'errorExitAndCleanUp ERR $LINENO $?' ERR
touch "${MC_jobScript}.started"
#
## End of header for backend 'local'
#
#
## Generated header
#
# Assign values to the parameters in this script
# Set taskId, which is the job name of this task
taskId="step0_0"
# Make compute.properties available
rundir="TEST_PROPERTY(project.basedir)/target/test/benchmark/run/testExtraVariable"
runid="testExtraVariable"
workflow="src/main/resources/workflows/benchmark.5.1/workflow.extra.variable.csv"
parameters="src/main/resources/workflows/benchmark.5.1/parameters.csv"
user="TEST_PROPERTY(user.name)"
database="none"
backend="localhost"
port="80"
interval="2000"
path="."
# Connect parameters to environment
input="hello"
# Validate that each 'value' parameter has only identical values in its list
# We do that to protect you against parameter values that might not be correctly set at runtime.
if [[ ! $(IFS=$'\n' sort -u <<< "${input[*]}" | wc -l | sed -e 's/^[[:space:]]*//') = 1 ]]; then echo "Error in Step 'step0': input parameter 'input' is an array with different values. Maybe 'input' is a runtime parameter with 'more variable' values than what was folded on generation-time?" >&2; exit 1; fi
#
## Start of your protocol template
#
#string input
# Let's do something with string 'in'
echo "${input}_hasBeenInStep1"
${extra_variable}
#
## End of your protocol template
#
# Save output in environment file: '$ENVIRONMENT_DIR/step0_0.env' with the output vars of this step
echo "" >> $ENVIRONMENT_DIR/step0_0.env
chmod 755 $ENVIRONMENT_DIR/step0_0.env
#
## Start of footer for backend 'local'.
#
if [ -d "${MC_tmpFolder:-}" ]; then
echo -n "INFO: Removing MC_tmpFolder ${MC_tmpFolder} ..."
rm -rf "${MC_tmpFolder}"
echo 'done.'
fi
tS=${SECONDS:-0}
tM=$((SECONDS / 60 ))
tH=$((SECONDS / 3600))
echo "On $(date +"%Y-%m-%d %T") ${MC_jobScript} finished successfully after ${tM} minutes." >> molgenis.bookkeeping.log
printf '%s:\t%d seconds\t%d minutes\t%d hours\n' "${MC_jobScript}" "${tS}" "${tM}" "${tH}" >> molgenis.bookkeeping.walltime
mv "${MC_jobScript}".{started,finished}
trap - EXIT
exit 0
|
molgenis/molgenis-compute
|
molgenis-compute-core/src/test/resources/expected/testExtraVariable/step0_0.sh
|
Shell
|
lgpl-3.0
| 5,813 |
#!/bin/bash
set -e
function logInToPaas() {
local redownloadInfra="${REDOWNLOAD_INFRA}"
local ca="PAAS_${ENVIRONMENT}_CA"
local k8sCa="${!ca}"
local clientCert="PAAS_${ENVIRONMENT}_CLIENT_CERT"
local k8sClientCert="${!clientCert}"
local clientKey="PAAS_${ENVIRONMENT}_CLIENT_KEY"
local k8sClientKey="${!clientKey}"
local tokenPath="PAAS_${ENVIRONMENT}_CLIENT_TOKEN_PATH"
local k8sTokenPath="${!tokenPath}"
local clusterName="PAAS_${ENVIRONMENT}_CLUSTER_NAME"
local k8sClusterName="${!clusterName}"
local clusterUser="PAAS_${ENVIRONMENT}_CLUSTER_USERNAME"
local k8sClusterUser="${!clusterUser}"
local systemName="PAAS_${ENVIRONMENT}_SYSTEM_NAME"
local k8sSystemName="${!systemName}"
local api="PAAS_${ENVIRONMENT}_API_URL"
local apiUrl="${!api:-192.168.99.100:8443}"
local cliInstalled
cliInstalled="$("${KUBECTL_BIN}" version && echo "true" || echo "false")"
local cliDownloaded
cliDownloaded="$(test -r "${KUBECTL_BIN}" && echo "true" || echo "false")"
echo "CLI Installed? [${cliInstalled}], CLI Downloaded? [${cliDownloaded}]"
if [[ ${cliInstalled} == "false" && ( ${cliDownloaded} == "false" || ${cliDownloaded} == "true" && ${redownloadInfra} == "true" ) ]]; then
echo "Downloading CLI"
curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/darwin/amd64/kubectl" --fail
local cliDownloaded="true"
else
echo "CLI is already installed or was already downloaded but the flag to redownload was disabled"
fi
if [[ ${cliDownloaded} == "true" ]]; then
echo "Adding CLI to PATH"
PATH="${PATH}:$(pwd)"
chmod +x "${KUBECTL_BIN}"
fi
echo "Removing current Kubernetes configuration"
rm -rf "${KUBE_CONFIG_PATH}" || echo "Failed to remove Kube config. Continuing with the script"
echo "Logging in to Kubernetes API [${apiUrl}], with cluster name [${k8sClusterName}] and user [${k8sClusterUser}]"
"${KUBECTL_BIN}" config set-cluster "${k8sClusterName}" --server="https://${apiUrl}" --certificate-authority="${k8sCa}" --embed-certs=true
# TOKEN will get injected as a credential if present
if [[ "${TOKEN}" != "" ]]; then
"${KUBECTL_BIN}" config set-credentials "${k8sClusterUser}" --token="${TOKEN}"
elif [[ "${k8sTokenPath}" != "" ]]; then
local tokenContent
tokenContent="$(cat "${k8sTokenPath}")"
"${KUBECTL_BIN}" config set-credentials "${k8sClusterUser}" --token="${tokenContent}"
else
"${KUBECTL_BIN}" config set-credentials "${k8sClusterUser}" --certificate-authority="${k8sCa}" --client-key="${k8sClientKey}" --client-certificate="${k8sClientCert}"
fi
"${KUBECTL_BIN}" config set-context "${k8sSystemName}" --cluster="${k8sClusterName}" --user="${k8sClusterUser}"
"${KUBECTL_BIN}" config use-context "${k8sSystemName}"
echo "CLI version"
"${KUBECTL_BIN}" version
}
function testDeploy() {
local appName
appName=$(retrieveAppName)
# Log in to PaaS to start deployment
logInToPaas
deployServices
# deploy app
deployAndRestartAppWithNameForSmokeTests "${appName}" "${PIPELINE_VERSION}"
}
function testRollbackDeploy() {
rm -rf "${OUTPUT_FOLDER}/test.properties"
local latestProdTag="${1}"
local appName
appName=$(retrieveAppName)
local latestProdVersion
latestProdVersion="${latestProdTag#prod/}"
echo "Last prod version equals ${latestProdVersion}"
logInToPaas
parsePipelineDescriptor
deployAndRestartAppWithNameForSmokeTests "${appName}" "${latestProdVersion}"
# Adding latest prod tag
echo "LATEST_PROD_TAG=${latestProdTag}" >>"${OUTPUT_FOLDER}/test.properties"
}
function deployService() {
local serviceType
serviceType="$(toLowerCase "${1}")"
local serviceName
serviceName="${2}"
local serviceCoordinates
serviceCoordinates="$(if [[ "${3}" == "null" ]]; then
echo "";
else
echo "${3}";
fi)"
local coordinatesSeparator=":"
echo "Will deploy service with type [${serviceType}] name [${serviceName}] and coordinates [${serviceCoordinates}]"
case ${serviceType} in
rabbitmq)
deployRabbitMq "${serviceName}"
;;
mysql)
deployMySql "${serviceName}"
;;
eureka)
local previousIfs
previousIfs="${IFS}"
IFS=${coordinatesSeparator} read -r EUREKA_ARTIFACT_ID EUREKA_VERSION <<<"${serviceCoordinates}"
IFS="${previousIfs}"
deployEureka "${EUREKA_ARTIFACT_ID}:${EUREKA_VERSION}" "${serviceName}"
;;
stubrunner)
local uniqueEurekaName
uniqueEurekaName="$(eurekaName)"
local uniqueRabbitName
uniqueRabbitName="$(rabbitMqName)"
local previousIfs
previousIfs="${IFS}"
IFS=${coordinatesSeparator} read -r STUBRUNNER_ARTIFACT_ID STUBRUNNER_VERSION <<<"${serviceCoordinates}"
IFS="${previousIfs}"
local parsedStubRunnerUseClasspath
parsedStubRunnerUseClasspath="$(echo "${PARSED_YAML}" | jq --arg x "${LOWER_CASE_ENV}" '.[$x].services[] | select(.type == "stubrunner") | .useClasspath' | sed 's/^"\(.*\)"$/\1/')"
local stubRunnerUseClasspath
stubRunnerUseClasspath=$(if [[ "${parsedStubRunnerUseClasspath}" == "null" ]]; then
echo "false";
else
echo "${parsedStubRunnerUseClasspath}";
fi)
deployStubRunnerBoot "${STUBRUNNER_ARTIFACT_ID}:${STUBRUNNER_VERSION}" "${REPO_WITH_BINARIES_FOR_UPLOAD}" "${uniqueRabbitName}" "${uniqueEurekaName}" "${serviceName}"
;;
*)
echo "Unknown service [${serviceType}]"
return 1
;;
esac
}
function eurekaName() {
echo "${PARSED_YAML}" | jq --arg x "${LOWER_CASE_ENV}" '.[$x].services[] | select(.type == "eureka") | .name' | sed 's/^"\(.*\)"$/\1/'
}
function rabbitMqName() {
echo "${PARSED_YAML}" | jq --arg x "${LOWER_CASE_ENV}" '.[$x].services[] | select(.type == "rabbitmq") | .name' | sed 's/^"\(.*\)"$/\1/'
}
function mySqlName() {
echo "${PARSED_YAML}" | jq --arg x "${LOWER_CASE_ENV}" '.[$x].services[] | select(.type == "mysql") | .name' | sed 's/^"\(.*\)"$/\1/'
}
function mySqlDatabase() {
echo "${PARSED_YAML}" | jq --arg x "${LOWER_CASE_ENV}" '.[$x].services[] | select(.type == "mysql") | .database' | sed 's/^"\(.*\)"$/\1/'
}
function appSystemProps() {
local systemProps
systemProps=""
# TODO: Not every system needs Eureka or Rabbit. But we need to bind this somehow...
local eurekaName
eurekaName="$(eurekaName)"
local rabbitMqName
rabbitMqName="$(rabbitMqName)"
local mySqlName
mySqlName="$(mySqlName)"
local mySqlDatabase
mySqlDatabase="$(mySqlDatabase)"
if [[ "${eurekaName}" != "" && "${eurekaName}" != "null" ]]; then
systemProps="${systemProps} -Deureka.client.serviceUrl.defaultZone=http://${eurekaName}:8761/eureka"
fi
if [[ "${rabbitMqName}" != "" && "${rabbitMqName}" != "null" ]]; then
systemProps="${systemProps} -DSPRING_RABBITMQ_ADDRESSES=${rabbitMqName}:5672"
fi
if [[ "${mySqlName}" != "" && "${mySqlName}" != "null" ]]; then
systemProps="${systemProps} -Dspring.datasource.url=jdbc:mysql://${mySqlName}/${mySqlDatabase}"
fi
echo "${systemProps}"
}
function deleteService() {
local serviceType="${1}"
local serviceName="${2}"
echo "Deleting all possible entries with name [${serviceName}]"
deleteAppByName "${serviceName}"
}
function deployRabbitMq() {
local serviceName="${1:-rabbitmq-github}"
local objectDeployed
objectDeployed="$(objectDeployed "service" "${serviceName}")"
if [[ "${ENVIRONMENT}" == "STAGE" && "${objectDeployed}" == "true" ]]; then
echo "Service [${serviceName}] already deployed. Won't redeploy for stage"
return
fi
echo "Waiting for RabbitMQ to start"
local originalDeploymentFile="${__ROOT}/k8s/rabbitmq.yml"
local originalServiceFile="${__ROOT}/k8s/rabbitmq-service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/rabbitmq.yml"
local serviceFile="${outputDirectory}/rabbitmq-service.yml"
substituteVariables "appName" "${serviceName}" "${deploymentFile}"
substituteVariables "appName" "${serviceName}" "${serviceFile}"
if [[ "${ENVIRONMENT}" == "TEST" ]]; then
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
fi
replaceApp "${deploymentFile}"
replaceApp "${serviceFile}"
}
function deployApp() {
local fileName="${1}"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" create -f "${fileName}"
}
function replaceApp() {
local fileName="${1}"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" replace --force -f "${fileName}"
}
function deleteAppByName() {
local serviceName="${1}"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete secret "${serviceName}" || result=""
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete persistentvolumeclaim "${serviceName}" || result=""
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete pod "${serviceName}" || result=""
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete deployment "${serviceName}" || result=""
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete service "${serviceName}" || result=""
}
function deleteAppByFile() {
local file="${1}"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete -f "${file}" || echo "Failed to delete app by [${file}] file. Continuing with the script"
}
function system {
local unameOut
unameOut="$(uname -s)"
case "${unameOut}" in
Linux*) machine=linux ;;
Darwin*) machine=darwin ;;
*) echo "Unsupported system" && exit 1
esac
echo "${machine}"
}
function substituteVariables() {
local variableName="${1}"
local substitution="${2}"
local fileName="${3}"
local escapedSubstitution
escapedSubstitution=$(escapeValueForSed "${substitution}")
#echo "Changing [${variableName}] -> [${escapedSubstitution}] for file [${fileName}]"
if [[ "${SYSTEM}" == "darwin" ]]; then
sed -i "" "s/{{${variableName}}}/${escapedSubstitution}/" "${fileName}"
else
sed -i "s/{{${variableName}}}/${escapedSubstitution}/" "${fileName}"
fi
}
function deployMySql() {
local serviceName="${1:-mysql-github}"
local objectDeployed
objectDeployed="$(objectDeployed "service" "${serviceName}")"
if [[ "${ENVIRONMENT}" == "STAGE" && "${objectDeployed}" == "true" ]]; then
echo "Service [${serviceName}] already deployed. Won't redeploy for stage"
return
fi
local secretName
secretName="mysql-$(retrieveAppName)"
echo "Waiting for MySQL to start"
local originalDeploymentFile="${__ROOT}/k8s/mysql.yml"
local originalServiceFile="${__ROOT}/k8s/mysql-service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/mysql.yml"
local serviceFile="${outputDirectory}/mysql-service.yml"
local mySqlDatabase
mySqlDatabase="$(mySqlDatabase)"
echo "Generating secret with name [${secretName}]"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete secret "${secretName}" || echo "Failed to delete secret [${serviceName}]. Continuing with the script"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" create secret generic "${secretName}" --from-literal=username="${MYSQL_USER}" --from-literal=password="${MYSQL_PASSWORD}" --from-literal=rootpassword="${MYSQL_ROOT_PASSWORD}"
substituteVariables "appName" "${serviceName}" "${deploymentFile}"
substituteVariables "secretName" "${secretName}" "${deploymentFile}"
substituteVariables "mysqlDatabase" "${mySqlDatabase}" "${deploymentFile}"
substituteVariables "appName" "${serviceName}" "${serviceFile}"
if [[ "${ENVIRONMENT}" == "TEST" ]]; then
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
fi
replaceApp "${deploymentFile}"
replaceApp "${serviceFile}"
}
function findAppByName() {
local serviceName
serviceName="${1}"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" get pods -o wide -l app="${serviceName}" | awk -v "app=${serviceName}" '$1 ~ app {print($0)}'
}
function deployAndRestartAppWithName() {
local appName="${1}"
local jarName="${2}"
local env="${LOWER_CASE_ENV}"
echo "Deploying and restarting app with name [${appName}] and jar name [${jarName}]"
deployAppWithName "${appName}" "${jarName}" "${env}" 'true'
restartApp "${appName}"
}
function deployAndRestartAppWithNameForSmokeTests() {
local appName="${1}"
local version="${2}"
local profiles="smoke,kubernetes"
local lowerCaseAppName
lowerCaseAppName=$(toLowerCase "${appName}")
local originalDeploymentFile="deployment.yml"
local originalServiceFile="service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/deployment.yml"
local serviceFile="${outputDirectory}/service.yml"
local systemProps
systemProps="-Dspring.profiles.active=${profiles} $(appSystemProps)"
substituteVariables "dockerOrg" "${DOCKER_REGISTRY_ORGANIZATION}" "${deploymentFile}"
substituteVariables "version" "${version}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${deploymentFile}"
substituteVariables "labelAppName" "${appName}" "${deploymentFile}"
substituteVariables "containerName" "${appName}" "${deploymentFile}"
substituteVariables "systemProps" "${systemProps}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${serviceFile}"
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
deployApp "${deploymentFile}"
deployApp "${serviceFile}"
waitForAppToStart "${appName}"
}
function deployAndRestartAppWithNameForE2ETests() {
local appName="${1}"
local profiles="e2e,kubernetes"
local lowerCaseAppName
lowerCaseAppName=$(toLowerCase "${appName}")
local originalDeploymentFile="deployment.yml"
local originalServiceFile="service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/deployment.yml"
local serviceFile="${outputDirectory}/service.yml"
local systemProps="-Dspring.profiles.active=${profiles}"
substituteVariables "dockerOrg" "${DOCKER_REGISTRY_ORGANIZATION}" "${deploymentFile}"
substituteVariables "version" "${PIPELINE_VERSION}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${deploymentFile}"
substituteVariables "labelAppName" "${appName}" "${deploymentFile}"
substituteVariables "containerName" "${appName}" "${deploymentFile}"
substituteVariables "systemProps" "${systemProps}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${serviceFile}"
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
deployApp "${deploymentFile}"
deployApp "${serviceFile}"
waitForAppToStart "${appName}"
}
function toLowerCase() {
local string=${1}
echo "${string}" | tr '[:upper:]' '[:lower:]'
}
function lowerCaseEnv() {
echo "${ENVIRONMENT}" | tr '[:upper:]' '[:lower:]'
}
function deleteAppInstance() {
local serviceName="${1}"
local lowerCaseAppName
lowerCaseAppName=$(toLowerCase "${serviceName}")
echo "Deleting application [${lowerCaseAppName}]"
deleteAppByName "${lowerCaseAppName}"
}
function deployEureka() {
local imageName="${1}"
local appName="${2}"
local objectDeployed
objectDeployed="$(objectDeployed "service" "${appName}")"
if [[ "${ENVIRONMENT}" == "STAGE" && "${objectDeployed}" == "true" ]]; then
echo "Service [${appName}] already deployed. Won't redeploy for stage"
return
fi
echo "Deploying Eureka. Options - image name [${imageName}], app name [${appName}], env [${ENVIRONMENT}]"
local originalDeploymentFile="${__ROOT}/k8s/eureka.yml"
local originalServiceFile="${__ROOT}/k8s/eureka-service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/eureka.yml"
local serviceFile="${outputDirectory}/eureka-service.yml"
substituteVariables "appName" "${appName}" "${deploymentFile}"
substituteVariables "appUrl" "${appName}.${PAAS_NAMESPACE}" "${deploymentFile}"
substituteVariables "eurekaImg" "${imageName}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${serviceFile}"
if [[ "${ENVIRONMENT}" == "TEST" ]]; then
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
fi
replaceApp "${deploymentFile}"
replaceApp "${serviceFile}"
waitForAppToStart "${appName}"
}
function escapeValueForSed() {
echo "${1//\//\\/}"
}
function deployStubRunnerBoot() {
local imageName="${1}"
local repoWithJars="${2}"
local rabbitName="${3}.${PAAS_NAMESPACE}"
local eurekaName="${4}.${PAAS_NAMESPACE}"
local stubRunnerName="${5:-stubrunner}"
local stubRunnerUseClasspath="${stubRunnerUseClasspath:-false}"
echo "Deploying Stub Runner. Options - image name [${imageName}], app name [${stubRunnerName}]"
local stubrunnerIds
stubrunnerIds="$(retrieveStubRunnerIds)"
echo "Found following stub runner ids [${stubrunnerIds}]"
local originalDeploymentFile="${__ROOT}/k8s/stubrunner.yml"
local originalServiceFile="${__ROOT}/k8s/stubrunner-service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
local systemProps=""
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/stubrunner.yml"
local serviceFile="${outputDirectory}/stubrunner-service.yml"
if [[ "${stubRunnerUseClasspath}" == "false" ]]; then
systemProps="${systemProps} -Dstubrunner.repositoryRoot=${repoWithJars}"
fi
substituteVariables "appName" "${stubRunnerName}" "${deploymentFile}"
substituteVariables "stubrunnerImg" "${imageName}" "${deploymentFile}"
substituteVariables "systemProps" "${systemProps}" "${deploymentFile}"
substituteVariables "rabbitAppName" "${rabbitName}" "${deploymentFile}"
substituteVariables "eurekaAppName" "${eurekaName}" "${deploymentFile}"
if [[ "${stubrunnerIds}" != "" ]]; then
substituteVariables "stubrunnerIds" "${stubrunnerIds}" "${deploymentFile}"
else
substituteVariables "stubrunnerIds" "" "${deploymentFile}"
fi
substituteVariables "appName" "${stubRunnerName}" "${serviceFile}"
if [[ "${ENVIRONMENT}" == "TEST" ]]; then
deleteAppByFile "${deploymentFile}"
deleteAppByFile "${serviceFile}"
fi
replaceApp "${deploymentFile}"
replaceApp "${serviceFile}"
waitForAppToStart "${stubRunnerName}"
}
function prepareForSmokeTests() {
echo "Retrieving group and artifact id - it can take a while..."
local appName
appName="$(retrieveAppName)"
mkdir -p "${OUTPUT_FOLDER}"
logInToPaas
local applicationPort
applicationPort="$(portFromKubernetes "${appName}")"
local stubrunnerAppName
stubrunnerAppName="stubrunner-${appName}"
local stubrunnerPort
stubrunnerPort="$(portFromKubernetes "${stubrunnerAppName}")"
local applicationHost
applicationHost="$(applicationHost "${appName}")"
local stubRunnerUrl
stubRunnerUrl="$(applicationHost "${stubrunnerAppName}")"
export APPLICATION_URL="${applicationHost}:${applicationPort}"
export STUBRUNNER_URL="${stubRunnerUrl}:${stubrunnerPort}"
}
function prepareForE2eTests() {
echo "Retrieving group and artifact id - it can take a while..."
local appName
appName="$(retrieveAppName)"
mkdir -p "${OUTPUT_FOLDER}"
logInToPaas
local applicationPort
applicationPort="$(portFromKubernetes "${appName}")"
local applicationHost
applicationHost="$(applicationHost "${appName}")"
export APPLICATION_URL="${applicationHost}:${applicationPort}"
}
function applicationHost() {
local appName="${1}"
if [[ "${KUBERNETES_MINIKUBE}" == "true" ]]; then
local apiUrlProp="PAAS_${ENVIRONMENT}_API_URL"
# host:port -> host
echo "${!apiUrlProp}" | awk -F: '{print $1}'
else
echo "${appName}.${PAAS_NAMESPACE}"
fi
}
function portFromKubernetes() {
local appName="${1}"
local jsonPath
{ if [[ "${KUBERNETES_MINIKUBE}" == "true" ]]; then
jsonPath="{.spec.ports[0].nodePort}"
else
jsonPath="{.spec.ports[0].port}"
fi
}
# '8080' -> 8080
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" get svc "${appName}" -o jsonpath="${jsonPath}"
}
function waitForAppToStart() {
local appName="${1}"
local port
port="$(portFromKubernetes "${appName}")"
local applicationHost
applicationHost="$(applicationHost "${appName}")"
isAppRunning "${applicationHost}" "${port}"
}
function retrieveApplicationUrl() {
local appName
appName="$(retrieveAppName)"
local port
port="$(portFromKubernetes "${appName}")"
local kubHost
kubHost="$(applicationHost "${appName}")"
echo "${kubHost}:${port}"
}
function isAppRunning() {
local host="${1}"
local port="${2}"
local waitTime=5
local retries=50
local running=1
local healthEndpoint="health"
echo "Checking if app [${host}:${port}] is running at [/${healthEndpoint}] endpoint"
for i in $(seq 1 "${retries}"); do
curl -m 5 "${host}:${port}/${healthEndpoint}" && running=0 && break
echo "Fail #$i/${retries}... will try again in [${waitTime}] seconds"
sleep "${waitTime}"
done
if [[ "${running}" == 1 ]]; then
echo "App failed to start"
exit 1
fi
echo ""
echo "App started successfully!"
}
function readTestPropertiesFromFile() {
local fileLocation="${1:-${OUTPUT_FOLDER}/test.properties}"
local key
local value
if [ -f "${fileLocation}" ]
then
echo "${fileLocation} found."
while IFS='=' read -r key value
do
key="$(echo "${key}" | tr '.' '_')"
eval "${key}='${value}'"
done <"${fileLocation}"
else
echo "${fileLocation} not found."
fi
}
function label() {
local appName="${1}"
local key="${2}"
local value="${3}"
local type="deployment"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" label "${type}" "${appName}" "${key}"="${value}"
}
function objectDeployed() {
local appType="${1}"
local appName="${2}"
local result
result="$("${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" get "${appType}" "${appName}" --ignore-not-found=true)"
if [[ "${result}" != "" ]]; then
echo "true"
else
echo "false"
fi
}
function stageDeploy() {
local appName
appName="$(retrieveAppName)"
# Log in to PaaS to start deployment
logInToPaas
deployServices
# deploy app
deployAndRestartAppWithNameForE2ETests "${appName}"
}
function prodDeploy() {
# TODO: Consider making it less JVM specific
local appName
appName="$(retrieveAppName)"
# Log in to PaaS to start deployment
logInToPaas
# deploy app
performProductionDeploymentOfTestedApplication "${appName}"
}
function performProductionDeploymentOfTestedApplication() {
local appName="${1}"
local lowerCaseAppName
lowerCaseAppName=$(toLowerCase "${appName}")
local profiles="kubernetes"
local originalDeploymentFile="deployment.yml"
local originalServiceFile="service.yml"
local outputDirectory
outputDirectory="$(outputFolder)/k8s"
mkdir -p "${outputDirectory}"
cp "${originalDeploymentFile}" "${outputDirectory}"
cp "${originalServiceFile}" "${outputDirectory}"
local deploymentFile="${outputDirectory}/deployment.yml"
local serviceFile="${outputDirectory}/service.yml"
local changedAppName
changedAppName="$(escapeValueForDns "${appName}-${PIPELINE_VERSION}")"
echo "Will name the application [${changedAppName}]"
local systemProps="-Dspring.profiles.active=${profiles}"
substituteVariables "dockerOrg" "${DOCKER_REGISTRY_ORGANIZATION}" "${deploymentFile}"
substituteVariables "version" "${PIPELINE_VERSION}" "${deploymentFile}"
# The name will contain also the version
substituteVariables "labelAppName" "${changedAppName}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${deploymentFile}"
substituteVariables "containerName" "${appName}" "${deploymentFile}"
substituteVariables "systemProps" "${systemProps}" "${deploymentFile}"
substituteVariables "appName" "${appName}" "${serviceFile}"
deployApp "${deploymentFile}"
local serviceDeployed
serviceDeployed="$(objectDeployed "service" "${appName}")"
echo "Service already deployed? [${serviceDeployed}]"
if [[ "${serviceDeployed}" == "false" ]]; then
deployApp "${serviceFile}"
fi
waitForAppToStart "${appName}"
}
function escapeValueForDns() {
local sed
sed="$(sed -e 's/\./-/g;s/_/-/g' <<<"$1")"
local lowerCaseSed
lowerCaseSed="$(toLowerCase "${sed}")"
echo "${lowerCaseSed}"
}
function completeSwitchOver() {
local appName
appName="$(retrieveAppName)"
# Log in to CF to start deployment
logInToPaas
# find the oldest version and remove it
local oldestDeployment
oldestDeployment="$(oldestDeployment "${appName}")"
if [[ "${oldestDeployment}" != "" ]]; then
echo "Deleting deployment with name [${oldestDeployment}]"
"${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" delete deployment "${oldestDeployment}"
else
echo "There's no blue instance to remove, skipping this step"
fi
}
function oldestDeployment() {
local appName="${1}"
local changedAppName
changedAppName="$(escapeValueForDns "${appName}-${PIPELINE_VERSION}")"
local deployedApps
deployedApps="$("${KUBECTL_BIN}" --context="${K8S_CONTEXT}" --namespace="${PAAS_NAMESPACE}" get deployments -lname="${appName}" --no-headers | awk '{print $1}' | grep -v "${changedAppName}")"
local oldestDeployment
oldestDeployment="$(echo "${deployedApps}" | sort | head -n 1)"
echo "${oldestDeployment}"
}
__ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
export LOWER_CASE_ENV
LOWER_CASE_ENV="$(lowerCaseEnv)"
export PAAS_NAMESPACE_VAR="PAAS_${ENVIRONMENT}_NAMESPACE"
[[ -z "${PAAS_NAMESPACE}" ]] && PAAS_NAMESPACE="${!PAAS_NAMESPACE_VAR}"
export KUBERNETES_NAMESPACE="${PAAS_NAMESPACE}"
export SYSTEM
SYSTEM="$(system)"
export KUBE_CONFIG_PATH
KUBE_CONFIG_PATH="${KUBE_CONFIG_PATH:-${HOME}/.kube/config}"
export KUBECTL_BIN
KUBECTL_BIN="${KUBECTL_BIN:-kubectl}"
# CURRENTLY WE ONLY SUPPORT JVM BASED PROJECTS OUT OF THE BOX
# shellcheck source=/dev/null
[[ -f "${__ROOT}/projectType/pipeline-jvm.sh" ]] && source "${__ROOT}/projectType/pipeline-jvm.sh" || \
echo "No projectType/pipeline-jvm.sh found"
|
k0chan/spring-cloud-pipelines
|
buildSrc/src/test/resources/project_customizer/common/src/main/bash/pipeline-k8s.sh
|
Shell
|
apache-2.0
| 26,388 |
#/bin/env bash
if [ ! -z "$*" ]; then
$@ 2>&1 | while read line;do
echo $(date +"%T") $line
done
exit ${PIPESTATUS[0]}
else
while read line;do
echo $(date +"%T") $line
done
echo ret $?
fi
|
reasonerjt/harbor
|
tests/showtime.sh
|
Shell
|
apache-2.0
| 201 |
sudo rpm -Uvh thingsboard-2.5.4pe.rpm
|
volodymyr-babak/thingsboard.github.io
|
docs/user-guide/install/pe/resources/2.5.4pe/thingsboard-centos-installation.sh
|
Shell
|
apache-2.0
| 38 |
# Make sure you create a directory where you want to keep your log files in this case (~/log)
cd ~/log && mv score.log score`date +"%Y_%m_%d"`.log && touch score.log
|
Sage-Bionetworks/SynapseChallengeTemplates
|
python/scorelog_update.sh
|
Shell
|
apache-2.0
| 165 |
#!/bin/bash
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Destroy Deployment
cd terraform/ || exit
terraform destroy -auto-approve
cd .. || exit
# Tear Down Dataflow
DF_JOBS=$(gcloud dataflow jobs list --status=active --region="${REGION}" --project="${PROJECT_ID}" | grep 'webhook-job-' | awk '{print $1;}')
gcloud dataflow jobs cancel "${DF_JOBS}" --region="${REGION}" --project="${PROJECT_ID}"
|
CloudVLab/professional-services
|
tools/webhook-ingestion-pipeline/tear_down.sh
|
Shell
|
apache-2.0
| 931 |
#!/usr/bin/env bash
set -e
cd "$(dirname "$BASH_SOURCE")/.."
rm -rf vendor/
source 'hack/.vendor-helpers.sh'
# the following lines are in sorted order, FYI
clone git github.com/Azure/go-ansiterm 70b2c90b260171e829f1ebd7c17f600c11858dbe
clone git github.com/Sirupsen/logrus v0.8.2 # logrus is a common dependency among multiple deps
clone git github.com/docker/libtrust 9cbd2a1374f46905c68a4eb3694a130610adc62a
clone git github.com/go-check/check 11d3bc7aa68e238947792f30573146a3231fc0f1
clone git github.com/gorilla/context 14f550f51a
clone git github.com/gorilla/mux e444e69cbd
clone git github.com/kr/pty 5cf931ef8f
clone git github.com/mattn/go-shellwords v1.0.0
clone git github.com/mattn/go-sqlite3 v1.1.0
clone git github.com/microsoft/hcsshim de43b42b5ce14dfdcbeedb0628b0032174d89caa
clone git github.com/mistifyio/go-zfs v2.1.1
clone git github.com/tchap/go-patricia v2.1.0
clone git github.com/vdemeester/shakers 3c10293ce22b900c27acad7b28656196fcc2f73b
clone git golang.org/x/net 47990a1ba55743e6ef1affd3a14e5bac8553615d https://github.com/golang/net.git
clone git github.com/docker/go-units 651fc226e7441360384da338d0fd37f2440ffbe3
clone git github.com/docker/go-connections v0.1.2
clone git github.com/docker/engine-api v0.2.2
clone git github.com/RackSec/srslog 6eb773f331e46fbba8eecb8e794e635e75fc04de
clone git github.com/imdario/mergo 0.2.1
#get libnetwork packages
clone git github.com/docker/libnetwork v0.5.6
clone git github.com/armon/go-metrics eb0af217e5e9747e41dd5303755356b62d28e3ec
clone git github.com/hashicorp/go-msgpack 71c2886f5a673a35f909803f38ece5810165097b
clone git github.com/hashicorp/memberlist 9a1e242e454d2443df330bdd51a436d5a9058fc4
clone git github.com/hashicorp/serf 7151adcef72687bf95f451a2e0ba15cb19412bf2
clone git github.com/docker/libkv c2aac5dbbaa5c872211edea7c0f32b3bd67e7410
clone git github.com/vishvananda/netns 604eaf189ee867d8c147fafc28def2394e878d25
clone git github.com/vishvananda/netlink bfd70f556483c008636b920dda142fdaa0d59ef9
clone git github.com/BurntSushi/toml f706d00e3de6abe700c994cdd545a1a4915af060
clone git github.com/samuel/go-zookeeper d0e0d8e11f318e000a8cc434616d69e329edc374
clone git github.com/deckarep/golang-set ef32fa3046d9f249d399f98ebaf9be944430fd1d
clone git github.com/coreos/etcd v2.2.0
fix_rewritten_imports github.com/coreos/etcd
clone git github.com/ugorji/go 5abd4e96a45c386928ed2ca2a7ef63e2533e18ec
clone git github.com/hashicorp/consul v0.5.2
clone git github.com/boltdb/bolt v1.1.0
clone git github.com/miekg/dns d27455715200c7d3e321a1e5cadb27c9ee0b0f02
# get graph and distribution packages
clone git github.com/docker/distribution cb08de17d74bef86ce6c5abe8b240e282f5750be
clone git github.com/vbatts/tar-split v0.9.11
# get desired notary commit, might also need to be updated in Dockerfile
clone git github.com/docker/notary docker-v1.10-3
clone git google.golang.org/grpc 174192fc93efcb188fc8f46ca447f0da606b6885 https://github.com/grpc/grpc-go.git
clone git github.com/miekg/pkcs11 80f102b5cac759de406949c47f0928b99bd64cdf
clone git github.com/jfrazelle/go v1.5.1-1
clone git github.com/agl/ed25519 d2b94fd789ea21d12fac1a4443dd3a3f79cda72c
clone git github.com/opencontainers/runc d97d5e8b007e4657316eed76ea30bc0f690230cf # libcontainer
clone git github.com/seccomp/libseccomp-golang 1b506fc7c24eec5a3693cdcbed40d9c226cfc6a1
# libcontainer deps (see src/github.com/opencontainers/runc/Godeps/Godeps.json)
clone git github.com/coreos/go-systemd v4
clone git github.com/godbus/dbus v3
clone git github.com/syndtr/gocapability 2c00daeb6c3b45114c80ac44119e7b8801fdd852
clone git github.com/golang/protobuf f7137ae6b19afbfd61a94b746fda3b3fe0491874
# gelf logging driver deps
clone git github.com/Graylog2/go-gelf 6c62a85f1d47a67f2a5144c0e745b325889a8120
clone git github.com/fluent/fluent-logger-golang v1.0.0
# fluent-logger-golang deps
clone git github.com/philhofer/fwd 899e4efba8eaa1fea74175308f3fae18ff3319fa
clone git github.com/tinylib/msgp 75ee40d2601edf122ef667e2a07d600d4c44490c
# fsnotify
clone git gopkg.in/fsnotify.v1 v1.2.0
# awslogs deps
clone git github.com/aws/aws-sdk-go v0.9.9
clone git github.com/vaughan0/go-ini a98ad7ee00ec53921f08832bc06ecf7fd600e6a1
clean
|
Collinux/docker
|
hack/vendor.sh
|
Shell
|
apache-2.0
| 4,181 |
#! /bin/bash
set -eo pipefail
PUBDIR="$SAFEDIR"
KEYDIR="$SAFEDIR"
PATH="$PATH:$SCION_ROOT/bin"
. "$PLAYGROUND/crypto_lib.sh"
cd $SAFEDIR
stop_docker || true
start_docker
for loc in {bern,geneva}
do
echo "Preparation: $loc"
if [ "$loc" = "bern" ]; then
IA="1-ff00:0:110"
else
IA="1-ff00:0:120"
fi
mkdir -p $SAFEDIR/$loc && cd $SAFEDIR/$loc
set_dirs
# Generate configuration files
navigate_pubdir
basic_conf && root_conf && ca_conf && as_conf
prepare_ca
sed -i \
-e 's/{{.Country}}/CH/g' \
-e "s/{{.State}}/$loc/g" \
-e "s/{{.Location}}/$loc/g" \
-e "s/{{.Organization}}/$loc/g" \
-e "s/{{.OrganizationalUnit}}/$loc InfoSec Squad/g" \
-e "s/{{.ISDAS}}/$IA/g" \
basic.cnf
for cnf in *.cnf
do
sed -i \
-e "s/{{.ShortOrg}}/$loc/g" \
$cnf
done
# Generate certificates
#
# The default start and end date are set by TestUpdateCrypto.
# For AS certificates we want smaller periods, because we want to check that
# the database correctly fetches when given a specific point in time.
KEYDIR=/workdir/$loc/keys PUBDIR=/workdir/$loc/public docker_exec "navigate_pubdir && gen_root && gen_ca \
&& STARTDATE=20200624120000Z ENDDATE=20200627120000Z gen_as && mv cp-as.crt cp-as1.crt \
&& STARTDATE=20200626120000Z ENDDATE=20200629120000Z gen_as && mv cp-as.crt cp-as2.crt \
&& STARTDATE=20200628120000Z ENDDATE=20200701120000Z gen_as_ca_steps && mv cp-as.crt cp-as3.crt"
scion-pki certs validate --type cp-root $PUBDIR/cp-root.crt
scion-pki certs validate --type cp-ca $PUBDIR/cp-ca.crt
scion-pki certs validate --type cp-as $PUBDIR/cp-as1.crt
scion-pki certs validate --type cp-as $PUBDIR/cp-as2.crt
scion-pki certs validate --type cp-as $PUBDIR/cp-as3.crt
mkdir -p "$TESTDATA/$loc"
cp $PUBDIR/*.crt "$TESTDATA/$loc"
done
stop_docker
|
netsec-ethz/scion
|
go/pkg/trust/dbtest/testdata/update_certs.sh
|
Shell
|
apache-2.0
| 1,980 |
#!/bin/bash
PASSWORD_CLASS=org.eclipse.jetty.util.security.Password
if [[ $(ls /opt/apigee/edge-gateway/lib/thirdparty/jetty-util-8*.jar 2> /dev/null) ]] ; then
PASSWORD_CLASS=org.eclipse.jetty.http.security.Password
fi
java -cp "${APIGEE_ROOT:-/opt/apigee}/edge-gateway/lib/thirdparty/*" $PASSWORD_CLASS "$1" 2>&1 | egrep '^OBF:'
|
apigee/ansible-install
|
roles/apigee-tls-ms/scripts/generate_obf.sh
|
Shell
|
apache-2.0
| 337 |
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Common utilites for kube-up/kube-down
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(cd $(dirname "${BASH_SOURCE}")/.. && pwd)
DEFAULT_KUBECONFIG="${HOME:-.}/.kube/config"
source "${KUBE_ROOT}/hack/lib/util.sh"
source "${KUBE_ROOT}/cluster/lib/logging.sh"
# KUBE_RELEASE_VERSION_REGEX matches things like "v1.2.3" or "v1.2.3-alpha.4"
#
# NOTE This must match the version_regex in build/common.sh
# kube::release::parse_and_validate_release_version()
KUBE_RELEASE_VERSION_REGEX="^v(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(-([a-zA-Z0-9]+)\\.(0|[1-9][0-9]*))?$"
KUBE_RELEASE_VERSION_DASHED_REGEX="v(0|[1-9][0-9]*)-(0|[1-9][0-9]*)-(0|[1-9][0-9]*)(-([a-zA-Z0-9]+)-(0|[1-9][0-9]*))?"
# KUBE_CI_VERSION_REGEX matches things like "v1.2.3-alpha.4.56+abcdefg" This
#
# NOTE This must match the version_regex in build/common.sh
# kube::release::parse_and_validate_ci_version()
KUBE_CI_VERSION_REGEX="^v(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)-([a-zA-Z0-9]+)\\.(0|[1-9][0-9]*)(\\.(0|[1-9][0-9]*)\\+[-0-9a-z]*)?$"
KUBE_CI_VERSION_DASHED_REGEX="^v(0|[1-9][0-9]*)-(0|[1-9][0-9]*)-(0|[1-9][0-9]*)-([a-zA-Z0-9]+)-(0|[1-9][0-9]*)(-(0|[1-9][0-9]*)\\+[-0-9a-z]*)?"
# Generate kubeconfig data for the created cluster.
# Assumed vars:
# KUBE_USER
# KUBE_PASSWORD
# KUBE_MASTER_IP
# KUBECONFIG
# CONTEXT
#
# If the apiserver supports bearer auth, also provide:
# KUBE_BEARER_TOKEN
#
# If the kubeconfig context being created should NOT be set as the current context
# SECONDARY_KUBECONFIG=true
#
# To explicitly name the context being created, use OVERRIDE_CONTEXT
#
# The following can be omitted for --insecure-skip-tls-verify
# KUBE_CERT
# KUBE_KEY
# CA_CERT
function create-kubeconfig() {
KUBECONFIG=${KUBECONFIG:-$DEFAULT_KUBECONFIG}
local kubectl="${KUBE_ROOT}/cluster/kubectl.sh"
SECONDARY_KUBECONFIG=${SECONDARY_KUBECONFIG:-}
OVERRIDE_CONTEXT=${OVERRIDE_CONTEXT:-}
if [[ "$OVERRIDE_CONTEXT" != "" ]];then
CONTEXT=$OVERRIDE_CONTEXT
fi
# KUBECONFIG determines the file we write to, but it may not exist yet
OLD_IFS=$IFS
IFS=':'
for cfg in ${KUBECONFIG} ; do
if [[ ! -e "${cfg}" ]]; then
mkdir -p "$(dirname "${cfg}")"
touch "${cfg}"
fi
done
IFS=$OLD_IFS
local cluster_args=(
"--server=${KUBE_SERVER:-https://${KUBE_MASTER_IP}}"
)
if [[ -z "${CA_CERT:-}" ]]; then
cluster_args+=("--insecure-skip-tls-verify=true")
else
cluster_args+=(
"--certificate-authority=${CA_CERT}"
"--embed-certs=true"
)
fi
local user_args=()
if [[ ! -z "${KUBE_BEARER_TOKEN:-}" ]]; then
user_args+=(
"--token=${KUBE_BEARER_TOKEN}"
)
elif [[ ! -z "${KUBE_USER:-}" && ! -z "${KUBE_PASSWORD:-}" ]]; then
user_args+=(
"--username=${KUBE_USER}"
"--password=${KUBE_PASSWORD}"
)
fi
if [[ ! -z "${KUBE_CERT:-}" && ! -z "${KUBE_KEY:-}" ]]; then
user_args+=(
"--client-certificate=${KUBE_CERT}"
"--client-key=${KUBE_KEY}"
"--embed-certs=true"
)
fi
KUBECONFIG="${KUBECONFIG}" "${kubectl}" config set-cluster "${CONTEXT}" "${cluster_args[@]}"
if [[ -n "${user_args[@]:-}" ]]; then
KUBECONFIG="${KUBECONFIG}" "${kubectl}" config set-credentials "${CONTEXT}" "${user_args[@]}"
fi
KUBECONFIG="${KUBECONFIG}" "${kubectl}" config set-context "${CONTEXT}" --cluster="${CONTEXT}" --user="${CONTEXT}"
if [[ "${SECONDARY_KUBECONFIG}" != "true" ]];then
KUBECONFIG="${KUBECONFIG}" "${kubectl}" config use-context "${CONTEXT}" --cluster="${CONTEXT}"
fi
# If we have a bearer token, also create a credential entry with basic auth
# so that it is easy to discover the basic auth password for your cluster
# to use in a web browser.
if [[ ! -z "${KUBE_BEARER_TOKEN:-}" && ! -z "${KUBE_USER:-}" && ! -z "${KUBE_PASSWORD:-}" ]]; then
KUBECONFIG="${KUBECONFIG}" "${kubectl}" config set-credentials "${CONTEXT}-basic-auth" "--username=${KUBE_USER}" "--password=${KUBE_PASSWORD}"
fi
echo "Wrote config for ${CONTEXT} to ${KUBECONFIG}"
}
# Clear kubeconfig data for a context
# Assumed vars:
# KUBECONFIG
# CONTEXT
#
# To explicitly name the context being removed, use OVERRIDE_CONTEXT
function clear-kubeconfig() {
export KUBECONFIG=${KUBECONFIG:-$DEFAULT_KUBECONFIG}
OVERRIDE_CONTEXT=${OVERRIDE_CONTEXT:-}
if [[ "$OVERRIDE_CONTEXT" != "" ]];then
CONTEXT=$OVERRIDE_CONTEXT
fi
local kubectl="${KUBE_ROOT}/cluster/kubectl.sh"
# Unset the current-context before we delete it, as otherwise kubectl errors.
local cc=$("${kubectl}" config view -o jsonpath='{.current-context}')
if [[ "${cc}" == "${CONTEXT}" ]]; then
"${kubectl}" config unset current-context
fi
"${kubectl}" config unset "clusters.${CONTEXT}"
"${kubectl}" config unset "users.${CONTEXT}"
"${kubectl}" config unset "users.${CONTEXT}-basic-auth"
"${kubectl}" config unset "contexts.${CONTEXT}"
echo "Cleared config for ${CONTEXT} from ${KUBECONFIG}"
}
# Creates a kubeconfig file with the credentials for only the current-context
# cluster. This is used by federation to create secrets in test setup.
function create-kubeconfig-for-federation() {
if [[ "${FEDERATION:-}" == "true" ]]; then
echo "creating kubeconfig for federation secret"
local kubectl="${KUBE_ROOT}/cluster/kubectl.sh"
local cc=$("${kubectl}" config view -o jsonpath='{.current-context}')
KUBECONFIG_DIR=$(dirname ${KUBECONFIG:-$DEFAULT_KUBECONFIG})
KUBECONFIG_PATH="${KUBECONFIG_DIR}/federation/kubernetes-apiserver/${cc}"
mkdir -p "${KUBECONFIG_PATH}"
"${kubectl}" config view --minify --flatten > "${KUBECONFIG_PATH}/kubeconfig"
fi
}
function tear_down_alive_resources() {
local kubectl="${KUBE_ROOT}/cluster/kubectl.sh"
"${kubectl}" delete deployments --all || true
"${kubectl}" delete rc --all || true
"${kubectl}" delete pods --all || true
"${kubectl}" delete svc --all || true
"${kubectl}" delete pvc --all || true
}
# Gets username, password for the current-context in kubeconfig, if they exist.
# Assumed vars:
# KUBECONFIG # if unset, defaults to global
# KUBE_CONTEXT # if unset, defaults to current-context
#
# Vars set:
# KUBE_USER
# KUBE_PASSWORD
#
# KUBE_USER,KUBE_PASSWORD will be empty if no current-context is set, or
# the current-context user does not exist or contain basicauth entries.
function get-kubeconfig-basicauth() {
export KUBECONFIG=${KUBECONFIG:-$DEFAULT_KUBECONFIG}
local cc=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.current-context}")
if [[ ! -z "${KUBE_CONTEXT:-}" ]]; then
cc="${KUBE_CONTEXT}"
fi
local user=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.contexts[?(@.name == \"${cc}\")].context.user}")
get-kubeconfig-user-basicauth "${user}"
if [[ -z "${KUBE_USER:-}" || -z "${KUBE_PASSWORD:-}" ]]; then
# kube-up stores username/password in a an additional kubeconfig section
# suffixed with "-basic-auth". Cloudproviders like GKE store in directly
# in the top level section along with the other credential information.
# TODO: Handle this uniformly, either get rid of "basic-auth" or
# consolidate its usage into a function across scripts in cluster/
get-kubeconfig-user-basicauth "${user}-basic-auth"
fi
}
# Sets KUBE_USER and KUBE_PASSWORD to the username and password specified in
# the kubeconfig section corresponding to $1.
#
# Args:
# $1 kubeconfig section to look for basic auth (eg: user or user-basic-auth).
# Assumed vars:
# KUBE_ROOT
# Vars set:
# KUBE_USER
# KUBE_PASSWORD
function get-kubeconfig-user-basicauth() {
KUBE_USER=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.users[?(@.name == \"$1\")].user.username}")
KUBE_PASSWORD=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.users[?(@.name == \"$1\")].user.password}")
}
# Generate basic auth user and password.
# Vars set:
# KUBE_USER
# KUBE_PASSWORD
function gen-kube-basicauth() {
KUBE_USER=admin
KUBE_PASSWORD=$(python -c 'import string,random; print("".join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16)))')
}
# Get the bearer token for the current-context in kubeconfig if one exists.
# Assumed vars:
# KUBECONFIG # if unset, defaults to global
# KUBE_CONTEXT # if unset, defaults to current-context
#
# Vars set:
# KUBE_BEARER_TOKEN
#
# KUBE_BEARER_TOKEN will be empty if no current-context is set, or the
# current-context user does not exist or contain a bearer token entry.
function get-kubeconfig-bearertoken() {
export KUBECONFIG=${KUBECONFIG:-$DEFAULT_KUBECONFIG}
local cc=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.current-context}")
if [[ ! -z "${KUBE_CONTEXT:-}" ]]; then
cc="${KUBE_CONTEXT}"
fi
local user=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.contexts[?(@.name == \"${cc}\")].context.user}")
KUBE_BEARER_TOKEN=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.users[?(@.name == \"${user}\")].user.token}")
}
# Generate bearer token.
#
# Vars set:
# KUBE_BEARER_TOKEN
function gen-kube-bearertoken() {
KUBE_BEARER_TOKEN=$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64 | tr -d "=+/" | dd bs=32 count=1 2>/dev/null)
}
# Generate uid
# This function only works on systems with python. It generates a time based
# UID instead of a UUID because GCE has a name length limit.
#
# Vars set:
# KUBE_UID
function gen-uid {
KUBE_UID=$(python -c 'import uuid; print(uuid.uuid1().fields[0])')
}
function load-or-gen-kube-basicauth() {
if [[ ! -z "${KUBE_CONTEXT:-}" ]]; then
get-kubeconfig-basicauth
fi
if [[ -z "${KUBE_USER:-}" || -z "${KUBE_PASSWORD:-}" ]]; then
gen-kube-basicauth
fi
# Make sure they don't contain any funny characters.
if ! [[ "${KUBE_USER}" =~ ^[-._@a-zA-Z0-9]+$ ]]; then
echo "Bad KUBE_USER string."
exit 1
fi
if ! [[ "${KUBE_PASSWORD}" =~ ^[-._@#%/a-zA-Z0-9]+$ ]]; then
echo "Bad KUBE_PASSWORD string."
exit 1
fi
}
function load-or-gen-kube-bearertoken() {
if [[ ! -z "${KUBE_CONTEXT:-}" ]]; then
get-kubeconfig-bearertoken
fi
if [[ -z "${KUBE_BEARER_TOKEN:-}" ]]; then
gen-kube-bearertoken
fi
}
# Get the master IP for the current-context in kubeconfig if one exists.
#
# Assumed vars:
# KUBECONFIG # if unset, defaults to global
# KUBE_CONTEXT # if unset, defaults to current-context
#
# Vars set:
# KUBE_MASTER_URL
#
# KUBE_MASTER_URL will be empty if no current-context is set, or the
# current-context user does not exist or contain a server entry.
function detect-master-from-kubeconfig() {
export KUBECONFIG=${KUBECONFIG:-$DEFAULT_KUBECONFIG}
local cc=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.current-context}")
if [[ ! -z "${KUBE_CONTEXT:-}" ]]; then
cc="${KUBE_CONTEXT}"
fi
local cluster=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.contexts[?(@.name == \"${cc}\")].context.cluster}")
KUBE_MASTER_URL=$("${KUBE_ROOT}/cluster/kubectl.sh" config view -o jsonpath="{.clusters[?(@.name == \"${cluster}\")].cluster.server}")
}
# Sets KUBE_VERSION variable to the proper version number (e.g. "v1.0.6",
# "v1.2.0-alpha.1.881+376438b69c7612") or a version' publication of the form
# <path>/<version> (e.g. "release/stable",' "ci/latest-1").
#
# See the docs on getting builds for more information about version
# publication.
#
# Args:
# $1 version string from command line
# Vars set:
# KUBE_VERSION
function set_binary_version() {
if [[ "${1}" =~ "/" ]]; then
IFS='/' read -a path <<< "${1}"
if [[ "${path[0]}" == "release" ]]; then
KUBE_VERSION=$(gsutil cat "gs://kubernetes-release/${1}.txt")
else
KUBE_VERSION=$(gsutil cat "gs://kubernetes-release-dev/${1}.txt")
fi
else
KUBE_VERSION=${1}
fi
}
# Figure out which binary use on the server and assure it is available.
# If KUBE_VERSION is specified use binaries specified by it, otherwise
# use local dev binaries.
#
# Assumed vars:
# KUBE_VERSION
# KUBE_RELEASE_VERSION_REGEX
# KUBE_CI_VERSION_REGEX
# Vars set:
# KUBE_TAR_HASH
# SERVER_BINARY_TAR_URL
# SERVER_BINARY_TAR_HASH
# SALT_TAR_URL
# SALT_TAR_HASH
function tars_from_version() {
local sha1sum=""
if which sha1sum >/dev/null 2>&1; then
sha1sum="sha1sum"
else
sha1sum="shasum -a1"
fi
if [[ -z "${KUBE_VERSION-}" ]]; then
find-release-tars
upload-server-tars
elif [[ ${KUBE_VERSION} =~ ${KUBE_RELEASE_VERSION_REGEX} ]]; then
SERVER_BINARY_TAR_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBE_VERSION}/kubernetes-server-linux-amd64.tar.gz"
SALT_TAR_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBE_VERSION}/kubernetes-salt.tar.gz"
# TODO: Clean this up.
KUBE_MANIFESTS_TAR_URL="${SERVER_BINARY_TAR_URL/server-linux-amd64/manifests}"
KUBE_MANIFESTS_TAR_HASH=$(curl ${KUBE_MANIFESTS_TAR_URL} --silent --show-error | ${sha1sum} | awk '{print $1}')
elif [[ ${KUBE_VERSION} =~ ${KUBE_CI_VERSION_REGEX} ]]; then
SERVER_BINARY_TAR_URL="https://storage.googleapis.com/kubernetes-release-dev/ci/${KUBE_VERSION}/kubernetes-server-linux-amd64.tar.gz"
SALT_TAR_URL="https://storage.googleapis.com/kubernetes-release-dev/ci/${KUBE_VERSION}/kubernetes-salt.tar.gz"
# TODO: Clean this up.
KUBE_MANIFESTS_TAR_URL="${SERVER_BINARY_TAR_URL/server-linux-amd64/manifests}"
KUBE_MANIFESTS_TAR_HASH=$(curl ${KUBE_MANIFESTS_TAR_URL} --silent --show-error | ${sha1sum} | awk '{print $1}')
else
echo "Version doesn't match regexp" >&2
exit 1
fi
if ! SERVER_BINARY_TAR_HASH=$(curl -Ss --fail "${SERVER_BINARY_TAR_URL}.sha1"); then
echo "Failure trying to curl release .sha1"
fi
if ! SALT_TAR_HASH=$(curl -Ss --fail "${SALT_TAR_URL}.sha1"); then
echo "Failure trying to curl Salt tar .sha1"
fi
if ! curl -Ss --head "${SERVER_BINARY_TAR_URL}" >&/dev/null; then
echo "Can't find release at ${SERVER_BINARY_TAR_URL}" >&2
exit 1
fi
if ! curl -Ss --head "${SALT_TAR_URL}" >&/dev/null; then
echo "Can't find Salt tar at ${SALT_TAR_URL}" >&2
exit 1
fi
}
# Search for the specified tarball in the various known output locations,
# echoing the location if found.
#
# Assumed vars:
# KUBE_ROOT
#
# Args:
# $1 name of tarball to search for
function find-tar() {
local -r tarball=$1
locations=(
"${KUBE_ROOT}/server/${tarball}"
"${KUBE_ROOT}/_output/release-tars/${tarball}"
"${KUBE_ROOT}/bazel-bin/build/release-tars/${tarball}"
)
location=$( (ls -t "${locations[@]}" 2>/dev/null || true) | head -1 )
if [[ ! -f "${location}" ]]; then
echo "!!! Cannot find ${tarball}" >&2
exit 1
fi
echo "${location}"
}
# Verify and find the various tar files that we are going to use on the server.
#
# Assumed vars:
# KUBE_ROOT
# Vars set:
# SERVER_BINARY_TAR
# SALT_TAR
# KUBE_MANIFESTS_TAR
function find-release-tars() {
SERVER_BINARY_TAR=$(find-tar kubernetes-server-linux-amd64.tar.gz)
SALT_TAR=$(find-tar kubernetes-salt.tar.gz)
# This tarball is used by GCI, Ubuntu Trusty, and Container Linux.
KUBE_MANIFESTS_TAR=
if [[ "${MASTER_OS_DISTRIBUTION:-}" == "trusty" || "${MASTER_OS_DISTRIBUTION:-}" == "gci" || "${MASTER_OS_DISTRIBUTION:-}" == "container-linux" || "${MASTER_OS_DISTRIBUTION:-}" == "ubuntu" ]] || \
[[ "${NODE_OS_DISTRIBUTION:-}" == "trusty" || "${NODE_OS_DISTRIBUTION:-}" == "gci" || "${NODE_OS_DISTRIBUTION:-}" == "container-linux" || "${NODE_OS_DISTRIBUTION:-}" == "ubuntu" ]] ; then
KUBE_MANIFESTS_TAR=$(find-tar kubernetes-manifests.tar.gz)
fi
}
# Discover the git version of the current build package
#
# Assumed vars:
# KUBE_ROOT
# Vars set:
# KUBE_GIT_VERSION
function find-release-version() {
KUBE_GIT_VERSION=""
if [[ -f "${KUBE_ROOT}/version" ]]; then
KUBE_GIT_VERSION="$(cat ${KUBE_ROOT}/version)"
fi
if [[ -f "${KUBE_ROOT}/_output/release-stage/full/kubernetes/version" ]]; then
KUBE_GIT_VERSION="$(cat ${KUBE_ROOT}/_output/release-stage/full/kubernetes/version)"
fi
if [[ -z "${KUBE_GIT_VERSION}" ]]; then
echo "!!! Cannot find release version"
exit 1
fi
}
function stage-images() {
find-release-version
find-release-tars
KUBE_IMAGE_TAG="$(echo """${KUBE_GIT_VERSION}""" | sed 's/+/-/g')"
local docker_wrapped_binaries=(
"kube-apiserver"
"kube-controller-manager"
"kube-scheduler"
"kube-proxy"
)
local docker_cmd=("docker")
if [[ "${KUBE_DOCKER_REGISTRY}" == "gcr.io/"* ]]; then
local docker_push_cmd=("gcloud" "docker")
else
local docker_push_cmd=("${docker_cmd[@]}")
fi
local temp_dir="$(mktemp -d -t 'kube-server-XXXX')"
tar xzfv "${SERVER_BINARY_TAR}" -C "${temp_dir}" &> /dev/null
for binary in "${docker_wrapped_binaries[@]}"; do
local docker_tag="$(cat ${temp_dir}/kubernetes/server/bin/${binary}.docker_tag)"
(
"${docker_cmd[@]}" load -i "${temp_dir}/kubernetes/server/bin/${binary}.tar"
"${docker_cmd[@]}" rmi "${KUBE_DOCKER_REGISTRY}/${binary}:${KUBE_IMAGE_TAG}" 2>/dev/null || true
"${docker_cmd[@]}" tag "gcr.io/google_containers/${binary}:${docker_tag}" "${KUBE_DOCKER_REGISTRY}/${binary}:${KUBE_IMAGE_TAG}"
"${docker_push_cmd[@]}" push "${KUBE_DOCKER_REGISTRY}/${binary}:${KUBE_IMAGE_TAG}"
) &> "${temp_dir}/${binary}-push.log" &
done
kube::util::wait-for-jobs || {
kube::log::error "unable to push images. See ${temp_dir}/*.log for more info."
return 1
}
rm -rf "${temp_dir}"
return 0
}
# Quote something appropriate for a yaml string.
#
# TODO(zmerlynn): Note that this function doesn't so much "quote" as
# "strip out quotes", and we really should be using a YAML library for
# this, but PyYAML isn't shipped by default, and *rant rant rant ... SIGH*
function yaml-quote {
echo "'$(echo "${@:-}" | sed -e "s/'/''/g")'"
}
# Builds the RUNTIME_CONFIG var from other feature enable options (such as
# features in alpha)
function build-runtime-config() {
# There is nothing to do here for now. Just using this function as a placeholder.
:
}
# Writes the cluster name into a temporary file.
# Assumed vars
# CLUSTER_NAME
function write-cluster-name {
cat >"${KUBE_TEMP}/cluster-name.txt" << EOF
${CLUSTER_NAME}
EOF
}
function write-master-env {
# If the user requested that the master be part of the cluster, set the
# environment variable to program the master kubelet to register itself.
if [[ "${REGISTER_MASTER_KUBELET:-}" == "true" && -z "${KUBELET_APISERVER:-}" ]]; then
KUBELET_APISERVER="${MASTER_NAME}"
fi
if [[ -z "${KUBERNETES_MASTER_NAME:-}" ]]; then
KUBERNETES_MASTER_NAME="${MASTER_NAME}"
fi
build-kube-env true "${KUBE_TEMP}/master-kube-env.yaml"
build-kube-master-certs "${KUBE_TEMP}/kube-master-certs.yaml"
}
function write-node-env {
if [[ -z "${KUBERNETES_MASTER_NAME:-}" ]]; then
KUBERNETES_MASTER_NAME="${MASTER_NAME}"
fi
build-kube-env false "${KUBE_TEMP}/node-kube-env.yaml"
}
function build-kube-master-certs {
local file=$1
rm -f ${file}
cat >$file <<EOF
KUBEAPISERVER_CERT: $(yaml-quote ${KUBEAPISERVER_CERT_BASE64:-})
KUBEAPISERVER_KEY: $(yaml-quote ${KUBEAPISERVER_KEY_BASE64:-})
CA_KEY: $(yaml-quote ${CA_KEY_BASE64:-})
AGGREGATOR_CA_KEY: $(yaml-quote ${AGGREGATOR_CA_KEY_BASE64:-})
REQUESTHEADER_CA_CERT: $(yaml-quote ${REQUESTHEADER_CA_CERT_BASE64:-})
PROXY_CLIENT_CERT: $(yaml-quote ${PROXY_CLIENT_CERT_BASE64:-})
PROXY_CLIENT_KEY: $(yaml-quote ${PROXY_CLIENT_KEY_BASE64:-})
EOF
}
# $1: if 'true', we're building a master yaml, else a node
function build-kube-env {
local master=$1
local file=$2
local server_binary_tar_url=$SERVER_BINARY_TAR_URL
local salt_tar_url=$SALT_TAR_URL
local kube_manifests_tar_url="${KUBE_MANIFESTS_TAR_URL:-}"
if [[ "${master}" == "true" && "${MASTER_OS_DISTRIBUTION}" == "container-linux" ]] || \
[[ "${master}" == "false" && "${NODE_OS_DISTRIBUTION}" == "container-linux" ]] || \
[[ "${master}" == "true" && "${MASTER_OS_DISTRIBUTION}" == "ubuntu" ]] || \
[[ "${master}" == "false" && "${NODE_OS_DISTRIBUTION}" == "ubuntu" ]] ; then
# TODO: Support fallback .tar.gz settings on Container Linux
server_binary_tar_url=$(split_csv "${SERVER_BINARY_TAR_URL}")
salt_tar_url=$(split_csv "${SALT_TAR_URL}")
kube_manifests_tar_url=$(split_csv "${KUBE_MANIFESTS_TAR_URL}")
fi
build-runtime-config
gen-uid
rm -f ${file}
cat >$file <<EOF
CLUSTER_NAME: $(yaml-quote ${CLUSTER_NAME})
ENV_TIMESTAMP: $(yaml-quote $(date -u +%Y-%m-%dT%T%z))
INSTANCE_PREFIX: $(yaml-quote ${INSTANCE_PREFIX})
NODE_INSTANCE_PREFIX: $(yaml-quote ${NODE_INSTANCE_PREFIX})
NODE_TAGS: $(yaml-quote ${NODE_TAGS:-})
NODE_NETWORK: $(yaml-quote ${NETWORK:-})
NODE_SUBNETWORK: $(yaml-quote ${SUBNETWORK:-})
CLUSTER_IP_RANGE: $(yaml-quote ${CLUSTER_IP_RANGE:-10.244.0.0/16})
SERVER_BINARY_TAR_URL: $(yaml-quote ${server_binary_tar_url})
SERVER_BINARY_TAR_HASH: $(yaml-quote ${SERVER_BINARY_TAR_HASH})
PROJECT_ID: $(yaml-quote ${PROJECT})
NETWORK_PROJECT_ID: $(yaml-quote ${NETWORK_PROJECT})
SALT_TAR_URL: $(yaml-quote ${salt_tar_url})
SALT_TAR_HASH: $(yaml-quote ${SALT_TAR_HASH})
SERVICE_CLUSTER_IP_RANGE: $(yaml-quote ${SERVICE_CLUSTER_IP_RANGE})
KUBERNETES_MASTER_NAME: $(yaml-quote ${KUBERNETES_MASTER_NAME})
ALLOCATE_NODE_CIDRS: $(yaml-quote ${ALLOCATE_NODE_CIDRS:-false})
ENABLE_CLUSTER_MONITORING: $(yaml-quote ${ENABLE_CLUSTER_MONITORING:-none})
ENABLE_METRICS_SERVER: $(yaml-quote ${ENABLE_METRICS_SERVER:-false})
DOCKER_REGISTRY_MIRROR_URL: $(yaml-quote ${DOCKER_REGISTRY_MIRROR_URL:-})
ENABLE_L7_LOADBALANCING: $(yaml-quote ${ENABLE_L7_LOADBALANCING:-none})
ENABLE_CLUSTER_LOGGING: $(yaml-quote ${ENABLE_CLUSTER_LOGGING:-false})
ENABLE_CLUSTER_UI: $(yaml-quote ${ENABLE_CLUSTER_UI:-false})
ENABLE_NODE_PROBLEM_DETECTOR: $(yaml-quote ${ENABLE_NODE_PROBLEM_DETECTOR:-none})
NODE_PROBLEM_DETECTOR_VERSION: $(yaml-quote ${NODE_PROBLEM_DETECTOR_VERSION:-})
NODE_PROBLEM_DETECTOR_TAR_HASH: $(yaml-quote ${NODE_PROBLEM_DETECTOR_TAR_HASH:-})
ENABLE_NODE_LOGGING: $(yaml-quote ${ENABLE_NODE_LOGGING:-false})
ENABLE_RESCHEDULER: $(yaml-quote ${ENABLE_RESCHEDULER:-false})
LOGGING_DESTINATION: $(yaml-quote ${LOGGING_DESTINATION:-})
ELASTICSEARCH_LOGGING_REPLICAS: $(yaml-quote ${ELASTICSEARCH_LOGGING_REPLICAS:-})
ENABLE_CLUSTER_DNS: $(yaml-quote ${ENABLE_CLUSTER_DNS:-false})
ENABLE_CLUSTER_REGISTRY: $(yaml-quote ${ENABLE_CLUSTER_REGISTRY:-false})
CLUSTER_REGISTRY_DISK: $(yaml-quote ${CLUSTER_REGISTRY_DISK:-})
CLUSTER_REGISTRY_DISK_SIZE: $(yaml-quote ${CLUSTER_REGISTRY_DISK_SIZE:-})
DNS_SERVER_IP: $(yaml-quote ${DNS_SERVER_IP:-})
DNS_DOMAIN: $(yaml-quote ${DNS_DOMAIN:-})
ENABLE_DNS_HORIZONTAL_AUTOSCALER: $(yaml-quote ${ENABLE_DNS_HORIZONTAL_AUTOSCALER:-false})
KUBELET_TOKEN: $(yaml-quote ${KUBELET_TOKEN:-})
KUBE_PROXY_DAEMONSET: $(yaml-quote ${KUBE_PROXY_DAEMONSET:-false})
KUBE_PROXY_TOKEN: $(yaml-quote ${KUBE_PROXY_TOKEN:-})
NODE_PROBLEM_DETECTOR_TOKEN: $(yaml-quote ${NODE_PROBLEM_DETECTOR_TOKEN:-})
ADMISSION_CONTROL: $(yaml-quote ${ADMISSION_CONTROL:-})
MASTER_IP_RANGE: $(yaml-quote ${MASTER_IP_RANGE})
RUNTIME_CONFIG: $(yaml-quote ${RUNTIME_CONFIG})
CA_CERT: $(yaml-quote ${CA_CERT_BASE64:-})
KUBELET_CERT: $(yaml-quote ${KUBELET_CERT_BASE64:-})
KUBELET_KEY: $(yaml-quote ${KUBELET_KEY_BASE64:-})
NETWORK_PROVIDER: $(yaml-quote ${NETWORK_PROVIDER:-})
NETWORK_POLICY_PROVIDER: $(yaml-quote ${NETWORK_POLICY_PROVIDER:-})
PREPULL_E2E_IMAGES: $(yaml-quote ${PREPULL_E2E_IMAGES:-})
HAIRPIN_MODE: $(yaml-quote ${HAIRPIN_MODE:-})
SOFTLOCKUP_PANIC: $(yaml-quote ${SOFTLOCKUP_PANIC:-})
OPENCONTRAIL_TAG: $(yaml-quote ${OPENCONTRAIL_TAG:-})
OPENCONTRAIL_KUBERNETES_TAG: $(yaml-quote ${OPENCONTRAIL_KUBERNETES_TAG:-})
OPENCONTRAIL_PUBLIC_SUBNET: $(yaml-quote ${OPENCONTRAIL_PUBLIC_SUBNET:-})
E2E_STORAGE_TEST_ENVIRONMENT: $(yaml-quote ${E2E_STORAGE_TEST_ENVIRONMENT:-})
KUBE_IMAGE_TAG: $(yaml-quote ${KUBE_IMAGE_TAG:-})
KUBE_DOCKER_REGISTRY: $(yaml-quote ${KUBE_DOCKER_REGISTRY:-})
KUBE_ADDON_REGISTRY: $(yaml-quote ${KUBE_ADDON_REGISTRY:-})
MULTIZONE: $(yaml-quote ${MULTIZONE:-})
NON_MASQUERADE_CIDR: $(yaml-quote ${NON_MASQUERADE_CIDR:-})
KUBE_UID: $(yaml-quote ${KUBE_UID:-})
ENABLE_DEFAULT_STORAGE_CLASS: $(yaml-quote ${ENABLE_DEFAULT_STORAGE_CLASS:-})
ENABLE_APISERVER_BASIC_AUDIT: $(yaml-quote ${ENABLE_APISERVER_BASIC_AUDIT:-})
ENABLE_APISERVER_ADVANCED_AUDIT: $(yaml-quote ${ENABLE_APISERVER_ADVANCED_AUDIT:-})
ENABLE_CACHE_MUTATION_DETECTOR: $(yaml-quote ${ENABLE_CACHE_MUTATION_DETECTOR:-false})
ENABLE_PATCH_CONVERSION_DETECTOR: $(yaml-quote ${ENABLE_PATCH_CONVERSION_DETECTOR:-false})
ADVANCED_AUDIT_BACKEND: $(yaml-quote ${ADVANCED_AUDIT_BACKEND:-log})
GCE_API_ENDPOINT: $(yaml-quote ${GCE_API_ENDPOINT:-})
PROMETHEUS_TO_SD_ENDPOINT: $(yaml-quote ${PROMETHEUS_TO_SD_ENDPOINT:-})
PROMETHEUS_TO_SD_PREFIX: $(yaml-quote ${PROMETHEUS_TO_SD_PREFIX:-})
ENABLE_PROMETHEUS_TO_SD: $(yaml-quote ${ENABLE_PROMETHEUS_TO_SD:-false})
ENABLE_POD_PRIORITY: $(yaml-quote ${ENABLE_POD_PRIORITY:-})
EOF
if [ -n "${KUBELET_PORT:-}" ]; then
cat >>$file <<EOF
KUBELET_PORT: $(yaml-quote ${KUBELET_PORT})
EOF
fi
if [ -n "${KUBE_APISERVER_REQUEST_TIMEOUT:-}" ]; then
cat >>$file <<EOF
KUBE_APISERVER_REQUEST_TIMEOUT: $(yaml-quote ${KUBE_APISERVER_REQUEST_TIMEOUT})
EOF
fi
if [ -n "${TERMINATED_POD_GC_THRESHOLD:-}" ]; then
cat >>$file <<EOF
TERMINATED_POD_GC_THRESHOLD: $(yaml-quote ${TERMINATED_POD_GC_THRESHOLD})
EOF
fi
if [[ "${master}" == "true" && ("${MASTER_OS_DISTRIBUTION}" == "trusty" || "${MASTER_OS_DISTRIBUTION}" == "gci" || "${MASTER_OS_DISTRIBUTION}" == "container-linux") || "${MASTER_OS_DISTRIBUTION}" == "ubuntu" ]] || \
[[ "${master}" == "false" && ("${NODE_OS_DISTRIBUTION}" == "trusty" || "${NODE_OS_DISTRIBUTION}" == "gci" || "${NODE_OS_DISTRIBUTION}" == "container-linux") || "${NODE_OS_DISTRIBUTION}" = "ubuntu" ]] ; then
cat >>$file <<EOF
KUBE_MANIFESTS_TAR_URL: $(yaml-quote ${kube_manifests_tar_url})
KUBE_MANIFESTS_TAR_HASH: $(yaml-quote ${KUBE_MANIFESTS_TAR_HASH})
EOF
fi
if [ -n "${TEST_CLUSTER:-}" ]; then
cat >>$file <<EOF
TEST_CLUSTER: $(yaml-quote ${TEST_CLUSTER})
EOF
fi
if [ -n "${KUBELET_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
KUBELET_TEST_ARGS: $(yaml-quote ${KUBELET_TEST_ARGS})
EOF
fi
if [ -n "${NODE_KUBELET_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
NODE_KUBELET_TEST_ARGS: $(yaml-quote ${NODE_KUBELET_TEST_ARGS})
EOF
fi
if [ -n "${MASTER_KUBELET_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
MASTER_KUBELET_TEST_ARGS: $(yaml-quote ${MASTER_KUBELET_TEST_ARGS})
EOF
fi
if [ -n "${KUBELET_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
KUBELET_TEST_LOG_LEVEL: $(yaml-quote ${KUBELET_TEST_LOG_LEVEL})
EOF
fi
if [ -n "${DOCKER_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
DOCKER_TEST_LOG_LEVEL: $(yaml-quote ${DOCKER_TEST_LOG_LEVEL})
EOF
fi
if [ -n "${DOCKER_LOG_DRIVER:-}" ]; then
cat >>$file <<EOF
DOCKER_LOG_DRIVER: $(yaml-quote ${DOCKER_LOG_DRIVER})
EOF
fi
if [ -n "${DOCKER_LOG_MAX_SIZE:-}" ]; then
cat >>$file <<EOF
DOCKER_LOG_MAX_SIZE: $(yaml-quote ${DOCKER_LOG_MAX_SIZE})
EOF
fi
if [ -n "${DOCKER_LOG_MAX_FILE:-}" ]; then
cat >>$file <<EOF
DOCKER_LOG_MAX_FILE: $(yaml-quote ${DOCKER_LOG_MAX_FILE})
EOF
fi
if [ -n "${ENABLE_CUSTOM_METRICS:-}" ]; then
cat >>$file <<EOF
ENABLE_CUSTOM_METRICS: $(yaml-quote ${ENABLE_CUSTOM_METRICS})
EOF
fi
if [ -n "${FEATURE_GATES:-}" ]; then
cat >>$file <<EOF
FEATURE_GATES: $(yaml-quote ${FEATURE_GATES})
EOF
fi
if [[ "${master}" == "true" && "${MASTER_OS_DISTRIBUTION}" == "gci" ]] ||
[[ "${master}" == "false" && "${NODE_OS_DISTRIBUTION}" == "gci" ]]; then
cat >>$file <<EOF
VOLUME_PLUGIN_DIR: $(yaml-quote ${VOLUME_PLUGIN_DIR:-/etc/srv/kubernetes/kubelet-plugins/volume/exec})
EOF
fi
if [ -n "${PROVIDER_VARS:-}" ]; then
local var_name
local var_value
for var_name in ${PROVIDER_VARS}; do
eval "local var_value=\$(yaml-quote \${${var_name}})"
cat >>$file <<EOF
${var_name}: ${var_value}
EOF
done
fi
if [[ "${master}" == "true" ]]; then
# Master-only env vars.
cat >>$file <<EOF
KUBERNETES_MASTER: $(yaml-quote "true")
KUBE_USER: $(yaml-quote ${KUBE_USER})
KUBE_PASSWORD: $(yaml-quote ${KUBE_PASSWORD})
KUBE_BEARER_TOKEN: $(yaml-quote ${KUBE_BEARER_TOKEN})
MASTER_CERT: $(yaml-quote ${MASTER_CERT_BASE64:-})
MASTER_KEY: $(yaml-quote ${MASTER_KEY_BASE64:-})
KUBECFG_CERT: $(yaml-quote ${KUBECFG_CERT_BASE64:-})
KUBECFG_KEY: $(yaml-quote ${KUBECFG_KEY_BASE64:-})
KUBELET_APISERVER: $(yaml-quote ${KUBELET_APISERVER:-})
ENABLE_MANIFEST_URL: $(yaml-quote ${ENABLE_MANIFEST_URL:-false})
MANIFEST_URL: $(yaml-quote ${MANIFEST_URL:-})
MANIFEST_URL_HEADER: $(yaml-quote ${MANIFEST_URL_HEADER:-})
NUM_NODES: $(yaml-quote ${NUM_NODES})
STORAGE_BACKEND: $(yaml-quote ${STORAGE_BACKEND:-etcd3})
STORAGE_MEDIA_TYPE: $(yaml-quote ${STORAGE_MEDIA_TYPE:-})
ENABLE_GARBAGE_COLLECTOR: $(yaml-quote ${ENABLE_GARBAGE_COLLECTOR:-})
ENABLE_LEGACY_ABAC: $(yaml-quote ${ENABLE_LEGACY_ABAC:-})
MASTER_ADVERTISE_ADDRESS: $(yaml-quote ${MASTER_ADVERTISE_ADDRESS:-})
ETCD_CA_KEY: $(yaml-quote ${ETCD_CA_KEY_BASE64:-})
ETCD_CA_CERT: $(yaml-quote ${ETCD_CA_CERT_BASE64:-})
ETCD_PEER_KEY: $(yaml-quote ${ETCD_PEER_KEY_BASE64:-})
ETCD_PEER_CERT: $(yaml-quote ${ETCD_PEER_CERT_BASE64:-})
EOF
# KUBE_APISERVER_REQUEST_TIMEOUT_SEC (if set) controls the --request-timeout
# flag
if [ -n "${KUBE_APISERVER_REQUEST_TIMEOUT_SEC:-}" ]; then
cat >>$file <<EOF
KUBE_APISERVER_REQUEST_TIMEOUT_SEC: $(yaml-quote ${KUBE_APISERVER_REQUEST_TIMEOUT_SEC})
EOF
fi
# ETCD_IMAGE (if set) allows to use a custom etcd image.
if [ -n "${ETCD_IMAGE:-}" ]; then
cat >>$file <<EOF
ETCD_IMAGE: $(yaml-quote ${ETCD_IMAGE})
EOF
fi
# ETCD_VERSION (if set) allows you to use custom version of etcd.
# The main purpose of using it may be rollback of etcd v3 API,
# where we need 3.0.* image, but are rolling back to 2.3.7.
if [ -n "${ETCD_VERSION:-}" ]; then
cat >>$file <<EOF
ETCD_VERSION: $(yaml-quote ${ETCD_VERSION})
EOF
fi
if [ -n "${APISERVER_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
APISERVER_TEST_ARGS: $(yaml-quote ${APISERVER_TEST_ARGS})
EOF
fi
if [ -n "${APISERVER_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
APISERVER_TEST_LOG_LEVEL: $(yaml-quote ${APISERVER_TEST_LOG_LEVEL})
EOF
fi
if [ -n "${CONTROLLER_MANAGER_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
CONTROLLER_MANAGER_TEST_ARGS: $(yaml-quote ${CONTROLLER_MANAGER_TEST_ARGS})
EOF
fi
if [ -n "${CONTROLLER_MANAGER_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
CONTROLLER_MANAGER_TEST_LOG_LEVEL: $(yaml-quote ${CONTROLLER_MANAGER_TEST_LOG_LEVEL})
EOF
fi
if [ -n "${SCHEDULER_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
SCHEDULER_TEST_ARGS: $(yaml-quote ${SCHEDULER_TEST_ARGS})
EOF
fi
if [ -n "${SCHEDULER_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
SCHEDULER_TEST_LOG_LEVEL: $(yaml-quote ${SCHEDULER_TEST_LOG_LEVEL})
EOF
fi
if [ -n "${INITIAL_ETCD_CLUSTER:-}" ]; then
cat >>$file <<EOF
INITIAL_ETCD_CLUSTER: $(yaml-quote ${INITIAL_ETCD_CLUSTER})
EOF
fi
if [ -n "${INITIAL_ETCD_CLUSTER_STATE:-}" ]; then
cat >>$file <<EOF
INITIAL_ETCD_CLUSTER_STATE: $(yaml-quote ${INITIAL_ETCD_CLUSTER_STATE})
EOF
fi
if [ -n "${ETCD_QUORUM_READ:-}" ]; then
cat >>$file <<EOF
ETCD_QUORUM_READ: $(yaml-quote ${ETCD_QUORUM_READ})
EOF
fi
else
# Node-only env vars.
cat >>$file <<EOF
KUBERNETES_MASTER: $(yaml-quote "false")
ZONE: $(yaml-quote ${ZONE})
EXTRA_DOCKER_OPTS: $(yaml-quote ${EXTRA_DOCKER_OPTS:-})
EOF
if [ -n "${KUBEPROXY_TEST_ARGS:-}" ]; then
cat >>$file <<EOF
KUBEPROXY_TEST_ARGS: $(yaml-quote ${KUBEPROXY_TEST_ARGS})
EOF
fi
if [ -n "${KUBEPROXY_TEST_LOG_LEVEL:-}" ]; then
cat >>$file <<EOF
KUBEPROXY_TEST_LOG_LEVEL: $(yaml-quote ${KUBEPROXY_TEST_LOG_LEVEL})
EOF
fi
fi
if [ -n "${NODE_LABELS:-}" ]; then
cat >>$file <<EOF
NODE_LABELS: $(yaml-quote ${NODE_LABELS})
EOF
fi
if [ -n "${EVICTION_HARD:-}" ]; then
cat >>$file <<EOF
EVICTION_HARD: $(yaml-quote ${EVICTION_HARD})
EOF
fi
if [[ "${master}" == "true" && "${MASTER_OS_DISTRIBUTION}" == "container-linux" ]] || \
[[ "${master}" == "false" && "${NODE_OS_DISTRIBUTION}" == "container-linux" ]]; then
# Container-Linux-only env vars. TODO(yifan): Make them available on other distros.
cat >>$file <<EOF
KUBERNETES_CONTAINER_RUNTIME: $(yaml-quote ${CONTAINER_RUNTIME:-rkt})
RKT_VERSION: $(yaml-quote ${RKT_VERSION:-})
RKT_PATH: $(yaml-quote ${RKT_PATH:-})
RKT_STAGE1_IMAGE: $(yaml-quote ${RKT_STAGE1_IMAGE:-})
EOF
fi
if [[ "${ENABLE_CLUSTER_AUTOSCALER}" == "true" ]]; then
cat >>$file <<EOF
ENABLE_CLUSTER_AUTOSCALER: $(yaml-quote ${ENABLE_CLUSTER_AUTOSCALER})
AUTOSCALER_MIG_CONFIG: $(yaml-quote ${AUTOSCALER_MIG_CONFIG})
AUTOSCALER_EXPANDER_CONFIG: $(yaml-quote ${AUTOSCALER_EXPANDER_CONFIG})
EOF
fi
# Federation specific environment variables.
if [[ -n "${FEDERATION:-}" ]]; then
cat >>$file <<EOF
FEDERATION: $(yaml-quote ${FEDERATION})
EOF
fi
if [ -n "${FEDERATION_NAME:-}" ]; then
cat >>$file <<EOF
FEDERATION_NAME: $(yaml-quote ${FEDERATION_NAME})
EOF
fi
if [ -n "${DNS_ZONE_NAME:-}" ]; then
cat >>$file <<EOF
DNS_ZONE_NAME: $(yaml-quote ${DNS_ZONE_NAME})
EOF
fi
if [ -n "${SCHEDULING_ALGORITHM_PROVIDER:-}" ]; then
cat >>$file <<EOF
SCHEDULING_ALGORITHM_PROVIDER: $(yaml-quote ${SCHEDULING_ALGORITHM_PROVIDER})
EOF
fi
}
function sha1sum-file() {
if which sha1sum >/dev/null 2>&1; then
sha1sum "$1" | awk '{ print $1 }'
else
shasum -a1 "$1" | awk '{ print $1 }'
fi
}
# Create certificate pairs for the cluster.
# $1: The public IP for the master.
#
# These are used for static cert distribution (e.g. static clustering) at
# cluster creation time. This will be obsoleted once we implement dynamic
# clustering.
#
# The following certificate pairs are created:
#
# - ca (the cluster's certificate authority)
# - server
# - kubelet
# - kubecfg (for kubectl)
#
# TODO(roberthbailey): Replace easyrsa with a simple Go program to generate
# the certs that we need.
#
# Assumed vars
# KUBE_TEMP
# MASTER_NAME
#
# Vars set:
# CERT_DIR
# CA_CERT_BASE64
# MASTER_CERT_BASE64
# MASTER_KEY_BASE64
# KUBELET_CERT_BASE64
# KUBELET_KEY_BASE64
# KUBECFG_CERT_BASE64
# KUBECFG_KEY_BASE64
function create-certs {
local -r primary_cn="${1}"
# Determine extra certificate names for master
local octets=($(echo "${SERVICE_CLUSTER_IP_RANGE}" | sed -e 's|/.*||' -e 's/\./ /g'))
((octets[3]+=1))
local -r service_ip=$(echo "${octets[*]}" | sed 's/ /./g')
local sans=""
for extra in $@; do
if [[ -n "${extra}" ]]; then
sans="${sans}IP:${extra},"
fi
done
sans="${sans}IP:${service_ip},DNS:kubernetes,DNS:kubernetes.default,DNS:kubernetes.default.svc,DNS:kubernetes.default.svc.${DNS_DOMAIN},DNS:${MASTER_NAME}"
echo "Generating certs for alternate-names: ${sans}"
setup-easyrsa
PRIMARY_CN="${primary_cn}" SANS="${sans}" generate-certs
AGGREGATOR_PRIMARY_CN="${primary_cn}" AGGREGATOR_SANS="${sans}" generate-aggregator-certs
CERT_DIR="${KUBE_TEMP}/easy-rsa-master/easyrsa3"
# By default, linux wraps base64 output every 76 cols, so we use 'tr -d' to remove whitespaces.
# Note 'base64 -w0' doesn't work on Mac OS X, which has different flags.
CA_KEY_BASE64=$(cat "${CERT_DIR}/pki/private/ca.key" | base64 | tr -d '\r\n')
CA_CERT_BASE64=$(cat "${CERT_DIR}/pki/ca.crt" | base64 | tr -d '\r\n')
MASTER_CERT_BASE64=$(cat "${CERT_DIR}/pki/issued/${MASTER_NAME}.crt" | base64 | tr -d '\r\n')
MASTER_KEY_BASE64=$(cat "${CERT_DIR}/pki/private/${MASTER_NAME}.key" | base64 | tr -d '\r\n')
KUBELET_CERT_BASE64=$(cat "${CERT_DIR}/pki/issued/kubelet.crt" | base64 | tr -d '\r\n')
KUBELET_KEY_BASE64=$(cat "${CERT_DIR}/pki/private/kubelet.key" | base64 | tr -d '\r\n')
KUBECFG_CERT_BASE64=$(cat "${CERT_DIR}/pki/issued/kubecfg.crt" | base64 | tr -d '\r\n')
KUBECFG_KEY_BASE64=$(cat "${CERT_DIR}/pki/private/kubecfg.key" | base64 | tr -d '\r\n')
KUBEAPISERVER_CERT_BASE64=$(cat "${CERT_DIR}/pki/issued/kube-apiserver.crt" | base64 | tr -d '\r\n')
KUBEAPISERVER_KEY_BASE64=$(cat "${CERT_DIR}/pki/private/kube-apiserver.key" | base64 | tr -d '\r\n')
# Setting up an addition directory (beyond pki) as it is the simplest way to
# ensure we get a different CA pair to sign the proxy-client certs and which
# we can send CA public key to the user-apiserver to validate communication.
AGGREGATOR_CERT_DIR="${KUBE_TEMP}/easy-rsa-master/aggregator"
AGGREGATOR_CA_KEY_BASE64=$(cat "${AGGREGATOR_CERT_DIR}/pki/private/ca.key" | base64 | tr -d '\r\n')
REQUESTHEADER_CA_CERT_BASE64=$(cat "${AGGREGATOR_CERT_DIR}/pki/ca.crt" | base64 | tr -d '\r\n')
PROXY_CLIENT_CERT_BASE64=$(cat "${AGGREGATOR_CERT_DIR}/pki/issued/proxy-client.crt" | base64 | tr -d '\r\n')
PROXY_CLIENT_KEY_BASE64=$(cat "${AGGREGATOR_CERT_DIR}/pki/private/proxy-client.key" | base64 | tr -d '\r\n')
}
function setup-easyrsa {
local -r cert_create_debug_output=$(mktemp "${KUBE_TEMP}/cert_create_debug_output.XXX")
# Note: This was heavily cribbed from make-ca-cert.sh
(set -x
cd "${KUBE_TEMP}"
curl -L -O --connect-timeout 20 --retry 6 --retry-delay 2 https://storage.googleapis.com/kubernetes-release/easy-rsa/easy-rsa.tar.gz
tar xzf easy-rsa.tar.gz
mkdir easy-rsa-master/kubelet
cp -r easy-rsa-master/easyrsa3/* easy-rsa-master/kubelet
mkdir easy-rsa-master/aggregator
cp -r easy-rsa-master/easyrsa3/* easy-rsa-master/aggregator) &>${cert_create_debug_output} || {
# If there was an error in the subshell, just die.
# TODO(roberthbailey): add better error handling here
cat "${cert_create_debug_output}" >&2
echo "=== Failed to setup easy-rsa: Aborting ===" >&2
exit 2
}
}
# Runs the easy RSA commands to generate certificate files.
# The generated files are at ${KUBE_TEMP}/easy-rsa-master/easyrsa3
#
# Assumed vars
# KUBE_TEMP
# MASTER_NAME
# PRIMARY_CN: Primary canonical name
# SANS: Subject alternate names
#
#
function generate-certs {
local -r cert_create_debug_output=$(mktemp "${KUBE_TEMP}/cert_create_debug_output.XXX")
# Note: This was heavily cribbed from make-ca-cert.sh
(set -x
cd "${KUBE_TEMP}/easy-rsa-master/easyrsa3"
./easyrsa init-pki
# this puts the cert into pki/ca.crt and the key into pki/private/ca.key
./easyrsa --batch "--req-cn=${PRIMARY_CN}@$(date +%s)" build-ca nopass
./easyrsa --subject-alt-name="${SANS}" build-server-full "${MASTER_NAME}" nopass
./easyrsa build-client-full kube-apiserver nopass
kube::util::ensure-cfssl "${KUBE_TEMP}/cfssl"
# make the config for the signer
echo '{"signing":{"default":{"expiry":"43800h","usages":["signing","key encipherment","client auth"]}}}' > "ca-config.json"
# create the kubelet client cert with the correct groups
echo '{"CN":"kubelet","names":[{"O":"system:nodes"}],"hosts":[""],"key":{"algo":"rsa","size":2048}}' | "${CFSSL_BIN}" gencert -ca=pki/ca.crt -ca-key=pki/private/ca.key -config=ca-config.json - | "${CFSSLJSON_BIN}" -bare kubelet
mv "kubelet-key.pem" "pki/private/kubelet.key"
mv "kubelet.pem" "pki/issued/kubelet.crt"
rm -f "kubelet.csr"
# Make a superuser client cert with subject "O=system:masters, CN=kubecfg"
./easyrsa --dn-mode=org \
--req-cn=kubecfg --req-org=system:masters \
--req-c= --req-st= --req-city= --req-email= --req-ou= \
build-client-full kubecfg nopass) &>${cert_create_debug_output} || {
# If there was an error in the subshell, just die.
# TODO(roberthbailey): add better error handling here
cat "${cert_create_debug_output}" >&2
echo "=== Failed to generate master certificates: Aborting ===" >&2
exit 2
}
}
# Runs the easy RSA commands to generate aggregator certificate files.
# The generated files are at ${KUBE_TEMP}/easy-rsa-master/aggregator
#
# Assumed vars
# KUBE_TEMP
# AGGREGATOR_MASTER_NAME
# AGGREGATOR_PRIMARY_CN: Primary canonical name
# AGGREGATOR_SANS: Subject alternate names
#
#
function generate-aggregator-certs {
local -r cert_create_debug_output=$(mktemp "${KUBE_TEMP}/cert_create_debug_output.XXX")
# Note: This was heavily cribbed from make-ca-cert.sh
(set -x
cd "${KUBE_TEMP}/easy-rsa-master/aggregator"
./easyrsa init-pki
# this puts the cert into pki/ca.crt and the key into pki/private/ca.key
./easyrsa --batch "--req-cn=${AGGREGATOR_PRIMARY_CN}@$(date +%s)" build-ca nopass
./easyrsa --subject-alt-name="${AGGREGATOR_SANS}" build-server-full "${AGGREGATOR_MASTER_NAME}" nopass
./easyrsa build-client-full aggregator-apiserver nopass
kube::util::ensure-cfssl "${KUBE_TEMP}/cfssl"
# make the config for the signer
echo '{"signing":{"default":{"expiry":"43800h","usages":["signing","key encipherment","client auth"]}}}' > "ca-config.json"
# create the aggregator client cert with the correct groups
echo '{"CN":"aggregator","hosts":[""],"key":{"algo":"rsa","size":2048}}' | "${CFSSL_BIN}" gencert -ca=pki/ca.crt -ca-key=pki/private/ca.key -config=ca-config.json - | "${CFSSLJSON_BIN}" -bare proxy-client
mv "proxy-client-key.pem" "pki/private/proxy-client.key"
mv "proxy-client.pem" "pki/issued/proxy-client.crt"
rm -f "proxy-client.csr"
# Make a superuser client cert with subject "O=system:masters, CN=kubecfg"
./easyrsa --dn-mode=org \
--req-cn=proxy-clientcfg --req-org=system:aggregator \
--req-c= --req-st= --req-city= --req-email= --req-ou= \
build-client-full proxy-clientcfg nopass) &>${cert_create_debug_output} || {
# If there was an error in the subshell, just die.
# TODO(roberthbailey): add better error handling here
cat "${cert_create_debug_output}" >&2
echo "=== Failed to generate aggregator certificates: Aborting ===" >&2
exit 2
}
}
# Run the cfssl command to generates certificate files for etcd service, the
# certificate files will save in $1 directory.
#
# Optional vars:
# GEN_ETCD_CA_CERT (CA cert encode with base64 and ZIP compression)
# GEN_ETCD_CA_KEY (CA key encode with base64)
#
# If GEN_ETCD_CA_CERT or GEN_ETCD_CA_KEY is not specified, it will generates certs for CA.
#
# Args:
# $1 (the directory that certificate files to save)
# $2 (the ip of etcd member)
# $3 (the type of etcd certificates, must be one of client, server, peer)
# $4 (the prefix of the certificate filename, default is $3)
function generate-etcd-cert() {
local cert_dir=${1}
local member_ip=${2}
local type_cert=${3}
local prefix=${4:-"${type_cert}"}
local GEN_ETCD_CA_CERT=${GEN_ETCD_CA_CERT:-}
local GEN_ETCD_CA_KEY=${GEN_ETCD_CA_KEY:-}
mkdir -p "${cert_dir}"
pushd "${cert_dir}"
kube::util::ensure-cfssl .
if [ ! -r "ca-config.json" ]; then
cat >ca-config.json <<EOF
{
"signing": {
"default": {
"expiry": "43800h"
},
"profiles": {
"server": {
"expiry": "43800h",
"usages": [
"signing",
"key encipherment",
"server auth"
]
},
"client": {
"expiry": "43800h",
"usages": [
"signing",
"key encipherment",
"client auth"
]
},
"peer": {
"expiry": "43800h",
"usages": [
"signing",
"key encipherment",
"server auth",
"client auth"
]
}
}
}
}
EOF
fi
if [ ! -r "ca-csr.json" ]; then
cat >ca-csr.json <<EOF
{
"CN": "Kubernetes",
"key": {
"algo": "ecdsa",
"size": 256
},
"names": [
{
"C": "US",
"L": "CA",
"O": "kubernetes.io"
}
]
}
EOF
fi
if [[ -n "${GEN_ETCD_CA_CERT}" && -n "${GEN_ETCD_CA_KEY}" ]]; then
echo "${ca_cert}" | base64 --decode | gunzip > ca.pem
echo "${ca_key}" | base64 --decode > ca-key.pem
fi
if [[ ! -r "ca.pem" || ! -r "ca-key.pem" ]]; then
${CFSSL_BIN} gencert -initca ca-csr.json | ${CFSSLJSON_BIN} -bare ca -
fi
case "${type_cert}" in
client)
echo "Generate client certificates..."
echo '{"CN":"client","hosts":["*"],"key":{"algo":"ecdsa","size":256}}' \
| ${CFSSL_BIN} gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json -profile=client - \
| ${CFSSLJSON_BIN} -bare "${prefix}"
;;
server)
echo "Generate server certificates..."
echo '{"CN":"'${member_ip}'","hosts":[""],"key":{"algo":"ecdsa","size":256}}' \
| ${CFSSL_BIN} gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json -profile=server -hostname="${member_ip},127.0.0.1" - \
| ${CFSSLJSON_BIN} -bare "${prefix}"
;;
peer)
echo "Generate peer certificates..."
echo '{"CN":"'${member_ip}'","hosts":[""],"key":{"algo":"ecdsa","size":256}}' \
| ${CFSSL_BIN} gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json -profile=peer -hostname="${member_ip},127.0.0.1" - \
| ${CFSSLJSON_BIN} -bare "${prefix}"
;;
*)
echo "Unknow, unsupported etcd certs type: ${type_cert}" >&2
echo "Supported type: client, server, peer" >&2
exit 2
esac
popd
}
#
# Using provided master env, extracts value from provided key.
#
# Args:
# $1 master env (kube-env of master; result of calling get-master-env)
# $2 env key to use
function get-env-val() {
local match=`(echo "${1}" | grep -E "^${2}:") || echo ""`
if [[ -z ${match} ]]; then
echo ""
fi
echo ${match} | cut -d : -f 2 | cut -d \' -f 2
}
# Load the master env by calling get-master-env, and extract important values
function parse-master-env() {
# Get required master env vars
local master_env=$(get-master-env)
KUBELET_TOKEN=$(get-env-val "${master_env}" "KUBELET_TOKEN")
KUBE_PROXY_TOKEN=$(get-env-val "${master_env}" "KUBE_PROXY_TOKEN")
NODE_PROBLEM_DETECTOR_TOKEN=$(get-env-val "${master_env}" "NODE_PROBLEM_DETECTOR_TOKEN")
CA_CERT_BASE64=$(get-env-val "${master_env}" "CA_CERT")
CA_KEY_BASE64=$(get-env-val "${master_env}" "CA_KEY")
KUBEAPISERVER_CERT_BASE64=$(get-env-val "${master_env}" "KUBEAPISERVER_CERT")
KUBEAPISERVER_KEY_BASE64=$(get-env-val "${master_env}" "KUBEAPISERVER_KEY")
EXTRA_DOCKER_OPTS=$(get-env-val "${master_env}" "EXTRA_DOCKER_OPTS")
KUBELET_CERT_BASE64=$(get-env-val "${master_env}" "KUBELET_CERT")
KUBELET_KEY_BASE64=$(get-env-val "${master_env}" "KUBELET_KEY")
MASTER_CERT_BASE64=$(get-env-val "${master_env}" "MASTER_CERT")
MASTER_KEY_BASE64=$(get-env-val "${master_env}" "MASTER_KEY")
AGGREGATOR_CA_KEY_BASE64=$(get-env-val "${master_env}" "AGGREGATOR_CA_KEY")
REQUESTHEADER_CA_CERT_BASE64=$(get-env-val "${master_env}" "REQUESTHEADER_CA_CERT")
PROXY_CLIENT_CERT_BASE64=$(get-env-val "${master_env}" "PROXY_CLIENT_CERT")
PROXY_CLIENT_KEY_BASE64=$(get-env-val "${master_env}" "PROXY_CLIENT_KEY")
}
# Update or verify required gcloud components are installed
# at minimum required version.
# Assumed vars
# KUBE_PROMPT_FOR_UPDATE
function update-or-verify-gcloud() {
local sudo_prefix=""
if [ ! -w $(dirname `which gcloud`) ]; then
sudo_prefix="sudo"
fi
# update and install components as needed
if [[ "${KUBE_PROMPT_FOR_UPDATE}" == "y" ]]; then
${sudo_prefix} gcloud ${gcloud_prompt:-} components install alpha
${sudo_prefix} gcloud ${gcloud_prompt:-} components install beta
${sudo_prefix} gcloud ${gcloud_prompt:-} components update
else
local version=$(gcloud version --format=json)
python -c'
import json,sys
from distutils import version
minVersion = version.LooseVersion("1.3.0")
required = [ "alpha", "beta", "core" ]
data = json.loads(sys.argv[1])
rel = data.get("Google Cloud SDK")
if rel != "HEAD" and version.LooseVersion(rel) < minVersion:
print("gcloud version out of date ( < %s )" % minVersion)
exit(1)
missing = []
for c in required:
if not data.get(c):
missing += [c]
if missing:
for c in missing:
print ("missing required gcloud component \"{0}\"".format(c))
exit(1)
' """${version}"""
fi
}
# Check whether required client and server binaries exist, prompting to download
# if missing.
# If KUBERNETES_SKIP_CONFIRM is set to y, we'll automatically download binaries
# without prompting.
function verify-kube-binaries() {
local missing_binaries=false
if ! "${KUBE_ROOT}/cluster/kubectl.sh" version --client >&/dev/null; then
echo "!!! kubectl appears to be broken or missing"
missing_binaries=true
fi
if ! $(find-release-tars); then
missing_binaries=true
fi
if ! "${missing_binaries}"; then
return
fi
get_binaries_script="${KUBE_ROOT}/cluster/get-kube-binaries.sh"
local resp="y"
if [[ ! "${KUBERNETES_SKIP_CONFIRM:-n}" =~ ^[yY]$ ]]; then
echo "Required binaries appear to be missing. Do you wish to download them? [Y/n]"
read resp
fi
if [[ "${resp}" =~ ^[nN]$ ]]; then
echo "You must download binaries to continue. You can use "
echo " ${get_binaries_script}"
echo "to do this for your automatically."
exit 1
fi
"${get_binaries_script}"
}
# Run pushd without stack output
function pushd() {
command pushd $@ > /dev/null
}
# Run popd without stack output
function popd() {
command popd $@ > /dev/null
}
|
shiywang/kubernetes
|
cluster/common.sh
|
Shell
|
apache-2.0
| 50,108 |
#!/bin/bash
VERSION=$(python setup.py --version)
PREFIX=holland-${VERSION}
TARBALL=${PREFIX}.tar.gz
git archive --prefix=${PREFIX}/ HEAD | gzip -n -9 > ${TARBALL}
|
m00dawg/holland
|
scripts/make_release.sh
|
Shell
|
bsd-3-clause
| 164 |
#!/bin/sh
make TARGET=debug clean
make TARGET=debug all
|
dunarel/dunphd-thesis
|
Chapter5/Main/hgt-qfunc.v.0.5.2/make.sh
|
Shell
|
bsd-3-clause
| 58 |
#!/bin/sh
arch=x86_64
pkg=emboss
version=6.6.0
build_deps="libc6-dev zlib1g-dev libncurses5-dev"
urls="
ftp://emboss.open-bio.org/pub/EMBOSS/EMBOSS-${version}.tar.gz
"
apt-get -qq update &&
apt-get install --no-install-recommends -y $build_deps &&
mkdir /build &&
cd /build &&
( for url in $urls; do
wget "$url" || false || exit
done ) &&
mkdir -p $HOME/bin/${arch}/ /build/dest/bin /build/dest/lib &&
tar xfvz EMBOSS-${version}.tar.gz &&
cd EMBOSS-${version} &&
ORIGIN='$ORIGIN' &&
export ORIGIN &&
LDFLAGS='-Wl,-rpath,$${ORIGIN}/../lib' ./configure --prefix /build/dest --without-x && make && make install &&
tar zcf /host/${pkg}-${version}-Linux-${arch}.tar.gz -C /build/dest .
|
natefoo/starforge
|
emboss/build.sh
|
Shell
|
mit
| 740 |
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test marking of spent outputs
# Create a transaction graph with four transactions,
# A/B/C/D
# C spends A
# D spends B and C
# Then simulate C being mutated, to create C'
# that is mined.
# A is still (correctly) considered spent.
# B should be treated as unspent
if [ $# -lt 1 ]; then
echo "Usage: $0 path_to_binaries"
echo "e.g. $0 ../../src"
echo "Env vars MONETAD and MONETACLI may be used to specify the exact binaries used"
exit 1
fi
set -f
MONETAD=${MONETAD:-${1}/monetad}
CLI=${MONETACLI:-${1}/moneta-cli}
DIR="${BASH_SOURCE%/*}"
SENDANDWAIT="${DIR}/send.sh"
if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi
. "$DIR/util.sh"
D=$(mktemp -d test.XXXXX)
# Two nodes; one will play the part of merchant, the
# other an evil transaction-mutating miner.
D1=${D}/node1
CreateDataDir $D1 port=11000 rpcport=11001
B1ARGS="-datadir=$D1 -debug=mempool"
$MONETAD $B1ARGS &
B1PID=$!
D2=${D}/node2
CreateDataDir $D2 port=11010 rpcport=11011
B2ARGS="-datadir=$D2 -debug=mempool"
$MONETAD $B2ARGS &
B2PID=$!
# Wait until all four nodes are at the same block number
function WaitBlocks {
while :
do
sleep 1
declare -i BLOCKS1=$( GetBlocks $B1ARGS )
declare -i BLOCKS2=$( GetBlocks $B2ARGS )
if (( BLOCKS1 == BLOCKS2 ))
then
break
fi
done
}
# Wait until node has $N peers
function WaitPeers {
while :
do
declare -i PEERS=$( $CLI $1 getconnectioncount )
if (( PEERS == "$2" ))
then
break
fi
sleep 1
done
}
echo "Generating test blockchain..."
# Start with B2 connected to B1:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# 2 block, 50 XBT each == 100 XBT
# These will be transactions "A" and "B"
$CLI $B1ARGS setgenerate true 2
WaitBlocks
# 100 blocks, 0 mature == 0 XBT
$CLI $B2ARGS setgenerate true 100
WaitBlocks
CheckBalance "$B1ARGS" 100
CheckBalance "$B2ARGS" 0
# restart B2 with no connection
$CLI $B2ARGS stop > /dev/null 2>&1
wait $B2PID
$MONETAD $B2ARGS &
B2PID=$!
B1ADDRESS=$( $CLI $B1ARGS getnewaddress )
B2ADDRESS=$( $CLI $B2ARGS getnewaddress )
# Transaction C: send-to-self, spend A
TXID_C=$( $CLI $B1ARGS sendtoaddress $B1ADDRESS 50.0)
# Transaction D: spends B and C
TXID_D=$( $CLI $B1ARGS sendtoaddress $B2ADDRESS 100.0)
CheckBalance "$B1ARGS" 0
# Mutate TXID_C and add it to B2's memory pool:
RAWTX_C=$( $CLI $B1ARGS getrawtransaction $TXID_C )
# ... mutate C to create C'
L=${RAWTX_C:82:2}
NEWLEN=$( printf "%x" $(( 16#$L + 1 )) )
MUTATEDTX_C=${RAWTX_C:0:82}${NEWLEN}4c${RAWTX_C:84}
# ... give mutated tx1 to B2:
MUTATEDTXID=$( $CLI $B2ARGS sendrawtransaction $MUTATEDTX_C )
echo "TXID_C: " $TXID_C
echo "Mutated: " $MUTATEDTXID
# Re-connect nodes, and have both nodes mine some blocks:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# Having B2 mine the next block puts the mutated
# transaction C in the chain:
$CLI $B2ARGS setgenerate true 1
WaitBlocks
# B1 should still be able to spend 100, because D is conflicted
# so does not count as a spend of B
CheckBalance "$B1ARGS" 100
$CLI $B2ARGS stop > /dev/null 2>&1
wait $B2PID
$CLI $B1ARGS stop > /dev/null 2>&1
wait $B1PID
echo "Tests successful, cleaning up"
rm -rf $D
exit 0
|
biton-project/moneta-0.10.0
|
qa/rpc-tests/conflictedbalance.sh
|
Shell
|
mit
| 3,481 |
#!/bin/bash
CURR_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
EXTERNAL_DIR=${CURR_DIR}/../external
INSTALL_DIR=${CURR_DIR}/../external/install
if [ -d ${EXTERNAL_DIR}/RapMap ] ; then
rm -fr ${EXTERNAL_DIR}/RapMap
fi
if [ -d ${INSTALL_DIR}/include/rapmap ] ; then
rm -fr ${INSTALL_DIR}/include/rapmap
fi
if [ -d ${INSTALL_DIR}/src/rapmap ] ; then
rm -fr ${INSTALL_DIR}/src/rapmap
fi
mkdir -p ${EXTERNAL_DIR}
curl -k -L https://github.com/COMBINE-lab/RapMap/archive/sf-v0.10.1.zip -o ${EXTERNAL_DIR}/rapmap.zip
rm -fr ${EXTERNAL_DIR}/RapMap
unzip ${EXTERNAL_DIR}/rapmap.zip -d ${EXTERNAL_DIR}
mv ${EXTERNAL_DIR}/RapMap-sf-v0.10.1 ${EXTERNAL_DIR}/RapMap
mkdir -p ${INSTALL_DIR}/include/rapmap
mkdir -p ${INSTALL_DIR}/src/rapmap
rm ${EXTERNAL_DIR}/RapMap/src/xxhash.c
rm ${EXTERNAL_DIR}/RapMap/include/xxhash.h
cp -r ${EXTERNAL_DIR}/RapMap/external/libdivsufsort.zip ${EXTERNAL_DIR}
cp -r ${EXTERNAL_DIR}/RapMap/src/*.c ${INSTALL_DIR}/src/rapmap
cp -r ${EXTERNAL_DIR}/RapMap/src/*.cpp ${INSTALL_DIR}/src/rapmap
cp -r ${EXTERNAL_DIR}/RapMap/include/tclap ${INSTALL_DIR}/include/rapmap
cp -r ${EXTERNAL_DIR}/RapMap/include/*.h ${INSTALL_DIR}/include/rapmap
cp -r ${EXTERNAL_DIR}/RapMap/include/*.hpp ${INSTALL_DIR}/include/rapmap
|
COMBINE-lab/quark
|
scripts/fetchRapMap.sh
|
Shell
|
gpl-3.0
| 1,262 |
#!/usr/bin/env bash
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner [email protected]
source /home/mozdef/envs/mozdef/bin/activate
/home/mozdef/envs/mozdef/cron/rotateIndexes.py -c /home/mozdef/envs/mozdef/cron/rotateIndexes.conf
|
DaneTheory/MozDef
|
cron/esMaint.sh
|
Shell
|
mpl-2.0
| 460 |
#!/bin/bash
###################### COPYRIGHT/COPYLEFT ######################
# (C) 2016 Intel Deutschland GmbH
# Author: Michael Soegtrop
#
# Released to the public by Intel under the
# GNU Lesser General Public License Version 2.1 or later
# See https://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
###################### DIFF A TAR FILE AND A FOLDER ######################
set -o nounset
# Print usage
if [ "$#" -lt 2 ] ; then
echo 'Diff a tar (or compressed tar) file with a folder'
echo 'difftar-folder.sh <tarfile> <folder> [strip]'
echo '<tarfile> is the name of the tar file do diff with (required)'
echo '<folder> is the name of the folder to diff with (required)'
echo '<strip> is the number of path components to strip from tar file (default is 0)'
echo 'All files in the tar file must have at least <strip> path components.'
echo 'This also adds new files from folder.new, if folder.new exists'
exit 1
fi
# Parse parameters
tarfile=$1
folder=$2
if [ "$#" -ge 3 ] ; then
strip=$3
else
strip=0
fi
# Get path prefix if --strip is used
if [ "$strip" -gt 0 ] ; then
# Get the path/name of the first file from teh tar and extract the first $strip path components
# This assumes that the first file in the tar file has at least $strip many path components
prefix=$(tar -t -f "$tarfile" | head -1 | cut -d / -f -$strip)/
else
prefix=
fi
# Original folder
orig=$folder.orig
mkdir -p "$orig"
# New amd empty filefolder
new=$folder.new
empty=$folder.empty
mkdir -p "$empty"
# Print information (this is ignored by patch)
echo diff/patch file created on "$(date)" with:
echo difftar-folder.sh "$@"
echo TARFILE= "$tarfile"
echo FOLDER= "$folder"
echo TARSTRIP= "$strip"
echo TARPREFIX= "$prefix"
echo ORIGFOLDER= "$orig"
# Make sure tar uses english output (for Mod time differs)
export LC_ALL=C
# Search all files with a deviating modification time using tar --diff
tar --diff -a -f "$tarfile" --strip $strip --directory "$folder" | grep "Mod time differs" | while read -r file ; do
# Substitute ': Mod time differs' with nothing
file=${file/: Mod time differs/}
# Check if file exists
if [ -f "$folder/$file" ] ; then
# Extract original file
tar -x -a -f "$tarfile" --strip $strip --directory "$orig" "$prefix$file"
# Compute diff
diff -u "$orig/$file" "$folder/$file"
fi
done
if [ -d "$new" ] ; then
diff -u -r --unidirectional-new-file "$empty" "$new"
fi
|
letouzey/coq-wip
|
dev/build/windows/difftar-folder.sh
|
Shell
|
lgpl-2.1
| 2,447 |
#!/bin/sh
set -e
echo "@testing http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories
apk add --update \
linux-headers \
liberasurecode@testing \
liberasurecode-dev@testing \
gnupg \
git \
curl \
rsync \
memcached \
openssl \
openssl-dev \
sqlite \
sqlite-libs \
sqlite-dev \
xfsprogs \
zlib-dev \
g++ \
libffi \
libffi-dev \
libxslt \
libxslt-dev \
libxml2 \
libxml2-dev \
|
openstack/swift
|
docker/install_scripts/10_apk_install_prereqs.sh
|
Shell
|
apache-2.0
| 441 |
#!/bin/bash
script/jruby -S rake db:reset db:seed
|
varshavaradarajan/functional-tests
|
gadget_renderer/db_reset.sh
|
Shell
|
apache-2.0
| 50 |
# install various tools that are useful for development
yum -y install gcc make gcc-c++ kernel-devel-`uname -r` zlib-devel openssl-devel readline-devel sqlite-devel perl wget dkms nfs-utils
|
scheuk/packer-rhel
|
scripts/development.sh
|
Shell
|
apache-2.0
| 190 |
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
install_bazelisk
# Pick a more recent version of xcode
export DEVELOPER_DIR=/Applications/Xcode_10.3.app/Contents/Developer
sudo xcode-select -s "${DEVELOPER_DIR}"
# Install macos pip dependencies
install_macos_pip_deps sudo pip3.5
# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.5'
export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export TF_NEED_CUDA=0
export CC_OPT_FLAGS='-mavx'
export PYTHON_BIN_PATH=$(which ${TF_PYTHON_VERSION})
yes "" | "$PYTHON_BIN_PATH" configure.py
# Export optional variables for running pip.sh
export TF_BUILD_FLAGS="--config=opt --config=v2"
export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
export TF_TEST_TARGETS="//tensorflow/python/..."
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export TF_TEST_FILTER_TAGS='-nomac,-no_mac,-no_oss,-oss_serial,-no_oss_py35,-gpu,-tpu,-benchmark-test'
export IS_NIGHTLY=0 # Not nightly
export TF_PROJECT_NAME="tensorflow"
export TF_PIP_TEST_ROOT="pip_test"
./tensorflow/tools/ci_build/builds/pip_new.sh
|
gunan/tensorflow
|
tensorflow/tools/ci_build/release/macos/cpu_py35_full/pip.sh
|
Shell
|
apache-2.0
| 1,984 |
#!/bin/bash
STREAM=../stream_power
BANKS=16
CAPACITY=8
LINKS=8
BSIZE=64
QDEPTH=64
XDEPTH=128
VAULTS=64
#NRQSTS=134217728
NRQSTS=33554432
#NRQSTS=4096
DRAMS=20
THREADS=16
SIMD=8
echo "Executing : $STREAM -b $BANKS -c $CAPACITY -l $LINKS -m $BSIZE -n 1 -q $QDEPTH -x $XDEPTH\
-d $DRAMS -v $VAULTS -N $NRQSTS -T $THREADS -s $SIMD"
$STREAM -b $BANKS -c $CAPACITY -l $LINKS -m $BSIZE -n 1 -q $QDEPTH -x $XDEPTH\
-d $DRAMS -v $VAULTS -N $NRQSTS -T $THREADS -s $SIMD
|
tactcomplabs/gc64-hmcsim
|
test/stream_power_tecplot/scripts/8link_8GB_stream.sh
|
Shell
|
bsd-2-clause
| 469 |
#!/usr/bin/env bash
# Copyright 2014 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
TAG="${CIRRUS_TAG:-latest}"
# Starts an interactive docker container with a bash shell running in it, and
# attaches the user's shell to it.
sudo docker run --interactive --tty \
"gcr.io/flutter-cirrus/build-flutter-image:$TAG" \
/bin/bash
|
Hixie/flutter
|
dev/ci/docker_linux/docker_attach.sh
|
Shell
|
bsd-3-clause
| 427 |
yuidoc js/ -o doc/ -t yuidoc-theme/yuidoc-theme-blue/
|
sitepoint-editors/MootorFrameworkApp_Part1
|
source/build-doc.sh
|
Shell
|
mit
| 54 |
mysqldump -u ${Username} --password=${Password} --all-databases ${Options} > ${File Path}
|
CA-ReleaseAutomation/ca-ra-mysql-pack
|
src/mysql/MySQLBackupAllDatabases1415599639582.sh
|
Shell
|
epl-1.0
| 89 |
#!/bin/sh
# Run this to generate all the initial makefiles, etc.
# cp configures/configure.in.new ./configure.in
srcdir=`dirname $0`
DIE=0
(autoconf --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: You must have \`autoconf' installed to."
echo "Download the appropriate package for your distribution,"
echo "or get the source tarball at ftp://ftp.gnu.org/pub/gnu/"
DIE=1
}
(grep "^AM_PROG_LIBTOOL" $srcdir/configure.in >/dev/null) && {
(libtool --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: You must have \`libtool' installed."
echo "Get ftp://ftp.gnu.org/pub/gnu/libtool-1.2d.tar.gz"
echo "(or a newer version if it is available)"
DIE=1
}
}
grep "^AM_GLIB_GNU_GETTEXT" $srcdir/configure.in >/dev/null && {
grep "sed.*POTFILES" $srcdir/configure.in >/dev/null || \
(glib-gettextize --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: You must have \`gettext' installed."
echo "Get ftp://alpha.gnu.org/gnu/gettext-0.10.35.tar.gz"
echo "(or a newer version if it is available)"
DIE=1
}
}
grep "^AM_GNOME_GETTEXT" $srcdir/configure.in >/dev/null && {
grep "sed.*POTFILES" $srcdir/configure.in >/dev/null || \
(gettext --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: You must have \`gettext' installed."
echo "Get ftp://alpha.gnu.org/gnu/gettext-0.10.35.tar.gz"
echo "(or a newer version if it is available)"
DIE=1
}
}
(automake --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: You must have \`automake' installed."
echo "Get ftp://ftp.gnu.org/pub/gnu/automake-1.3.tar.gz"
echo "(or a newer version if it is available)"
DIE=1
NO_AUTOMAKE=yes
}
# if no automake, don't bother testing for aclocal
test -n "$NO_AUTOMAKE" || (aclocal --version) < /dev/null > /dev/null 2>&1 || {
echo
echo "**Error**: Missing \`aclocal'. The version of \`automake'"
echo "installed doesn't appear recent enough."
echo "Get ftp://ftp.gnu.org/pub/gnu/automake-1.3.tar.gz"
echo "(or a newer version if it is available)"
DIE=1
}
if test "$DIE" -eq 1; then
exit 1
fi
if test -z "$*"; then
echo "**Warning**: I am going to run \`configure' with no arguments."
echo "If you wish to pass any to it, please specify them on the"
echo \`$0\'" command line."
echo
fi
case $CC in
xlc )
am_opt=--include-deps;;
esac
for coin in `find $srcdir -name configure.in -print`
do
dr=`dirname $coin`
if test -f $dr/NO-AUTO-GEN; then
echo skipping $dr -- flagged as no auto-gen
else
echo processing $dr
macrodirs=`sed -n -e 's,AM_ACLOCAL_INCLUDE(\(.*\)),\1,gp' < $coin`
( cd $dr
aclocalinclude="$ACLOCAL_FLAGS"
for k in $macrodirs; do
if test -d $k; then
aclocalinclude="$aclocalinclude -I $k"
##else
## echo "**Warning**: No such directory \`$k'. Ignored."
fi
done
if grep "^AM_GLIB_GNU_GETTEXT" configure.in >/dev/null; then
if grep "sed.*POTFILES" configure.in >/dev/null; then
: do nothing -- we still have an old unmodified configure.in
else
echo "Creating $dr/aclocal.m4 ..."
test -r $dr/aclocal.m4 || touch $dr/aclocal.m4
echo "Running glib-gettextize... Ignore non-fatal messages."
echo "no" | glib-gettextize --force --copy
echo "Making $dr/aclocal.m4 writable ..."
test -r $dr/aclocal.m4 && chmod u+w $dr/aclocal.m4
fi
fi
if grep "^AM_GNOME_GETTEXT" configure.in >/dev/null; then
echo "Creating $dr/aclocal.m4 ..."
test -r $dr/aclocal.m4 || touch $dr/aclocal.m4
echo "Running gettextize... Ignore non-fatal messages."
echo "no" | gettextize --force --copy
echo "Making $dr/aclocal.m4 writable ..."
test -r $dr/aclocal.m4 && chmod u+w $dr/aclocal.m4
fi
if grep "^AM_PROG_LIBTOOL" configure.in >/dev/null; then
echo "Running libtoolize..."
libtoolize --force --copy
fi
echo "Running aclocal $aclocalinclude ..."
aclocal $aclocalinclude
if grep "^AM_CONFIG_HEADER" configure.in >/dev/null; then
echo "Running autoheader..."
autoheader
fi
echo "Running automake --gnu $am_opt ..."
automake --add-missing --gnu $am_opt
echo "Running autoconf ..."
autoconf
)
fi
done
#conf_flags="--enable-maintainer-mode --enable-compile-warnings" #--enable-iso-c
if test x$NOCONFIGURE = x; then
echo Running $srcdir/configure $conf_flags "$@" ...
$srcdir/configure $conf_flags "$@" \
&& echo Now type \`make\' to compile
else
echo Skipping configure process.
fi
|
vicamo/cndrvcups-lb
|
ppd/autogen.sh
|
Shell
|
gpl-2.0
| 4,543 |
#!/bin/sh
# Copyright (C) 2015 Curt Brune <[email protected]>
#
# SPDX-License-Identifier: GPL-2.0
#
# Script to create a tarball of "ONIE tools", which are made available
# to the NOS.
#
arch=$1
tools_dir=$2
output_file=$3
sysroot=$4
shift 4
# The tools originate from two locations:
#
# 1. Some CPU architecture independent tools are from the $sysroot of
# the ONIE installer image directly. These tools are unmodified
# copies of what is in the ONIE runtime image.
#
# 2. CPU dependent tools are from an architecture specific directory
# located within the ONIE repo $tools_dir. These tools are *not*
# present in the ONIE runtime image.
[ -d "${tools_dir}/${arch}" ] || {
echo "ERROR: arch tools directory '${tools_dir}/${arch}' does not exist."
exit 1
}
touch $output_file || {
echo "ERROR: unable to create output file: $output_file"
exit 1
}
rm -f $output_file
[ -d "$sysroot" ] || {
echo "ERROR: sysroot directory '$sysroot' does not exist."
exit 1
}
[ $# -gt 0 ] || {
echo "Error: No ONIE sysroot tool files found"
exit 1
}
tmp_dir=
clean_up()
{
rm -rf $tmp_dir
}
trap clean_up EXIT
# make the tools tarball
# contents:
# - /bin -- shell scripts
# - /lib -- shell script fragments
echo -n "Building ONIE tools archive ."
tmp_dir=$(mktemp --directory)
cp -a "${tools_dir}/${arch}"/* $tmp_dir
echo -n "."
for f in $* ; do
tdir="${tmp_dir}/$(dirname $f)"
mkdir -p $tdir || exit 1
cp -a "${sysroot}/$f" $tdir || exit 1
echo -n "."
done
# Bundle data into a tar file
tar -C $tmp_dir -cJf $output_file $(ls $tmp_dir) || exit 1
echo -n "."
rm -rf $tmp_dir
echo " Done."
echo "Success: ONIE tools tar archive is ready: ${output_file}"
|
InterfaceMasters/onie
|
build-config/scripts/onie-mk-tools.sh
|
Shell
|
gpl-2.0
| 1,738 |
#! /bin/sh
# Copyright (C) 2009-2013 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Test 'make -n' for various targets, to ensure that:
#
# 1. no files or directories are created or removed, and
#
# 2. if using GNU make or a make implementation supporting the .MAKE
# special target, the output from make is sufficiently complete.
#
# This test exercises the GCS-mandated targets (except for dist)
# as well as tags.
# For gen-testsuite-part: ==> try-with-serial-tests <==
. test-init.sh
# Does $MAKE support the '.MAKE' special target?
have_dotmake=false
if using_gmake; then
have_dotmake=: # GNU make must support it.
else
unindent > mk.tmp << 'END'
targ.tmp:
: > $@
.MAKE: targ.tmp
END
if $MAKE -n -f mk.tmp targ.tmp && test -f targ.tmp; then
have_dotmake=:
fi
fi
mkdir sub sub2
cat >> configure.ac << 'END'
AC_CONFIG_FILES([sub/Makefile sub2/Makefile])
AC_OUTPUT
END
cat > Makefile.am <<'END'
TESTS = foo.test
SUBDIRS = sub sub2
##BUILT_SOURCES = foo
foo:
foo.test:
all-local:
@: > stamp-all
install-data-local:
@: > stamp-install
uninstall-local:
@: > stamp-uninstall
clean-local:
@: > stamp-clean
distclean-local:
@: > stamp-distclean
info-local:
@: > stamp-info
html-local:
@: > stamp-html
dvi-local:
@: > stamp-dvi
ps-local:
@: > stamp-ps
pdf-local:
@: > stamp-pdf
check-local:
@: > stamp-check
installcheck-local:
@: > stamp-installcheck
mostlyclean-local:
@: > stamp-mostlyclean
maintainer-clean-local:
@: > stamp-maintainer-clean
END
cat >sub/Makefile.am <<'END'
all-local:
@: > stamp-all-sub
install-data-local:
@: > stamp-install-sub
uninstall-local:
@: > stamp-uninstall-sub
clean-local:
@: > stamp-clean-sub
distclean-local:
@: > stamp-distclean-sub
info-local:
@: > stamp-info-sub
html-local:
@: > stamp-html-sub
dvi-local:
@: > stamp-dvi-sub
ps-local:
@: > stamp-ps-sub
pdf-local:
@: > stamp-pdf-sub
check-local:
@: > stamp-check-sub
installcheck-local:
@: > stamp-installcheck-sub
tags:
@: > stamp-tags-sub
mostlyclean-local:
@: > stamp-mostlyclean-sub
maintainer-clean-local:
@: > stamp-maintainer-clean-sub
END
cat >sub2/Makefile.am <<'END'
all install uninstall clean check:
@: > sub2-$@-should-not-be-executed
info dvi pdf ps:
@: > sub2-$@-should-not-be-executed
html:
@: > sub2-$@-should-not-be-executed
install-info install-html install-dvi install-pdf install-ps:
@: > sub2-$@-should-not-be-executed
installcheck installdirs tags mostlyclean:
@: > sub2-$@-should-not-be-executed
## These targets cannot be overridden like this:
## install-strip distclean maintainer-clean
END
$ACLOCAL
$AUTOCONF
check_targets ()
{
for target in \
all install install-strip uninstall clean distclean check \
info html dvi pdf ps \
install-info install-html install-dvi install-pdf install-ps \
installcheck installdirs tags mostlyclean maintainer-clean
do
run_make -O -- -n $target
case $target in
install-* | installdirs | tags ) ;;
*)
if $have_dotmake; then
grep "stamp-$target$" stdout || exit 1
fi
test ! -e "stamp-$target" || exit 1
;;
esac
case $target in
install-* | installdirs ) ;;
*)
if $have_dotmake; then
grep "stamp-$target-sub" stdout || exit 1
fi
test ! -e "sub/stamp-$target-sub" || exit 1
;;
esac
case $target in
distclean | maintainer-clean ) ;;
*)
if $have_dotmake; then
grep "should-not-be-executed" stdout || exit 1
fi
test ! -e "sub2/sub2-$target-should-not-be-executed" || exit 1
;;
esac
done
}
$AUTOMAKE -a -Wno-override
./configure
check_targets || exit 1
# Now, introduce BUILT_SOURCES into the toplevel Makefile
# TODO: add BUILT_SOURCES to sub2, fix fallout.
sed 's/##//' < Makefile.am > t
mv -f t Makefile.am
$AUTOMAKE -Wno-override --force Makefile
./configure
check_targets || exit 1
exit 0
|
DDTChen/CookieVLC
|
vlc/extras/tools/automake/t/maken3.sh
|
Shell
|
gpl-2.0
| 4,478 |
#!/bin/sh
#
# test program for fiwalk
IMG=/corp/drives/nps/nps-2009-canon2/nps-2009-canon2-gen5.raw
if [ ! -r $IMG ] ; then
echo ERROR: $IMG not on this system
echo Cannot perform this test.
exit 0
fi
/bin/rm -f gen5.xml
ficonfig=../plugins/ficonfig.txt
if [ ! -r $ficonfig ] ; then
ficonfig=$srcdir/../plugins/ficonfig.txt
fi
if ! ./fiwalk -c $ficonfig -X gen5.xml $IMG ; then exit 1 ; fi
if ! xmllint gen5.xml > /dev/null ; then
echo *** BAD XML in gen5.xml ***
exit 1 ;
fi
if ! grep 6c9e27f9911f37488ef0d6e878c68f2a61100b2c gen5.xml >/dev/null ; then echo sha1 extract not working; exit 1 ; fi
if ! grep 'One-chip color area sensor' gen5.xml >/dev/null ; then echo EXPAT plugin not working ; exit 1; fi
/bin/rm -f gen5.xml
|
Cisco-Talos/pyrebox
|
sleuthkit/tools/fiwalk/src/test_fiwalk.sh
|
Shell
|
gpl-2.0
| 748 |
#!/bin/sh
#
# Copyright (C) 2001, 2004, 2007, 2012, 2016 Internet Systems Consortium, Inc. ("ISC")
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# $Id: setup.sh,v 1.6 2007/06/19 23:47:03 tbox Exp $
cd ns1 && cp -f cache.in cache
|
pecharmin/bind9
|
bin/tests/system/glue/setup.sh
|
Shell
|
mpl-2.0
| 387 |
# Aliases
alias g='git'
compdef g=git
alias gst='git status'
compdef _git gst=git-status
alias gd='git diff'
compdef _git gd=git-diff
alias gdc='git diff --cached'
compdef _git gdc=git-diff
alias gl='git pull'
compdef _git gl=git-pull
alias gup='git pull --rebase'
compdef _git gup=git-fetch
alias gp='git push'
compdef _git gp=git-push
alias gd='git diff'
gdv() { git diff -w "$@" | view - }
compdef _git gdv=git-diff
alias gc='git commit -v'
compdef _git gc=git-commit
alias gc!='git commit -v --amend'
compdef _git gc!=git-commit
alias gca='git commit -v -a'
compdef _git gc=git-commit
alias gca!='git commit -v -a --amend'
compdef _git gca!=git-commit
alias gcmsg='git commit -m'
compdef _git gcmsg=git-commit
alias gco='git checkout'
compdef _git gco=git-checkout
alias gcm='git checkout master'
alias gr='git remote'
compdef _git gr=git-remote
alias grv='git remote -v'
compdef _git grv=git-remote
alias grmv='git remote rename'
compdef _git grmv=git-remote
alias grrm='git remote remove'
compdef _git grrm=git-remote
alias grset='git remote set-url'
compdef _git grset=git-remote
alias grup='git remote update'
compdef _git grset=git-remote
alias grbi='git rebase -i'
compdef _git grbi=git-rebase
alias grbc='git rebase --continue'
compdef _git grbc=git-rebase
alias grba='git rebase --abort'
compdef _git grba=git-rebase
alias gb='git branch'
compdef _git gb=git-branch
alias gba='git branch -a'
compdef _git gba=git-branch
alias gcount='git shortlog -sn'
compdef gcount=git
alias gcl='git config --list'
alias gcp='git cherry-pick'
compdef _git gcp=git-cherry-pick
alias glg='git log --stat --max-count=10'
compdef _git glg=git-log
alias glgg='git log --graph --max-count=10'
compdef _git glgg=git-log
alias glgga='git log --graph --decorate --all'
compdef _git glgga=git-log
alias glo='git log --oneline --decorate --color'
compdef _git glo=git-log
alias glog='git log --oneline --decorate --color --graph'
compdef _git glog=git-log
alias gss='git status -s'
compdef _git gss=git-status
alias ga='git add'
compdef _git ga=git-add
alias gm='git merge'
compdef _git gm=git-merge
alias grh='git reset HEAD'
alias grhh='git reset HEAD --hard'
alias gclean='git reset --hard && git clean -dfx'
alias gwc='git whatchanged -p --abbrev-commit --pretty=medium'
#remove the gf alias
#alias gf='git ls-files | grep'
alias gpoat='git push origin --all && git push origin --tags'
alias gmt='git mergetool --no-prompt'
compdef _git gm=git-mergetool
alias gg='git gui citool'
alias gga='git gui citool --amend'
alias gk='gitk --all --branches'
alias gsts='git stash show --text'
alias gsta='git stash'
alias gstp='git stash pop'
alias gstd='git stash drop'
# Will cd into the top of the current repository
# or submodule.
alias grt='cd $(git rev-parse --show-toplevel || echo ".")'
# Git and svn mix
alias git-svn-dcommit-push='git svn dcommit && git push github master:svntrunk'
compdef git-svn-dcommit-push=git
alias gsr='git svn rebase'
alias gsd='git svn dcommit'
#
# Will return the current branch name
# Usage example: git pull origin $(current_branch)
#
function current_branch() {
ref=$(git symbolic-ref HEAD 2> /dev/null) || \
ref=$(git rev-parse --short HEAD 2> /dev/null) || return
echo ${ref#refs/heads/}
}
function current_repository() {
ref=$(git symbolic-ref HEAD 2> /dev/null) || \
ref=$(git rev-parse --short HEAD 2> /dev/null) || return
echo $(git remote -v | cut -d':' -f 2)
}
# these aliases take advantage of the previous function
alias ggpull='git pull origin $(current_branch)'
compdef ggpull=git
alias ggpur='git pull --rebase origin $(current_branch)'
compdef ggpur=git
alias ggpush='git push origin $(current_branch)'
compdef ggpush=git
alias ggpnp='git pull origin $(current_branch) && git push origin $(current_branch)'
compdef ggpnp=git
# Pretty log messages
function _git_log_prettily(){
if ! [ -z $1 ]; then
git log --pretty=$1
fi
}
alias glp="_git_log_prettily"
compdef _git glp=git-log
# Work In Progress (wip)
# These features allow to pause a branch development and switch to another one (wip)
# When you want to go back to work, just unwip it
#
# This function return a warning if the current branch is a wip
function work_in_progress() {
if $(git log -n 1 2>/dev/null | grep -q -c "\-\-wip\-\-"); then
echo "WIP!!"
fi
}
# these alias commit and uncomit wip branches
alias gwip='git add -A; git ls-files --deleted -z | xargs -0 git rm; git commit -m "--wip--"'
alias gunwip='git log -n 1 | grep -q -c "\-\-wip\-\-" && git reset HEAD~1'
# these alias ignore changes to file
alias gignore='git update-index --assume-unchanged'
alias gunignore='git update-index --no-assume-unchanged'
# list temporarily ignored files
alias gignored='git ls-files -v | grep "^[[:lower:]]"'
|
timsuchanek/oh-my-zsh
|
plugins/git/git.plugin.zsh
|
Shell
|
mit
| 4,744 |
#!/bin/sh
echo "dn: ou=eng,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
for i in {1..10}
do
echo "dn: cn=John-$i,ou=eng,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
done
echo "dn: ou=eng,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=administrator,cn=users,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=password and lockout policy,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=ForeignSecurityPrincipals,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=users,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=Administrators,cn=Builtin,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=Users,cn=BuiltIn,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
echo "dn: cn=BuiltIn,dc=tenant2,dc=com"
echo "changetype: delete"
echo ""
|
schatt/lightwave
|
vmdir/testing/acl_tests/generate_data_tenant2_del.sh
|
Shell
|
apache-2.0
| 873 |
#!/bin/bash
set -e -x
cd /app
composer install --prefer-dist --no-interaction --optimize-autoloader
./init --env=${APP_ENV:-Production} --overwrite=y
./yii migrate --interactive=0
function setEnvironmentVariable() {
if [ -z "$2" ]; then
echo "Environment variable '$1' not set."
return
fi
echo "env[$1] = \"$2\" ; automatically add env" >> /usr/local/etc/php-fpm.conf
}
sed -i '/automatically add env/d' /usr/local/etc/php-fpm.conf
# Grep all ENV variables
for _curVar in `env | awk -F = '{print $1}'`;do
# awk has split them by the equals sign
# Pass the name and value to our function
setEnvironmentVariable ${_curVar} ${!_curVar}
done
supervisord -n
# service supervisord start
|
okiter/getyii
|
docker-files/run.sh
|
Shell
|
bsd-3-clause
| 736 |
#!/usr/bin/env bash
#
# NOTE: The working directory should be the main capp directory when this script is run
#
# $1 Cappuccino documentation directory
# Do this if you want to use the utility functions
source "$1"/support/processor_setup.sh
markdown=`which markdown`
if [ -n "$markdown" ]; then
processor_msg "Markdown main page..."
"$markdown" README.markdown > "$1"/README.html
else
processor_msg "markdown binary is not installed, documentation cannot be generated." "red"
echo "On Mac OS X, install brew with the following command line:"
echo ' ruby -e "$(curl -fsSL https://gist.github.com/raw/323731/install_homebrew.rb)"'
echo "Then use 'brew install markdown' from the command line to install markdown."
exit 1
fi
|
i5ting/mdpreview
|
vendor/cappuccino/Tools/Documentation/preprocess/001.markdown_readme.sh
|
Shell
|
mit
| 755 |
#!/bin/bash
#/*
# Copyright 2009-2013 by The Regents of the University of California
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# you may obtain a copy of the License from
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#*/
hostname
#Import cluster properties
. conf/cluster.properties
#Get the IP address of the cc
CCHOST_NAME=`cat conf/master`
CCHOST=`bin/getip.sh`
#Remove the temp dir
rm -rf $CCTMP_DIR
mkdir $CCTMP_DIR
#Remove the logs dir
rm -rf $CCLOGS_DIR
mkdir $CCLOGS_DIR
#Export JAVA_HOME and JAVA_OPTS
export JAVA_HOME=$JAVA_HOME
export JAVA_OPTS=$CCJAVA_OPTS
#Launch hyracks cc script
chmod -R 755 $HYRACKS_HOME
if [ -f "conf/topology.xml" ]; then
#Launch hyracks cc script with topology
$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
else
#Launch hyracks cc script without toplogy
$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
fi
|
sjaco002/incubator-asterixdb-hyracks
|
hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
|
Shell
|
apache-2.0
| 1,809 |
#!/bin/sh
#
# Copyright 2005-2010 Intel Corporation. All Rights Reserved.
#
# This file is part of Threading Building Blocks.
#
# Threading Building Blocks is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation.
#
# Threading Building Blocks is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Threading Building Blocks; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# As a special exception, you may use this file as part of a free software
# library without restriction. Specifically, if other files instantiate
# templates or use macros or inline functions from this file, or you compile
# this file and link it with other files to produce an executable, this
# file does not by itself cause the resulting executable to be covered by
# the GNU General Public License. This exception does not however
# invalidate any other reasons why the executable file might be covered by
# the GNU General Public License.
while getopts "l:" flag #
do #
if [ `uname` != 'Linux' ] ; then #
echo 'skip' #
exit #
fi #
LD_PRELOAD=$OPTARG #
shift `expr $OPTIND - 1` #
done #
# Set stack limit
ulimit -s 10240 #
# Run the command line passed via parameters
export LD_PRELOAD #
./$* #
|
bamos/parsec-benchmark
|
pkgs/libs/tbblib/src/build/test_launcher.sh
|
Shell
|
bsd-3-clause
| 1,645 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.