lang
stringclasses
10 values
seed
stringlengths
5
2.12k
shell
#!/bin/bash flytectl sandbox teardown
shell
#!/bin/sh mkdir -p autotools aclocal libtoolize autoheader automake -a autoconf automake -a ./configure $@
shell
install -d "${ROOTFS_DIR}/etc/systemd/system/rc-local.service.d" install -m 640 files/ttyoutput.conf "${ROOTFS_DIR}/etc/systemd/system/rc-local.service.d/" install -m 640 files/50raspi "${ROOTFS_DIR}/etc/apt/apt.conf.d/" install -m 640 files/console-setup "${ROOTFS_DIR}/etc/default/"
shell
fi if [[ $(basename $0) != "cgo" ]]; then export CGO_ENABLED=0 fi BIN="${GOROOT}/bin/go" if [[ $(basename $0) == "gobin" ]]; then
shell
echo 'Removing intel only mode setup......' rm -rf /etc/X11/xorg.conf.d/99-intel.conf rm -rf /etc/modprobe.d/99-intel.conf rm -rf /etc/modules-load.d/99-intel.conf rm -rf /usr/local/bin/optimus.sh sleep 1 echo 'Setting nvidia prime mode.......'
shell
#!/bin/sh cd /app/mydjangoapp/static npm install --allow-root --save-dev bower install --allow-root
shell
cat telemetry-data-as-object.json | coap post coap://coap.thingsboard.cloud/api/v1/$ACCESS_TOKEN/telemetry # For example, $ACCESS_TOKEN is ABC<PASSWORD>: cat telemetry-data-as-object.json | coap post coap://coap.thingsboard.cloud/api/v1/ABC123/telemetry # Publish data as an array of objects without timestamp (server-side timestamp will be used). Replace $ACCESS_TOKEN with corresponding value. cat telemetry-data-as-array.json | coap post coap://coap.thingsboard.cloud/api/v1/$ACCESS_TOKEN/telemetry # For example, $ACCESS_TOKEN is ABC<PASSWORD>: cat telemetry-data-as-array.json | coap post coap://coap.thingsboard.cloud/api/v1/ABC123/telemetry
shell
echo "##############" echo "<NAME>" echo "##############" python /repos/mrnet/scripts/train.py $1 -d meniscus -s axial --gpu --epochs 40 echo "################" echo "<NAME>" echo "################" python /repos/mrnet/scripts/train.py $1 -d meniscus -s coronal --gpu --epochs 40
shell
#!/bin/sh cd /root/.ssh if [ -d keys ] then cat keys/*.pub > authorized_keys fi ssh-keygen -A exec /usr/sbin/sshd -D "$@"
shell
echo "./vendor/bin/php-cs-fixer fix app/ --level=psr2 --verbose --dry-run"; echo -e "\nAlternatively, you can launch PHPCS in interactive mode, with the '-a' parameter.\n\n"; read -p "Press enter to continue" CONTINUE; ### Mess detector #sh -c "vendor/bin/phpmd ./app html cleancode,codesize,controversial,design,naming > MessDetector.html" #echo -e "\n";
shell
#!/bin/sh INPUT=$(</dev/stdin) VALID=false IFS=$'\n' for LINE in $(echo "$INPUT" | gpg --trust-model always "$@" 2>/dev/null); do
shell
shift 4 echo 1>&2 "Running for $name" run_twice $input /usr/bin/time -p -o $name $PERFDIR/../dgsh -p $PERFDIR/.. $flags \ $PERFDIR/../example/$script "$@" }
shell
echo "Using ${dev_version} as new development version." echo # Make sure dev is fully up to date. echo "Pulling all from remote." git pull --all # Create release path. prepare_dev_branch release "${release_version}" merge_dev_into_master create_git_tag "${git_tag}" # Create dev path. prepare_dev_branch development "${dev_version}"
shell
alias ipy='ipython' if command-exists poetry; then alias pr='poetry run' fi if command-exists pyenv; then alias pvenvs='pyenv virtualenvs --bare --skip-aliases' alias pyvenvs='pvenvs' alias pver='pyenv version' alias pyver='pver' alias pvers='pyenv versions --skip-aliases' alias pyvers='pvers'
shell
ln -sFf ~/dotfiles/.path ~/.path ln -sFf ~/dotfiles/.screenrc ~/.screenrc ln -sFf ~/dotfiles/.tmux.conf ~/.tmux.conf ln -sFf ~/dotfiles/.tmux.line.conf ~/.tmux.line.conf ln -sFf ~/dotfiles/.vimrc ~/.vimrc ln -sFf ~/dotfiles/.ctags ~/.ctags ln -sFf ~/dotfiles/.ackrc ~/.ackrc
shell
--proto_path="$1" \ verrpc/verrpc.proto # TODO(guggero): Fix import problem with auctioneerrpc package. # When uncommenting the following lines, there will be errors in the generated # code. You'll need to manually add the auctioneerrpc package import and change # the batch snapshot messages from poolrpc to auctioneerrpc. # This will be fixed by generating the stubs directly in the repo where the # proto lives. # #target_pkg="github.com/lightninglabs/pool/poolrpc"
shell
# Clone Deformable-DETR library from source. # Since it is not an installable pacakge, we will have to add this to system path to import functions from it. git clone https://github.com/fundamentalvision/Deformable-DETR ddetr cd ddetr && git reset --hard <PASSWORD>
shell
var1="'payment_method' => 'PaymentMethodSnapshot'," rep1="'payment_method' => '\\\Gr4vy\\\model\\\PaymentMethodSnapshot'," sed -i '' "s/$var1/$rep1/g" lib/model/*.php var1="'payment_service' => 'PaymentServiceSnapshot'" rep1="'payment_service' => '\\\Gr4vy\\\model\\\PaymentService'" sed -i '' "s/$var1/$rep1/g" lib/model/*.php
shell
zip -r archive.zip ./* -x **/.* popd || exit mkdir -p "dist/${VERSION}" mv build/tmp/archive.zip "$OUTPUT" rm -rf build/tmp/
shell
mkdir -p db/data db/init_sql } echo "Type 'mysqlstart' to create data folder and start ${CONTAINER_NAME}" mysqlstart() { echo "docker run --rm --name ${CONTAINER_NAME} -p 3306:3306 -v $PWD/data:/var/lib/mysql -e \"MYSQL_ROOT_PASSWORD=<PASSWORD>\" -e \"MYSQL_DATABASE=db\" -d ${IMAGE_NAME}" docker run --rm --name ${CONTAINER_NAME} \ -p 3306:3306 \
shell
[ -z "$LMG_TASK_MESSAGE_FILENAME" ] && LMG_TASK_MESSAGE_FILENAME="task-message" # Cร“DIGOS DE ERRO DO SCRIPT (90-119) LMG_ERR_UNEXPECTED=90 ## INVALID (9X) LMG_ERR_INVALID_ARG=91 LMG_ERR_INVALID_TASK_TYPE=92 LMG_ERR_INVALID_TASK_ARG=93
shell
sudo make sudo make install sudo depmod --quick sudo modprobe dm-imrsim if [ "$1" = "loop" ]; then dd if=/dev/zero of=/tmp/imrsim1 bs=4096 seek=$(((256*4+64)*1024*1024/4096-1)) count=1 losetup /dev/loop1 /tmp/imrsim1 echo "0 `imrsim_util/imr_format.sh -d /dev/loop1` imrsim /dev/loop1 0" | dmsetup create imrsim else echo "0 2097152 imrsim /dev/sdb2 0" | dmsetup create imrsim dd if=/dev/zero of=/dev/mapper/imrsim bs=4096 seek=$((2097152)) count=32*1024 2> /dev/null 1> /dev/null
shell
#!/bin/bash # https://hub.kubeapps.com/charts/stable/redis helm install --name echoserver gcr.io/google_containers/echoserver:1.4 | tee echoserver.out
shell
##CREATE LIST OF SORTED BAM FILES ls -1 Intermediate_files/2.bam_alignments/*.sorted_bam > Intermediate_files/2.bam_alignments/samples_list.txt #start GNU parallel to run each region (e.g. chromosome, scaffold) on separate CPU core parallel --gnu --max-procs $NUM_CORES --keep-order "\
shell
mkdir -p logs cd logs wget https://people.eecs.berkeley.edu/~bmild/nerf/fern_example_weights.zip unzip fern_example_weights.zip
shell
#!/bin/sh docker run --rm -v $PWD:/data \ -e VERSION=1.0.0-RC1 \ -e CGO_LDFLAGS="-lstdc++ -lm -lsodium" \ -e CGO_ENABLED=1 \ -e GOOS=linux \ golang:1.13-alpine sh -c " apk add --no-cache git build-base zeromq-dev musl-dev pkgconfig alpine-sdk libsodium-dev libsodium-static go get -d -u github.com/gopherdata/gophernotes cd /go/src/github.com/gopherdata/gophernotes go build -a --ldflags '-extldflags "-static" -v' -o /data/gophernotes "
shell
sudo sysctl vm.overcommit_memory=1 sudo sh -c 'echo never > /sys/kernel/mm/transparent_hugepage/enabled' /bin/bash /var/prawnalith/cloud_images/docker-compose.sh up
shell
SCRIPT_FILE="genericsect_bow_elmo.py" python ${SCRIPT_FILE} \ --exp_name "genericsect_bow_elmo" \ --exp_dir_path "./genericsect_bow_elmo" \ --model_save_dir "./genericsect_bow_elmo" \ --device cuda:7 \ --layer_aggregation last \ --word_aggregation sum \ --bs 10 \ --lr 1e-4 \ --epochs 50 \ --save_every 5 \
shell
EOF tests:put sync <<EOF echo 1: \$1 echo 2: \$2
shell
if [ -n "$1" ] then VERSION="$1" sudo rm -rf /usr/local/go ~/.cache/go-build/* /home/yohann/go/pkg/mod/* /home/yohann/go/src/* /tmp/go${VERSION}.linux-amd64.tar.gz
shell
;; 9[1-8]) Message="Better hurry with that new disk... One partition is $space % full." ;; 99) Message="I'm drowning here! There's a partition at $space %!" ;;
shell
#!/bin/bash sudo docker build -t vs_frontend /home/ubuntu/frontEnd sudo docker run --name vs_frontend -p 80:80 -p 443:443 -d vs_frontend
shell
exit 1 ;; esac shift done } init_venv_python() { debugging "Virtualenv: ${venv} doesn't not exist, Configuring." for ver in {3,2,''}; do #ensure python3 is first debugging "Checking Python${ver}: $(which python${ver})" if (which python${ver} > /dev/null 2>&1); then python_version="$(python${ver} <<< 'import sys; print(sys.version_info[0])')" stdout_log "Python Version Selected: python${python_version}" break
shell
wget http://archive.cloudera.com/cdh5/one-click-install/trusty/amd64/$DEB_INSTALLER dpkg -i $DEB_INSTALLER cat > /etc/apt/preferences.d/cloudera.pref <<EOF Package: * Pin: release o=Cloudera, l=Cloudera Pin-Priority: 501 EOF apt-get -y install hadoop-0.20-mapreduce-jobtracker apt-get -y install hadoop-hdfs-namenode apt-get -y install hadoop-hdfs-datanode apt-get -y install hadoop-client
shell
#!/bin/bash # Pre-requisite: run `git submodule init; git submodule update` kubectl apply -f namespace.yaml helm install code-server ./code-server/ci/helm-chart -n code-server -f values.yaml
shell
#!/bin/bash . ../init.sh BOOTSTRAP_VERSION=3.0.2 PREFIX=/opt/wandbox/fpc-head
shell
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. source common.sh [ $# -ne 1 ] && { usage; } SERVICE=$1 JOBDESC_TPL=${WORKDIR}/${SERVICE}/jobdesc.template JOBDESC=${WORKDIR}/${SERVICE}/jobdesc.json WORKING_DIR=`pwd` JAR=`ls ${ROOTDIR}/s2jobs/target/scala-2.11/s2jobs-assembly*.jar` LIB=`cd ${ROOTDIR}/target/apache-s2graph-*-incubating-bin/lib; pwd`
shell
# tests will be removed from the binary and would not count as uncovered code. export RUSTFLAGS='-C link-dead-code' # TODO Maybe in the future there will be a better way. See https://github.com/rust-lang/cargo/issues/1924. build=$(cargo test --no-run --message-format=json 2>/dev/null \ | jq -r "select(.profile.test == true) | .filenames[]" \ | rev | cut -d'/' -f 1 | rev)
shell
#kiwi.context = http://$MICO_HOST:8080/marmotta/ #kiwi.setup.host = true marmotta.home = $MARMOTTA_HOME EOF chown tomcat7:tomcat7 -R $MARMOTTA_HOME #Config Tomcat
shell
docker build -t jakkaj/odbcbase .
shell
p6_return_aws_account_id "$account_id" } ######################################################################
shell
mvn dependency:copy-dependencies compile docker build -t softsecgroup6/tcms . docker push softsecgroup6/tcms kubectl apply -f tcms.yml kubectl patch -f tcms.yml -p "{\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"date\":\"`date +'%s'`\"}}}}}" kubectl expose deployment lights-tcms --type=NodePort
shell
echo "PLEASE PROVIE A path AND A FILE extension." ;; esac done #RegEx URL Patter HTTPulr="(https|HTTPS).*[a-zA-Z]" sed_HTTPurl="\(https\|HTTPS\).*[a-zA-Z]" #Files In Directory FID=`ls $uPath`
shell
sudo apt-get install jenkins -y # add mysql objects so that integration tests will succeed echo "Adding mysql objects..." mysql --execute="create database searchengine_test;" mysql --execute="create user 'se_test_user'@'localhost' identified by 'se_test_user';" mysql --execute="grant all on searchengine_test.* to 'se_test_user'@'localhost';"
shell
# Only execute if this is an uninstall, not an upgrade. if [ "$1" = "0" ]; then CONF_DIR=$(pkg-config --variable=completionsdir bash-completion 2> /dev/null) || true if [ "${CONF_DIR}" != "" ]; then BASH_COMPLETION_DIR=$CONF_DIR elif [ -d /usr/share/bash-completion/completions/ ]; then BASH_COMPLETION_DIR=/usr/share/bash-completion/completions/ fi
shell
config_file=config.cmake echo "try build ${working_path}" echo "*******************************************************************************" echo "start generate cmake project..." if [ ! -d "build" ]; then mkdir -p build fi
shell
# Install Python2 Tools ~/.pyenv/versions/tools27/bin/pip install $PY2TOOLS # Protect lib dir for global interpreters chmod -R -w ~/.pyenv/versions/$PY2/lib/ chmod -R -w ~/.pyenv/versions/$PY3/lib/
shell
set -ex # Clone git repository git clone --branch popl-22 https://github.com/stg-tud/pi4 cd pi4/ # Install dependencies
shell
for test_script in *.sh; do if [ "$test_script" == "all.sh" ]; then continue fi ./"$test_script"
shell
set -o pipefail export SHORT_SHA=`git rev-parse --short HEAD` export BRANCH=`git rev-parse --abbrev-ref HEAD` echo ">> Building: ${BRANCH}#${SHORT_SHA}" gcloud builds submit \ --config cloudbuild.yaml \ --substitutions SHORT_SHA=$SHORT_SHA \ --machine-type=n1-highcpu-8 \ ../
shell
rm -rf $tempfile1 rm -rf $tempfile2 temp1=0 temp2=0 total=0 free=0
shell
curl -sLO https://github.com/shyiko/jabba/raw/master/install.sh chmod +x install.sh ./install.sh source ~/.jabba/jabba.sh jabba install [email protected]
shell
if [ $# -eq 0 ]; then echo "USAGE: $0 <classname>" echo "# Lists all nodes that use a class" exit 64; fi; normalize_classname() { IFS="::" local class=( $1 ) echo "${class[@]^}" | sed 's@\s\+@::@g'
shell
#!/usr/bin/env bash # Install dependencies for building $GITHUB_WORKSPACE/dev/install-build-deps-ubuntu.sh # Setup password-less & python3 $GITHUB_WORKSPACE/dev/test-cluster/config-ssh.sh
shell
# Tmux ln -sfn ~/.dotfiles/tmux/tmux.conf ~/.tmux.conf # Terminfo cp -r ~/.dotfiles/terminfo ~/.terminfo tic -o ~/.terminfo ~/.terminfo/tmux.terminfo tic -o ~/.terminfo ~/.terminfo/tmux-256color.terminfo tic -o ~/.terminfo ~/.terminfo/xterm-256color.terminfo
shell
docker volume create --name $OVPN_DATA docker run -v $OVPN_DATA:/etc/openvpn --log-driver=none --rm winq/winq-openvpn ovpn_genconfig -u udp://$DOMAIN:18888 docker run -v $OVPN_DATA:/etc/openvpn --log-driver=none --rm -it winq/winq-openvpn ovpn_initpki nopass docker run -v $OVPN_DATA:/etc/openvpn -d --name $name_openvpn --restart always -p 18888:1194/udp --cap-add=NET_ADMIN winq/winq-openvpn
shell
## Contents of "gfarm2rc_hpciNNNNNN": ## |auth disable gsi * ## |auth disable gsi_auth * ## |auth enable sharedsecret * ## |local_user_map gf_mapfile_hpciNNNNNN
shell
for ep in 25 50; do # e.g., GA = EPSCN_A2C_x2_0005.pth, GB = RDDBNet_C2B_x2_0005.pth GA=$m\@G2LAB_A2C_x$up\_00$ep.pth; GB=ResDeconv@G2LAB_C2B_x$up\_00$ep.pth; CUDA_VISIBLE_DEVICE=$device python ./src/testCasConstLAB.py \ --netGA ./checkpoints/$GA \ --netGB ./checkpoints/$GB; done
shell
#!/bin/bash -e DIR=$(dirname ${BASH_SOURCE[0]}) # Create namespace before anything else kubectl apply -f ${DIR}/pre-deploy/namespace.yml for SECTION in manifests do echo "## run kubectl apply for ${SECTION}" kubectl apply -f ${DIR}/${SECTION}/ | column -t done
shell
#!/bin/bash EXPECTED_DBG_AGENT="-agentpath:/opt/cdbg/cdbg_java_agent.so=--log_dir=/var/log/app_engine,--alsologtostderr=true,--cdbg_extra_class_path=${JETTY_BASE}/webapps/root/WEB-INF/classes:${JETTY_BASE}/webapps/root/WEB-INF/lib" ACTUAL_DBG_AGENT="$(export GAE_INSTANCE=instance; /docker-entrypoint.bash env | grep DBG_AGENT | cut -d '=' -f 1 --complement)" if [ "$ACTUAL_DBG_AGENT" != "$EXPECTED_DBG_AGENT" ]; then echo "DBG_AGENT='$(echo ${ACTUAL_DBG_AGENT})'" exit 1 else echo OK fi
shell
make build-docker docker login --username "$DOCKER_USERNAME" --password "$<PASSWORD>" export REPO=mmichaelb/gosharexserver docker build -f ./build/package/Dockerfile -t ${REPO}:$1 . docker tag ${REPO}:$1 ${REPO}:latest docker push ${REPO}
shell
local prefix local description if [ $# -eq 1 ]; then prefix=$(mvn_get_plugin_prefix "$1") description="$1" else prefix=$1 description="$2" fi echo "$description" | sed -n "/^${prefix}:/s/^${prefix}:\(.*\)/\1/p" } # mvn-gen-pom [-t <template>] [groupId] [artifactId] [version]
shell
<gh_stars>0 #!/usr/bin/env sh DIRNAME=$(cd "$(dirname "${BASH_SOURCE}")" ; pwd -P)
shell
pass proto tcp from any to any port { www https } \\ flags S/SA keep state \\ (max-src-conn 100, max-src-conn-rate 100/1, \\ overload <bruteforce> flush global) " PF_CONF=/etc/pf.conf echo "$pf_conf" | doas tee -a $PF_CONF >/dev/null
shell
#!/bin/bash # To simplify things, we don't want to install mariadb-server until we've done # our initial sync of filesystems from blue. Blue will ssh in and call this # when it's done zfs send. apt-get -qq install -y mariadb-server mariadb-client
shell
#!/bin/bash set -eo pipefail DB_PASSWORD=$(dd if=/dev/random bs=8 count=1 2>/dev/null | od -An -tx1 | tr -d ' \t\n') aws secretsmanager create-secret --name list-manager --description "List-manager database password" --secret-string "{\"password\":\"<PASSWORD>\"}"
shell
echo "[$APP_ENV] Applying application config..." cp ./docker/$APP_ENV/app/app.ini ./data/app/gogs/conf/app.ini cat ./data/app/gogs/conf/app.ini
shell
./SegParser train train-file:../../data/$args/$args.train.$runid model-name:../../data/$args/$args.model.$runid decode-type:non-proj test test-file:../../data/$args/$args.test.$runid seed:${runid} $@ rm ../../data/$args/$args.train.$runid rm ../../data/$args/$args.test.$runid
shell
editorconfig \ file-icons \ language-diff \ language-docker \ language-puppet \ language-terraform \ linter \ linter-jshint \ linter-ruby \ native-ui \ one-dark-syntax \
shell
#!/bin/bash xclip -sel clip -o >in.in
shell
# 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # This software is provided by the copyright holders and contributors "as is" # and any express or implied warranties, including, but not limited to, the # implied warranties of merchantability and fitness for a particular purpose
shell
#!/usr/bin/env bash a=`cut -f 5 $1 | awk '{ if ($1 >= 10) print;}' | stats.py | tr '\n' ' ' | cut -f 6,7,9,10 -d ' '` echo "STR-all $a" a=`cut -f 5 $1 | awk '{ if ($1 >= 10 && $1 < 50) print;}' | stats.py | tr '\n' ' ' | cut -f 6,7,9,10 -d ' '` echo "STR-10 $a" a=`cut -f 5 $1 | awk '{ if ($1 >= 50) print;}' | stats.py | tr '\n' ' ' | cut -f 6,7,9,10 -d ' '` echo "STR-50 $a"
shell
#!/usr/bin/env bash set -eu # $1 should be project name SERVICE_ACCOUNT_JSON_PATH="$REMOCON_PREFIX/$REMOCON_FIREBASE_PROJECT_ID/service-account.json"
shell
set -e echo "Building git-deps docker file for repository $1..." docker build -t pwb/git-deps . docker run --rm -t -i -p 5000:5000 -v $1:/src --name git-deps pwb/git-deps /bin/bash
shell
ts-node rpc-cache-connection/test/test_connexion_proxy.ts
shell
set -e docker build -t bithavoc/buildpacks-cloud-builder .
shell
#!/bin/bash read proc < .pid_splunge kill -9 $proc
shell
cp 15MB $mountpoint/$projName/$writeable_dir & $dxfuse -sync & SYNC_PID=$! sleep 1 # Additional sync commands fail until the above sync completes sync_output=$($dxfuse -sync 2>&1) if echo "$sync_output" | grep -q "another sync operation is already running"; then wait $SYNC_PID
shell
rms='rsync --partial --progress -av -AX -e ssh' while true; do $rds cent-vm:sync2git-logs ../ $rds ../sync2git-logs composer01.rdu2.centos.org: $rms ../sync2git-logs/. composer01.rdu2.centos.org:centos-sync-packages/logs/ $rds ../sync2git-logs dashboard: $rms ../sync2git-logs/. dashboard:centos-sync-packages/logs/ date --iso=minutes; sleep 10m done
shell
#!/bin/bash docker-compose up -d -f /commons/infrastructure/prod/elasticsearch/docker-compose.yml
shell
load test_helper setup() { dokku "$PLUGIN_COMMAND_PREFIX:create" l dokku apps:create my_app dokku "$PLUGIN_COMMAND_PREFIX:link" l my_app } teardown() { dokku "$PLUGIN_COMMAND_PREFIX:unlink" l my_app
shell
mkdir $PROJ_DIR echo "#########################" echo $PID_FIELD echo "A" $A
shell
#!/bin/bash /usr/sbin/alternatives --install /usr/bin/java java /usr/java/latest/jre/bin/java 20000 /usr/sbin/alternatives --install /usr/bin/javaws javaws /usr/java/latest/jre/bin/javaws 20000 /usr/sbin/alternatives --install /usr/bin/javac javac /usr/java/latest/bin/javac 20000 /usr/sbin/alternatives --install /usr/bin/jar jar /usr/java/latest/bin/jar 20000
shell
#!/bin/bash sudo apt-get -y update sudo apt-get -y install libffi-dev gnupg2 libgpgme-dev swig
shell
#!/bin/bash OS=`uname -s` DOCINDEX=${PROJECT_ROOT}/docs/index.html if [ "${OS}" = "Darwin" ] then open ${DOCINDEX} elif [ "${OS}"="Linux" ] then xdg-open ${DOCINDEX}
shell
fi MCU=atmega328p F_CPU=16000000 ARDUINO_VARIANT=standard EXT_FLAGS="-DBAUD=$1 -DPC_SERIAL_SOFT -DBUS_SERIAL0 -D__AVR_ATmega328P__" make #upload using arduino bootloader on the chip' board #avrdude -p atmega328p -b19200 -c arduino -P /dev/ttyUSB$2 -cwiring -D -Uflash:w:uartbus_connector.cpp.hex:i #upload directoy using arduino as an isp if [ $2 -gt -1 ]; then avrdude -p atmega328p -b19200 -c avrisp -P /dev/ttyUSB$2 -Uflash:w:uartbus_connector.cpp.hex:i
shell
set -a _user=$(oc whoami) usage() { echo "Usage: $0 [-c <string> (cluster template to apply -- sailplane or observability)] -w (wait for Apps to turn healthy) ]" 1>&2; exit 1; } GIT_ROOT=$(git rev-parse --show-toplevel) source $GIT_ROOT/scripts/common.sh while getopts c:w flag do
shell
done for x in "one two three" # this is just for one string do echo number $x done for myfile in /etc/r*
shell
python /data/project/rw/dummy.py
shell
# Wait until the processes have been shut down while pgrep -x polybar >/dev/null; do sleep 1; done # Launch bar1 and bar2 polybar --reload top & insync start & #polybar bottom &
shell
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
shell
cp /var/www/swamp-web-server/env.swampinabox /var/www/swamp-web-server/.env fi sed -i -e "s/SED_HOSTNAME/$HOSTNAME/" /var/www/swamp-web-server/.env sed -i -e "s/SED_ENVIRONMENT/SWAMP-in-a-Box/" /var/www/swamp-web-server/.env chown apache:apache /var/www/swamp-web-server/.env chmod 400 /var/www/swamp-web-server/.env echo "Setting Laravel application key"
shell
echo "deb https://apt.dockerproject.org/repo ubuntu-trusty main" | tee /etc/apt/sources.list.d/docker.list apt-get update apt-get install -y docker-engine docker ps -a
shell
#!/bin/bash cur_dir=`dirname $(readlink -f $BASH_SOURCE)` APP_TITLE='flask-app-seed' # change this to a name of your choice DIRS_TO_DEPLOY="flask_app common" DEPLOY_ROOT="$cur_dir/../../deploydir-$APP_TITLE" DEPLOY_FILES_LOCATION=$cur_dir
shell
exit 1 fi # before starting current job, remove evidence of failed job. if [ -e "$lockfile_failed_fp" ] ; then rm "$lockfile_failed_fp" fi # automatically remove the lockfile when finished, whether fail or success function remove_lockfile() { rm $lockfile_fp } function trap_success() { if [ ! -e "$lockfile_failed_fp" ] ; then
shell
<gh_stars>1-10 #!/bin/bash # ysoftman # ํ•˜์œ„ ๋””๋ ‰ํ† ๋ฆฌ(ํŒŒ์ผ์„ ์ œ์™ธ)๋งŒ ์ถœ๋ ฅํ•˜๊ธฐ # ls -lR ./ ysoftman : ./ysoftman ๋””๋ ‰ํ† ๋ฆฌ์—์„œ -l : ๋ฆฌ์ŠคํŠธ, -R : ํ•˜์œ„ ๋””๋ ‰ํ† ๋ฆฌ # | grep ":$" : ์•ž์˜ ์ถœ๋ ฅ ๊ฒฐ๊ณผ๋ฅผ ํŒŒ์ดํ”„๋กœ ๋„˜๊ฒจ๋ฐ›์•„ : ๋กœ ๋๋‚˜๋Š” ๋ถ€๋ถ„๋งŒ ์ถœ๋ ฅ # | sed 's/:/\//' : ์•ž์˜ ์ถœ๋ ฅ ๊ฒฐ๊ณผ๋ฅผ ํŒŒ์ดํ”„๋กœ ๋„˜๊ฒจ๋ฐ›์•„ ์ฒ˜์Œ ๋ถ€๋ถ„์„ [subdir] ๋กœ ๋ฐ”๊ฟ”์„œ(์ถ”๊ฐ€) ์ถœ๋ ฅ ls -lR .. | grep ":$" | sed "s/^/[subdir]/"
shell
# (001) 345-0000 ้€šไธ่ฟ‡๏ผŒไธญ้—ดๆœ‰ๅคšไธช็ฉบๆ ผ re=[0-9]{3}-[0-9]{3}-[0-9]{4} re1=[0-9]{3}-[0-9]{4} re2=^\([0-9]{3}\)$ temp='' for line in `cat ./notes/leetcode/simple/file.txt` do if [[ "$line" =~ ^[0-9]{3}-[0-9]{3}-[0-9]{4}$ ]]; then echo $line elif [[ "$line" =~ ^[0-9]{3}-[0-9]{4}$ && $temp ]]; then echo $temp $line temp='' elif [[ "$line" =~ ^\([0-9]{3}\)$ ]]; then temp=$line
shell
--from=markdown \ -c $HOME/.pandoc/styles/buttondown.css \ --filter=pandoc-expandpaths.py \ --standalone \ --mathjax \ --email-obfuscation=javascript \ ${options[@]:-} \ -o "$destfile" -- "$basefilename.$ext" done=1 break fi done (( "$done" )) || { echo "Error: no source file was found!"; exit 1;}
shell
# <NAME>, 2019/12/16 # # Usage: # This script is called by aci_restart_instances.sh n-times, once per region. # $ ./aci-restart-instance.sh cjoakim-atw-1-eastus RESOURCE=$1
shell
java -cp dist/xmlpg.jar edu.nps.moves.xmlpg.Xmlpg DIS7.xml schema echo "Generating csharp" spatial schema generate --cachePath=.spatialos/schema_codegen_cache --output=build/GeneratedCode/csharp --language=csharp echo "Generating java" spatial schema generate --cachePath=.spatialos/schema_codegen_cache --output=build/GeneratedCode/java --language=java echo "Generating cpp" spatial schema generate --cachePath=.spatialos/schema_codegen_cache --output=build/GeneratedCode/cpp --language=cpp