code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
config() {
NEW="$1"
OLD="$(dirname $NEW)/$(basename $NEW .new)"
# If there's no config file by that name, mv it over:
if [ ! -r $OLD ]; then
mv $NEW $OLD
elif [ "$(cat $OLD | md5sum)" = "$(cat $NEW | md5sum)" ]; then
# toss the redundant copy
rm $NEW
fi
# Otherwise, we leave the .new copy for the admin to consider...
}
config etc/typespeedrc.new
| panosmdma/SlackOnly-SlackBuilds | games/typespeed/doinst.sh | Shell | mit | 375 |
#!/bin/bash
set -x
set -e
set -u
if [ $UID != "0" ]; then
echo "This script must be run as root" >&2
exit 1
fi
packages=`grep ^Package: /var/lib/apt/lists/developer.download.nvidia.com_compute_cuda_repos_ubuntu1404_*_Packages | cut -f 2 -d ' '`
apt-get -y remove $packages cuda-repo-ubuntu1404
rm -f /etc/apt/sources.list.d/cuda.list
apt-get -y update
apt-get --fix-broken -y install
## The nvidia driver sucks for desktop use :-(
#if ! [ -e /etc/apt/sources.list.d/cuda.list ]; then
#wget -O /tmp/cuda.deb http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1404/x86_64/cuda-repo-ubuntu1404_6.5-14_amd64.deb
#dpkg -i /tmp/cuda.deb
#apt-get -y update
#fi
#apt-get -y install --install-recommends --fix-broken --ignore-hold --auto-remove \
#cuda \
#nvidia-prime
| dnuffer/setup | remove_cuda.sh | Shell | mit | 781 |
#!/bin/sh
# complimentary script for Telegram Desktop build:
# upgrade GCC to version 6.x
# (c) 2017 Dmitry Veltishchev <[email protected]> (github:vdmit)
# See LICENSE.md for legal info
# WARNING: this script changes your default GCC compiler.
set -e
inst()
{
sudo apt install -y "$@"
}
log()
{
echo "`date +%Y.%m.%d-%H-%M:%S`" "$@"
}
log "adding test toolchain repo"
sudo add-apt-repository "ppa:ubuntu-toolchain-r/test"
sudo apt update
inst gcc-6 g++-6
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-6 60
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-6 60
log "Checking for GCC version"
gcc -dumpversion
| vdmit/autogram | install-gcc-6.x.sh | Shell | mit | 659 |
# Sets reasonable OS X defaults.
#
# Or, in other words, set shit how I like in OS X.
#
# The original idea (and a couple settings) were grabbed from:
# https://github.com/mathiasbynens/dotfiles/blob/master/.osx
#
COMPUTER_NAME="rafaelrozon"
osascript -e 'tell application "System Preferences" to quit'
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.macos` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
###############################################################################
# General UI/UX #
###############################################################################
# Set computer name (as done via System Preferences → Sharing)
sudo scutil --set ComputerName "$COMPUTER_NAME"
sudo scutil --set HostName "$COMPUTER_NAME"
sudo scutil --set LocalHostName "$COMPUTER_NAME"
sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.smb.server NetBIOSName -string "$COMPUTER_NAME"
# Set standby delay to 24 hours (default is 1 hour)
sudo pmset -a standbydelay 86400
# Disable audio feedback when volume is changed
defaults write com.apple.sound.beep.feedback -bool false
# Disable the sound effects on boot
sudo nvram SystemAudioVolume=" "
# Menu bar: disable transparency
defaults write com.apple.universalaccess reduceTransparency -bool true
# Disable opening and closing window animations
defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
# Increase window resize speed for Cocoa applications
defaults write NSGlobalDomain NSWindowResizeTime -float 0.001
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode2 -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
# Disable the “Are you sure you want to open this application?” dialog
defaults write com.apple.LaunchServices LSQuarantine -bool false
# Disable Resume system-wide
defaults write com.apple.systempreferences NSQuitAlwaysKeepsWindows -bool false
# Disable the crash reporter
#defaults write com.apple.CrashReporter DialogType -string "none"
# Restart automatically if the computer freezes
sudo systemsetup -setrestartfreeze on
# Disable Notification Center and remove the menu bar icon
launchctl unload -w /System/Library/LaunchAgents/com.apple.notificationcenterui.plist 2> /dev/null
# Disable smart quotes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
# Disable smart dashes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
###############################################################################
# Trackpad, mouse, keyboard, Bluetooth accessories, and input #
###############################################################################
# Trackpad: enable tap to click for this user and for the login screen
# defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
# defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Trackpad: map bottom right corner to right-click
# defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadCornerSecondaryClick -int 2
# defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadRightClick -bool true
# defaults -currentHost write NSGlobalDomain com.apple.trackpad.trackpadCornerClickBehavior -int 1
# defaults -currentHost write NSGlobalDomain com.apple.trackpad.enableSecondaryClick -bool true
# Trackpad: swipe between pages with three fingers
# defaults write NSGlobalDomain AppleEnableSwipeNavigateWithScrolls -bool true
# defaults -currentHost write NSGlobalDomain com.apple.trackpad.threeFingerHorizSwipeGesture -int 1
# defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadThreeFingerHorizSwipeGesture -int 1
# Increase sound quality for Bluetooth headphones/headsets
# defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" -int 40
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
# Enable access for assistive devices
# echo -n 'a' | sudo tee /private/var/db/.AccessibilityAPIEnabled > /dev/null 2>&1
# sudo chmod 444 /private/var/db/.AccessibilityAPIEnabled
# TODO: avoid GUI password prompt somehow (http://apple.stackexchange.com/q/60476/4408)
#sudo osascript -e 'tell application "System Events" to set UI elements enabled to true'
# Use scroll gesture with the Ctrl (^) modifier key to zoom
# defaults write com.apple.universalaccess closeViewScrollWheelToggle -bool true
# defaults write com.apple.universalaccess HIDScrollZoomModifierMask -int 262144
# Follow the keyboard focus while zoomed in
# defaults write com.apple.universalaccess closeViewZoomFollowsFocus -bool true
# Disable press-and-hold for keys in favor of key repeat
# defaults write NSGlobalDomain ApplePressAndHoldEnabled -bool false
# Set a blazingly fast keyboard repeat rate
defaults write NSGlobalDomain KeyRepeat -int 1
defaults write NSGlobalDomain InitialKeyRepeat -int 15
# Automatically illuminate built-in MacBook keyboard in low light
defaults write com.apple.BezelServices kDim -bool true
# Turn off keyboard illumination when computer is not used for 5 minutes
defaults write com.apple.BezelServices kDimTime -int 300
# Set language and text formats
# Note: if you’re in the US, replace `EUR` with `USD`, `Centimeters` with
# `Inches`, `en_GB` with `en_US`, and `true` with `false`.
defaults write NSGlobalDomain AppleLanguages -array "en"
defaults write NSGlobalDomain AppleLocale -string "en_CA@currency=CAD"
defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
defaults write NSGlobalDomain AppleMetricUnits -bool true
# Set the timezone; see `sudo systemsetup -listtimezones` for other values
sudo systemsetup -settimezone "America/Vancouver" > /dev/null
# Disable auto-correct
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
# Stop iTunes from responding to the keyboard media keys
#launchctl unload -w /System/Library/LaunchAgents/com.apple.rcd.plist 2> /dev/null
###############################################################################
# Screen #
###############################################################################
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# Save screenshots to the desktop
defaults write com.apple.screencapture location -string "${HOME}/Desktop/screenshots"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF)
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots
defaults write com.apple.screencapture disable-shadow -bool true
# Enable subpixel font rendering on non-Apple LCDs
defaults write NSGlobalDomain AppleFontSmoothing -int 2
###############################################################################
# Finder #
###############################################################################
# Finder: allow quitting via ⌘ + Q; doing so will also hide desktop icons
defaults write com.apple.finder QuitMenuItem -bool true
# Finder: disable window animations and Get Info animations
defaults write com.apple.finder DisableAllAnimations -bool true
# Finder: show hidden files by default
defaults write com.apple.finder AppleShowAllFiles -bool true
# Finder: show all filename extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Finder: allow text selection in Quick Look
defaults write com.apple.finder QLEnableTextSelection -bool true
# Display full POSIX path as Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# Keep folders on top when sorting by name
defaults write com.apple.finder _FXSortFoldersFirst -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Avoid creating .DS_Store files on network or USB volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
# Disable disk image verification
defaults write com.apple.frameworks.diskimages skip-verify -bool true
defaults write com.apple.frameworks.diskimages skip-verify-locked -bool true
defaults write com.apple.frameworks.diskimages skip-verify-remote -bool true
# Use AirDrop over every interface.
defaults write com.apple.NetworkBrowser BrowseAllInterfaces -bool true
# Always open everything in Finder's list view.
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`
defaults write com.apple.finder FXPreferredViewStyle -string "clmv"
# Disable the warning before emptying the Trash
defaults write com.apple.finder WarnOnEmptyTrash -bool false
# Enable the MacBook Air SuperDrive on any Mac
# sudo nvram boot-args="mbasd=1"
# Expand the following File Info panes:
# “General”, “Open with”, and “Sharing & Permissions”
defaults write com.apple.finder FXInfoPanesExpanded -dict General -bool true OpenWith -bool true Privileges -bool true
###############################################################################
# Dock #
###############################################################################
# Show indicator lights for open applications in the Dock
defaults write com.apple.dock show-process-indicators -bool true
# Don’t animate opening applications from the Dock
defaults write com.apple.dock launchanim -bool false
# Automatically hide and show the Dock
defaults write com.apple.dock autohide -bool true
# Make Dock icons of hidden applications translucent
defaults write com.apple.dock showhidden -bool true
# No bouncing icons
defaults write com.apple.dock no-bouncing -bool true
###############################################################################
# Dashboard #
###############################################################################
# Speed up Mission Control animations
defaults write com.apple.dock expose-animation-duration -float 0.1
# Disable Dashboard
defaults write com.apple.dashboard mcx-disabled -bool true
# Don’t show Dashboard as a Space
defaults write com.apple.dock dashboard-in-overlay -bool true
###############################################################################
# Hot corners #
###############################################################################
# Possible values:
# 0: no-op
# 2: Mission Control
# 3: Show application windows
# 4: Desktop
# 5: Start screen saver
# 6: Disable screen saver
# 7: Dashboard
# 10: Put display to sleep
# 11: Launchpad
# 12: Notification Center
# Top left screen corner
defaults write com.apple.dock wvous-tl-corner -int 0
defaults write com.apple.dock wvous-tl-modifier -int 0
# Top right screen corner
defaults write com.apple.dock wvous-tr-corner -int 0
defaults write com.apple.dock wvous-tr-modifier -int 0
# Bottom left screen corner → Display to sleep
defaults write com.apple.dock wvous-bl-corner -int 10
defaults write com.apple.dock wvous-bl-modifier -int 0
# Bottom right screen corner
defaults write com.apple.dock wvous-br-corner -int 0
defaults write com.apple.dock wvous-br-modifier -int 0
###############################################################################
# Safari & WebKit #
###############################################################################
# Privacy: don’t send search queries to Apple
defaults write com.apple.Safari UniversalSearchEnabled -bool false
defaults write com.apple.Safari SuppressSearchSuggestions -bool true
# Press Tab to highlight each item on a web page
defaults write com.apple.Safari WebKitTabToLinksPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2TabsToLinks -bool true
# Show the full URL in the address bar (note: this still hides the scheme)
defaults write com.apple.Safari ShowFullURLInSmartSearchField -bool true
# Set Safari’s home page to `about:blank` for faster loading
defaults write com.apple.Safari HomePage -string "about:blank"
# Prevent Safari from opening ‘safe’ files automatically after downloading
defaults write com.apple.Safari AutoOpenSafeDownloads -bool false
# Allow hitting the Backspace key to go to the previous page in history
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2BackspaceKeyNavigationEnabled -bool true
# Hide Safari’s bookmarks bar by default
defaults write com.apple.Safari ShowFavoritesBar -bool false
# Disable Safari’s thumbnail cache for History and Top Sites
defaults write com.apple.Safari DebugSnapshotsUpdatePolicy -int 2
# Hide Safari’s sidebar in Top Sites
defaults write com.apple.Safari ShowSidebarInTopSites -bool false
# Remove useless icons from Safari’s bookmarks bar
defaults write com.apple.Safari ProxiesInBookmarksBar "()"
# Allow hitting the Backspace key to go to the previous page in history
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2BackspaceKeyNavigationEnabled -bool true
# Enable the Develop menu, the Web Inspector, and the debug menu in Safari
defaults write com.apple.Safari IncludeInternalDebugMenu -bool true
defaults write com.apple.Safari IncludeDevelopMenu -bool true
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled -bool true
# Add a context menu item for showing the Web Inspector in web views
defaults write NSGlobalDomain WebKitDeveloperExtras -bool true
###############################################################################
# Mail #
###############################################################################
# Display emails in threaded mode
defaults write com.apple.mail DraftsViewerAttributes -dict-add "DisplayInThreadedMode" -string "yes"
# Disable send and reply animations in Mail.app
defaults write com.apple.mail DisableReplyAnimations -bool true
defaults write com.apple.mail DisableSendAnimations -bool true
# Copy email addresses as `[email protected]` instead of `Foo Bar <[email protected]>` in Mail.app
defaults write com.apple.mail AddressesIncludeNameOnPasteboard -bool false
# Disable inline attachments (just show the icons)
defaults write com.apple.mail DisableInlineAttachmentViewing -bool true
# Disable automatic spell checking
# defaults write com.apple.mail SpellCheckingBehavior -string "NoSpellCheckingEnabled"
# Disable sound for incoming mail
defaults write com.apple.mail MailSound -string ""
# Disable sound for other mail actions
defaults write com.apple.mail PlayMailSounds -bool false
# Mark all messages as read when opening a conversation
defaults write com.apple.mail ConversationViewMarkAllAsRead -bool true
###############################################################################
# Spotlight #
###############################################################################
# Hide Spotlight tray-icon (and subsequent helper)
#sudo chmod 600 /System/Library/CoreServices/Search.bundle/Contents/MacOS/Search
# Disable Spotlight indexing for any volume that gets mounted and has not yet
# been indexed before.
# Use `sudo mdutil -i off "/Volumes/foo"` to stop indexing any volume.
sudo defaults write /.Spotlight-V100/VolumeConfiguration Exclusions -array "/Volumes"
# Change indexing order and disable some file types
defaults write com.apple.spotlight orderedItems -array \
'{"enabled" = 1;"name" = "APPLICATIONS";}' \
'{"enabled" = 1;"name" = "SYSTEM_PREFS";}' \
'{"enabled" = 1;"name" = "DIRECTORIES";}' \
'{"enabled" = 1;"name" = "CONTACT";}' \
'{"enabled" = 1;"name" = "DOCUMENTS";}' \
'{"enabled" = 1;"name" = "PDF";}' \
'{"enabled" = 0;"name" = "FONTS";}' \
'{"enabled" = 0;"name" = "MESSAGES";}' \
'{"enabled" = 0;"name" = "EVENT_TODO";}' \
'{"enabled" = 0;"name" = "IMAGES";}' \
'{"enabled" = 0;"name" = "BOOKMARKS";}' \
'{"enabled" = 0;"name" = "MUSIC";}' \
'{"enabled" = 0;"name" = "MOVIES";}' \
'{"enabled" = 0;"name" = "PRESENTATIONS";}' \
'{"enabled" = 0;"name" = "SPREADSHEETS";}' \
'{"enabled" = 0;"name" = "SOURCE";}'
# Load new settings before rebuilding the index
killall mds > /dev/null 2>&1
# Make sure indexing is enabled for the main volume
sudo mdutil -i on / > /dev/null
# Rebuild the index from scratch
sudo mdutil -E / > /dev/null
###############################################################################
# Terminal #
###############################################################################
# Only use UTF-8 in Terminal.app
defaults write com.apple.terminal StringEncodings -array 4
# Use "Basic" theme
defaults write com.apple.terminal "Default Window Settings" -string "Basic"
defaults write com.apple.terminal "Startup Window Settings" -string "Basic"
# Disable audible and visual bells
defaults write com.apple.terminal "Bell" -bool false
defaults write com.apple.terminal "VisualBell" -bool false
# Disable the annoying line marks
defaults write com.apple.Terminal ShowLineMarks -int 0
###############################################################################
# Time Machine #
###############################################################################
# Disable local Time Machine backups
# hash tmutil &> /dev/null && sudo tmutil disablelocal
# Disable local Time Machine snapshots
# sudo tmutil disablelocal
###############################################################################
# Activity Monitor #
###############################################################################
# Show the main window when launching Activity Monitor
defaults write com.apple.ActivityMonitor OpenMainWindow -bool true
# Visualize CPU usage in the Activity Monitor Dock icon
defaults write com.apple.ActivityMonitor IconType -int 5
# Show all processes in Activity Monitor
defaults write com.apple.ActivityMonitor ShowCategory -int 0
# Sort Activity Monitor results by CPU usage
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
###############################################################################
# Mac App Store #
###############################################################################
# Enable the WebKit Developer Tools in the Mac App Store
defaults write com.apple.appstore WebKitDeveloperExtras -bool true
# Enable Debug Menu in the Mac App Store
defaults write com.apple.appstore ShowDebugMenu -bool true
# Enable the automatic update check
defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool true
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
# Download newly available updates in background
defaults write com.apple.SoftwareUpdate AutomaticDownload -int 1
# Install System data files & security updates
defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 1
# Turn on app auto-update
defaults write com.apple.commerce AutoUpdate -bool true
# Allow the App Store to reboot machine on macOS updates
# defaults write com.apple.commerce AutoUpdateRestartRequired -bool true
###############################################################################
# Kill affected applications #
###############################################################################
for app in "Address Book" "Calendar" "Contacts" "Dock" "Finder" "Mail" "Safari" "SystemUIServer" "iCal"; do
killall "${app}" &> /dev/null
done
exit $? | rafaelrozon/dotfiles | roles/osx/files/set-defaults.sh | Shell | mit | 21,591 |
#!/bin/sh
########################################
### ###
### DON'T EDIT THIS FILE AFTER BUILD ###
### ###
### USE ENVIRONMENT VARIABLES ###
### INSTEAD ###
### ###
########################################
# When not specified, try to be smart and predict a allowmask
if [ "$INSP_SERVICES_ALLOWMASK" = "" ]; then
INSP_SERVICES_ALLOWMASK=$(ip route show dev eth0 | grep -v default | cut -d" " -f1 | head -1)
fi
# Set sendpass to password if it's not further specified
if [ "$INSP_SERVICES_SENDPASS" = "" ] && [ "$INSP_SERVICES_PASSWORD" != "" ]; then
INSP_SERVICES_SENDPASS="$INSP_SERVICES_PASSWORD"
fi
# Set recvpass to password if it's not further specified
if [ "$INSP_SERVICES_RECVPASS" = "" ] && [ "$INSP_SERVICES_PASSWORD" != "" ]; then
INSP_SERVICES_RECVPASS="$INSP_SERVICES_PASSWORD"
fi
# Set TLS support by extending the generation config extension
if [ "${INSP_SERVICES_TLS_ON}" = "yes" ]; then
INSP_SERVICES_OPTIONS="$INSP_SERVICES_OPTIONS ssl=\"gnutls\""
fi
# Set default services name
INSP_SERVICES_NAME="${INSP_SERVICES_NAME:-services&netsuffix;}"
if [ "${INSP_SERVICES_SENDPASS}" != "" ] && [ "${INSP_SERVICES_RECVPASS}" != "" ]; then
cat <<EOF
<link name="${INSP_SERVICES_NAME}"
ipaddr="${INSP_SERVICES_IPADDR:-services}"
port="7000"
allowmask="${INSP_SERVICES_ALLOWMASK}"
hidden="${INSP_SERVICES_HIDDEN:-no}"
sendpass="${INSP_SERVICES_SENDPASS}"
recvpass="${INSP_SERVICES_RECVPASS}"
${INSP_SERVICES_OPTIONS}>
<uline server="$INSP_SERVICES_NAME" silent="yes">
<module name="m_sasl.so">
<sasl target="$INSP_SERVICES_NAME">
EOF
fi
| Adam-/inspircd-docker | conf/services.sh | Shell | mit | 1,745 |
#!/bin/sh -e
# Edit the following to change the name of the database user that will be created:
APP_DB_USER=admin
APP_APIW_USER=clerk
# Edit the following to change the name of the database that is created (defaults to the user name)
APP_DB_NAME=airline
# Edit the following to change the version of PostgreSQL that is installed
PG_VERSION=9.3
# TODO Generate a password
APP_DB_PASS='6aefc51303e28f3d82d2945fa35add12'
APP_APIW_PASS='fcb0412fa508923ff05793ace6570309'
###########################################################
# Changes below this line are probably not necessary
###########################################################
print_db_usage () {
echo "Your PostgreSQL database has been setup and can be accessed on your local machine on the forwarded port (default: 15432)"
echo " Host: localhost"
echo " Port: 15432"
echo " Database: $APP_DB_NAME"
echo " Username: $APP_DB_USER"
echo " Password: $APP_DB_PASS"
echo ""
echo "Admin access to postgres user via VM:"
echo " vagrant ssh"
echo " sudo su - postgres"
echo ""
echo "psql access to app database user via VM:"
echo " vagrant ssh"
echo " sudo su - postgres"
echo " PGUSER=$APP_DB_USER PGPASSWORD=$APP_DB_PASS psql -h localhost $APP_DB_NAME"
echo ""
echo "Env variable for application development:"
echo " DATABASE_URL=postgresql://$APP_DB_USER:$APP_DB_PASS@localhost:15432/$APP_DB_NAME"
echo " DATABASE_URL=postgresql://$APP_APIW_USER:$APP_APIW_PASS@localhost:15432/$APP_DB_NAME"
echo ""
echo "Local command to access the database via psql:"
echo " PGUSER=$APP_APIW_USER PGPASSWORD=$APP_APIW_PASS psql -h localhost -p 15432 $APP_DB_NAME"
}
export DEBIAN_FRONTEND=noninteractive
PROVISIONED_ON=/etc/vm_provision_on_timestamp
if [ -f "$PROVISIONED_ON" ]
then
echo "VM was already provisioned at: $(cat $PROVISIONED_ON)"
echo "To run system updates manually login via 'vagrant ssh' and run 'apt-get update && apt-get upgrade'"
echo ""
print_db_usage
exit
fi
PG_REPO_APT_SOURCE=/etc/apt/sources.list.d/pgdg.list
if [ ! -f "$PG_REPO_APT_SOURCE" ]
then
# Add PG apt repo:
echo "deb http://apt.postgresql.org/pub/repos/apt/ precise-pgdg main" > "$PG_REPO_APT_SOURCE"
# Add PGDG repo key:
wget --quiet -O - http://apt.postgresql.org/pub/repos/apt/ACCC4CF8.asc | apt-key add -
fi
# Update package list and upgrade all packages
apt-get update
apt-get -y upgrade
apt-get -y install "postgresql-$PG_VERSION" "postgresql-contrib-$PG_VERSION"
PG_CONF="/etc/postgresql/$PG_VERSION/main/postgresql.conf"
PG_HBA="/etc/postgresql/$PG_VERSION/main/pg_hba.conf"
PG_DIR="/var/lib/postgresql/$PG_VERSION/main"
# Edit postgresql.conf to change listen address to '*':
sed -i "s/#listen_addresses = 'localhost'/listen_addresses = '*'/" "$PG_CONF"
# Append to pg_hba.conf to add password auth:
echo "host all all all md5" >> "$PG_HBA"
# Restart so that all new config is loaded:
service postgresql restart
cat << EOF | su - postgres -c psql
-- Create the database user:
CREATE USER $APP_DB_USER WITH PASSWORD '$APP_DB_PASS';
CREATE USER $APP_APIW_USER WITH PASSWORD '$APP_APIW_PASS';
-- Create the database:
CREATE DATABASE $APP_DB_NAME WITH OWNER $APP_DB_USER;
EOF
cat create_tables.sql | su - postgres -c psql $APP_DB_NAME $APP_DB_USER
# Tag the provision time:
date > "$PROVISIONED_ON"
echo "Successfully created PostgreSQL dev virtual machine."
echo ""
print_db_usage
| finessed/airline | VirtualMachines/pg-airline/Vagrant-setup/bootstrap.sh | Shell | epl-1.0 | 3,459 |
#!/bin/bash
yum install -y vim wget lrzsz
groupadd -rf serv
echo -e "Setting path ... \c"
touch "/usr/local/etc/path"
echo '
#import path
for path in `cat /usr/local/etc/path`; do
export PATH=$PATH:$path
done
' > /etc/profile.d/path.sh
echo "[down]";
cp tools.sh /etc/profile.d/
echo -e "Setting vimrc ... \c"
echo '
hi Comment ctermfg=darkgrey
set tabstop=4
set nu
' > ~/.vimrc
echo "[done]"
echo -e "Setting .bashrc ... \c"
echo '
alias du="du -h --max-depth=1"
' >> /etc/bashrc
echo "[done]"
| LyonWong/CentUp | base.sh | Shell | gpl-2.0 | 504 |
#!/bin/bash
for h in {30,}
do
for i in {uni,rl}
do
for j in {1,2,3}
do
cd Gar${h}-2fl-${j}${i}
mv emoeats/emoeats.cfg emoeats/emoeatpts.cfg
mv emoeats/emoeats.out emoeats/emoeatpts.out
mv emoeatpts/emoeatpts.cfg emoeatpts/emoeats.cfg
mv emoeatpts/emoeatpts.out emoeatpts/emoeats.out
mv emoeats tmp
mv emoeatpts emoeats
mv tmp emoeatpts
cd ..
done
done
done
| vira-lytvyn/labsAndOthersNiceThings | DataMining/I_A_D/GA_C++/deong-sls-f104071/deong-sls-f104071/src/scripts/fixnames.sh | Shell | gpl-2.0 | 502 |
#! /bin/sh
# Copyright (C) 2002-2018 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Test that Automake suggests using AC_PROG_F77/FC if Fortran sources
# are used.
. test-init.sh
cat >Makefile.am <<END
bin_PROGRAMS = hello
hello_SOURCES = hello.f foo.f95
END
$ACLOCAL
AUTOMAKE_fails
grep AC_PROG_F77 stderr
grep AC_PROG_FC stderr
| komh/automake-os2 | t/fort1.sh | Shell | gpl-2.0 | 942 |
#!/bin/bash
echo "****Start building****"
echo "****Cleaning****"
rm output/marvelc/boot.img-kernel
rm output/marvelc/out/ramdisk-new.gz
rm output/marvelcfiles/system/lib/modules/bcm4329.ko
rm output/marvelcfiles/system/lib/modules/kineto_gan.ko
rm output/marvelcfiles/boot.img
rm output/marvelc/boot_new.img
make clean mrproper
START=$(date +%s)
echo "****Building****"
make ARCH=arm marvelc_defconfig
make -j16
echo "****Creating boot image****"
cp arch/arm/boot/zImage output/marvelc/boot.img-kernel
cp drivers/net/wireless/bcm4329_204/bcm4329.ko output/marvelcfiles/system/lib/modules/bcm4329.ko
cp drivers/net/kineto_gan.ko output/marvelcfiles/system/lib/modules/kineto_gan.ko
cd output/marvelc
./packboot
cd ..
cd ..
cp output/marvelc/boot_new.img output/marvelcfiles/boot.img
cd output/marvelcfiles
zip -q -r Jmz-Kernel-marvelc-$(date +%m%d%y).zip .
cd ..
cd ..
mv output/marvelcfiles/Jmz-Kernel-marvelc-$(date +%m%d%y).zip output/Jmz-Kernel-marvelc-$(date +%m%d%y).zip
echo "****Compile done****"
echo "****Kernel and modules are in output/****"
END=$(date +%s)
ELAPSED=$((END - START))
E_MIN=$((ELAPSED / 60))
E_SEC=$((ELAPSED - E_MIN * 60))
printf "Elapsed: "
[ $E_MIN != 0 ] && printf "%d min(s) " $E_MIN
printf "%d sec(s)\n" $E_SEC
read -n 1 -p "Press any key to continue"
| jmztaylor/kernel_gb_marvelc | build.sh | Shell | gpl-2.0 | 1,291 |
#!/bin/sh
system="$1"
start="$2"
end="$3"
field="$4"
echo "SELECT UNIX_TIMESTAMP(creation) AS time, data_$field FROM memory WHERE system='$system' AND UNIX_TIMESTAMP(creation) > '$start' AND UNIX_TIMESTAMP(creation) <= '$end' ORDER BY row_id;" \
|mysql --batch --delimiter=: --skip-column-names sdc \
|awk '{print $1 ":" $2}'
| jheusala/sdc | rrdtool/db-query-memory.sh | Shell | gpl-2.0 | 329 |
#!/bin/bash
java -cp de.bitctrl.dav.toolset.archivcheck-runtime.jar \
-Xmx768m \
de.bitctrl.dav.toolset.archivcheck.ArchivSizer \
-datenverteiler=192.168.1.219:8083 \
-benutzer=vrz \
-authentifizierung=../../bin/dos/passwd \
-baseDir=Hier_muss_das_Basedir_her \
-outputFile=archivsize.txt \
-debugLevelStdErrText=INFO \
-debugLevelFileText=CONFIG
| bitctrl/dav-toolset | de.bitctrl.dav.toolset.archivcheck/src/main/dist/archivsize.sh | Shell | gpl-2.0 | 359 |
#!/bin/bash
. cmd.sh
stage=1
train_stage=-10
use_gpu=true
dir=exp/nnet2_online/nnet_a
. cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if $use_gpu; then
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed. Otherwise, call this script with --use-gpu false
EOF
fi
parallel_opts="--gpu 1"
num_threads=1
minibatch_size=512
else
# Use 4 nnet jobs just like run_4d_gpu.sh so the results should be
# almost the same, but this may be a little bit slow.
num_threads=16
minibatch_size=128
parallel_opts="--num-threads $num_threads"
fi
# stages 1 through 3 run in run_nnet2_common.sh.
local/online/run_nnet2_common.sh --stage $stage || exit 1;
if [ $stage -le 4 ]; then
steps/nnet2/train_pnorm_simple2.sh --stage $train_stage \
--splice-width 7 \
--feat-type raw \
--online-ivector-dir exp/nnet2_online/ivectors \
--cmvn-opts "--norm-means=false --norm-vars=false" \
--num-threads "$num_threads" \
--minibatch-size "$minibatch_size" \
--parallel-opts "$parallel_opts" \
--num-jobs-nnet 4 \
--num-epochs 25 \
--add-layers-period 1 \
--num-hidden-layers 2 \
--mix-up 4000 \
--initial-learning-rate 0.02 --final-learning-rate 0.004 \
--cmd "$decode_cmd" \
--pnorm-input-dim 1000 \
--pnorm-output-dim 200 \
data/train data/lang exp/tri3b_ali $dir || exit 1;
fi
if [ $stage -le 5 ]; then
steps/online/nnet2/extract_ivectors_online.sh --cmd "$train_cmd" --nj 4 \
data/test exp/nnet2_online/extractor exp/nnet2_online/ivectors_test || exit 1;
fi
if [ $stage -le 6 ]; then
# Note: comparing the results of this with run_online_decoding_nnet2_baseline.sh,
# it's a bit worse, meaning the iVectors seem to hurt at this amount of data.
# However, experiments by Haihua Xu (not checked in yet) on WSJ, show it helping
# nicely. This setup seems to have too little data for it to work, but it suffices
# to demonstrate the scripts. We will likely modify it to add noise to the
# iVectors in training, which will tend to mitigate the over-training.
steps/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
--online-ivector-dir exp/nnet2_online/ivectors_test \
exp/tri3b/graph data/test $dir/decode &
steps/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
--online-ivector-dir exp/nnet2_online/ivectors_test \
exp/tri3b/graph_ug data/test $dir/decode_ug || exit 1;
wait
fi
if [ $stage -le 7 ]; then
# If this setup used PLP features, we'd have to give the option --feature-type plp
# to the script below.
steps/online/nnet2/prepare_online_decoding.sh data/lang exp/nnet2_online/extractor \
"$dir" ${dir}_online || exit 1;
fi
if [ $stage -le 8 ]; then
# do the actual online decoding with iVectors.
steps/online/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
exp/tri3b/graph data/test ${dir}_online/decode &
steps/online/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
exp/tri3b/graph_ug data/test ${dir}_online/decode_ug || exit 1;
wait
fi
if [ $stage -le 9 ]; then
# this version of the decoding treats each utterance separately
# without carrying forward speaker information.
steps/online/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
--per-utt true \
exp/tri3b/graph data/test ${dir}_online/decode_per_utt &
steps/online/nnet2/decode.sh --config conf/decode.config --cmd "$decode_cmd" --nj 20 \
--per-utt true \
exp/tri3b/graph_ug data/test ${dir}_online/decode_ug_per_utt || exit 1;
wait
fi
exit 0;
# the experiment (with GPU)
#for x in exp/nnet2_online/nnet_a/decode*; do grep WER $x/wer_* | utils/best_wer.sh; done
%WER 2.20 [ 276 / 12533, 37 ins, 61 del, 178 sub ] exp/nnet2_online/nnet_a/decode/wer_5
%WER 10.22 [ 1281 / 12533, 143 ins, 193 del, 945 sub ] exp/nnet2_online/nnet_a/decode_ug/wer_10
# This is the baseline with spliced non-CMVN cepstra and no iVector input.
# The difference is pretty small on RM; I expect it to be more clear-cut on larger corpora.
%WER 2.30 [ 288 / 12533, 35 ins, 57 del, 196 sub ] exp/nnet2_online/nnet_gpu_baseline/decode/wer_5
%WER 10.98 [ 1376 / 12533, 121 ins, 227 del, 1028 sub ] exp/nnet2_online/nnet_gpu_baseline/decode_ug/wer_10
# and this is the same (baseline) using truly-online decoding; it probably only differs because
# of slight decoding-parameter differences.
%WER 2.31 [ 290 / 12533, 34 ins, 57 del, 199 sub ] exp/nnet2_online/nnet_gpu_baseline_online/decode/wer_5
%WER 10.93 [ 1370 / 12533, 142 ins, 202 del, 1026 sub ] exp/nnet2_online/nnet_gpu_baseline_online/decode_ug/wer_9
# This is the online decoding.
# This truly-online per-utterance decoding gives essentially the same WER as the offline decoding, which is
# as we expect as the features and decoding parameters are the same.
# for x in exp/nnet2_online/nnet_gpu_online/decode*utt; do grep WER $x/wer_* | utils/best_wer.sh; done
%WER 2.28 [ 286 / 12533, 66 ins, 39 del, 181 sub ] exp/nnet2_online/nnet_a_online/decode_per_utt/wer_2
%WER 10.45 [ 1310 / 12533, 106 ins, 241 del, 963 sub ] exp/nnet2_online/nnet_a_online/decode_ug_per_utt/wer_12
# The following are online decoding, as above, but using previous utterances of
# the same speaker to refine the adaptation state. It doesn't make much difference.
# for x in exp/nnet2_online/nnet_gpu_online/decode*; do grep WER $x/wer_* | utils/best_wer.sh; done | grep -v utt
%WER 2.27 [ 285 / 12533, 42 ins, 62 del, 181 sub ] exp/nnet2_online/nnet_a_online/decode/wer_5
%WER 10.26 [ 1286 / 12533, 140 ins, 188 del, 958 sub ] exp/nnet2_online/nnet_a_online/decode_ug/wer_10
| michellemorales/OpenMM | kaldi/egs/rm/s5/local/online/run_nnet2.sh | Shell | gpl-2.0 | 5,863 |
# Script to process files with HAN data (HAN-Marc) with an xslt-transformation to get MARC21 records
basedir=$1
inputdir=$1/raw.hanmarc
outputdir=$1/out.swissbib-MARC
xslt=$basedir/xslt/HAN.Bestand.xslt
output=HAN.marc21.nr
cp=$1/libs/saxon9.jar
#institutioncode=$2
nr=1
echo "start HAN-Marc -> Marc21 transformation"
for datei in $inputdir/*.xml
do
echo "file: "$datei
java -Xms16024m -Xmx16024m -cp $cp net.sf.saxon.Transform -s:$datei -xsl:$xslt -o:$outputdir/`basename "$datei" .xml`_marcxml.xml
nr=$(($nr+1))
done
| swissbib/HANTransformations | transform.han2sbmarc.sh | Shell | gpl-2.0 | 531 |
#!/bin/bash
# **************************************************
# catpure IO and MEMORY/CPU information
# **************************************************
RUN_TIME_SECONDS=1000000
IOSTAT_INTERVAL=10
IOSTAT_ROUNDS=$[RUN_TIME_SECONDS/IOSTAT_INTERVAL+1]
CAPTURE_MEMORY_INTERVAL=5
LOG_NAME_IOSTAT=mongodb.iostat
LOG_NAME_MEMORY=mongodb.memory
rm -f $LOG_NAME_IOSTAT
rm -f $LOG_NAME_MEMORY
iostat -dxm $IOSTAT_INTERVAL $IOSTAT_ROUNDS > $LOG_NAME_IOSTAT &
capture-memory.bash ${RUN_TIME_SECONDS} ${CAPTURE_MEMORY_INTERVAL} ${LOG_NAME_MEMORY} mongod &
| Percona-QA/toku-qa | tokudb/software/mongodb/sysbench-mongodb/doit-monitors.bash | Shell | gpl-2.0 | 555 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-Linux-x86
CND_CONF=Release
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/exercici_5_-_modular
OUTPUT_BASENAME=exercici_5_-_modular
PACKAGE_TOP_DIR=exercici5-modular/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/exercici5-modular/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/exercici5-modular.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/exercici5-modular.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
| pdavila13/Programacion_C | Modular/Exercici 5 - Modular/nbproject/Package-Release.bash | Shell | gpl-2.0 | 1,511 |
#!/usr/bin/env bash
QUERY=${1:-"\"term2\""}
INDEX_NAME=${2:-index_getjenny_english_0}
PORT=${3:-8888}
curl -v -H "Authorization: Basic $(echo -n 'admin:adminp4ssw0rd' | base64)" \
-H "Content-Type: application/json" -X POST http://localhost:${PORT}/${INDEX_NAME}/term/get -d "{
\"ids\": [${QUERY}]
}"
| GetJenny/starchat | scripts/api_test/getTerms.sh | Shell | gpl-2.0 | 306 |
#!/bin/bash
# This script prepares the CI build for running
echo "Configuring backend"
sed -i -e "s|my \$hostname = .*$|my \$hostname = 'localhost';|" \
-e "s|our \$bsuser = 'obsrun';|our \$bsuser = 'jenkins';|" \
-e "s|our \$bsgroup = 'obsrun';|our \$bsgroup = 'jenkins';|" src/backend/BSConfig.pm.template
cp src/backend/BSConfig.pm.template src/backend/BSConfig.pm
chmod a+x src/api/script/start_test_backend
pushd src/api
echo "Creating database"
mysql -e 'create database ci_api_test;'
echo "Configuring database"
cp config/database.yml.example config/database.yml
sed -e 's,password:.*,password:,' -i config/database.yml
sed -i "s|database: api|database: ci_api|" config/database.yml
echo "Configuring frontend"
cp config/options.yml.example config/options.yml
cp config/thinking_sphinx.yml.example config/thinking_sphinx.yml
echo "Initialize database"
bundle exec rails db:drop db:create db:setup --trace
# Stuff
# Clear temp data
rm -rf log/* tmp/cache tmp/sessions tmp/sockets
popd
# travis rvm can not deal with our extended executable names
sed -i 1,1s,\.ruby2\.4,, src/api/{script,bin}/* docs/api/restility/bin/*
| leemgs/open-build-service | dist/ci/travis_before_script.sh | Shell | gpl-2.0 | 1,146 |
#!/bin/sh
#
# Copyright (C) 2004, 2007, 2012 Internet Systems Consortium, Inc. ("ISC")
# Copyright (C) 2001 Internet Software Consortium.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id$
rm -f ns1/named.conf ns1/myftp.db
rm -f */named.memstats
| phra/802_21 | myODTONE/app/dhcp_usr/libs/bind/bind-9.8.4-P1/bin/tests/system/ixfr/clean.sh | Shell | gpl-2.0 | 913 |
##
# SCRIPT COMMANDS
##
# system-install
#
# This is meant to setup the server on Travis-CI so that it can run the tests.
#
system_install() {
# Add the Google Chrome packages.
header Setting up APT
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
sudo sh -c 'echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list'
sudo apt-get update > /dev/null
# Create a database for our Drupal site.
mysql -e 'create database drupal;'
# Install the latest Drush 6.
header Installing Drush
composer global require --prefer-source --no-interaction drush/drush:6.*
drush dl -y drupalorg_drush-7.x-1.x-dev --destination=$HOME/.drush
drush cc drush
# Build Codebase
mkdir profiles
mv ecdpanopoly profiles/
mkdir drupal
mv profiles drupal/
# Build the current branch.
header Building ecdpanopoly from current branch
cd drupal
drush make --yes profiles/ecdpanopoly/drupal-org-core.make --prepare-install
drush make --yes profiles/ecdpanopoly/drupal-org.make --no-core --contrib-destination=profiles/ecdpanopoly
if [[ "$INSTALL_ecdpanopoly_DEMO_FROM_APPS" != 1 ]]; then
#drush dl ecdpanopoly_demo-1.x-dev ##Disabled to test
fi
drush dl diff
mkdir sites/default/private
mkdir sites/default/private/files
mkdir sites/default/private/temp
# Build Behat dependencies
header Installing Behat
cd profiles/ecdpanopoly/modules/ecdpanopoly/ecdpanopoly_test/tests
composer install --prefer-source --no-interaction
cd ../../../../../../../
# Verify that all the .make files will work on Drupal.org.
header Verifying .make file
drush verify-makefile drupal/profiles/ecdpanopoly/drupal-org.make
find drupal/profiles/ecdpanopoly/modules -name \*.make -print0 | xargs -0 -n1 drush verify-makefile
# Download an old version to test upgrading from.
if [[ "$UPGRADE" != none ]]; then
header Downloading ecdpanopoly $UPGRADE
drush dl ecdpanopoly-$UPGRADE
fi
# Setup files
sudo chmod -R 777 drupal/sites/all
# Setup display for Selenium
header Starting X
sh -e /etc/init.d/xvfb start
sleep 5
# Get Chrome and ChromeDriver
header Installing Google Chrome
sudo apt-get install -y --force-yes google-chrome-stable
wget http://chromedriver.storage.googleapis.com/2.9/chromedriver_linux64.zip
unzip -a chromedriver_linux64.zip
# Insane hack from jsdevel:
# https://github.com/jsdevel/travis-debugging/blob/master/shim.bash
# This allows chrome-sandbox to work in side of OpenVZ, because I can't
# figure out how to start chrome with --no-sandbox.
sudo rm -f $CHROME_SANDBOX
sudo wget https://googledrive.com/host/0B5VlNZ_Rvdw6NTJoZDBSVy1ZdkE -O $CHROME_SANDBOX
sudo chown root:root $CHROME_SANDBOX
sudo chmod 4755 $CHROME_SANDBOX
sudo md5sum $CHROME_SANDBOX
# Get Selenium
header Downloading Selenium
wget http://selenium-release.storage.googleapis.com/2.41/selenium-server-standalone-2.41.0.jar
# Disable sendmail
echo sendmail_path=`which true` >> ~/.phpenv/versions/$(phpenv version-name)/etc/php.ini
# Enable APC
echo "extension=apc.so" >> ~/.phpenv/versions/$(phpenv version-name)/etc/php.ini
echo "apc.shm_size=256M" >> ~/.phpenv/versions/$(phpenv version-name)/etc/php.ini
# Increase the MySQL connection timeout on the PHP end.
echo "mysql.connect_timeout=3000" >> ~/.phpenv/versions/$(phpenv version-name)/etc/php.ini
echo "default_socket_timeout=3000" >> ~/.phpenv/versions/$(phpenv version-name)/etc/php.ini
# Increase the MySQL server timetout and packet size.
mysql -e "SET GLOBAL wait_timeout = 36000;"
mysql -e "SET GLOBAL max_allowed_packet = 33554432;"
}
# before_tests
#
# Setup Drupal to run the tests.
#
before_tests() {
# Hack to get the correct version of ecdpanopoly Demo (there was no 1.0-rc4 or 1.0-rc5)
UPGRADE_DEMO_VERSION=`echo $UPGRADE | sed -e s/^7.x-//`
case $UPGRADE_DEMO_VERSION in
1.0-rc[45])
UPGRADE_DEMO_VERSION=1.0-rc3
;;
esac
# Do the site install (either the current revision or old for the upgrade).
header Installing Drupal
if [[ "$UPGRADE" == none ]]; then
cd drupal
else
cd ecdpanopoly-$UPGRADE
if [[ "$INSTALL_ecdpanopoly_DEMO_FROM_APPS" != 1 ]]; then
drush dl ecdpanopoly_demo-$UPGRADE_DEMO_VERSION
fi
fi
drush si ecdpanopoly --db-url=mysql://root:@127.0.0.1/drupal --account-name=admin --account-pass=admin [email protected] --site-name="ecdpanopoly" --yes
drush vset -y file_private_path "sites/default/private/files"
drush vset -y file_temporary_path "sites/default/private/temp"
# Switch to the ecdpanopoly platform built from Git (if we aren't there already).
cd ../drupal
# If we're an upgrade test, run the upgrade process.
if [[ "$UPGRADE" != none ]]; then
header Upgrading to latest version
cp -a ../ecdpanopoly-$UPGRADE/sites/default/* sites/default/
run_test drush updb --yes
drush cc all
fi
# Our tests depend on ecdpanopoly_test.
#drush en -y ecdpanopoly_test
# Run the webserver
header Starting webserver
drush runserver --server=builtin 8888 > /dev/null 2>&1 &
echo $! > /tmp/web-server-pid
wait_for_port 8888
cd ..
# Run the selenium server
header Starting selenium
java -jar selenium-server-standalone-2.41.0.jar -Dwebdriver.chrome.driver=`pwd`/chromedriver > /dev/null 2>&1 &
echo $! > /tmp/selenium-server-pid
wait_for_port 4444
}
# before_tests
#
# Run the tests.
#
run_tests() {
header Running tests
# Make the Travis tests repos agnostic by injecting drupal_root with BEHAT_PARAMS
export BEHAT_PARAMS="extensions[Drupal\\DrupalExtension\\Extension][drupal][drupal_root]=$BUILD_TOP/drupal"
cd drupal/profiles/ecdpanopoly/modules/ecdpanopoly/ecdpanopoly_test/tests
# If this isn't an upgrade, we test if any features are overridden.
if [[ "$UPGRADE" == none ]]; then
run_test ../../../../scripts/check-overridden.sh
fi
# First, run all the tests in Firefox.
run_test ./bin/behat --config behat.travis.yml
# Then run some Chrome-only tests.
run_test ./bin/behat --config behat.travis.yml -p chrome
}
# after_tests
#
# Clean up after the tests.
#
after_tests() {
header Cleaning up after tests
WEB_SERVER_PID=`cat /tmp/webserver-server-pid`
SELENIUM_SERVER_PID=`cat /tmp/selenium-server-pid`
# Stop the servers we started
kill $WEB_SERVER_PID
kill $SELENIUM_SERVER_PID
}
##
# UTILITY FUNCTIONS:
##
# Prints a message about the section of the script.
header() {
set +xv
echo
echo "** $@"
echo
set -xv
}
# Sets the exit level to error.
set_error() {
EXIT_VALUE=1
}
# Runs a command and sets an error if it fails.
run_test() {
if ! $@; then
set_error
fi
}
# Runs a command showing all the lines executed
run_command() {
set -xv
$@
set +xv
}
# Wait for a specific port to respond to connections.
wait_for_port() {
local port=$1
while echo | telnet localhost $port 2>&1 | grep -qe 'Connection refused'; do
echo "Connection refused on port $port. Waiting 5 seconds..."
sleep 5
done
}
##
# SCRIPT MAIN:
##
# Capture all errors and set our overall exit value.
trap 'set_error' ERR
# We want to always start from the same directory:
cd $BUILD_TOP
case $COMMAND in
system-install)
run_command system_install
;;
drupal-install)
run_command drupal_install
;;
before-tests)
run_command before_tests
;;
run-tests)
run_command run_tests
;;
after-tests)
run_command after_tests
;;
esac
exit $EXIT_VALUE
| energycircle/ecdpanopoly-drops-7 | profiles/ecdpanopoly/scripts/travis-ci copy.sh | Shell | gpl-2.0 | 7,562 |
#!/bin/bash
readonly GATEWAY="192.168.1.1"
readonly SECONDS_INTERVAL=300
readonly MAX_FAILS=3;
fail_counter=0;
logger "${0} started {GATEWAY=${GATEWAY}, SECONDS_INTERVAL=${SECONDS_INTERVAL}, MAX_FAILS=${MAX_FAILS}}";
while true
do
sleep $SECONDS_INTERVAL;
ping -c 1 $GATEWAY > /dev/null 2>&1;
if [ $? = 0 ]; then
fail_counter=0;
else
(( fail_counter++ ));
logger "Cannot ping gateway (${GATEWAY}). Failure #${fail_counter}";
fi;
if (( fail_counter >= MAX_FAILS )); then
logger "Cannot ping gateway (${GATEWAY}). I assume something is wrong with my network card. Going for reboot.";
reboot;
fi;
done
| yu55/scripts | pi/net-watch/pi_network_watchdog.bash | Shell | gpl-2.0 | 674 |
#!/bin/bash
export KERNELDIR=`readlink -f .`
export RAMFS_SOURCE=`readlink -f $KERNELDIR/recovery`
export USE_SEC_FIPS_MODE=true
echo "kerneldir = $KERNELDIR"
echo "ramfs_source = $RAMFS_SOURCE"
RAMFS_TMP="/tmp/arter97-ks01lte-recovery"
echo "ramfs_tmp = $RAMFS_TMP"
cd $KERNELDIR
if [ "${1}" = "skip" ] ; then
echo "Skipping Compilation"
else
echo "Compiling kernel"
cp defconfig .config
scripts/configcleaner "
CONFIG_SEC_LOCALE_KOR
CONFIG_MACH_KS01EUR
CONFIG_EXTRA_FIRMWARE
CONFIG_EXTRA_FIRMWARE_DIR
CONFIG_TDMB
CONFIG_SEC_DEVIDE_RINGTONE_GAIN
CONFIG_WLAN_REGION_CODE
"
echo '
# CONFIG_SEC_LOCALE_KOR is not set
CONFIG_MACH_KS01EUR=y
CONFIG_EXTRA_FIRMWARE="audience-es325-fw-KS01-eur.bin"
CONFIG_EXTRA_FIRMWARE_DIR="firmware"
# CONFIG_TDMB is not set
# CONFIG_SEC_DEVIDE_RINGTONE_GAIN is not set
CONFIG_WLAN_REGION_CODE=100
' >> .config
make oldconfig
sed -i -e 's/config->fsg.luns\[0\].cdrom = 1;/config->fsg.luns\[0\].cdrom = 0;/g' drivers/usb/gadget/android.c
make "$@" || exit 1
git checkout drivers/usb/gadget/android.c
fi
echo "Building new ramdisk"
#remove previous ramfs files
rm -rf '$RAMFS_TMP'*
rm -rf $RAMFS_TMP
rm -rf $RAMFS_TMP.cpio
#copy ramfs files to tmp directory
cp -ax $RAMFS_SOURCE $RAMFS_TMP
cd $RAMFS_TMP
find . -name '*.sh' -exec chmod 755 {} \;
$KERNELDIR/ramdisk_fix_permissions.sh 2>/dev/null
#clear git repositories in ramfs
find . -name .git -exec rm -rf {} \;
find . -name EMPTY_DIRECTORY -exec rm -rf {} \;
cd $KERNELDIR
rm -rf $RAMFS_TMP/tmp/*
cd $RAMFS_TMP
find . | fakeroot cpio -H newc -o | lzop -9 > $RAMFS_TMP.cpio.lzo
ls -lh $RAMFS_TMP.cpio.lzo
cd $KERNELDIR
echo "Making new boot image"
gcc -w -s -pipe -O2 -o tools/dtbtool/dtbtool tools/dtbtool/dtbtool.c
tools/dtbtool/dtbtool -s 2048 -o arch/arm/boot/dt.img -p scripts/dtc/ arch/arm/boot/
gcc -w -s -pipe -O2 -Itools/libmincrypt -o tools/mkbootimg/mkbootimg tools/libmincrypt/*.c tools/mkbootimg/mkbootimg.c
tools/mkbootimg/mkbootimg --kernel $KERNELDIR/arch/arm/boot/zImage --dt $KERNELDIR/arch/arm/boot/dt.img --ramdisk $RAMFS_TMP.cpio.lzo --cmdline 'console=null androidboot.hardware=qcom user_debug=23 msm_rtb.filter=0x37 ehci-hcd.park=3 enforcing=0' --base 0x00000000 --pagesize 2048 --kernel_offset 0x00008000 --ramdisk_offset 0x02000000 --tags_offset 0x01e00000 --second_offset 0x00f00000 -o $KERNELDIR/recovery.img
echo -n "SEANDROIDENFORCE" >> recovery.img
if [ "${1}" = "CC=\$(CROSS_COMPILE)gcc" ] ; then
dd if=/dev/zero bs=$((20971520-$(stat -c %s recovery.img))) count=1 >> recovery.img
fi
echo "done"
ls -al recovery.img
echo ""
| 1N4148/android_kernel_samsung_msm8974 | build_recovery.sh | Shell | gpl-2.0 | 2,556 |
#!/bin/sh
# Test definition of the newly introduced nonstandard 'B 1 21/m 1'
# space group setting:
#BEGIN DEPEND------------------------------------------------------------------
INPUT_SCRIPT=scripts/symop_build_spacegroup
#END DEPEND--------------------------------------------------------------------
set -ue
${INPUT_SCRIPT} <<EOF
-x,y+1/2,-z
-x,-y,-z
x+1/2,y,z+1/2
EOF
| sauliusg/cod-tools | tests/shtests/check_spacegroups_005.sh | Shell | gpl-2.0 | 379 |
#!/bin/bash
git pull
git init
git add .
git commit -a -m 'data version'
git remote add origin [email protected]:username/reponame.git
git push -u origin master
| zigfrid2356/world_of_sand | run.sh | Shell | gpl-2.0 | 159 |
#!/bin/bash
debug=true
pkg=com.maimairen.app.jinchuhuo
avdDbDir=`pwd | sed "s/.*\/databases/\/data\/data\/$pkg\/databases/g"`
if [ debug ]; then
echo packname: $pkg;
echo target dir: $avdDbDir;
fi
if [ $1 ]; then
dbFileName=$1;
else
dbFileName=jinchuhuobook.bdb
fi
adb push ./$dbFileName $avdDbDir/$dbFileName
owner=`adb shell ls -l /data/data/$pkg/databases/hmdb | awk '{print $2}'`
echo $owner
adb shell chown -R $owner:$owner $avdDbDir/$dbFileName
adb shell chmod -R 777 $avdDbDir/$dbFileName
echo "result:"
adb shell ls -l $avdDbDir
| JeremyHe-cn/work-every-where | .bin/push_file_to_avd.sh | Shell | gpl-2.0 | 552 |
#! /bin/sh
### BEGIN INIT INFO
# Provides: LCDinit
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start PiLCD on boot
# Description: Start PiLCD IP display for WebMSO28
### END INIT INFO
#. /lib/lsb/init-functions
#/root/run_js.sh
#set -e
case "$1" in
start|force-reload|restart|reload)
/home/pi/PiLCD/lcd_ip2
;;
stop|status)
;;
*)
echo "Usage: $N {start|stop|restart|force-reload|status}" >&2
exit 1
;;
esac
exit 0
| tkrmnz/PiLCD | LCDinit.sh | Shell | gpl-2.0 | 560 |
#!/bin/bash
###########################################################################################
## Copyright 2003, 2015 IBM Corp ##
## ##
## Redistribution and use in source and binary forms, with or without modification, ##
## are permitted provided that the following conditions are met: ##
## 1.Redistributions of source code must retain the above copyright notice, ##
## this list of conditions and the following disclaimer. ##
## 2.Redistributions in binary form must reproduce the above copyright notice, this ##
## list of conditions and the following disclaimer in the documentation and/or ##
## other materials provided with the distribution. ##
## ##
## THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS AND ANY EXPRESS ##
## OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF ##
## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ##
## THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ##
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF ##
## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ##
## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, ##
## OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ##
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ##
############################################################################################
### File : perl-Font-AFM.sh ##
##
### Description: This testcase tests perl-Font-AFM package ##
##
### Author: Basavaraju.G [email protected] ##
###########################################################################################
#cd $(dirname $0)
#LTPBIN=${LTPBIN%/shared}/perl_Font_AFM
source $LTPBIN/tc_utils.source
TESTS_DIR="${LTPBIN%/shared}/perl_Font_AFM/t"
required="perl rpm"
function tc_local_setup()
{
# check installation and environment
tc_exec_or_break $required
# install check
rpm -q "perl-Font-AFM" >$stdout 2>$stderr
tc_break_if_bad $? "perl-Font-AFM not installed"
}
################################################################################
# testcase functions #
################################################################################
#
# Function: runtests
#
# Description: - test perl-Font-AFM
#
# Parameters: - none
#
# Return - zero on success
# - return value from commands on failure
################################################################################
function run_test()
{
pushd $TESTS_DIR &>/dev/null
TESTS=`ls *.t`
TST_TOTAL=`echo $TESTS | wc -w`
for test in $TESTS; do
tc_register "Test $test"
perl $test >$stdout 2>$stderr
rc=`grep "not ok" $stdout`
[ -z "$rc" ]
tc_pass_or_fail $? "Test $test fail"
done
popd &>/dev/null
}
##############################################
#MAIN #
##############################################
TST_TOTAL=1
tc_setup && \
run_test
| PoornimaNayak/autotest-client-tests | linux-tools/perl_Font_AFM/perl-Font-AFM.sh | Shell | gpl-2.0 | 3,806 |
#! /bin/sh -e
# tup - A file-based build system
#
# Copyright (C) 2009-2018 Mike Shal <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Test that a node-variable cannot refer to a generated file.
. ./tup.sh
cat > Tupfile << HERE
: |> touch %o |> a.txt
&node_var = a.txt
HERE
tup touch Tupfile
update_fail_msg "Node-variables can only refer to normal files and directories, not a 'generated file'."
eotup
| jonatanolofsson/tup | test/t2114-node-var-generated-file.sh | Shell | gpl-2.0 | 1,023 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-Linux-x86
CND_CONF=Debug
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=so
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/word_count_-_posix
OUTPUT_BASENAME=word_count_-_posix
PACKAGE_TOP_DIR=wordcount-posix/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/wordcount-posix/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/wordcount-posix.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/wordcount-posix.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
| akibis/school | 415/415 - Homework 4 - Word Count/Word Count - POSIX/nbproject/Package-Debug.bash | Shell | gpl-2.0 | 1,497 |
#!/bin/bash
DIR=$(cd $(dirname $0); pwd)
SENSORD_DIR=$(cd $DIR/..; pwd)
. $SENSORD_DIR/../scripts/common.sh
NODE_PATH=$(which node)
(
cd $SENSORD_DIR
npm install --unsafe-perm --production
if [ -f /lib/lsb/init-functions ]
then
echo generating init.d script /etc/init.d/sensord
$DIR/gen_init_d.sh
fi
) | tee install.log
| premisense/premisense | sensord/scripts/install.sh | Shell | gpl-2.0 | 339 |
#!/bin/sh
echo "FluxGeo Installation"
read -p "Would you like to install this script? [y/n]" response
case $response in [yY])
echo "Stopping xflux daemon"
killall xflux
echo "Installing xflux"
if [ "$(uname -m | grep '64')" != "" ]; then
echo "Detected 64 bit arch"
sudo cp xflux64 /usr/local/bin/xflux
else
echo "Detected 32 bit arch"
sudo cp xflux32 /usr/local/bin/xflux
fi
echo "Installing GeoIP database"
sudo mkdir /usr/share/GeoIP
sudo cp GeoLiteCity.dat /usr/share/GeoIP/GeoLiteCity.dat
echo "Installing fluxgeo"
sudo cp fluxgeo.py /usr/local/bin/fluxgeo
sudo chmod +x /usr/local/bin/fluxgeo
echo "Setting up fluxgeo"
mkdir ~/.fluxgeo
chmod 777 ~/.fluxgeo
touch ~/.fluxgeo/lat
echo "0" >> ~/.fluxgeo/lat
chmod 777 ~/.fluxgeo/lat
touch ~/.fluxgeo/long
echo "0" >> ~/.fluxgeo/long
chmod 777 ~/.fluxgeo/long
echo "Installation Finished"
echo "Starting fluxgeo"
;;
*)
echo "Installation Aborted"
;;
esac
exit 0
| isaaclo123/fluxgeo | setup.sh | Shell | gpl-2.0 | 1,130 |
#!/usr/bin/env bash
# Installation Script for Local Development
if [ -e /etc/os-release ]; then
. /etc/os-release
else
. /usr/lib/os-release
fi
if [ "$ID" = "opensuse-leap" ]; then
echo "Add wiki repository for openSUSE Leap $VERSION"
sudo zypper addrepo https://download.opensuse.org/repositories/openSUSE:infrastructure:wiki/openSUSE_Leap_$VERSION/openSUSE:infrastructure:wiki.repo
elif [ "$ID" = "opensuse-tumbleweed" ]; then
echo "Add wiki repository for openSUSE Tumbleweed"
sudo zypper addrepo https://download.opensuse.org/repositories/openSUSE:infrastructure:wiki/openSUSE_Tumbleweed/openSUSE:infrastructure:wiki.repo
fi
sudo zypper refresh
# Install RPM packages
echo "Install RPM packages"
sudo zypper install mediawiki_1_27-openSUSE
# Link folders and files
echo "Link MediaWiki files and folders"
function link() {
rm ./$1 -rf
ln -s /usr/share/mediawiki_1_27/$1 ./$1
}
link api.php
link autoload.php
link img_auth.php
link index.php
link load.php
link opensearch_desc.php
link thumb_handler.php
link thumb.php
link extensions/AbuseFilter
link extensions/Auth_remoteuser
link extensions/CategoryTree
link extensions/CirrusSearch
link extensions/Cite
link extensions/CiteThisPage
link extensions/ConfirmEdit
link extensions/Elastica
link extensions/Gadgets
link extensions/GitHub
link extensions/HitCounters
link extensions/ImageMap
link extensions/InputBox
link extensions/intersection
link extensions/Interwiki
link extensions/LocalisationUpdate
link extensions/Maps
link extensions/maps-vendor
link extensions/MultiBoilerplate
link extensions/Nuke
link extensions/ParamProcessor
link extensions/ParserFunctions
link extensions/PdfHandler
link extensions/Poem
link extensions/Renameuser
link extensions/ReplaceText
link extensions/RSS
link extensions/SpamBlacklist
link extensions/SyntaxHighlight_GeSHi
link extensions/TitleBlacklist
link extensions/UserMerge
link extensions/UserPageEditProtection
link extensions/Validator
link extensions/WikiEditor
link includes
link languages
link maintenance
link resources
link serialized
link vendor
# Copy development settings
echo "Copy development settings"
cp wiki_settings.example.php wiki_settings.php
# Make directories
echo "Make directories"
rm -r cache
mkdir cache
rm -r data
mkdir data # Save SQLite files
rm -r images
mkdir images
# Run installation script
echo "Run installation script"
# Install without extensions
mv LocalSettings.php _LocalSettings.php
php maintenance/install.php --dbuser="" --dbpass="" --dbname=wiki --dbpath=./data \
--dbtype=sqlite --confpath=./ --scriptpath=/ --pass=evergreen openSUSE Geeko
# Update with extensions
rm LocalSettings.php
mv _LocalSettings.php LocalSettings.php
php maintenance/update.php --conf LocalSettings.php
| openSUSE/wiki | install_devel.sh | Shell | gpl-2.0 | 2,770 |
#!/usr/bin/env bash
export LC_ALL="C"
function set_path_vars()
{
if [ -n "$OSXCROSS_VERSION" ]; then
export VERSION=$OSXCROSS_VERSION
export OSX_VERSION_MIN=$OSXCROSS_OSX_VERSION_MIN
export TARGET=$OSXCROSS_TARGET
export BASE_DIR=$OSXCROSS_BASE_DIR
export TARBALL_DIR=$OSXCROSS_TARBALL_DIR
export BUILD_DIR=$OSXCROSS_BUILD_DIR
export TARGET_DIR=$OSXCROSS_TARGET_DIR
export TARGET_DIR_SDK_TOOLS=$OSXCROSS_TARGET_DIR/SDK/tools
export PATCH_DIR=$OSXCROSS_PATCH_DIR
export SDK_DIR=$OSXCROSS_SDK_DIR
export SDK_VERSION=$OSXCROSS_SDK_VERSION
export SDK=$OSXCROSS_SDK
export LIBLTO_PATH=$OSXCROSS_LIBLTO_PATH
export LINKER_VERSION=$OSXCROSS_LINKER_VERSION
# Do not use these
unset OSXCROSS_VERSION OSXCROSS_OSX_VERSION_MIN
unset OSXCROSS_TARGET OSXCROSS_BASE_DIR
unset OSXCROSS_SDK_VERSION OSXCROSS_SDK
unset OSXCROSS_SDK_DIR OSXCROSS_TARBALL_DIR
unset OSXCROSS_PATCH_DIR OSXCROSS_TARGET_DIR
unset OSXCROSS_BUILD_DIR OSXCROSS_CCTOOLS_PATH
unset OSXCROSS_LIBLTO_PATH OSXCROSS_LINKER_VERSION
else
export BASE_DIR=$PWD
export TARBALL_DIR=$PWD/tarballs
export BUILD_DIR=$PWD/build
export TARGET_DIR=${TARGET_DIR:-$BASE_DIR/target}
export TARGET_DIR_SDK_TOOLS=$TARGET_DIR/SDK/tools
export PATCH_DIR=$PWD/patches
export SDK_DIR=$TARGET_DIR/SDK
fi
}
set_path_vars
PLATFORM=$(uname -s)
ARCH=$(uname -m)
OPERATING_SYSTEM=$(uname -o 2>/dev/null || echo "-")
SCRIPT=$(basename $0)
if [[ $PLATFORM == CYGWIN* ]]; then
echo "Cygwin is no longer supported." 1>&2
exit 1
fi
if [[ $PLATFORM == Darwin ]]; then
echo $PATH
CFLAGS_OPENSSL="$(pkg-config --cflags openssl)"
LDFLAGS_OPENSSL="$(pkg-config --libs-only-L openssl)"
export C_INCLUDE_PATH=${CFLAGS_OPENSSL:2}
export CPLUS_INCLUDE_PATH=${CFLAGS_OPENSSL:2}
export LIBRARY_PATH=${LDFLAGS_OPENSSL:2}
fi
function require()
{
if ! command -v $1 &>/dev/null; then
echo "Required dependency '$1' is not installed" 1>&2
exit 1
fi
}
if [[ $PLATFORM == *BSD ]] || [ $PLATFORM == "DragonFly" ]; then
MAKE=gmake
SED=gsed
else
MAKE=make
SED=sed
fi
if [ -z "$USESYSTEMCOMPILER" ]; then
if [ -z "$CC" ]; then
export CC="clang"
fi
if [ -z "$CXX" ]; then
export CXX="clang++"
fi
fi
if [ -z "$CMAKE" ]; then
CMAKE="cmake"
fi
if [ -n "$CC" ]; then
require $CC
fi
if [ -n "$CXX" ]; then
require $CXX
fi
require $SED
require $MAKE
require $CMAKE
require patch
require gunzip
# enable debug messages
[ -n "$OCDEBUG" ] && set -x
# how many concurrent jobs should be used for compiling?
if [ -z "$JOBS" ]; then
JOBS=$(tools/get_cpu_count.sh || echo 1)
fi
# Don't run osxcross-conf for the top build.sh script
if [ $SCRIPT != "build.sh" ]; then
res=$(tools/osxcross_conf.sh || echo "")
if [ -z "$res" ] &&
[[ $SCRIPT != gen_sdk_package*.sh ]] &&
[ $SCRIPT != "build_wrapper.sh" ] &&
[[ $SCRIPT != build*_clang.sh ]] &&
[ $SCRIPT != "mount_xcode_image.sh" ]; then
echo "you must run ./build.sh first before you can start building $DESC"
exit 1
fi
if [ -z "$TOP_BUILD_SCRIPT" ]; then
eval "$res"
set_path_vars
fi
fi
# find sdk version to use
function guess_sdk_version()
{
tmp1=
tmp2=
tmp3=
file=
sdk=
guess_sdk_version_result=
sdkcount=$(find -L tarballs/ -type f | grep MacOSX | wc -l)
if [ $sdkcount -eq 0 ]; then
echo no SDK found in 'tarballs/'. please see README.md
exit 1
elif [ $sdkcount -gt 1 ]; then
sdks=$(find -L tarballs/ -type f | grep MacOSX)
for sdk in $sdks; do echo $sdk; done
echo 'more than one MacOSX SDK tarball found. please set'
echo 'SDK_VERSION environment variable for the one you want'
echo '(for example: SDK_VERSION=10.x [OSX_VERSION_MIN=10.x] [TARGET_DIR=...] ./build.sh)'
exit 1
else
sdk=$(find -L tarballs/ -type f | grep MacOSX)
tmp2=$(echo ${sdk/bz2/} | $SED s/[^0-9.]//g)
tmp3=$(echo $tmp2 | $SED s/\\\.*$//g)
guess_sdk_version_result=$tmp3
echo 'found SDK version' $guess_sdk_version_result 'at tarballs/'$(basename $sdk)
fi
if [ $guess_sdk_version_result ]; then
if [ $guess_sdk_version_result = 10.4 ]; then
guess_sdk_version_result=10.4u
fi
fi
export guess_sdk_version_result
}
# make sure there is actually a file with the given SDK_VERSION
function verify_sdk_version()
{
sdkv=$1
for file in tarballs/*; do
if [ -f "$file" ] && [ $(echo $file | grep OSX.*$sdkv) ]; then
echo "verified at "$file
sdk=$file
fi
done
if [ ! $sdk ] ; then
echo cant find SDK for OSX $sdkv in tarballs. exiting
exit 1
fi
}
function extract()
{
echo "extracting $(basename $1) ..."
local tarflags
tarflags="xf"
test -n "$OCDEBUG" && tarflags+="v"
case $1 in
*.tar.xz)
xz -dc $1 | tar $tarflags -
;;
*.tar.gz)
gunzip -dc $1 | tar $tarflags -
;;
*.tar.bz2)
bzip2 -dc $1 | tar $tarflags -
;;
*)
echo "Unhandled archive type" 2>&1
exit 1
;;
esac
}
function get_exec_dir()
{
local dirs=$(dirs)
echo ${dirs##* }
}
function make_absolute_path()
{
local current_path
if [ $# -eq 1 ]; then
current_path=$PWD
else
current_path=$2
fi
case $1 in
/*) echo "$1" ;;
*) echo "${current_path}/$1" ;;
esac
}
function cleanup_tmp_dir()
{
if [ -n "$OC_KEEP_TMP_DIR" ]; then
echo "Not removing $TMP_DIR ..."
return
fi
echo "Removing $TMP_DIR ..."
rm -rf $TMP_DIR
}
function create_tmp_dir()
{
mkdir -p $BUILD_DIR
pushd $BUILD_DIR &>/dev/null
local tmp
for i in {1..100}; do
tmp="tmp_$RANDOM"
[ -e $tmp ] && continue
mkdir $tmp && break
done
if [ ! -d $tmp ]; then
echo "cannot create $BUILD_DIR/$tmp directory" 1>&2
exit 1
fi
TMP_DIR=$BUILD_DIR/$tmp
trap cleanup_tmp_dir EXIT
popd &>/dev/null
}
# f_res=1 = something has changed upstream
# f_res=0 = nothing has changed
function git_clone_repository
{
local url=$1
local branch=$2
local project_name=$3
if [ -n "$TP_OSXCROSS_DEV" ]; then
# copy files from local working directory
rm -rf $project_name
cp -r $TP_OSXCROSS_DEV/$project_name .
if [ -e ${project_name}/.git ]; then
pushd $project_name &>/dev/null
git clean -fdx &>/dev/null
popd &>/dev/null
fi
f_res=1
return
fi
local git_extra_opts=""
if [ -z "$FULL_CLONE" ]; then
git_extra_opts="--depth 1 "
fi
if [ ! -d $project_name ]; then
git clone $url $project_name $git_extra_opts
fi
pushd $project_name &>/dev/null
git reset --hard &>/dev/null
git clean -fdx &>/dev/null
if git show-ref refs/heads/$branch &>/dev/null; then
git fetch origin $branch
else
git fetch origin $branch:$branch $git_extra_opts
fi
git checkout $branch
git pull origin $branch
local new_hash=$(git rev-parse HEAD)
local old_hash=""
local hash_file="$BUILD_DIR/.${project_name}_git_hash"
if [ -f $hash_file ]; then
old_hash=$(cat $hash_file)
fi
echo -n $new_hash > $hash_file
if [ "$old_hash" != "$new_hash" ]; then
f_res=1
else
f_res=0
fi
popd &>/dev/null
}
function get_project_name_from_url()
{
local url=$1
local project_name
project_name=$(basename $url)
project_name=${project_name/\.git/}
echo -n $project_name
}
function build_success()
{
local project_name=$1
touch "$BUILD_DIR/.${CURRENT_BUILD_PROJECT_NAME}_build_complete"
unset CURRENT_BUILD_PROJECT_NAME
}
function build_msg()
{
echo ""
if [ $# -eq 2 ]; then
echo "## Building $1 ($2) ##"
else
echo "## Building $1 ##"
fi
echo ""
}
# f_res=1 = build the project
# f_res=0 = nothing to do
function get_sources()
{
local url="$1"
local branch="$2"
local project_name="$3"
local build_complete_file
if [[ -z "${project_name}" ]]; then
project_name=$(get_project_name_from_url "${url}")
fi
build_complete_file="${BUILD_DIR}/.${project_name}_build_complete"
CURRENT_BUILD_PROJECT_NAME="${project_name}"
build_msg "${project_name}" "${branch}"
if [[ "${SKIP_BUILD}" == *${project_name}* ]]; then
f_res=0
return
fi
git_clone_repository "${url}" "${branch}" "${project_name}"
if [[ $f_res -eq 1 ]]; then
rm -f "${build_complete_file}"
f_res=1
else
# nothing has changed upstream
if [[ -f "${build_complete_file}" ]]; then
echo ""
echo "## Nothing to do ##"
echo ""
f_res=0
else
rm -f "${build_complete_file}"
f_res=1
fi
fi
}
function download()
{
local uri=$1
local filename=$(basename $1)
if command -v curl &>/dev/null; then
## cURL ##
local curl_opts="-L -C - "
curl $curl_opts -o $filename $uri
elif command -v wget &>/dev/null; then
## wget ##
local wget_opts="-c "
local output=$(wget --no-config 2>&1)
if [[ $output != *--no-config* ]]; then
wget_opts+="--no-config "
fi
wget $wget_opts -O $filename $uri
else
echo "Required dependency 'curl or wget' not installed" 1>&2
exit 1
fi
}
function create_symlink()
{
ln -sf $1 $2
}
function verbose_cmd()
{
echo "$@"
eval "$@"
}
function test_compiler()
{
if [ "$3" != "required" ]; then
set +e
fi
echo -ne "testing $1 ... "
$1 $2 -O2 -Wall -o test
if [ $? -eq 0 ]; then
rm test
echo "works"
else
echo "failed (ignored)"
fi
if [ "$3" != "required" ]; then
set -e
fi
}
function test_compiler_cxx11()
{
set +e
echo -ne "testing $1 -stdlib=libc++ -std=c++11 ... "
$1 $2 -O2 -stdlib=libc++ -std=c++11 -Wall -o test &>/dev/null
if [ $? -eq 0 ]; then
rm test
echo "works"
else
echo "failed (ignored)"
fi
set -e
}
## Also used in gen_sdk_package_pbzx.sh ##
function build_xar()
{
pushd $BUILD_DIR &>/dev/null
get_sources https://github.com/tpoechtrager/xar.git master
if [ $f_res -eq 1 ]; then
pushd $CURRENT_BUILD_PROJECT_NAME/xar &>/dev/null
CFLAGS+=" -w" \
./configure --prefix=$TARGET_DIR
$MAKE -j$JOBS
$MAKE install -j$JOBS
popd &>/dev/null
build_success
fi
popd &>/dev/null
}
# exit on error
set -e
| tpoechtrager/osxcross | tools/tools.sh | Shell | gpl-2.0 | 10,152 |
#!/bin/sh
# SPDX-License-Identifier: LGPL-2.1-or-later
set -eu
git shortlog -s `git describe --abbrev=0 --match 'v[0-9][0-9][0-9]'`.. | \
awk '{ $1=""; print $0 "," }' | \
sort -u
| phomes/systemd | tools/git-contrib.sh | Shell | gpl-2.0 | 189 |
#!/bin/sh
export KMP_AFFINITY=compact
for SEED in `seq 0 15`
do
export CONFIG_OPTS="--with-cuda --seed $SEED"
export OUTPUT_SUFFIX="gpu-$SEED"
./run.sh
done
| lawmurray/Resampling | run_gpu.sh | Shell | gpl-2.0 | 166 |
#!/bin/bash
# I need to rewrite the code later
echo "Hello World"
parent=./Writing
a=24
#newfolder=/newfolder
#mkdir "$newfolder"
for folder in "$parent"/*; do
if [[ -d $folder ]]; then
foldername="${folder##*/}"
for file in "$parent"/"$foldername"/*; do
filename="${file##*/}"
newfilename="$a"_"$foldername"_"$filename"
cp "$file" /home/tanzim/Developer/Videos/Processed/"$newfilename"
echo "$file"
done
fi
a=$((a+1))
done
echo "End"
| tahmid-tanzim/bash-scripting | file-process.sh | Shell | gpl-2.0 | 435 |
# Puropose: Verify existing BWBASIC behavior
# Author: Howard Wulf
# Date: 2014-03-28
# Usage: implementatino defined
# Example:
# cd /sdcard/Download/BASIC/bwbasic3/bwskytel
# ash ./00test.sh
#
rm *.80
rm *.OUT
rm *.dif
# ----------------------------------------------
# Regression Tests
# ----------------------------------------------
testcase()
{
TESTCASE=${1}
echo "TESTCASE=${TESTCASE}"
~/bwbasic ${TESTCASE} > ${TESTCASE}.OUT
echo "500 DATA ${TESTCASE}.OUT, ${TESTCASE}.80" > PAD80.INP
~/bwbasic PAD80.BAS
diff ${TESTCASE}.run ${TESTCASE}.80 > ${TESTCASE}.dif
if test -s ${TESTCASE}.dif
then
echo less ${TESTCASE}.dif
fi
}
# ---------------------------------------------
echo "OPTION VERSION BYWATER" > profile.bas
echo "OPTION LABELS OFF" >> profile.bas
testcase addmag
testcase altaz
testcase altaz2
testcase angsep
testcase asteroid
testcase binary
testcase blkhole1
testcase blkhole2
testcase blkhole3
testcase calendar
testcase caljd
testcase capture
testcase ccdlimi2
testcase chance
testcase chart
testcase circle
testcase comet
testcase crater
testcase daysold
testcase dial
testcase difpat
testcase easter
testcase extinc
testcase facecirc
testcase fireball
testcase fracts
testcase glob1
testcase gmst
testcase gwmonth
testcase interp
testcase jdcal
testcase jmerid
testcase kepler
testcase lens
testcase lheight
testcase limmag
testcase lookback
testcase lookbak2
testcase lunar
testcase mallam
testcase mars
testcase meteor
testcase moonfx
testcase moons
testcase moonup
testcase msachart
testcase obscur
testcase occvis
testcase orbits
testcase path
testcase period
testcase photom
testcase precess
testcase refr1
testcase rocket
testcase rotate
testcase saros
testcase satrings
testcase scales
testcase shadow
testcase shower
testcase shuttr
testcase solarecl
testcase space
testcase sphe
testcase stay
testcase steppr
testcase stereo
testcase sunshine
testcase suntan
testcase sunup
testcase supernum
testcase surf
testcase track
testcase vislimit
testcase wavel
testcase xyz
cat *.dif > dif.OUT
less dif.OUT
# EOF
| yantrabuddhi/bwbasic3 | SKYTEL/00test.sh | Shell | gpl-2.0 | 2,223 |
#!/bin/sh
# Use revcontrol.sh <text file with list of files/directories to check>
# Change here
mailreports="[email protected]"
backupdir="/my/config/backup/archives"
startprocess=`date +%Y%m%d%H%M`
if [! -f "$1" ]; then
echo "missing/invalid parameter"
exit 1
else
param="$1"
fi
filename=`basename "$param"`
filelist=$(/bin/cat "$param" | tr '\n' ' ');
#echo "File list: $filelist"
if [ -f "$backupdir/$filename.tar.gz" ]; then
newfiles=`find $filelist -newer $backupdir/$filename.tar.gz`
if [ -n "$newfiles" ]; then
mv $backupdir/$filename.tar.gz $backupdir/$filename-$startprocess.tar.gz
tar cfz $backupdir/$filename.tar.gz --dereference -T $1
echo $newfiles | mail -s "New files on $startprocess of $filename" $mailreports
fi
else
tar cfz $backupdir/$filename.tar.gz --dereference -T $1
fi
| gmuslera/revcontrol | revcontrol.sh | Shell | gpl-2.0 | 817 |
for centos:
yum install sysstat bcc-tools bpftrace perf htop iproute msr-tools
for ubuntu:
sudo apt-get install sysstat bcc-tools bpftrace linux-tools-common \
linux-tools-$(uname -r) iproute2 msr-tools
git clone https://github.com/brendangregg/msr-cloud-tools
git clone https://github.com/brendangregg/bpf-perf-tools-book
| heromsl/Memomis | obs_tool.sh | Shell | gpl-3.0 | 329 |
#!/usr/bin/env bash
#
# Utility script for running the Vagrantfile.remotetest against a Streisand host
#
# Set errexit option to exit immediately on any non-zero status return
set -e
echo -e "\n\033[38;5;255m\033[48;5;234m\033[1m S T R E I S A N D R E M O T E T E S T\033[0m\n"
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd -P)"
VAGRANT_FILENAME="Vagrantfile.remotetest"
GENERATED_DOCS_DIR="$SCRIPT_DIR/../generated-docs"
mkdir -p $GENERATED_DOCS_DIR
VAGRANTFILE="$SCRIPT_DIR/../$VAGRANT_FILENAME"
function backup_vagrantfile() {
cp $VAGRANTFILE $VAGRANTFILE.dist
}
function restore_vagrantfile() {
cp $VAGRANTFILE.dist $VAGRANTFILE
rm $VAGRANTFILE.dist
}
trap restore_vagrantfile EXIT
# set_remote_ip replaces the "REMOTE_IP_HERE" token from the
# Vagrantfile.remotetest with the response from a user prompt
function set_remote_ip() {
read -r -p "What is the Streisand server IP? " SERVER_IP
sed "s/\"REMOTE_IP_HERE\",/\"$SERVER_IP\",/" "$VAGRANTFILE" > "$VAGRANTFILE.new"
mv "$VAGRANTFILE.new" "$VAGRANTFILE"
}
function set_gateway_pass() {
local GATEWAY_PASS_FILE="$GENERATED_DOCS_DIR/gateway-password.txt"
read -r -p "What is the Streisand gateway password? " GATEWAY_PASS
echo "$GATEWAY_PASS" > $GATEWAY_PASS_FILE
}
backup_vagrantfile
set_remote_ip
set_gateway_pass
pushd "$SCRIPT_DIR/.."
VAGRANT_VAGRANTFILE=$VAGRANT_FILENAME vagrant up --provision
popd
| ipat8/Streisand-VPN-GUI | tests/remote_test.sh | Shell | gpl-3.0 | 1,392 |
#!/usr/bin/env sh
# REQUIRES: shell
# RUN: cd %T; %{bear} --verbose --output %t.json -- %{shell} %s
# RUN: assert_compilation %t.json count -eq 4
# RUN: assert_compilation %t.json contains -file %T/parallel_build_1.c -directory %T -arguments %{c_compiler} -c -o parallel_build_1.o parallel_build_1.c
# RUN: assert_compilation %t.json contains -file %T/parallel_build_2.c -directory %T -arguments %{c_compiler} -c -o parallel_build_2.o parallel_build_2.c
# RUN: assert_compilation %t.json contains -file %T/parallel_build_3.c -directory %T -arguments %{c_compiler} -c -o parallel_build_3.o parallel_build_3.c
# RUN: assert_compilation %t.json contains -file %T/parallel_build_4.c -directory %T -arguments %{c_compiler} -c -o parallel_build_4.o parallel_build_4.c
touch parallel_build_1.c parallel_build_2.c parallel_build_3.c parallel_build_4.c
$CC -c -o parallel_build_1.o parallel_build_1.c &
$CC -c -o parallel_build_2.o parallel_build_2.c &
$CC -c -o parallel_build_3.o parallel_build_3.c &
$CC -c -o parallel_build_4.o parallel_build_4.c &
wait
true;
| rizsotto/Bear | test/cases/compilation/output/parallel_build.sh | Shell | gpl-3.0 | 1,060 |
#!/bin/sh
# this script is to be run as root on the remote server machine, usually via ssh
# this assumes all of the necessary files such as distrib.conf, setupssl2.sh, etc. have already
# been copied to the machine
. ./libperftest.sh
DSLOGDIR=${DSLOGDIR:-$PREFIX/var/log/dirsrv/slapd-$INST}
LOGCONV=${LOGCONV:-logconv.pl}
# top
killall top
# mon-tcp-backlog
killall /usr/bin/perl
# epoll pid
if [ -f epoll.pid ]; then
kill `cat epoll.pid`
fi
# logmon
if [ -f logmon.pid ]; then
kill `cat logmon.pid`
fi
# list slapd sockets in use
ls -al /proc/`pidof ns-slapd`/fd|grep socket
# inotifywait
killall inotifywait
if [ -n "$LOGCONV_ON_SERVER" ] ; then
killall ns-slapd
mkdir -p /dev/shm/logconv
$LOGCONV -D /dev/shm/logconv -m access.out $DSLOGDIR/access.20*.bz2 $DSLOGDIR/access > access.stats
rm -rf /dev/shm/logconv
fi
| richm/389-perf-test | servercleanup.sh | Shell | gpl-3.0 | 848 |
# This file must be sourced inside (ba)sh using: .
#
# filesystem.sh. Generated from filesystem.sh.in by configure.
#
# Sets up the GNUstep filesystem paths for shell scripts
#
# Copyright (C) 2007 Free Software Foundation, Inc.
#
# Author: Nicola Pero <[email protected]>,
#
# Date: February 2007
#
# This file is part of the GNUstep Makefile Package.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU General Public
# License along with this library; see the file COPYING.
# If not, write to the Free Software Foundation,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# This does the same that filesystem.make does, but for GNUstep.sh.
# Include this file after reading your config to make sure that all
# the paths are available.
#
# PS: If you change this list, make sure that top update the list of
# paths used in all other filesystem.*, and in common.make when
# GNUSTEP_INSTALLATION_DIR is set.
#
#
# Location of Users directories ... never used by gnustep-make.
#
if [ -z "$GNUSTEP_SYSTEM_USERS_DIR" ];
then GNUSTEP_SYSTEM_USERS_DIR="/home"
fi
if [ -z "$GNUSTEP_NETWORK_USERS_DIR" ];
then GNUSTEP_NETWORK_USERS_DIR="/home"
fi
if [ -z "$GNUSTEP_LOCAL_USERS_DIR" ];
then GNUSTEP_LOCAL_USERS_DIR="/home"
fi
# PS: We don't want to define these variables to avoid extreme
# environment pollution. :-) They are fixed subdirs of LIBRARY if you
# need them.
#GNUSTEP_SYSTEM_APPLICATION_SUPPORT = $GNUSTEP_SYSTEM_LIBRARY/ApplicationSupport
#GNUSTEP_SYSTEM_BUNDLES = $GNUSTEP_SYSTEM_LIBRARY/Bundles
#GNUSTEP_SYSTEM_FRAMEWORKS = $GNUSTEP_SYSTEM_LIBRARY/Frameworks
#GNUSTEP_SYSTEM_PALETTES = $GNUSTEP_SYSTEM_LIBRARY/ApplicationSupport/Palettes
#GNUSTEP_SYSTEM_SERVICES = $GNUSTEP_SYSTEM_LIBRARY/Services
#GNUSTEP_SYSTEM_RESOURCES = $GNUSTEP_SYSTEM_LIBRARY/Libraries/Resources
#GNUSTEP_SYSTEM_JAVA = $GNUSTEP_SYSTEM_LIBRARY/Libraries/Java
#
# SYSTEM domain
#
if [ -z "$GNUSTEP_SYSTEM_APPS" ];
then GNUSTEP_SYSTEM_APPS="/usr/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_SYSTEM_ADMIN_APPS" ];
then GNUSTEP_SYSTEM_ADMIN_APPS="/usr/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_SYSTEM_WEB_APPS" ];
then GNUSTEP_SYSTEM_WEB_APPS="/usr/lib/GNUstep/WebApplications"
fi
if [ -z "$GNUSTEP_SYSTEM_TOOLS" ];
then GNUSTEP_SYSTEM_TOOLS="/usr/bin"
fi
if [ -z "$GNUSTEP_SYSTEM_ADMIN_TOOLS" ];
then GNUSTEP_SYSTEM_ADMIN_TOOLS="/usr/sbin"
fi
if [ -z "$GNUSTEP_SYSTEM_LIBRARY" ];
then GNUSTEP_SYSTEM_LIBRARY="/usr/lib/GNUstep"
fi
if [ -z "$GNUSTEP_SYSTEM_HEADERS" ];
then GNUSTEP_SYSTEM_HEADERS="/usr/include"
fi
if [ -z "$GNUSTEP_SYSTEM_LIBRARIES" ];
then GNUSTEP_SYSTEM_LIBRARIES="/usr/lib"
fi
if [ -z "$GNUSTEP_SYSTEM_DOC" ];
then GNUSTEP_SYSTEM_DOC="/usr/share/GNUstep/Documentation"
fi
if [ -z "$GNUSTEP_SYSTEM_DOC_MAN" ];
then GNUSTEP_SYSTEM_DOC_MAN="/usr/share/man"
fi
if [ -z "$GNUSTEP_SYSTEM_DOC_INFO" ];
then GNUSTEP_SYSTEM_DOC_INFO="/usr/share/info"
fi
#
# NETWORK domain
#
if [ -z "$GNUSTEP_NETWORK_APPS" ];
then GNUSTEP_NETWORK_APPS="/usr/local/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_NETWORK_ADMIN_APPS" ];
then GNUSTEP_NETWORK_ADMIN_APPS="/usr/local/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_NETWORK_WEB_APPS" ];
then GNUSTEP_NETWORK_WEB_APPS="/usr/lib/GNUstep/WebApplications"
fi
if [ -z "$GNUSTEP_NETWORK_TOOLS" ];
then GNUSTEP_NETWORK_TOOLS="/usr/local/bin"
fi
if [ -z "$GNUSTEP_NETWORK_ADMIN_TOOLS" ];
then GNUSTEP_NETWORK_ADMIN_TOOLS="/usr/local/sbin"
fi
if [ -z "$GNUSTEP_NETWORK_LIBRARY" ];
then GNUSTEP_NETWORK_LIBRARY="/usr/local/lib/GNUstep"
fi
if [ -z "$GNUSTEP_NETWORK_HEADERS" ];
then GNUSTEP_NETWORK_HEADERS="/usr/local/include"
fi
if [ -z "$GNUSTEP_NETWORK_LIBRARIES" ];
then GNUSTEP_NETWORK_LIBRARIES="/usr/local/lib"
fi
if [ -z "$GNUSTEP_NETWORK_DOC" ];
then GNUSTEP_NETWORK_DOC="/usr/local/share/GNUstep/Documentation"
fi
if [ -z "$GNUSTEP_NETWORK_DOC_MAN" ];
then GNUSTEP_NETWORK_DOC_MAN="/usr/local/share/man"
fi
if [ -z "$GNUSTEP_NETWORK_DOC_INFO" ];
then GNUSTEP_NETWORK_DOC_INFO="/usr/local/share/info"
fi
#
# LOCAL domain
#
if [ -z "$GNUSTEP_LOCAL_APPS" ];
then GNUSTEP_LOCAL_APPS="/usr/local/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_LOCAL_ADMIN_APPS" ];
then GNUSTEP_LOCAL_ADMIN_APPS="/usr/local/lib/GNUstep/Applications"
fi
if [ -z "$GNUSTEP_LOCAL_WEB_APPS" ];
then GNUSTEP_LOCAL_WEB_APPS="/usr/local/lib/GNUstep/WebApplications"
fi
if [ -z "$GNUSTEP_LOCAL_TOOLS" ];
then GNUSTEP_LOCAL_TOOLS="/usr/local/bin"
fi
if [ -z "$GNUSTEP_LOCAL_ADMIN_TOOLS" ];
then GNUSTEP_LOCAL_ADMIN_TOOLS="/usr/local/sbin"
fi
if [ -z "$GNUSTEP_LOCAL_LIBRARY" ];
then GNUSTEP_LOCAL_LIBRARY="/usr/local/lib/GNUstep"
fi
if [ -z "$GNUSTEP_LOCAL_HEADERS" ];
then GNUSTEP_LOCAL_HEADERS="/usr/local/include"
fi
if [ -z "$GNUSTEP_LOCAL_LIBRARIES" ];
then GNUSTEP_LOCAL_LIBRARIES="/usr/local/lib"
fi
if [ -z "$GNUSTEP_LOCAL_DOC" ];
then GNUSTEP_LOCAL_DOC="/usr/local/share/GNUstep/Documentation"
fi
if [ -z "$GNUSTEP_LOCAL_DOC_MAN" ];
then GNUSTEP_LOCAL_DOC_MAN="/usr/local/share/man"
fi
if [ -z "$GNUSTEP_LOCAL_DOC_INFO" ];
then GNUSTEP_LOCAL_DOC_INFO="/usr/local/share/info"
fi
#
# USER domain
#
if [ -z "$GNUSTEP_USER_DIR_APPS" ];
then GNUSTEP_USER_DIR_APPS="GNUstep/Applications"
fi
if [ -z "$GNUSTEP_USER_DIR_ADMIN_APPS" ];
then GNUSTEP_USER_DIR_ADMIN_APPS="GNUstep/Applications/Admin"
fi
if [ -z "$GNUSTEP_USER_DIR_WEB_APPS" ];
then GNUSTEP_USER_DIR_WEB_APPS="GNUstep/WebApplications"
fi
if [ -z "$GNUSTEP_USER_DIR_TOOLS" ];
then GNUSTEP_USER_DIR_TOOLS="GNUstep/Tools"
fi
if [ -z "$GNUSTEP_USER_DIR_ADMIN_TOOLS" ];
then GNUSTEP_USER_DIR_ADMIN_TOOLS="GNUstep/Tools/Admin"
fi
if [ -z "$GNUSTEP_USER_DIR_LIBRARY" ];
then GNUSTEP_USER_DIR_LIBRARY="GNUstep/Library"
fi
if [ -z "$GNUSTEP_USER_DIR_HEADERS" ];
then GNUSTEP_USER_DIR_HEADERS="GNUstep/Library/Headers"
fi
if [ -z "$GNUSTEP_USER_DIR_LIBRARIES" ];
then GNUSTEP_USER_DIR_LIBRARIES="GNUstep/Library/Libraries"
fi
if [ -z "$GNUSTEP_USER_DIR_DOC" ];
then GNUSTEP_USER_DIR_DOC="GNUstep/Library/Documentation"
fi
if [ -z "$GNUSTEP_USER_DIR_DOC_MAN" ];
then GNUSTEP_USER_DIR_DOC_MAN="GNUstep/Library/Documentation/man"
fi
if [ -z "$GNUSTEP_USER_DIR_DOC_INFO" ];
then GNUSTEP_USER_DIR_DOC_INFO="GNUstep/Library/Documentation/info"
fi
#
# Now for all directories in the USER domain, check if they are
# relative; if so, consider them as subdirs of GNUSTEP_HOME.
#
case "$GNUSTEP_USER_DIR_APPS" in
/*) # An absolute path
GNUSTEP_USER_APPS="$GNUSTEP_USER_DIR_APPS";;
*) # Something else
GNUSTEP_USER_APPS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_APPS";;
esac
unset GNUSTEP_USER_DIR_APPS
case "$GNUSTEP_USER_DIR_ADMIN_APPS" in
/*) # An absolute path
GNUSTEP_USER_ADMIN_APPS="$GNUSTEP_USER_DIR_ADMIN_APPS";;
*) # Something else
GNUSTEP_USER_ADMIN_APPS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_ADMIN_APPS";;
esac
unset GNUSTEP_USER_DIR_ADMIN_APPS
case "$GNUSTEP_USER_DIR_WEB_APPS" in
/*) # An absolute path
GNUSTEP_USER_WEB_APPS="$GNUSTEP_USER_DIR_WEB_APPS";;
*) # Something else
GNUSTEP_USER_WEB_APPS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_WEB_APPS";;
esac
unset GNUSTEP_USER_DIR_WEB_APPS
case "$GNUSTEP_USER_DIR_TOOLS" in
/*) GNUSTEP_USER_TOOLS="$GNUSTEP_USER_DIR_TOOLS";;
*) GNUSTEP_USER_TOOLS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_TOOLS";;
esac
unset GNUSTEP_USER_DIR_TOOLS
case "$GNUSTEP_USER_DIR_ADMIN_TOOLS" in
/*) GNUSTEP_USER_ADMIN_TOOLS="$GNUSTEP_USER_DIR_ADMIN_TOOLS";;
*) GNUSTEP_USER_ADMIN_TOOLS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_ADMIN_TOOLS";;
esac
unset GNUSTEP_USER_DIR_ADMIN_TOOLS
case "$GNUSTEP_USER_DIR_LIBRARY" in
/*) GNUSTEP_USER_LIBRARY="$GNUSTEP_USER_DIR_LIBRARY";;
*) GNUSTEP_USER_LIBRARY="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_LIBRARY";;
esac
unset GNUSTEP_USER_DIR_LIBRARY
case "$GNUSTEP_USER_DIR_HEADERS" in
/*) GNUSTEP_USER_HEADERS="$GNUSTEP_USER_DIR_HEADERS";;
*) GNUSTEP_USER_HEADERS="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_HEADERS";;
esac
unset GNUSTEP_USER_DIR_HEADERS
case "$GNUSTEP_USER_DIR_LIBRARIES" in
/*) GNUSTEP_USER_LIBRARIES="$GNUSTEP_USER_DIR_LIBRARIES";;
*) GNUSTEP_USER_LIBRARIES="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_LIBRARIES";;
esac
unset GNUSTEP_USER_DIR_LIBRARIES
case "$GNUSTEP_USER_DIR_DOC" in
/*) GNUSTEP_USER_DOC="$GNUSTEP_USER_DIR_DOC";;
*) GNUSTEP_USER_DOC="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_DOC";;
esac
unset GNUSTEP_USER_DIR_DOC
case "$GNUSTEP_USER_DIR_DOC_MAN" in
/*) GNUSTEP_USER_DOC_MAN="$GNUSTEP_USER_DIR_DOC_MAN";;
*) GNUSTEP_USER_DOC_MAN="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_DOC_MAN";;
esac
unset GNUSTEP_USER_DIR_DOC_MAN
case "$GNUSTEP_USER_DIR_DOC_INFO" in
/*) GNUSTEP_USER_DOC_INFO="$GNUSTEP_USER_DIR_DOC_INFO";;
*) GNUSTEP_USER_DOC_INFO="$GNUSTEP_HOME/$GNUSTEP_USER_DIR_DOC_INFO";;
esac
unset GNUSTEP_USER_DIR_DOC_INFO
| dwestfall/gnustep-make | filesystem.sh | Shell | gpl-3.0 | 9,107 |
#/bin/bash
# This programs fixes the ugly tooltip in Eclipse using GTK3
# running on Ubuntu with Ambiance Theme.
#
# Copyright (C) 2016 Kiran Mohan
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>
#
# TODO: fix for Radiance theme
OS_NAME="Ubuntu"
OS_VERSION="16.04"
OS_FULLNAME="$OS_NAME $OS_VERSION"
# tested only ubuntu for now
lsb_release -a 2>&1 |grep -q "$OS_FULLNAME"
if [ $? -ne 0 ]; then
echo "This scripts is only tested on $OS_FULLNAME."
echo "Bye"
exit 0;
fi
# check arguments
if [ $# -eq 0 ]; then
echo "Usage: $0 <path/to/eclipse>"
exit 0;
fi
# check ECLIPSE_HOME is valid or not
ECLIPSE_HOME=`readlink -m $1`
if [ -f $ECLIPSE_HOME ]; then
ECLIPSE_HOME=`dirname $ECLIPSE_HOME`
fi
if [ ! -f $ECLIPSE_HOME/eclipse ]; then
echo "Eclipse not found in $ECLIPSE_HOME"
exit 0;
fi
ECLIPSE_VERS=`grep version $ECLIPSE_HOME/.eclipseproduct | sed 's/version=//'`
DATE=`date|tr ' ' '_'`
# Fetch location of desktop directory
DESKTOP_DIR=$(xdg-user-dir DESKTOP)
# Directory where the modified Ambiance theme is stored.
ECLIPSE_THEME_ROOT_DIR=$HOME/.eclipse/share/themes
ECLIPSE_THEME_DIR=$ECLIPSE_THEME_ROOT_DIR/Ambiance
if [ -d $ECLIPSE_THEME_DIR ];then
mv $ECLIPSE_THEME_DIR $ECLIPSE_THEME_DIR".b4_"$DATE
fi
mkdir -p $ECLIPSE_THEME_DIR
# copy Ambiance theme and modify tooltip color
cp -rf /usr/share/themes/Ambiance/* $ECLIPSE_THEME_DIR
sed -i -r -e 's/@define-color tooltip_bg_color.*/@define-color tooltip_bg_color #f5f5c5;/'\
-e 's/@define-color tooltip_fg_color.*/@define-color tooltip_fg_color #000000;/'\
$ECLIPSE_THEME_DIR/gtk-3.0/gtk-main.css
sed -i -r -e 's/color: @tooltip_fg_color;/color: #000000;/'\
$ECLIPSE_THEME_DIR/gtk-3.0/gtk-widgets.css
# also copy Default theme (somehow needed to keep button border
# animations the same)
cp -rf /usr/share/themes/Default $ECLIPSE_THEME_DIR/../
# create Eclipse launch menu
LAUNCH_FILENAME="eclipse_$ECLIPSE_VERS.desktop"
if [ -f $DESKTOP_DIR/$LAUNCH_FILENAME ]; then
mv $DESKTOP_DIR/$LAUNCH_FILENAME $DESKTOP_DIR/$LAUNCH_FILENAME.b4_$DATE
fi
touch $DESKTOP_DIR/$LAUNCH_FILENAME
cat > $DESKTOP_DIR/$LAUNCH_FILENAME << endtext
[Desktop Entry]
Version=$ECLIPSE_VERS
Name=Eclipse $ECLIPSE_VERS
Comment=Eclipse IDE $ECLIPSE_VERS
Exec=env GTK_DATA_PREFIX=$HOME/.eclipse GTK_THEME=Ambiance $ECLIPSE_HOME/eclipse
Icon=$ECLIPSE_HOME/icon.xpm
Terminal=false
Type=Application
Categories=Utility;Application;
endtext
# Add the launch menu $HOME/.local/share/applications
if [ -f $HOME/.local/share/applications/$LAUNCH_FILENAME ]; then
mv $HOME/.local/share/applications/$LAUNCH_FILENAME $HOME/.local/share/applications/$LAUNCH_FILENAME.b4_$DATE
fi
cp $DESKTOP_DIR/$LAUNCH_FILENAME $HOME/.local/share/applications/$LAUNCH_FILENAME
# set the permission to launch files
chmod 700 $DESKTOP_DIR/$LAUNCH_FILENAME $HOME/.local/share/applications/$LAUNCH_FILENAME
# say bye
echo "Completed fixing Eclipse UI for $OS_FULLNAME"
echo "You can launch Eclipse from launcher on the Desktop or by search for Eclipse in Unity Dash"
echo "Bye"
# END OF SCRIPT
| KiranMohan/eclipse-gtk3-ubuntu | fix-eclipse-ubuntu-16.04-laf.sh | Shell | gpl-3.0 | 3,606 |
export lang=uk_UA;
| a1ive/grub2-filemanager | lang/uk_UA/lang.sh | Shell | gpl-3.0 | 19 |
#!/bin/bash
set -e
pegasus_lite_version_major="4"
pegasus_lite_version_minor="7"
pegasus_lite_version_patch="0"
pegasus_lite_enforce_strict_wp_check="true"
pegasus_lite_version_allow_wp_auto_download="true"
. pegasus-lite-common.sh
pegasus_lite_init
# cleanup in case of failures
trap pegasus_lite_signal_int INT
trap pegasus_lite_signal_term TERM
trap pegasus_lite_exit EXIT
echo -e "\n################################ Setting up workdir ################################" 1>&2
# work dir
export pegasus_lite_work_dir=$PWD
pegasus_lite_setup_work_dir
echo -e "\n###################### figuring out the worker package to use ######################" 1>&2
# figure out the worker package to use
pegasus_lite_worker_package
echo -e "\n##################### setting the xbit for executables staged #####################" 1>&2
# set the xbit for any executables staged
/bin/chmod +x example_workflow-taskevent_8-1.0
echo -e "\n############################# executing the user tasks #############################" 1>&2
# execute the tasks
set +e
pegasus-kickstart -n example_workflow::taskevent_8:1.0 -N ID0000014 -R condorpool -L example_workflow -T 2016-11-07T19:10:01+00:00 ./example_workflow-taskevent_8-1.0
job_ec=$?
set -e
| elainenaomi/sciwonc-dataflow-examples | dissertation2017/Experiment 1A/instances/11_0_workflow_full_10files_primary_3sh_3rs_noannot_with_proj_3s_hash/dags/ubuntu/pegasus/example_workflow/20161107T191002+0000/00/00/taskevent_8_ID0000014.sh | Shell | gpl-3.0 | 1,237 |
#!/bin/bash
#Script to push current repo on the repository gh-pages branch.
# we should be in /home/travis/build/ivmartel/dcmbench
echo -e "Starting to update gh-pages\n"
# go to home and setup git
cd $HOME
git config --global user.email "[email protected]"
git config --global user.name "Travis"
# using token, clone gh-pages branch
git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/ivmartel/dcmbench.git gh-pages
# clean up demo
rm -Rf $HOME/gh-pages/*
# copy current repo in gh-pages
cp -Rf $HOME/build/ivmartel/dcmbench/* $HOME/gh-pages/
# add nojekyll file
touch $HOME/gh-pages/.nojekyll
# move back to root of repo
cd $HOME/gh-pages
# add, commit and push files
git add -Af .
git commit -m "Travis build $TRAVIS_BUILD_NUMBER pushed to gh-pages"
git push -fq origin gh-pages
echo -e "Done updating.\n"
| ivmartel/dcmbench | resources/scripts/update-gh-pages.sh | Shell | gpl-3.0 | 830 |
#!/bin/bash
#git clone
git clone https://github.com/ysycloud/paraGSEA.git
cd paraGSEA
#make
make all
#install
make install
#Test
quick_search_serial | ysycloud/paraGSEA | install.sh | Shell | gpl-3.0 | 149 |
#!/bin/bash
python sender.py&
LD_PRELOAD=$HOME/opt/MUSIC.install/lib/libmusic.so.1 env PATH=$HOME/opt/music-adapters.build/:$PATH mpirun -np 2 --oversubscribe ~/opt/MUSIC.install/bin/music test.music&
python receiver.py
| incf-music/music-adapters | test/test_zmq_adapter/test.sh | Shell | gpl-3.0 | 220 |
cd /usr/local/src
wget https://nodejs.org/dist/v10.10.0/node-v10.10.0-linux-x64.tar.xz
tar xf node-v10.10.0-linux-x64.tar.xz
mv node-v10.10.0-linux-x64 /srv/node-v10.10.0
rm -f /srv/node
ln -s /srv/node-v10.10.0 /srv/node | oscm/shell | lang/node.js/binrary/node-v10.10.0.sh | Shell | gpl-3.0 | 221 |
#!/bin/sh
cd "$(dirname "$0")"
must() {
status=$1
shift
"$@"
if [ $? = $status ]
then
echo "Test OK!"
else
echo "Failed test!"
fi
}
echo "Regression tests for issue #2"
must 0 ../codegrep "print" 2.0.lua
must 1 ../codegrep "print" 2.1.lua
must 0 ../codegrep "print" 2.2.lua
must 1 ../codegrep "print" 2.3.lua
must 1 ../codegrep "print" 2.4.lua
must 0 ../codegrep "print" 2.5.lua
must 0 ../codegrep "print" 2.6.lua
| hishamhm/codegrep | tests/run_tests.sh | Shell | gpl-3.0 | 450 |
#!/bin/sh
do_tests() {
rm *.$2 || true
( cd .. && echo '(compile-file "asdf")' |$1 )
for i in *.script;
do
rm *.$2 || true
if $1 < $i ;then
echo "Using $1, $i passed" >&2
else
echo "Using $1, $i failed" >&2
exit 1
fi
done
echo "Using $1, all tests apparently successful" >&2
}
# do_tests {lisp invocation} {fasl extension}
# - read lisp forms one at a time from standard input
# - quit with exit status 0 on getting eof
# - quit with exit status >0 if an unhandled error occurs
set -e
if type sbcl
then
do_tests "sbcl --userinit /dev/null --sysinit /dev/null --noprogrammer" fasl
fi
if [ -x /usr/bin/lisp ]
then
do_tests "/usr/bin/lisp -batch -noinit" x86f
fi
if [ -x /usr/bin/clisp ]
then
do_tests "/usr/bin/clisp -norc -ansi -I " fas
fi
| bjc/moxie | Lisp/asdf/test/run-tests.sh | Shell | gpl-3.0 | 779 |
#!/bin/bash
SCRIPT_ROOT=$(readlink -f $(dirname $0))
SRC_ROOT=$(readlink -f $(dirname $0)/..)
ID=$(id -u)
ARG=$1
. $SCRIPT_ROOT/config
[ -e "$SCRIPT_ROOT/config_local" ] && . $SCRIPT_ROOT/config_local
if [ $ID -eq 0 -a -d $CHROOT ]; then
# chroot and su to the specific normal user
grep $CHROOT/dev /proc/mounts > /dev/null || mount --bind /dev $CHROOT/dev
grep $CHROOT/proc /proc/mounts > /dev/null || chroot $CHROOT mount -t proc proc /proc
grep $CHROOT/sys /proc/mounts > /dev/null || chroot $CHROOT mount -t sysfs sysfs /sys
grep $CHROOT/dev/pts /proc/mounts > /dev/null || chroot $CHROOT mount -t devpts devpts /dev/pts
BASENAME=$(basename $SRC_ROOT)
DEB_KERNEL_GIT=${DEB_KERNEL_DIR}.git
LINUX_GIT=${LINUX_DIR}.git
[ -d "$CHROOT/$BASENAME" ] || mkdir $CHROOT/$BASENAME
[ -d "$CHROOT/$DEB_KERNEL_GIT" ] || mkdir $CHROOT/$DEB_KERNEL_GIT
[ -d "$CHROOT/$LINUX_GIT" ] || mkdir $CHROOT/$LINUX_GIT
grep "$CHROOT/$BASENAME" /proc/mounts > /dev/null || mount --bind $SRC_ROOT $CHROOT/$BASENAME
if ! grep "$CHROOT/$DEB_KERNEL_GIT" /proc/mounts > /dev/null; then
[ -n $DEB_KERNEL_DIR ] && [ -d "$LOCAL_HOME/${DEB_KERNEL_DIR}/.git" ] && mount --bind $LOCAL_HOME/${DEB_KERNEL_DIR}/.git $CHROOT/$DEB_KERNEL_GIT
fi
grep "$CHROOT/$LINUX_GIT" /proc/mounts > /dev/null || mount --bind $LOCAL_HOME/${LINUX_DIR}/.git $CHROOT/$LINUX_GIT
[ "x$ARG" = "xprepare" ] && exit 0
echo Start to work under chroot shell
echo chroot $CHROOT su -l $NORMALUSER
chroot $CHROOT su -l $NORMALUSER
echo you need to run \"./umount_chroot_device.sh\" to release some chrooted device mounting after finishing all chroot shells.
fi
| rogers0/debian-kernel-cross | archived/jessie/chroot_shell.sh | Shell | gpl-3.0 | 1,621 |
#!/bin/sh
#
# monthly mean
#
echo "########## $0 start ##########"
set -x
CNFID=$1 # CNFID (e.g. "def")
START_YMD=$2 # YYYYMMDD (start day of analysis period)
ENDPP_YMD=$3 # YYYYMMDD (end+1 day of analysis period)
INPUT_DIR=$4 # input dir
OUTPUT_DIR=$5 # output dir
OVERWRITE=$6 # overwrite option (optional)
INC_SUBVARS=$7 # SUBVARS option (optional)
TARGET_VAR=$8 # variable name (optional)
set +x
echo "##########"
. ./common.sh ${CNFID} || exit 1
create_temp || exit 1
TEMP_DIR=${BASH_COMMON_TEMP_DIR}
trap "finish" 0
if [ "${OVERWRITE}" != "" \
-a "${OVERWRITE}" != "yes" -a "${OVERWRITE}" != "no" \
-a "${OVERWRITE}" != "dry-rm" -a "${OVERWRITE}" != "rm" ] ; then
echo "error: OVERWRITE = ${OVERWRITE} is not supported yet." >&2
exit 1
fi
if [ "${TARGET_VAR}" = "" ] ; then
VAR_LIST=( $( ls ${INPUT_DIR}/ ) ) || exit 1
else
VAR_LIST=( ${TARGET_VAR} )
fi
NOTHING=1
#============================================================#
#
# variable loop
#
#============================================================#
for VAR in ${VAR_LIST[@]} ; do
#
#----- check whether output dir is write-protected
#
if [ -f "${OUTPUT_DIR}/${VAR}/_locked" ] ; then
echo "info: ${OUTPUT_DIR} is locked."
continue
fi
#
#----- check existence of output data
#
OUTPUT_CTL=${OUTPUT_DIR}/${VAR}/${VAR}.ctl
YM_STARTMM=$( date -u --date "${START_YMD} 1 second ago" +%Y%m ) || exit 1
YM_END=$( date -u --date "${ENDPP_YMD} 1 month ago" +%Y%m ) || exit 1
# if [ -f "${OUTPUT_CTL}" ] ; then
if [ -f "${OUTPUT_CTL}" -a "${OVERWRITE}" != "rm" -a "${OVERWRITE}" != "dry-rm" ] ; then
YM_TMP=$( date -u --date "${YM_STARTMM}01 1 month" +%Y%m ) || exit 1
FLAG=( $( grads_exist_data.sh ${OUTPUT_CTL} -ymd "[${YM_TMP}15:${YM_END}15]" ) ) || exit 1
if [ "${FLAG[0]}" = "ok" ] ; then
echo "info: Output data already exist."
continue
fi
fi
#
#----- get number of grids for input/output
#
INPUT_CTL=${INPUT_DIR}/${VAR}/${VAR}.ctl
if [ ! -f "${INPUT_CTL}" ] ; then
echo "warning: ${INPUT_CTL} does not exist."
continue
fi
DIMS=( $( grads_ctl.pl ${INPUT_CTL} DIMS NUM ) ) || exit 1
XDEF=${DIMS[0]} ; YDEF=${DIMS[1]} ; ZDEF=${DIMS[2]}
TDEF=${DIMS[3]} ; EDEF=${DIMS[4]}
TDEF_START=$( grads_ctl.pl ${INPUT_CTL} TDEF 1 ) || exit 1
TDEF_INCRE_SEC=$( grads_ctl.pl ${INPUT_CTL} TDEF INC --unit SEC | sed -e "s/SEC//" ) || exit 1
SUBVARS=( ${VAR} )
if [ "${INC_SUBVARS}" = "yes" ] ; then
SUBVARS=( $( grads_ctl.pl ${INPUT_CTL} VARS ALL ) ) || exit 1
fi
VDEF=${#SUBVARS[@]}
#
START_HMS=$( date -u --date "${TDEF_START}" +%H%M%S )
TMP_H=${START_HMS:0:2}
TMP_M=${START_HMS:2:2}
let TMP_MN=TMP_H*60+TMP_M
#
#----- check existence of input data
#
if [ "${START_HMS}" != "000000" ] ; then
FLAG=( $( grads_exist_data.sh ${INPUT_CTL} -ymd "(${START_YMD}:${ENDPP_YMD}]" ) ) || exit 1
else
FLAG=( $( grads_exist_data.sh ${INPUT_CTL} -ymd "[${START_YMD}:${ENDPP_YMD})" ) ) || exit 1
fi
if [ "${FLAG[0]}" != "ok" ] ; then
echo "warning: All or part of data does not exist (CTL=${INPUT_CTL})."
continue
fi
#
#---- generate control file (unified)
#
mkdir -p ${OUTPUT_DIR}/${VAR}/log || exit 1
if [ "${OVERWRITE}" != "rm" -a "${OVERWRITE}" != "dry-rm" ] ; then
if [ "${INC_SUBVARS}" = "yes" ] ; then
grads_ctl.pl ${INPUT_CTL} > ${OUTPUT_CTL}.tmp1 || exit 1
else
TMP=$( grads_ctl.pl ${INPUT_CTL} VARS | grep ^${VAR} )
grads_ctl.pl ${INPUT_CTL} --set "VARS 1" --set "${TMP}" > ${OUTPUT_CTL}.tmp1 || exit 1
fi
#
STR_ENS=""
[ ${EDEF} -gt 1 ] && STR_ENS="_bin%e"
#
YM=$( date -u --date "${TDEF_START}" +%Y%m ) || exit 1
let TDEF_SEC=TDEF_INCRE_SEC*${TDEF}
# OUTPUT_YM_END=$( date -u --date "${TDEF_START} ${TDEF_SEC} seconds 1 month ago" +%Y%m ) || exit 1
OUTPUT_YM_END=$( date -u --date "${TDEF_START} ${TDEF_SEC} seconds" +%Y%m ) || exit 1
OUTPUT_YM_END=$( date -u --date "${OUTPUT_YM_END}01 1 month ago" +%Y%m ) || exit 1
OUTPUT_TDEF=0
while [ ${YM} -le ${OUTPUT_YM_END} ] ; do
let OUTPUT_TDEF=OUTPUT_TDEF+1
YM=$( date -u --date "${YM}01 1 month" +%Y%m )
done
OUTPUT_TDEF_START=15$( date -u --date "${TDEF_START}" +%b%Y ) || exit 1
sed ${OUTPUT_CTL}.tmp1 \
-e "s|^DSET .*$|DSET ^%y4/${VAR}_%y4%m2${STR_ENS}.grd|" \
-e "s/TEMPLATE//ig" \
-e "s/^OPTIONS .*$/OPTIONS TEMPLATE BIG_ENDIAN/i" \
-e "s/ yrev//i" \
-e "s/^UNDEF .*$/UNDEF -0.99900E+35/i" \
-e "s/^TDEF .*$/TDEF ${OUTPUT_TDEF} LINEAR ${OUTPUT_TDEF_START} 1mo/" \
-e "s/^ -1,40,1 / 99 /" \
-e "/^CHSUB .*$/d" \
> ${OUTPUT_CTL} || exit 1
rm ${OUTPUT_CTL}.tmp1
fi
#
#========================================#
# month loop (for each file)
#========================================#
YM=${YM_STARTMM}
while [ ${YM} -lt ${YM_END} ] ; do
#
#----- set/proceed date -----#
#
YM=$( date -u --date "${YM}01 1 month" +%Y%m ) || exit 1
YMPP=$( date -u --date "${YM}01 1 month" +%Y%m ) || exit 1
YEAR=${YM:0:4} ; MONTH=${YM:4:2}
#
#----- output data
#
# File name convention
# 2004/ms_tem_20040601.grd (center of the date if incre > 1dy)
#
mkdir -p ${OUTPUT_DIR}/${VAR}/${YEAR} || exit 1
#
# output file exist?
for(( e=1; ${e}<=${EDEF}; e=${e}+1 )) ; do
STR_ENS=""
if [ ${EDEF} -gt 1 ] ; then
STR_ENS=$( printf "%03d" ${e} ) || exit 1
STR_ENS="_bin${STR_ENS}"
fi
#
OUTPUT_DATA=${OUTPUT_DIR}/${VAR}/${YEAR}/${VAR}_${YEAR}${MONTH}${STR_ENS}.grd
#
# [ ! -d ${OUTPUT_DIR}/${VAR} ] && mkdir -p ${OUTPUT_DIR}/${VAR}
if [ -f ${OUTPUT_DATA} ] ; then
SIZE_OUT=$( ls -lL ${OUTPUT_DATA} | awk '{ print $5 }' ) || exit 1
SIZE_OUT_EXACT=$( echo "4*${XDEF}*${YDEF}*${ZDEF}*${VDEF}" | bc ) || exit 1
if [ ${SIZE_OUT} -eq ${SIZE_OUT_EXACT} -a "${OVERWRITE}" != "yes" \
-a "${OVERWRITE}" != "dry-rm" -a "${OVERWRITE}" != "rm" ] ; then
continue 2
fi
echo "Removing ${OUTPUT_DATA}."
echo ""
[ "${OVERWRITE}" = "dry-rm" ] && continue 1
rm -f ${OUTPUT_DATA}
fi
done
[ "${OVERWRITE}" = "rm" -o "${OVERWRITE}" = "dry-rm" ] && continue 1
#
# average
#
NOTHING=0
if [ "${START_HMS}" != "000000" ] ; then
TMIN=$( grads_time2t.sh ${INPUT_CTL} ${YM}01 -gt ) || exit 1
TMAX=$( grads_time2t.sh ${INPUT_CTL} ${YMPP}01 -le ) || exit 1
else
TMIN=$( grads_time2t.sh ${INPUT_CTL} ${YM}01 -ge ) || exit 1
TMAX=$( grads_time2t.sh ${INPUT_CTL} ${YMPP}01 -lt ) || exit 1
fi
echo "YM=${YM} (TMIN=${TMIN}, TMAX=${TMAX})"
#
cd ${TEMP_DIR}
for(( e=1; ${e}<=${EDEF}; e=${e}+1 )) ; do
STR_ENS=""
TEMPLATE_ENS=""
if [ ${EDEF} -gt 1 ] ; then
STR_ENS=$( printf "%03d" ${e} ) || exit 1
STR_ENS="_bin${STR_ENS}"
TEMPLATE_ENS="_bin%e"
fi
OUTPUT_DATA=${OUTPUT_DIR}/${VAR}/${YEAR}/${VAR}_${YEAR}${MONTH}${STR_ENS}.grd
#
rm -f temp.grd temp2.grd
for SUBVAR in ${SUBVARS[@]} ; do
cat > temp.gs <<EOF
'reinit'
rc = gsfallow('on')
'xopen ../${INPUT_CTL}'
'set gxout fwrite'
'set fwrite -be temp2.grd'
'set undef -0.99900E+35'
'set x 1 ${XDEF}'
'set y 1 ${YDEF}'
'set e ${e}'
z = 1
while( z <= ${ZDEF} )
prex( 'set z 'z )
prex( 'd ave(${SUBVAR},t=${TMIN},t=${TMAX})' )
z = z + 1
endwhile
'disable fwrite'
'quit'
EOF
if [ ${VERBOSE} -ge 1 ] ; then
[ ${VERBOSE} -ge 2 ] && cat temp.gs
grads -blc temp.gs || exit 1
else
grads -blc temp.gs > temp.log || { cat temp.log ; exit 1 ; }
fi
#
cat temp2.grd >> temp.grd || exit 1
rm temp2.grd temp.gs
done
mv temp.grd ../${OUTPUT_DATA} || exit 1
done
cd - > /dev/null || exit 1
done # year/month loop
done # variable loop
[ ${NOTHING} -eq 1 ] && echo "info: Nothing to do."
echo "$0 normally finished."
echo
| kodamail/data_conv | monthly_mean.sh | Shell | gpl-3.0 | 8,071 |
#!/usr/bin/env bash
sudo apt-get update
sudo apt-get install -y python-software-properties software-properties-common
sudo add-apt-repository -y ppa:pi-rho/dev
sudo apt-get update
sudo apt-get install -y tmux=2.0-1~ppa1~t
sudo apt-get -y install libreadline6 libreadline-dev libreadline-dev libreadline6-dev libconfig-dev libssl-dev tmux lua5.2 liblua5.2-dev lua-socket lua-sec lua-expat libevent-dev make unzip redis-server autoconf git g++ libjansson-dev libpython-dev expat libexpat1-dev ppa-purge python3-pip python3-dev software-properties-common python-software-properties gcc-6
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update
sudo apt-get upgrade
sudo apt-get dist-upgrade
sudo ppa-purge
sudo apt-get install python3-pip
pip3 install redis
chmod 777 telegram-cli-1222
chmod 777 anticrash.sh
chmod 777 tabchi-*.sh
RED='\033[0;31m'
NC='\033[0m'
CYAN='\033[0;36m'
echo -e "${CYAN}Installation Completed! Create a bot with creator.lua (python3 creator.py)${NC}"
exit
| SayedSadat/Tab | install.sh | Shell | gpl-3.0 | 994 |
#!/bin/bash
set -ev
cmake --build build
(cd build ; cpack --verbose -G DragNDrop)
| mariokonrad/vhf-sim | .travis/osx/script.sh | Shell | gpl-3.0 | 84 |
#!/bin/bash
#------------------
# File: run-tests.sh
# Author: Wolfger Schramm <[email protected]>
# Created: 19.04.2011 15:23:56 CEST
./utils/build_all.rb
mm-build
phantomjs test/testrunner.js file://`pwd`/test/offline.html
| spearwolf/kiwoticum | old-stuff/App/run-tests-offline.sh | Shell | gpl-3.0 | 229 |
#!/usr/bin/env bash
#
# This script assumes a linux environment
echo "*** AdNauseam.firefox: Creating web store package"
BLDIR=dist/build
DES="$BLDIR"/adnauseam.firefox
rm -rf $DES
mkdir -p $DES/webextension
VERSION=`jq .version manifest.json` # top-level adnauseam manifest
echo "*** AdNauseam.firefox: Copying common files"
bash ./tools/copy-common-files.sh $DES
cp platform/firefox/manifest.json $DES/
cp platform/firefox/webext.js $DES/js/
cp platform/firefox/vapi-webrequest.js $DES/js/
# Webext-specific
rm $DES/img/icon_128.png
sed -i '' "s/\"{version}\"/${VERSION}/" $DES/manifest.json
echo "*** AdNauseam.firefox: Generating meta..."
python tools/make-firefox-meta.py $DES/
if [ "$1" = all ]; then
echo "*** AdNauseam.firefox: Creating package..."
pushd $(dirname $DES/) > /dev/null
zip adnauseam.firefox.zip -qr $(basename $DES/)/*
popd > /dev/null
elif [ -n "$1" ]; then
echo "*** AdNauseam.firefox: Creating versioned package..."
pushd $DES > /dev/null
zip ../$(basename $DES).xpi -qr *
popd > /dev/null
mv "$BLDIR"/uBlock0.firefox.xpi "$BLDIR"/uBlock0_"$1".firefox.xpi
fi
echo "*** AdNauseam.firefox: Package done."
echo
| cqx931/AdNauseam | tools/make-firefox.sh | Shell | gpl-3.0 | 1,194 |
#!/bin/sh
#
# See the openvpn man page for details on `--up`
#
/sbin/iptables -A FORWARD -o "${dev}" -i eth0 -s 192.168.2.0/24 -m conntrack --ctstate NEW -j ACCEPT
| nharward/openvpn-gateway | on-up.sh | Shell | gpl-3.0 | 165 |
# download toolchain from:
# https://sourcery.mentor.com/GNUToolchain/subscription3053?lite=arm&lite=ARM&signature=4-1330864567-0-e3ad3089427f58b2a4a8bdf30f5fb0fb4ae5e79f
# Sourcery CodeBench Lite 2013.11-24
# https://sourcery.mentor.com/GNUToolchain/release2635
# https://sourcery.mentor.com/GNUToolchain/package12190/public/arm-none-eabi/arm-2013.11-24-arm-none-eabi-i686-pc-linux-gnu.tar.bz2
#
# make -C target/firmware CROSS_COMPILE=arm-none-eabi-
# make CROSS_COMPILE=arm-none-eabi- -f Makefile.test
# make CROSS_COMPILE=arm-none-eabi- -f Makefile.test.mtk
# make mtk-loader
# make -d CROSS_COMPILE=arm-none-eabi- -f Makefile.test.mtk > a
# ./osmocon -p /dev/ttyUSB0 -m mtk ../../target/firmware/board/mt62xx/loader.mtkram.bin
# ./osmocon -p /dev/ttyUSB0 -m mtk ./loader_mtk.mtkram.bin
CROSS_COMPILE=arm-none-eabi-
PATH=/home/nouser/mobile/arm-2013.11/bin:$PATH
#PATH=/mnt/sda2/mobile/arm-2013.11/bin:$PATH
| Ma3X/boot-talker | docs/toolchains/armenv.sh | Shell | gpl-3.0 | 920 |
#!/bin/bash
mkdir deps
cd deps
#----------------------------------------------------
echo "mustache.go ..."
#git clone git://github.com/hoisie/mustache.go.git
git clone git://github.com/jsz/mustache.go.git
cd mustache.go && gomake && gomake install && cd ..
#----------------------------------------------------
echo "web.go ..."
#git clone git://github.com/hoisie/web.go.git
git clone git://github.com/jsz/web.go.git
cd web.go && gomake && gomake install && cd ..
#----------------------------------------------------
echo "mgo ..."
goinstall launchpad.net/mgo
#----------------------------------------------------
echo "done ..."
| kybernetyk/fettemama | getdeps.sh | Shell | gpl-3.0 | 635 |
#!/bin/bash
# modules/kerberos/run.sh
# Copyright Vince Mulhollon 2014
# GPLv3 license
pkg install --no-repo-update --yes krb5 pam_krb5-rh | $NIHLOG
cp $NIH/krb5.conf /etc/krb5.conf
exit 0
| vincemulhollon/nihconfig | modules/kerberos/run.sh | Shell | gpl-3.0 | 193 |
#!/bin/bash
# pwd is the git repo.
set -e
echo "Install peep"
pip install bin/peep-2.5.0.tar.gz
echo "Install Python dependencies"
peep install -r requirements.txt
# less important requirements
pip install -r dev-requirements.txt
echo "Creating a test database"
mysql -e 'create database peekaboo;'
# psql -c 'create database airmozilla;' -U postgres
| mozilla/peekaboo | bin/travis/install.sh | Shell | mpl-2.0 | 354 |
#!/bin/bash
#
# This file is part of SerialPundit.
#
# Copyright (C) 2014-2020, Rishi Gupta. All rights reserved.
#
# The SerialPundit is DUAL LICENSED. It is made available under the terms of the GNU Affero
# General Public License (AGPL) v3.0 for non-commercial use and under the terms of a commercial
# license for commercial use of this software.
#
# The SerialPundit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#################################################################################################
# Script to execute with the help of udev rule. It plays given sound file whenever a USB-serial
# device is added or removed. Two different sounds can be played for connect and disconnect events.
# When installing your software, copy event.wav audio file in /usr/share/sp directory. This script
# should be installed in /usr/bin directory.
# This script can also be executed from within Java code as shown below to indicate events to user.
# Asynchronous :
# ProcessBuilder pb = new ProcessBuilder("/usr/bin/play-sound.sh");
# Process p = pb.start();
# Synchronous :
# ProcessBuilder pb = new ProcessBuilder("/usr/bin/play-sound.sh");
# Process p = pb.start();
# p.waitFor();
aplay /usr/share/sp/event.wav
| RishiGupta12/serial-communication-manager | tools-and-utilities/play-sound.sh | Shell | agpl-3.0 | 1,358 |
#!/bin/sh
# local config section
CATALINA_HOME=/path/to/tomcat
JAVA_HOME=/path/to/java
TOMCAT_USER=tomcat
# end of local config section
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# resolve links - $0 may be a softlink
ARG0="$0"
while [ -h "$ARG0" ]; do
ls=`ls -ld "$ARG0"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
ARG0="$link"
else
ARG0="`dirname $ARG0`/$link"
fi
done
DIRNAME="`dirname $ARG0`"
PROGRAM="`basename $ARG0`"
for o; do
case "$o" in
--java-home )
JAVA_HOME="$2"
shift; shift;
continue
;;
--catalina-home )
CATALINA_HOME="$2"
shift; shift;
continue
;;
--catalina-base )
CATALINA_BASE="$2"
shift; shift;
continue
;;
--catalina-pid )
CATALINA_PID="$2"
shift; shift;
continue
;;
--tomcat-user )
TOMCAT_USER="$2"
shift; shift;
continue
;;
* )
break
;;
esac
done
# OS specific support (must be 'true' or 'false').
cygwin=false;
darwin=false;
case "`uname`" in
CYGWIN*)
cygwin=true
;;
Darwin*)
darwin=true
;;
esac
# Use the maximum available, or set MAX_FD != -1 to use that
test ".$MAX_FD" = . && MAX_FD="maximum"
# Setup parameters for running the jsvc
#
test ".$TOMCAT_USER" = . && TOMCAT_USER=tomcat
# Set JAVA_HOME to working JDK or JRE
# JAVA_HOME=/opt/jdk-1.6.0.22
# If not set we'll try to guess the JAVA_HOME
# from java binary if on the PATH
#
if [ -z "$JAVA_HOME" ]; then
JAVA_BIN="`which java 2>/dev/null || type java 2>&1`"
test -x "$JAVA_BIN" && JAVA_HOME="`dirname $JAVA_BIN`"
test ".$JAVA_HOME" != . && JAVA_HOME=`cd "$JAVA_HOME/.." >/dev/null; pwd`
else
JAVA_BIN="$JAVA_HOME/bin/java"
fi
# Only set CATALINA_HOME if not already set
test ".$CATALINA_HOME" = . && CATALINA_HOME=`cd "$DIRNAME/.." >/dev/null; pwd`
test ".$CATALINA_BASE" = . && CATALINA_BASE="$CATALINA_HOME"
test ".$CATALINA_MAIN" = . && CATALINA_MAIN=org.apache.catalina.startup.Bootstrap
test ".$JSVC" = . && JSVC="$CATALINA_BASE/bin/jsvc"
# Ensure that any user defined CLASSPATH variables are not used on startup,
# but allow them to be specified in setenv.sh, in rare case when it is needed.
CLASSPATH=
JAVA_OPTS=
if [ -r "$CATALINA_BASE/bin/setenv.sh" ]; then
. "$CATALINA_BASE/bin/setenv.sh"
elif [ -r "$CATALINA_HOME/bin/setenv.sh" ]; then
. "$CATALINA_HOME/bin/setenv.sh"
fi
# Add on extra jar files to CLASSPATH
test ".$CLASSPATH" != . && CLASSPATH="${CLASSPATH}:"
CLASSPATH="$CLASSPATH$CATALINA_HOME/bin/bootstrap.jar:$CATALINA_HOME/bin/commons-daemon.jar"
test ".$CATALINA_OUT" = . && CATALINA_OUT="$CATALINA_BASE/logs/catalinadaemon.out"
test ".$CATALINA_TMP" = . && CATALINA_TMP="$CATALINA_BASE/temp"
# Add tomcat-juli.jar to classpath
# tomcat-juli.jar can be over-ridden per instance
if [ -r "$CATALINA_BASE/bin/tomcat-juli.jar" ] ; then
CLASSPATH="$CLASSPATH:$CATALINA_BASE/bin/tomcat-juli.jar"
else
CLASSPATH="$CLASSPATH:$CATALINA_HOME/bin/tomcat-juli.jar"
fi
# Set juli LogManager config file if it is present and an override has not been issued
if [ -z "$LOGGING_CONFIG" ]; then
if [ -r "$CATALINA_BASE/conf/logging.properties" ]; then
LOGGING_CONFIG="-Djava.util.logging.config.file=$CATALINA_BASE/conf/logging.properties"
else
# Bugzilla 45585
LOGGING_CONFIG="-Dnop"
fi
fi
test ".$LOGGING_MANAGER" = . && LOGGING_MANAGER="-Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager"
JAVA_OPTS="$JAVA_OPTS $LOGGING_MANAGER"
# Set -pidfile
test ".$CATALINA_PID" = . && CATALINA_PID="$CATALINA_BASE/logs/catalina-daemon.pid"
# Increase the maximum file descriptors if we can
if [ "$cygwin" = "false" ]; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ "$?" -eq 0 ]; then
# Darwin does not allow RLIMIT_INFINITY on file soft limit
if [ "$darwin" = "true" -a "$MAX_FD_LIMIT" = "unlimited" ]; then
MAX_FD_LIMIT=`/usr/sbin/sysctl -n kern.maxfilesperproc`
fi
test ".$MAX_FD" = ".maximum" && MAX_FD="$MAX_FD_LIMIT"
ulimit -n $MAX_FD
if [ "$?" -ne 0 ]; then
echo "$PROGRAM: Could not set maximum file descriptor limit: $MAX_FD"
fi
else
echo "$PROGRAM: Could not query system maximum file descriptor limit:$MAX_FD_LIMIT"
fi
fi
# ----- Execute The Requested Command -----------------------------------------
case "$1" in
run )
shift
"$JSVC" $* \
$JSVC_OPTS \
-java-home "$JAVA_HOME" \
-pidfile "$CATALINA_PID" \
-wait 10 \
-nodetach \
-outfile "&1" \
-errfile "&2" \
-classpath "$CLASSPATH" \
"$LOGGING_CONFIG" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMP" \
$CATALINA_MAIN
exit $?
;;
start )
"$JSVC" $JSVC_OPTS \
-java-home "$JAVA_HOME" \
-user $TOMCAT_USER \
-pidfile "$CATALINA_PID" \
-wait 10 \
-outfile "$CATALINA_OUT" \
-errfile "&1" \
-classpath "$CLASSPATH" \
"$LOGGING_CONFIG" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMP" \
$CATALINA_MAIN
exit $?
;;
stop )
"$JSVC" $JSVC_OPTS \
-stop \
-pidfile "$CATALINA_PID" \
-classpath "$CLASSPATH" \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMP" \
$CATALINA_MAIN
exit $?
;;
version )
"$JSVC" \
-java-home "$JAVA_HOME" \
-pidfile "$CATALINA_PID" \
-classpath "$CLASSPATH" \
-errfile "&2" \
-version \
-check \
$CATALINA_MAIN
if [ "$?" = 0 ]; then
"$JAVA_BIN" \
-classpath "$CATALINA_HOME/lib/catalina.jar" \
org.apache.catalina.util.ServerInfo
fi
exit $?
;;
* )
echo "Unkown command: \`$1'"
echo "Usage: $PROGRAM ( commands ... )"
echo "commands:"
echo " run Start Catalina without detaching from console"
echo " start Start Catalina"
echo " stop Stop Catalina"
echo " version What version of commons daemon and Tomcat"
echo " are you running?"
exit 1
;;
esac
| WASP-System/central | wasp-doc/src/scripts/tomcat7-init.d.sh | Shell | agpl-3.0 | 6,767 |
#!/usr/bin/env bash
set -e
. ../build_config.sh
rm -rf tmp
mkdir tmp
cd tmp
echo "Building zlib $ZLIB_VERSION"
curl -SLO https://zlib.net/$ZLIB_VERSION.tar.gz
tar -xf $ZLIB_VERSION.tar.gz
cd $ZLIB_VERSION/
CC=$HOST-gcc AR="$HOST-ar" RANLIB=$HOST-ranlib ./configure \
--prefix=$PREFIX \
--static
make
make install
cd ../..
rm -r tmp
| MTG/essentia | packaging/win32_3rdparty/build_zlib.sh | Shell | agpl-3.0 | 348 |
#!/bin/sh
set -e
GEOIPDB=http://geolite.maxmind.com/download/geoip/database/GeoIPCountryCSV.zip
. ${0%/*}/db_auth.sh
usage() {
echo Usage: ${0##*/} [options]
echo
echo Options:
echo " " -h display this help message
echo " " -s be more silent \(show only warnings\)
}
# getopt
SILENT=""
args=`getopt -o sh -l silent,help -- "$@"`
eval set -- "$args"
while true; do
case "$1" in
-h|--help) usage; exit 0 ;;
-s|--silent) SILENT="1"; shift ;;
--) shift; break ;;
*) echo "Invalid option: $1"; exit 1 ;;
esac
done
if [ ${#SILENT} -ne 0 ]; then
# we won't see all the index creation notices when creating tables
export PGOPTIONS='--client_min_messages=warning'
fi
psql --set "ON_ERROR_STOP=1" -f - <<EOF
BEGIN;
DROP TABLE IF EXISTS geoip CASCADE;
CREATE TABLE geoip (
id SERIAL PRIMARY KEY,
begin_ip BIGINT,
end_ip BIGINT,
country CHAR(2)
);
DROP FUNCTION IF EXISTS inet_to_bigint(INET);
CREATE OR REPLACE FUNCTION inet_to_bigint(ip INET)
RETURNS BIGINT AS
\$\$
DECLARE
w TEXT;
x TEXT;
y TEXT;
z TEXT;
sp TEXT[];
BEGIN
sp := regexp_split_to_array(ip::text, E'\\\\.');
w := sp[1];
x := sp[2];
y := sp[3];
z := substring(sp[4], 0, strpos(sp[4], '/'));
return 16777216*w::bigint + 65536*x::bigint + 256*y::bigint + z::bigint;
END;
\$\$ LANGUAGE plpgsql IMMUTABLE;
COMMIT;
EOF
DIR=`mktemp -d`
trap "rm -rf $DIR;" EXIT
cd $DIR
wget $GEOIPDB
GEOIPCVS=$(zipinfo -1 ${GEOIPDB##*/} | grep '\.csv$')
if [ $(echo $GEOIPCVS | wc -w) -lt "1" ]; then
echo There is no csv file in the archive. Canceling
elif [ $(echo $GEOIPCVS | wc -w) -gt "1" ]; then
echo There is more than one csv file in the archive. Which one should I pick ?
fi
unzip ${GEOIPDB##*/} $GEOIPCVS
# insert all values from csv to database
sed -e 's/"\([^"]\+\)","\([^"]\+\)","\([^"]\+\)","\([^"]\+\)","\([^"]\+\)","\([^"]\+\)"/INSERT INTO geoip (begin_ip, end_ip, country) VALUES ('\''\3'\'','\''\4'\'','\''\5'\'');/' $GEOIPCVS | psql --set "ON_ERROR_STOP=1" -f -
psql --set "ON_ERROR_STOP=1" -c "VACUUM ANALYZE geoip;"
| niavok/syj | scripts/updategeoip.sh | Shell | agpl-3.0 | 2,121 |
#!/usr/bin/env bash
: ${OS?}
: ${ARCH?}
: ${TAG:=}
: ${BRANCH?}
set -eux
BASE_DIR=$(cd $(dirname $0)/.. && pwd)
TMP_BUILD_DIR=$BASE_DIR/$(mktemp -d build.XXXXXXXX)
trap 'rm -rf $TMP_BUILD_DIR' EXIT
cd $TMP_BUILD_DIR
curl -s -L "https://github.com/acoustid/ffmpeg-build/releases/download/v4.2.2-3/ffmpeg-4.2.2-audio-$OS-$ARCH.tar.gz" | tar xz
export FFMPEG_DIR=$TMP_BUILD_DIR/$(ls -d ffmpeg-* | tail)
CMAKE_ARGS=(
-DCMAKE_INSTALL_PREFIX=$BASE_DIR/chromaprint-$OS-$ARCH
-DCMAKE_BUILD_TYPE=Release
-DBUILD_TOOLS=ON
-DBUILD_TESTS=OFF
-DBUILD_SHARED_LIBS=OFF
)
STRIP=strip
case $OS in
windows)
perl -pe "s!{EXTRA_PATHS}!$FFMPEG_DIR!g" $BASE_DIR/package/toolchain-mingw.cmake.in | perl -pe "s!{ARCH}!$ARCH!g" >toolchain.cmake
CMAKE_ARGS+=(
-DCMAKE_TOOLCHAIN_FILE=$TMP_BUILD_DIR/toolchain.cmake
-DCMAKE_C_FLAGS='-static -static-libgcc -static-libstdc++'
-DCMAKE_CXX_FLAGS='-static -static-libgcc -static-libstdc++'
-DHAVE_AV_PACKET_UNREF=1
-DHAVE_AV_FRAME_ALLOC=1
-DHAVE_AV_FRAME_FREE=1
)
STRIP=$ARCH-w64-mingw32-strip
;;
macos)
CMAKE_ARGS+=(
-DCMAKE_CXX_FLAGS='-stdlib=libc++'
)
;;
linux)
case $ARCH in
i686)
CMAKE_ARGS+=(
-DCMAKE_C_FLAGS='-m32 -static -static-libgcc -static-libstdc++'
-DCMAKE_CXX_FLAGS='-m32 -static -static-libgcc -static-libstdc++'
)
;;
x86_64)
CMAKE_ARGS+=(
-DCMAKE_C_FLAGS='-static -static-libgcc -static-libstdc++'
-DCMAKE_CXX_FLAGS='-static -static-libgcc -static-libstdc++'
)
;;
arm*)
perl -pe "s!{EXTRA_PATHS}!$FFMPEG_DIR!g" $BASE_DIR/package/toolchain-armhf.cmake.in | perl -pe "s!{ARCH}!$ARCH!g" >toolchain.cmake
CMAKE_ARGS+=(
-DCMAKE_TOOLCHAIN_FILE=$TMP_BUILD_DIR/toolchain.cmake
-DCMAKE_C_FLAGS='-static -static-libgcc -static-libstdc++'
-DCMAKE_CXX_FLAGS='-static -static-libgcc -static-libstdc++'
)
STRIP=arm-linux-gnueabihf-strip
;;
*)
echo "unsupported architecture ($ARCH)"
exit 1
esac
;;
*)
echo "unsupported OS ($OS)"
exit 1
;;
esac
cmake "${CMAKE_ARGS[@]}" $BASE_DIR
make VERBOSE=1
make install VERBOSE=1
$STRIP $BASE_DIR/chromaprint-$OS-$ARCH/bin/fpcalc*
case $TAG in
v*)
VERSION=$(echo $TAG | sed 's/^v//')
;;
*)
VERSION=$BRANCH-$(date +%Y%m%d%H%M)
;;
esac
FPCALC_DIR=chromaprint-fpcalc-$VERSION-$OS-$ARCH
rm -rf $FPCALC_DIR
mkdir $FPCALC_DIR
cp $BASE_DIR/chromaprint-$OS-$ARCH/bin/fpcalc* $FPCALC_DIR
case $OS in
windows)
zip -r $BASE_DIR/$FPCALC_DIR.zip $FPCALC_DIR
;;
*)
tar -zcvf $BASE_DIR/$FPCALC_DIR.tar.gz $FPCALC_DIR
;;
esac
| lalinsky/chromaprint | package/build.sh | Shell | lgpl-2.1 | 2,751 |
main_file=/home/reemahs/EclipseProjects/Final_Year_Project/NESTCameraSimulator/json
file_name=NEST_Camera_One.json
hub_url=http://localhost:8081
java -cp nestcamerasimulator.jar:lib/jackson-annotations-2.8.7.jar:lib/jackson-core-2.8.7.jar:lib/jackson-databind-2.8.7.jar:../../IoTCommon/deploy/iotcommon.jar:../../IoTCommon/deploy/lib/jackson-annotations-2.8.7.jar:../../IoTCommon/deploy/lib/jackson-core-2.8.7.jar:../../IoTCommon/deploy/lib/jackson-databind-2.8.7.jar:../../IoTCommon/deploy/lib/log4j-api-2.3.jar:../../IoTCommon/deploy/lib/log4j-core-2.3.jar iot.interop.nestcamsim.sim.NestCamSimulator $main_file $file_name $hub_url
| Reemahs/Cloud-Based-Internet-of-Things-Data-Interoperability-Repository | NESTCameraSimulator/deploy/run.sh | Shell | lgpl-2.1 | 634 |
# Run with ./run-tests.sh
test_that_stbt_lint_passes_existing_images() {
cat > test.py <<-EOF &&
import stbt
stbt.wait_for_match('$testdir/videotestsrc-redblue.png')
EOF
stbt lint --errors-only test.py
}
test_that_stbt_lint_fails_nonexistent_image() {
cat > test.py <<-EOF &&
import stbt
stbt.wait_for_match('idontexist.png')
EOF
! stbt lint --errors-only test.py
}
test_that_stbt_lint_ignores_generated_image_names() {
cat > test.py <<-EOF &&
import os
import stbt
from os.path import join
var = 'idontexist'
stbt.wait_for_match(var + '.png')
stbt.wait_for_match('%s.png' % var)
stbt.wait_for_match(os.path.join('directory', 'idontexist.png'))
stbt.wait_for_match(join('directory', 'idontexist.png'))
EOF
stbt lint --errors-only test.py
}
test_that_stbt_lint_ignores_regular_expressions() {
cat > test.py <<-EOF &&
import re
re.match(r'.*/(.*)\.png', '')
EOF
stbt lint --errors-only test.py
}
test_that_stbt_lint_ignores_images_created_by_the_stbt_script() {
cat > test.py <<-EOF &&
import cv2, stbt
stbt.save_frame(stbt.get_frame(), 'i-dont-exist-yet.png')
cv2.imwrite('neither-do-i.png', stbt.get_frame())
from cv2 import imwrite
from stbt import save_frame
save_frame(stbt.get_frame(), 'i-dont-exist-yet.png')
imwrite('neither-do-i.png', stbt.get_frame())
EOF
stbt lint --errors-only --extension-pkg-whitelist=cv2 test.py
}
test_that_stbt_lint_ignores_multiline_image_name() {
cat > test.py <<-EOF &&
import subprocess
subprocess.check_call("""set -e
tvservice -e "CEA 16" # 1080p60
sudo fbi -T 1 -noverbose original.png
sudo fbi -T 2 -noverbose original.png""")
EOF
stbt lint --errors-only test.py
}
test_that_stbt_lint_ignores_image_urls() {
cat > test.py <<-EOF &&
import urllib2
urllib2.urlopen("http://example.com/image.png")
EOF
stbt lint --errors-only test.py
}
test_pylint_plugin_on_itself() {
# It should work on arbitrary python files, so that you can just enable it
# as a pylint plugin across your entire project, not just for stbt scripts.
stbt lint --errors-only "$srcdir"/_stbt/pylint_plugin.py
}
test_that_stbt_lint_checks_uses_of_stbt_return_values() {
cat > test.py <<-EOF &&
import re, stbt
from stbt import is_screen_black, match, match_text, ocr, press, wait_until
def test_something():
assert wait_until(lambda: True)
some_var = wait_until(lambda: True)
if wait_until(lambda: True): pass
wait_until(lambda: True)
stbt.wait_until(lambda: True)
something_else_that_ends_in_wait_until() # pylint:disable=E0602
assert match('$testdir/videotestsrc-redblue.png')
match('$testdir/videotestsrc-redblue.png')
re.match('foo', 'bah')
press('KEY_OK')
is_screen_black()
match_text('hello')
ocr()
EOF
stbt lint --errors-only test.py > lint.log
cat > lint.expected <<-'EOF'
************* Module test
E: 8, 4: "wait_until" return value not used (missing "assert"?) (stbt-unused-return-value)
E: 9, 4: "stbt.wait_until" return value not used (missing "assert"?) (stbt-unused-return-value)
E: 12, 4: "match" return value not used (missing "assert"?) (stbt-unused-return-value)
E: 15, 4: "is_screen_black" return value not used (missing "assert"?) (stbt-unused-return-value)
E: 16, 4: "match_text" return value not used (missing "assert"?) (stbt-unused-return-value)
E: 17, 4: "ocr" return value not used (missing "assert"?) (stbt-unused-return-value)
EOF
diff -u lint.expected lint.log
}
test_that_stbt_lint_checks_that_wait_until_argument_is_callable() {
cat > test.py <<-EOF &&
import functools
from functools import partial
from stbt import is_screen_black, press, wait_until
def return_a_function():
return lambda: True
def test_something():
press('KEY_POWER')
assert wait_until(is_screen_black)
assert wait_until(is_screen_black())
assert wait_until(return_a_function())
assert wait_until(return_a_function()())
assert wait_until(lambda: True)
assert wait_until((lambda: True)())
assert wait_until(functools.partial(lambda x: True, x=3))
assert wait_until(functools.partial(lambda x: True, x=3)())
assert wait_until(partial(lambda x: True, x=3)) # Pylint can't infer functools.partial. pylint:disable=stbt-wait-until-callable
assert wait_until(partial(lambda x: True, x=3)())
EOF
stbt lint --errors-only test.py > lint.log
cat > lint.expected <<-'EOF'
************* Module test
E: 11,11: "wait_until" argument "is_screen_black()" isn't callable (stbt-wait-until-callable)
E: 13,11: "wait_until" argument "return_a_function()()" isn't callable (stbt-wait-until-callable)
E: 15,11: "wait_until" argument "lambda : True()" isn't callable (stbt-wait-until-callable)
E: 17,11: "wait_until" argument "functools.partial(lambda x: True, x=3)()" isn't callable (stbt-wait-until-callable)
E: 19,11: "wait_until" argument "partial(lambda x: True, x=3)()" isn't callable (stbt-wait-until-callable)
EOF
diff -u lint.expected lint.log
}
test_that_stbt_lint_checks_frame_parameter_in_frameobject_methods() {
cat > test.py <<-EOF
from stbt import FrameObject, match, match_text, ocr, is_screen_black
def find_boxes(frame=None):
pass
class ModalDialog(FrameObject):
@property
def is_visible(self):
return bool(find_boxes())
class ErrorDialog(ModalDialog):
@property
def is_visible(self):
return bool(
match("videotestsrc-redblue.png") and
match_text("Error") and
not is_screen_black())
@property
def text(self):
return ocr()
class Good(FrameObject):
@property
def is_visible(self):
return bool(find_boxes(self._frame))
@property
def property1(self):
return bool(match("videotestsrc-redblue.png", self._frame))
@property
def property2(self):
return bool(match("videotestsrc-redblue.png", frame=self._frame))
def not_a_property(self):
return bool(match("videotestsrc-redblue.png"))
def normal_test():
assert match("videotestsrc-redblue.png")
EOF
cp "$testdir/videotestsrc-redblue.png" .
stbt lint --errors-only test.py > lint.log
cat > lint.expected <<-'EOF'
************* Module test
E: 9,20: "find_boxes()" missing "frame" argument (stbt-frame-object-missing-frame)
E: 15,12: "match('videotestsrc-redblue.png')" missing "frame" argument (stbt-frame-object-missing-frame)
E: 16,12: "match_text('Error')" missing "frame" argument (stbt-frame-object-missing-frame)
E: 17,16: "is_screen_black()" missing "frame" argument (stbt-frame-object-missing-frame)
E: 21,15: "ocr()" missing "frame" argument (stbt-frame-object-missing-frame)
EOF
diff -u lint.expected lint.log
}
test_that_stbt_lint_ignores_astroid_inference_exceptions() {
cat > test.py <<-EOF
import stbt
assert stbt.wait_until(InfoPage)
EOF
stbt lint --errors-only test.py > lint.log
cat > lint.expected <<-'EOF'
************* Module test
E: 2, 7: "wait_until" argument "InfoPage" isn't callable (stbt-wait-until-callable)
E: 2,23: Undefined variable 'InfoPage' (undefined-variable)
EOF
diff -u lint.expected lint.log
}
| LewisHaley/stb-tester | tests/test-stbt-lint.sh | Shell | lgpl-2.1 | 7,252 |
#!/bin/sh
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
factor="50%"
listfiles="\
medop-gui-aliasfield.png \
medop-gui-result.png \
medop-gui-selectfield.png \
medop-gui-visufield.png"
for file in $listfiles; do
echo "Processing file $file ..."
bn=$(basename $file .png)
outfile=$bn"_scale.png"
convert -scale $factor $file $outfile
done
| FedoraScientific/salome-med | src/MEDOP/doc/sphinx/images/convert.sh | Shell | lgpl-2.1 | 1,182 |
#!/bin/sh
VER=0.40.4-0
cd data
rm -f ../AndroidData/openttd-data-*.zip.xz ../AndroidData/openttd-data-*.zip
zip -0 -r ../AndroidData/openttd-data-$VER.zip . && xz -8 ../AndroidData/openttd-data-$VER.zip
| pelya/commandergenius | project/jni/application/openttd-jgrpp/pack-data.sh | Shell | lgpl-2.1 | 205 |
#!/bin/sh
echo 0.3.0
| berndhs/qxmpp | version.sh | Shell | lgpl-2.1 | 21 |
# Make.sh = update Makefile.lib, Makefile.shlib, Makefile.list
# or style_*.h files
# Syntax: sh Make.sh style
# sh Make.sh Makefile.lib
# sh Make.sh Makefile.shlib
# sh Make.sh Makefile.list
# function to create one style_*.h file
# must whack *.d files that depend on style_*.h file,
# else Make will not recreate them
style () {
# modified Stefan Radl to generate version
builddate=`date +%Y-%m-%d-%H:%M:%S`
wai=`whoami`
vers=`cat version.txt`
bra=`cat version_branch.txt`
echo "#define C3PO_VERSION \"$bra-$vers, compiled $builddate by $wai\"" > version.h
list=`grep -sl $1 $2*.h`
if (test -e style_$3.tmp) then
rm -f style_$3.tmp
fi
for file in $list; do
qfile="\"$file\""
echo "#include $qfile" >> style_$3.tmp
done
if (test ! -e style_$3.tmp) then
if (test ! -e style_$3.h) then
touch style_$3.h
elif (test "`cat style_$3.h`" != "") then
rm -f style_$3.h
touch style_$3.h
rm -f Obj_*/$4.d
if (test $5) then
rm -f Obj_*/$5.d
fi
rm -f Obj_*/lammps.d
fi
elif (test ! -e style_$3.h) then
mv style_$3.tmp style_$3.h
rm -f Obj_*/$4.d
if (test $5) then
rm -f Obj_*/$5.d
fi
rm -f Obj_*/lammps.d
elif (test "`diff --brief style_$3.h style_$3.tmp`" != "") then
mv style_$3.tmp style_$3.h
rm -f Obj_*/$4.d
if (test $5) then
rm -f Obj_*/$5.d
fi
rm -f Obj_*/lammps.d
else
rm -f style_$3.tmp
fi
}
# create individual style files
# called by "make machine"
# col 1 = string to search for
# col 2 = search in *.h files starting with this name
# col 3 = prefix of style file
# col 4
if (test $1 = "style") then
#NOTIMPLEMENTED
style OPERATION_CLASS operation_ operation void
style SELECTOR_CLASS selector_ selector void
elif (test $1 = "Makefile.lib") then
echo "Building Makefile.lib..."
# edit Makefile.lib, for creating non-shared lib
# called by "make makelib"
# use current list of *.cpp and *.h files in src dir w/out main.cpp
list=`ls -1 *.cpp | sed s/^main\.cpp// | tr "[:cntrl:]" " "`
sed -i -e "s/SRC = .*/SRC = $list/" Makefile.lib
list=`ls -1 *.h | tr "[:cntrl:]" " "`
sed -i -e "s/INC = .*/INC = $list/" Makefile.lib
elif (test $1 = "Makefile.shlib") then
# edit Makefile.shlib, for creating shared lib
# called by "make makeshlib"
# use current list of *.cpp and *.h files in src dir w/out main.cpp
list=`ls -1 *.cpp | sed s/^main\.cpp// | tr "[:cntrl:]" " "`
sed -i -e "s/SRC = .*/SRC = $list/" Makefile.shlib
list=`ls -1 *.h | tr "[:cntrl:]" " "`
sed -i -e "s/INC = .*/INC = $list/" Makefile.shlib
fi
| CFDEMproject/C3PO-PUBLIC | core/Make.sh | Shell | lgpl-3.0 | 2,661 |
#!/bin/bash
#Your task is to use for loops to display only odd natural numbers from 1
#to 99
for i in {1..99..2}
do
echo ${i}
done
| heticor915/ProgrammingContest | HackerRank/LinuxShell/Bash/Looping_Skipping.sh | Shell | lgpl-3.0 | 142 |
#!/bin/bash
free=`free -mt | grep Total | awk '{print $4}'`
echo $free
if [ $free > 256 ]; then
ps -eo %mem,pid,user,args >/tmp/processes.txt
echo 'Warning, free memory is '$free'mb' | notify-send
fi
| SenchoPens/dotfiles | bin/check_ram.sh | Shell | unlicense | 217 |
eval $(ssh-agent -s)
ssh-add gitkey | TheDenys/Scripts | sh/setkey.sh | Shell | unlicense | 36 |
#!/bin/bash
group="$1"
sudo chgrp --recursive $group /var/lib/jenkins && sudo chmod --recursive g+w /var/lib/jenkins
| gnawhleinad/wendy | test/util/permission.sh | Shell | unlicense | 118 |
#this shellscript combines your images into a gif
#in order to get the software you need to run this script on Raspbian, run:
#1.) sudo apt-get install imagemagick
#2.) make sure this script is in the same folder as capture.sh and the "timelapse" folder
#you can run this script manually or do it as a cronjob if you want
convert ./timelapse/*.jpeg -set delay 10 output.gif
#for explanation on how to modify this to suit your own needs, to go http://gotofritz.net/blog/geekery/combining-images-imagemagick/
| TroyCitizenScience/timelapse | combine.sh | Shell | unlicense | 512 |
#!/bin/bash
# this script copies the required dynamic lib (libtensorflow_cc.so) to desired folder
if [[ $(/usr/bin/id -u) -ne 0 ]]; then
echo "*** You need to run this script as root (try with sudo) ***"
echo
echo "This script":
echo " 1. copies libtensorflow_cc.so"
echo " from src path (ofxMSATensorFlow/libs/tensorflow/lib/linux64 by default)"
echo " to destination folder (~/lib by defaut)"
echo " 2. adds this path to /etc/ld.so.conf.d/libtensorflow_cc.so.conf"
echo " 3. runs ldconfig"
echo
echo
echo "Usage (parameters optional):"
echo
echo "$ update_lib.sh [dst_folder] [src_folder]"
echo
exit
fi
# DEFAULTS
LIB_DST=$HOME'/lib' # override with arg 1
LIB_SRC='../../libs/tensorflow/lib/linux64/libtensorflow_cc.so' # override with arg2
DST_LD_CONF='/etc/ld.so.conf.d/libtensorflow_cc.so.conf'
if ! ([ -z "$1" ]); then LIB_DST=$1; fi
if ! ([ -z "$2" ]); then LIB_SRC=$2; fi
echo 'Copying' $LIB_SRC 'to' $LIB_DST
mkdir -p $LIB_DST
cp $LIB_SRC $LIB_DST
echo 'Writing path to' $DST_LD_CONF
echo $LIB_DST > $DST_LD_CONF
echo 'Running ldconfig...'
ldconfig
| memo/ofxMSATensorFlow | scripts/ubuntu/update_lib.sh | Shell | apache-2.0 | 1,101 |
#!/usr/bin/env bash
# from extended_B.1.sh
ID=$(basename "$0" | sed "s/.sh$//g")
ABS_PATH=$(readlink -f $0)
cd $(dirname $(dirname $(dirname ${ABS_PATH})))
MYDIR=logs/${ID}
mkdir -p ${MYDIR}
cp ${ABS_PATH} ${MYDIR}
CUDA_VISIBLE_DEVICES=5 \
python -u main.py \
--experiment_id ${ID} \
--data_name scan \
--train_file SCAN/length_split/tasks_train_length.txt \
--test_file SCAN/length_split/tasks_test_length.txt \
--model_name rand_reg \
--random_seed 9 \
--batch_size 64 \
--switch_temperature 0.1 \
--attention_temperature 1 \
--num_units 32 \
--epochs 10000 \
--learning_rate 0.01 \
--max_gradient_norm 1.0 \
--use_input_length \
--use_embedding \
--embedding_size 8 \
--bidirectional_encoder \
--random_batch \
--decay_steps 100 \
--remove_switch \
--content_noise \
--function_noise \
--content_noise_coe 0.01 \
--noise_weight 1 \
--sample_wise_content_noise \
--masked_attention \
--random_random \
| tee ${MYDIR}/log.txt
python attention_visualization.py \
--hide_switch \
--experiment_id ${ID} | yli1/CGPS | experiments/length_main/length_main_B.sh | Shell | apache-2.0 | 1,004 |
#!/bin/bash
if [ -z "$ROOT" ]
then
ROOT=$(while ! test -e env.sh.sample; do cd ..; done; pwd)
export ROOT
fi
. $ROOT/env.sh
START_FOLDER=$PWD
mkdir -p $IDS_FOLDER
mkdir -p $TMPDIR
echo
echo "*********************************"
echo servIoTicy final demo tester
echo Listing SOs and DPPs
echo "*********************************"
echo
$SCRIPTS/get_access_token.sh $START_FOLDER
$SCRIPTS/get_random_access_token.sh $START_FOLDER
$SCRIPTS/delete_sos.sh $START_FOLDER
echo Done.
echo
| servioticy/servioticy-demo | delete_all_so.sh | Shell | apache-2.0 | 494 |
####################
# functions
####################
# print available colors and their numbers
function colours() {
for i in {0..255}; do
printf "\x1b[38;5;${i}m colour${i}"
if (( $i % 5 == 0 )); then
printf "\n"
else
printf "\t"
fi
done
}
# Create a new directory and enter it
function md() {
mkdir -p "$@" && cd "$@"
}
function hist() {
history | awk '{a[$2]++}END{for(i in a){print a[i] " " i}}' | sort -rn | head
}
# find shorthand
function f() {
find . -name "$1"
}
# take this repo and copy it to somewhere else minus the .git stuff.
function gitexport(){
mkdir -p "$1"
git archive master | tar -x -C "$1"
}
# get gzipped size
function gz() {
echo "orig size (bytes): "
cat "$1" | wc -c
echo "gzipped size (bytes): "
gzip -c "$1" | wc -c
}
# Extract archives - use: extract <file>
# Credits to http://dotfiles.org/~pseup/.bashrc
function extract() {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2) tar xjf $1 ;;
*.tar.gz) tar xzf $1 ;;
*.bz2) bunzip2 $1 ;;
*.rar) rar x $1 ;;
*.gz) gunzip $1 ;;
*.tar) tar xf $1 ;;
*.tbz2) tar xjf $1 ;;
*.tgz) tar xzf $1 ;;
*.zip) unzip $1 ;;
*.Z) uncompress $1 ;;
*.7z) 7z x $1 ;;
*) echo "'$1' cannot be extracted via extract()" ;;
esac
else
echo "'$1' is not a valid file"
fi
}
# set the background color to light
function light() {
export BACKGROUND="light" && reload!
}
function dark() {
export BACKGROUND="dark" && reload!
}
| omalsa04/dotfiles | zsh/functions.zsh | Shell | apache-2.0 | 1,666 |
#!/bin/sh
# logit "\n"
# info "4 - Container Images and Build Files"
# 4.1
check_4_1="4.1 - Create a user for the container"
check=$(echo $check_4_1 | cut -d "-" -f 1)
check_description=$(echo $check_4_1 | cut -d "-" -f 2,3 | sed -e 's/^[ \t]*//')
# If container_users is empty, there are no running containers
if [ -z "$containers" ]; then
# info "$check_4_1"
# info " * No containers running"
./docker-bench-security-logging.sh "$check" "$check_description" "info" "No containers running"
else
# We have some containers running, set failure flag to 0. Check for Users.
fail=0
# Make the loop separator be a new-line in POSIX compliant fashion
set -f; IFS=$'
'
for c in $containers; do
user=$(docker inspect --format 'User={{.Config.User}}' "$c")
if [ "$user" = "User=" -o "$user" = "User=[]" -o "$user" = "User=<no value>" ]; then
# If it's the first container, fail the test
if [ $fail -eq 0 ]; then
# warn "$check_4_1"
# warn " * Running as root: $c"
./docker-bench-security-logging.sh "$check" "$check_description" "warn" "Running as root: $c"
fail=1
else
# warn " * Running as root: $c"
./docker-bench-security-logging.sh "$check" "$check_description" "warn" "Running as root: $c"
fi
fi
done
# We went through all the containers and found none running as root
if [ $fail -eq 0 ]; then
# pass "$check_4_1"
./docker-bench-security-logging.sh "$check" "$check_description" "pass" "No container runs as root"
fi
fi
# Make the loop separator go back to space
set +f; unset IFS
| ehsanmostajeran/docker-bench-security-logging | tests/4_container_images.sh | Shell | apache-2.0 | 1,587 |
#!/bin/bash
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
if [[ -z "${PROJECT_ROOT:-}" ]]; then
PROJECT_ROOT="github/google-resumable-media-python"
fi
cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
# Debug: show build environment
env | grep KOKORO
# Setup service account credentials.
export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
# Setup project id.
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
cleanup() {
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
$KOKORO_GFILE_DIR/linux_amd64/flakybot
}
trap cleanup EXIT HUP
fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
python3 -m nox -s ${NOX_SESSION:-}
else
python3 -m nox
fi
| googleapis/google-resumable-media-python | .kokoro/build.sh | Shell | apache-2.0 | 1,772 |
## \cond
#HEADSTART##############################################################
#
#PROJECT: UnifiedTraceAndLogManager
#AUTHOR: Arno-Can Uestuensoez - [email protected]
#MAINTAINER: Arno-Can Uestuensoez - [email protected]
#SHORT: utalm-bash
#LICENSE: Apache-2.0 + CCL-BY-SA-3.0
#
#
########################################################################
#
# Copyright [2007,2008,2010,2013] Arno-Can Uestuensoez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
########################################################################
#
# refer to source-package for unstripped sources
#
#HEADEND################################################################
###
## \endcond
VERSION=03_03_005
RELEASE=03_03_005
VARIANT=VARIANT
DATE="2011.12.07"
TIME="06:13:14"
###
LOC="0"
LOCNET="0"
LOD="16"
| ArnoCan/utalm | src/utalm-bash/src/conf/versinfo.gen.sh | Shell | apache-2.0 | 1,354 |
#!/bin/bash -e
OPTS=`getopt -o op: --long os,publish,help: -n 'parse-options' -- "$@"`
BASE_DIR=$(dirname $(dirname $(dirname $(realpath ${0}))))
echo "Base ${BASE_DIR}"
OS=""
TASK="build"
while true; do
case "$1" in
-o | --os) OS="${2}"; shift;shift ;;
-p | --publish) TASK="publish"; shift;;
-- ) shift; break ;;
* ) break ;;
esac
done
if [[ "${OS}" = "" ]]; then
echo "No target specified"
exit -1
fi
if [[ "${FONT_AWESOME_TOKEN}" = "" ]]; then
echo "No font awesome token provided"
exit -1
fi
TASK="${TASK}:${OS}"
WORK_DIR=${BASE_DIR}/dist/${OS}
echo "Executing '${TASK}' with work dir ${WORK_DIR}"
mkdir -p ${WORK_DIR}/node-modules
mkdir -p ${WORK_DIR}/dist
mkdir -p ${WORK_DIR}/electron-cache
mkdir -p ${WORK_DIR}/electron-builder-cache
#mkdir -p ~/.cache/electron/${OS}
#mkdir -p ~/.cache/electron-builder/${OS}
docker run --rm -ti \
--env-file <(env | grep -iE 'DEBUG|NODE_|ELECTRON_|YARN_|NPM_|CI|CIRCLE|TRAVIS_TAG|TRAVIS|TRAVIS_REPO_|TRAVIS_BUILD_|TRAVIS_BRANCH|TRAVIS_PULL_REQUEST_|APPVEYOR_|CSC_|GH_|GITHUB_|BT_|AWS_|STRIP|BUILD_') \
--env ELECTRON_CACHE="/root/.cache/electron" \
--env ELECTRON_BUILDER_CACHE="/root/.cache/electron-builder" \
-v ${BASE_DIR}:/project \
-v ${HOME}/.npmrc:/root/.npmrc \
-v ${WORK_DIR}/dist:/project/dist \
-v ${WORK_DIR}/node-modules:/project/node_modules \
-v ${WORK_DIR}/electron-cache:/root/.cache/electron \
-v ${WORK_DIR}/electron-builder-cache:/root/.cache/electron-builder \
electronuserland/builder:wine-chrome \
/bin/bash -c "yarn && yarn run ${TASK}"
# npm config set \"@fortawesome:registry\" https://npm.fontawesome.com/ && npm config set \"//npm.fontawesome.com/:_authToken\" ${FONT_AWESOME_TOKEN}
| densebrain/epictask | etc/scripts/build-other-os.sh | Shell | apache-2.0 | 1,703 |
#!/bin/sh
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
mkdir -p Sources
ROOT=../../../.build/debug
$ROOT/google-api-swift-generator disco-compute-v1.json > Sources/Compute.swift
$ROOT/google-cli-swift-generator disco-compute-v1.json > Sources/main.swift
| googleapis/google-api-swift-client | Examples/Google/Compute/GENERATE.sh | Shell | apache-2.0 | 809 |
tests_dir="$(athena.arg 1)"
php_exit_code=0
pushd "$tests_dir" 1>/dev/null
if ! athena.argument.argument_exists '--exclude'; then
find . -name "*.php" | SHELL=$SHELL parallel php -l >/dev/null
else
list_arg=$(athena.argument.arg --exclude)
list=$(echo $list_arg | sed "s#,#\|#g")
# the 'SHELL=$SHELL' is necessary to avoid a 'parallel' warning
# but it should not happen and might be a bug in the shell or
# in perl or in parallel itself
find . -name "*.php" | egrep -v -e "$list" | SHELL=$SHELL parallel php -l {} >/dev/null
php_exit_code=$?
fi
popd 1>/dev/null
if [[ $php_exit_code -ne 0 ]]; then
athena.os.exit $php_exit_code
fi
athena.color.print_ok "No errors were found..."
| athena-oss/plugin-php | bin/cmd/lint.sh | Shell | apache-2.0 | 692 |
#! /bin/bash
### ---- parameters expected ----
# see readme for additional details
#
# 1. service identifier for the process being monitored (optional)
### -----------------------------
if command -v realpath >/dev/null 2>&1; then
SCRIPTPATH=`dirname $(realpath $0)`
else
SCRIPTPATH=$( cd "$(dirname "$0")" ; pwd -P )
fi
LIBPATH="$SCRIPTPATH/../*:$SCRIPTPATH/../lib/*"
TNT4JOPTS="-Dtnt4j.dump.on.vm.shutdown=true -Dtnt4j.dump.on.exception=true -Dtnt4j.dump.provider.default=true"
### --- tnt4j file ----
if [[ -z "$TNT4J_PROPERTIES" ]]; then
TNT4J_PROPERTIES="$SCRIPTPATH/../config/tnt4j.properties"
fi
TNT4JOPTS="$TNT4JOPTS -Dtnt4j.config=$TNT4J_PROPERTIES"
### -------------------
### --- log4j file ----
if [[ -z "$LOG4J_PROPERTIES" ]]; then
LOG4J_PROPERTIES="$SCRIPTPATH/../config/log4j2.xml"
fi
TNT4JOPTS="$TNT4JOPTS -Dlog4j2.configurationFile=file:$LOG4J_PROPERTIES"
### --- stream log file name ---
#TNT4JOPTS="$TNT4JOPTS -Dtnt4j.stream.log.filename=$SCRIPTPATH/../logs/tnt4j-stream-jmx.log"
### --- streamed activities log file name ---
#TNT4JOPTS="$TNT4JOPTS -Dtnt4j.activities.log.filename=$SCRIPTPATH/../logs/tnt4j-stream-jmx_samples.log"
### -------------------
### ---- AppServer identifies source ----
if [[ -z "$TNT4J_APPSERVER" ]]; then
TNT4J_APPSERVER="Default"
fi
if [[ "x$1" != "x" ]] && [[ "x$1" != "x." ]]; then
TNT4J_APPSERVER="$1"
fi
TNT4JOPTS="$TNT4JOPTS -Dfile.encoding=UTF-8 -Dsjmx.serviceId=$TNT4J_APPSERVER"
### -------------------------------------
JAVA_EXEC="java"
if [[ "$JAVA_HOME" == "" ]]; then
echo '"JAVA_HOME" env. variable is not defined!..'
else
echo 'Will use java from:' "$JAVA_HOME"
JAVA_EXEC="$JAVA_HOME/bin/java"
fi
$JAVA_EXEC $TNT4JOPTS -classpath "$LIBPATH" com.jkoolcloud.tnt4j.stream.jmx.SamplingAgent "*:*" "" 10000 60000 | Nastel/tnt4j-stream-jmx | bin/stream-jmx.sh | Shell | apache-2.0 | 1,803 |
#!/bin/bash
for ffmpegdir in `find ffmpeg* -maxdepth 0 -type d | sort -rV`
do
rm -rf $ffmpegdir
done
for tarfile in `find releases/ffmpeg* | sort -rV`
do
tar -xf $tarfile
done
RELEASE_TIMESTAMP=`date +%Y%m%d%H%M`
for ffmpegdir in `find ffmpeg* -maxdepth 0 -type d | sort -rV`
do
mv $ffmpegdir $ffmpegdir.android
touch -t $RELEASE_TIMESTAMP ./ffmpeg4android.readme
cp ./ffmpeg4android.readme $ffmpegdir.android/
for makefile in `find makefiles/* | grep .mk`
do
touch -t $RELEASE_TIMESTAMP $makefile
cp $makefile ${makefile/#makefiles/$ffmpegdir.android}
done
done
rm -rf releases.android
mkdir releases.android
for ffmpegdir in `find ffmpeg*.android -maxdepth 0 -type d | sort -rV`
do
tar czf releases.android/$ffmpegdir.tar.gz $ffmpegdir &
done
for ffmpegdir in `find ffmpeg*.android -maxdepth 0 -type d | sort -rV`
do
touch -t $RELEASE_TIMESTAMP releases.android/$ffmpegdir.tar.gz
done
| zhangkom/ffmpeg4android-code | create-ffmpeg4android.sh | Shell | apache-2.0 | 952 |
#!/usr/bin/env bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs prow/pj-on-kind.sh with config arguments specific to the prow.istio.io instance.
# Requries go, docker, and kubectl.
# Example usage:
# ./pj-on-kind.sh pull-test-infra-prow-checkconfig
set -o errexit
set -o nounset
set -o pipefail
export CONFIG_PATH="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/oss/config.yaml)"
export JOB_CONFIG_PATH="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/prowjobs)"
bash <(curl -sSfL https://raw.githubusercontent.com/kubernetes/test-infra/master/prow/pj-on-kind.sh) "$@"
| GoogleCloudPlatform/oss-test-infra | prow/pj-on-kind.sh | Shell | apache-2.0 | 1,107 |
#!/bin/bash
# Copyright 2012-2014 Brno University of Technology (Author: Karel Vesely)
# Apache 2.0
# This example script creates a multilingual nnet. The training is done in 3 stages:
# 1. FMLLR features: It generates fmllr features from the multilingual training data.
# 2. DBN Pre-training: To initialize the nnet, it can
# a) train a dbn using the multilingual fmllr features or
# b) use an existing pre-trained dbn or dnn from the user
# 3. DNN cross-entropy training: It fine-tunes the initialized nnet using
# the multilingual training data (deterministic transcripts).
#
# Usage: $0 --precomp-dnn "exp/dnn4e-fmllr_multisoftmax/final.nnet" "AR CA HG MD UR" "SW"
# Usage: $0 --precomp-dbn "exp/dnn4_pretrain-dbn/6.dbn" "AR CA HG MD UR" "SW"
. ./cmd.sh ## You'll want to change cmd.sh to something that will work on your system.
## This relates to the queue.
. ./path.sh ## Source the tools/utils (import the queue.pl)
stage=0 # resume training with --stage=N
feats_nj=4
train_nj=8
decode_nj=4
precomp_dbn=
precomp_dnn=
hid_layers=6
hid_dim=1024
splice=5 # temporal splicing
splice_step=1 # stepsize of the splicing (1 == no gap between frames)
bn_dim=
train_iters=20
use_delta=false
skip_decode=false
train_dbn=true # by default, train a dbn
# End of config.
. utils/parse_options.sh || exit 1;
echo "$0 $@" # Print the command line for logging
if [ $# != 7 ]; then
echo "usage: $0 <train lang> <test lang> <gmmdir> <alidir> <data_fmllr> <nnetinitdir> <nnetoutdir>"
fi
TRAIN_LANG=$1
TEST_LANG=$2
gmmdir=$3 # exp/tri3c/${TEST_LANG}
alidir=$4 # exp/tri3c_ali/${TEST_LANG}
data_fmllr=$5 # data-fmllr-tri3c/${TEST_LANG}
nnetinitdir=$6
nnetoutdir=$7
UNILANG_CODE=$(echo $TRAIN_LANG |sed 's/ /_/g')
[[ ! -z ${precomp_dnn} ]] && train_dbn=false
[[ ! -z ${precomp_dbn} ]] && train_dbn=true
$train_dbn && echo "Will use a DBN to init target DNN" || echo "Will either use - a) randomly initialized DNN or b) supplied DNN - to init target DNN"
#echo ==========================
#if [ $stage -le 0 ]; then
#steps/align_fmllr.sh --nj "$train_nj" --cmd "$train_cmd" \
#data/${UNILANG_CODE}/train data/${UNILANG_CODE}/lang $gmmdir ${alidir} 2>&1 | tee ${alidir}/align.log
#fi
#echo ==========================
if [ $stage -le 1 ]; then
# Store fMLLR features, so we can train on them easily
# eval
for lang in ${TRAIN_LANG} ${TEST_LANG}; do
dir=$data_fmllr/$lang/eval
steps/nnet/make_fmllr_feats.sh --nj $feats_nj --cmd "$train_cmd" \
--transform-dir $gmmdir/decode_eval_$lang \
$dir data/$lang/eval $gmmdir $dir/log $dir/data || exit 1
steps/compute_cmvn_stats.sh $dir $dir/log $dir/data || exit 1;
utils/validate_data_dir.sh --no-text $dir
done
# dev
for lang in ${TRAIN_LANG} ${TEST_LANG}; do
dir=$data_fmllr/$lang/dev
steps/nnet/make_fmllr_feats.sh --nj $feats_nj --cmd "$train_cmd" \
--transform-dir $gmmdir/decode_dev_$lang \
$dir data/$lang/dev $gmmdir $dir/log $dir/data || exit 1
steps/compute_cmvn_stats.sh $dir $dir/log $dir/data || exit 1;
utils/validate_data_dir.sh --no-text $dir
done
# train
for lang in ${UNILANG_CODE} ; do
dir=$data_fmllr/$lang/train
steps/nnet/make_fmllr_feats.sh --nj $feats_nj --cmd "$train_cmd" \
--transform-dir $alidir \
$dir data/$lang/train $alidir $dir/log $dir/data || exit 1
steps/compute_cmvn_stats.sh $dir $dir/log $dir/data || exit 1;
utils/validate_data_dir.sh --no-text $dir
# split the data : 90% train 10% cross-validation (held-out)
utils/subset_data_dir_tr_cv.sh $dir ${dir}_tr90 ${dir}_cv10 || exit 1
done
fi
if [ $stage -le 2 ]; then
if $train_dbn ; then
# First check for pre-computed DBN dir. Then try pre-computed DNN dir. If both fail, generate DBN now.
mkdir -p $nnetinitdir
if [[ ! -z ${precomp_dbn} ]]; then
echo "using pre-computed dbn ${precomp_dbn}"
# copy the dbn and feat xform from dbn dir
cp -r ${precomp_dbn} $nnetinitdir
# Comment lines below if you want to compute feature xform estmn from the adaptation data (SBS)
#cp $(dirname ${precomp_dbn})/final.feature_transform $dir
#feature_transform=$dir/final.feature_transform
#feature_transform_opt=$(echo "--feature-transform $feature_transform")
else
echo "train with a randomly initialized DBN"
# Pre-train DBN, i.e. a stack of RBMs (small database, smaller DNN)
(tail --pid=$$ -F $nnetinitdir/log/pretrain_dbn.log 2>/dev/null)& # forward log
if [[ ! -z $bn_layer ]]; then
$cuda_cmd $nnetinitdir/log/pretrain_dbn.log \
local/nnet/pretrain_dbn.sh --nn-depth $hid_layers --hid-dim $hid_dim \
--bn-layer $bn_layer --bn-dim $bn_dim --splice $splice --splice-step $splice_step \
--cmvn-opts "--norm-means=true --norm-vars=true" \
--delta-opts "--delta-order=2" --splice 5 \
--rbm-iter 20 $data_fmllr/${UNILANG_CODE}/train $nnetinitdir || exit 1;
else
$cuda_cmd $nnetinitdir/log/pretrain_dbn.log \
steps/nnet/pretrain_dbn.sh --nn-depth $hid_layers --hid-dim $hid_dim \
--splice $splice --splice-step $splice_step \
--cmvn-opts "--norm-means=true --norm-vars=true" \
--delta-opts "--delta-order=2" --splice 5 \
--rbm-iter 20 $data_fmllr/${UNILANG_CODE}/train $nnetinitdir || exit 1;
fi
fi
fi
fi
dir=$nnetoutdir
if [ $stage -le 3 ]; then
# Train the DNN optimizing per-frame cross-entropy.
ali=$alidir
feature_transform=
(tail --pid=$$ -F $dir/log/train_nnet.log 2>/dev/null)& # forward log
# Train
if $train_dbn; then
# Initialize NN training with a DBN
echo "using DBN to start DNN training"
dbn=${nnetinitdir}/${hid_layers}.dbn
$cuda_cmd $dir/log/train_nnet.log \
steps/nnet/train.sh --dbn $dbn --hid-layers 0 \
--cmvn-opts "--norm-means=true --norm-vars=true" \
--delta-opts "--delta-order=2" --splice $splice --splice-step $splice_step \
--learn-rate 0.008 \
$data_fmllr/${UNILANG_CODE}/train_tr90 $data_fmllr/${UNILANG_CODE}/train_cv10 data/${UNILANG_CODE}/lang $ali $ali $dir || exit 1;
# Train nnet from scratch if BN-DBN does not train properly.
#steps/nnet/train.sh --hid-layers $hid_layers --hid-dim 1024 --bn-dim $bn_dim \
#--cmvn-opts "--norm-means=true --norm-vars=true" \
#--delta-opts "--delta-order=2" --splice 5 \
#--learn-rate 0.008 \
#$data_fmllr/${UNILANG_CODE}/train_tr90 $data_fmllr/${UNILANG_CODE}/train_cv10 data/${UNILANG_CODE}/lang $ali $ali $dir || exit 1;
else
if [[ -f ${precomp_dnn} ]]; then
echo "using pre-computed dnn ${precomp_dnn} to start DNN training"
# replace the softmax layer of the precomp dnn with a random init layer
[[ ! -d $nnetinitdir ]] && mkdir -p $nnetinitdir
nnet_init=$nnetinitdir/nnet.init
perl local/utils/nnet/renew_nnet_softmax.sh $gmmdir/final.mdl ${precomp_dnn} ${nnet_init}
# Comment lines below if you want to compute feature xform estmn from the adaptation data (SBS)
#cp $(dirname ${precomp_dnn})/final.feature_transform $dir
#feature_transform=$dir/final.feature_transform
#feature_transform_opt=$(echo "--feature-transform $feature_transform")
$cuda_cmd $dir/log/train_nnet.log \
steps/nnet/train.sh --nnet-init ${nnet_init} --hid-layers 0 \
--cmvn-opts "--norm-means=true --norm-vars=true" \
--delta-opts "--delta-order=2" --splice $splice --splice-step $splice_step \
--learn-rate 0.008 \
$data_fmllr/${UNILANG_CODE}/train_tr90 $data_fmllr/${UNILANG_CODE}/train_cv10 data/${UNILANG_CODE}/lang $ali $ali $dir || exit 1;
else
echo "using a randomly initialized DNN to start DNN training"
$cuda_cmd $dir/log/train_nnet.log \
steps/nnet/train.sh --hid-layers $hid_layers --hid-dim $hid_dim \
${bn_dim:+ --bn-dim $bn_dim} \
--cmvn-opts "--norm-means=true --norm-vars=true" \
--delta-opts "--delta-order=2" --splice $splice --splice-step $splice_step \
--learn-rate 0.008 \
$data_fmllr/${UNILANG_CODE}/train_tr90 $data_fmllr/${UNILANG_CODE}/train_cv10 data/${UNILANG_CODE}/lang $ali $ali $dir || exit 1;
fi
fi
fi
if [ $stage -le 4 ]; then
if ! $skip_decode; then
# Nnet decode:
exp_dir=$gmmdir
#for L in ${TRAIN_LANG} ${TEST_LANG}; do
for L in ${TEST_LANG}; do
echo "Decoding $L"
graph_dir=${exp_dir}/graph_text_G_$L
[[ -d $graph_dir ]] || { mkdir -p $graph_dir; utils/mkgraph.sh data/$L/lang_test_text_G $exp_dir $graph_dir || exit 1; }
(steps/nnet/decode.sh --nj 4 --cmd "$decode_cmd" --config conf/decode_dnn.config --acwt 0.2 \
$graph_dir $data_fmllr/$L/dev $dir/decode_dev_text_G_$L || exit 1;) &
(steps/nnet/decode.sh --nj 4 --cmd "$decode_cmd" --config conf/decode_dnn.config --acwt 0.2 \
$graph_dir $data_fmllr/$L/eval $dir/decode_eval_text_G_$L || exit 1;) &
(cd $dir; ln -s decode_dev_text_G_$L decode_dev_$L; ln -s decode_eval_text_G_$L decode_eval_$L)
done
fi
fi
echo "Done: `date`"
exit 0;
# Getting results [see RESULTS file]
# for x in exp/*/decode*; do [ -d $x ] && grep WER $x/wer_* | utils/best_wer.sh; done
| irrawaddy28/SBS-mul | run_dnn_adapt_to_multi_dt.sh | Shell | apache-2.0 | 9,024 |
#!/bin/bash
#
# -- Run as: ./run_target_range_test_evenness_scaling.sh 1> test_output/stdout.txt 2> test_output/stderr.txt
#
errecho(){ >&2 echo $@; }
# Declare test ranges:
declare -a range_targets=("../models/ico_sphere/ico_sphere.obj" "../models/dome/dome_c.obj" "../models/dome/ico_2.obj") #("1.25") #$(seq 3 90) #e.g. ("44" "45")
# Declare test output location:
result_set="test_output/Target_Results_RangeTest_"$(date -d "today" +"%Y-%m-%d_%H")"-00"
relative_result_path_from_run="exp/Evenness_Scaling/"
# Specify properties file location(s).
default_properties_filename="../../properties/default.properties"
pre_default_properties_filename=$default_properties_filename".pre-"${result_set//\//-}
post_default_properties_filename=$default_properties_filename".post-"${result_set//\//-}
function pre_test_file_organisation(){
# Prep File Organisation & properties file backup:
mkdir -p $result_set
cp -n $default_properties_filename $pre_default_properties_filename
}
function post_test_file_organisation(){
# Backup the finished properties file into $post_default_properties_filename. (ensure there's a backup)
cp -b $default_properties_filename $post_default_properties_filename
# Restore the original properties file: (force)
cp -f $pre_default_properties_filename $default_properties_filename
}
function update_properties_file_output_directory(){
# Update the run.py evaluation results directory, by modifying the default.properties file:
python helpers/update_default-properties_file.py "DO_OVERWRITE" $default_properties_filename "results_file.results_output_file_path_prefix=" "$relative_result_path_from_run""$result_set""/"
}
function run_range_test(){
for n in "${range_targets[@]}"
do
./run_target_evenness_scaling_trial.sh $n
if [ $? -ne 0 ]; then
errecho ""; errecho " -------------- "
errecho "run_target_evenness_scaling_trial.sh failed."
errecho "Exiting."
exit 1;
fi
done
}
function main(){
pre_test_file_organisation
update_properties_file_output_directory
run_range_test
post_test_file_organisation
}
main
| LightStage-Aber/LightStage-Repo | exp/Evenness_Scaling/run_target_range_test_evenness_scaling.sh | Shell | apache-2.0 | 2,178 |
#!/usr/bin/env bash
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )
PATH=$DIR/../../dist/bin:$DIR/../../bin:$PATH
if [ x$TARGET == x ]; then
case "$OSTYPE" in
darwin*) TARGET=macbook ;;
linux*) TARGET=linux ;;
*) echo "unknown: $OSTYPE" && exit 1;;
esac
fi
var=CFLAGS_${TARGET}
CFLAGS=${!var}
var=COMPILER_ARGS_${TARGET}
COMPILER_ARGS=${!var} # add -opt for an optimized build.
mkdir -p $DIR/build/c_interop/
mkdir -p $DIR/build/bin/
cinterop -def $DIR/src/main/c_interop/sockets.def -copt "$CFLAGS" -target $TARGET \
-o $DIR/build/c_interop/sockets || exit 1
konanc $COMPILER_ARGS -target $TARGET $DIR/src/main/kotlin/EchoServer.kt \
-library $DIR/build/c_interop/sockets \
-o $DIR/build/bin/EchoServer || exit 1
echo "Artifact path is $DIR/build/bin/EchoServer.kexe"
| jiaminglu/kotlin-native | samples/socket/build.sh | Shell | apache-2.0 | 812 |
#!/bin/bash
# A container based OpenWRT image builder.
#
# https://github.com/jandelgado/lede-dockerbuilder
#
# (c) Jan Delgado 2017-2021
set -euo pipefail
# base Tag to use for docker imag
IMAGE_TAG=openwrt-imagebuilder
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# may be overridden in the config file
OUTPUT_DIR=$SCRIPT_DIR/output
ROOTFS_OVERLAY=$SCRIPT_DIR/rootfs-overlay
LEDE_DISABLED_SERVICES=
REPOSITORIES_CONF=
PROG=$0
function usage {
cat<<EOT
Dockerized LEDE/OpenWRT image builder.
Usage: $1 COMMAND CONFIGFILE [OPTIONS]
COMMAND is one of:
build-docker-image - build the docker image (run once first)
profiles - start container and show avail profiles for current configuration
build - start container and build the LEDE/OpenWRT image
shell - start shell in docker container
CONFIGFILE - configuraton file to use
OPTIONS:
-o OUTPUT_DIR - output directory (default $OUTPUT_DIR)
--docker-opts OPTS - additional options to pass to docker run
(can occur multiple times)
-f ROOTFS_OVERLAY - rootfs-overlay directory (default $ROOTFS_OVERLAY)
--skip-sudo - call docker directly, without sudo
--dockerless - use podman and buildah instead of docker daemon
command line options -o, -f override config file settings.
Example:
# build the builder docker image first
$PROG build-docker-image example.conf
# now build the OpenWrt image
$PROG build example.conf -o output -f myrootfs
# show available profiles
$PROG profiles example.conf
# mount downloads to host directory during build
$PROG build example-nexx-wt3020.conf --docker-opts "-v=\$(pwd)/dl:/lede/imagebuilder/dl:z"
EOT
exit 0
}
# return given file path as absolute path. path to file must exist.
function abspath {
echo "$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
}
# build container and pass in the actual builder to use
function build_docker_image {
echo "building docker image $IMAGE_TAG ..."
# shellcheck disable=2086
$SUDO $DOCKER_BUILD\
--build-arg BUILDER_URL="$LEDE_BUILDER_URL" -t "$IMAGE_TAG" docker
}
function run_cmd_in_container {
local docker_term_opts="-ti"
[ ! -t 0 ] && docker_term_opts="-i"
if [ -n "$REPOSITORIES_CONF" ]; then
conf="$(abspath "$REPOSITORIES_CONF")"
repositories_volume=(-v "$conf":/lede/imagebuilder/repositories.conf:z)
else
repositories_volume=()
fi
# shellcheck disable=SC2068 disable=SC2086
$SUDO $DOCKER_RUN\
--rm\
$docker_term_opts \
-v "$(abspath "$ROOTFS_OVERLAY")":/lede/rootfs-overlay:z \
-v "$(abspath "$OUTPUT_DIR")":/lede/output:z \
"${repositories_volume[@]}" \
${DOCKER_OPTS[@]} \
--rm "$IMAGE_TAG" "$@"
}
# run the builder in the container.
function build_lede_image {
echo "building image for $LEDE_PROFILE ..."
run_cmd_in_container make image PROFILE="$LEDE_PROFILE" \
PACKAGES="$LEDE_PACKAGES" \
DISABLED_SERVICES="$LEDE_DISABLED_SERVICES" \
FILES="/lede/rootfs-overlay" \
BIN_DIR="/lede/output"
}
# show available profiles
function show_profiles {
run_cmd_in_container make info
}
# run a shell in the container, useful for debugging.
function run_shell {
run_cmd_in_container bash
}
# print message and exit
function fail {
echo "ERROR: $*" >&2
exit 1
}
if [ $# -lt 2 ]; then
usage "$0"
exit 1
fi
COMMAND=$1; shift
CONFIG_FILE=$1; shift
# default: use docker
SUDO=sudo
DOCKER_BUILD="docker build"
DOCKER_RUN="docker run -e GOSU_UID=$(id -ur) -e GOSU_GID=$(id -g)"
DOCKER_OPTS=()
# pull in config file, making $BASEDIR_CONFIG_FILE available inside`
[ ! -f "$CONFIG_FILE" ] && fail "can not open $CONFIG_FILE"
# shellcheck disable=SC2034
BASEDIR_CONFIG_FILE=$( cd "$( dirname "$CONFIG_FILE" )" && pwd )
eval "$(cat "$CONFIG_FILE")"
# if macos skip sudo
if [ "$(uname)" == "Darwin" ]; then
SUDO=""
fi
# parse cli args, can override config file params
while [[ $# -ge 1 ]]; do
key="$1"
case $key in
-f)
ROOTFS_OVERLAY="$2"; shift ;;
-o)
OUTPUT_DIR="$2"; shift ;;
--skip-sudo)
SUDO="" ;;
--docker-opts)
DOCKER_OPTS+=("$2"); shift ;;
--dockerless)
SUDO=""
DOCKER_BUILD="buildah bud --layers=true"
DOCKER_RUN="podman run" ;;
*)
fail "invalid option: $key";;
esac
shift
done
mkdir -p "$OUTPUT_DIR"
[ ! -d "$OUTPUT_DIR" ] && fail "output-dir: no such directory $OUTPUT_DIR"
[ ! -d "$ROOTFS_OVERLAY" ] && fail "rootfs-overlay: no such directory $ROOTFS_OVERLAY"
# set default LEDE_BUILDER_URL if not overriden in configuration file
if [ -z "${LEDE_BUILDER_URL+x}" ]; then
LEDE_BUILDER_URL="https://downloads.openwrt.org/releases/$LEDE_RELEASE/targets/$LEDE_TARGET/$LEDE_SUBTARGET/openwrt-imagebuilder-$LEDE_RELEASE-$LEDE_TARGET-$LEDE_SUBTARGET.Linux-x86_64.tar.xz"
fi
IMAGE_TAG=$IMAGE_TAG:$LEDE_RELEASE-$LEDE_TARGET-$LEDE_SUBTARGET
function print_config {
cat<<EOT
--- configuration ------------------------------
RELEASE...........: $LEDE_RELEASE
TARGET............: $LEDE_TARGET
SUBTARGET.........: $LEDE_SUBTARGET
PROFILE...........: $LEDE_PROFILE
BUILDER_URL.......: $LEDE_BUILDER_URL
DOCKER_IMAGE_TAG..: $IMAGE_TAG
OUTPUT_DIR........: $OUTPUT_DIR
ROOTFS_OVERLAY....: $ROOTFS_OVERLAY
DISABLED_SERVICES.: $LEDE_DISABLED_SERVICES
REPOSITORIES_CONF.: $REPOSITORIES_CONF
CONTAINER ENGINE..: $(echo "$DOCKER_RUN" | cut -d " " -f1)
------------------------------------------------
EOT
}
case $COMMAND in
build)
print_config
build_lede_image ;;
build-docker-image)
print_config
build_docker_image ;;
profiles)
show_profiles ;;
shell)
print_config
run_shell ;;
*)
usage "$0"
exit 0 ;;
esac
| jandelgado/lede-dockerbuilder | builder.sh | Shell | apache-2.0 | 5,964 |
mosmlc -c Mips.sml
mosmlc -c RegAlloc.sig RegAlloc.sml
mosmlc -c S100.sml
mosmlyac -v Parser.grm
mosmlc -c Parser.sig Parser.sml
mosmllex Lexer.lex
mosmlc -c Lexer.sml
mosmlc -c Type.sig Type.sml
mosmlc -c Compiler.sig Compiler.sml
mosmlc -o C100 C100.sml
| martinnj/100Compiler | CODE/compile.sh | Shell | apache-2.0 | 256 |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# shellcheck source=scripts/ci/libraries/_script_init.sh
. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
# Builds or waits for the CI image in the CI environment
# Depending on "GITHUB_REGISTRY_WAIT_FOR_IMAGE" setting
function build_ci_image_on_ci() {
build_images::prepare_ci_build
start_end::group_start "Prepare CI image ${AIRFLOW_CI_IMAGE}"
rm -rf "${BUILD_CACHE_DIR}"
mkdir -pv "${BUILD_CACHE_DIR}"
if [[ ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
# Pretend that the image was build. We already have image with the right sources baked in!
# so all the checksums are assumed to be correct
md5sum::calculate_md5sum_for_all_files
# Remove me on 15th of August 2021 after all users had chance to rebase
legacy_ci_image="ghcr.io/${GITHUB_REPOSITORY}-${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci-v2:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
build_images::wait_for_image_tag "${AIRFLOW_CI_IMAGE}" ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${legacy_ci_image}"
md5sum::update_all_md5_with_group
else
build_images::rebuild_ci_image_if_needed
fi
# Disable force pulling forced above this is needed for the subsequent scripts so that
# They do not try to pull/build images again.
unset FORCE_PULL_IMAGES
unset FORCE_BUILD
# Skip the image check entirely for the rest of the script
export CHECK_IMAGE_FOR_REBUILD="false"
start_end::group_end
}
build_ci_image_on_ci
| dhuang/incubator-airflow | scripts/ci/images/ci_prepare_ci_image_on_ci.sh | Shell | apache-2.0 | 2,315 |
#!/bin/bash
# Copyright 2015 Backstop Solutions Group, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =======================================================================
#
# This stashes local unstaged changes so the later checks don't look at
# uncommitted code
if [ -e .git/MERGE_HEAD ]; then
echo "Not stashing during a merge"
exit 0
fi
RED='\033[0;31m'
ORANGE='\033[0;33m'
NC='\033[0m'
GIT_UNSTAGED_FILES=$(git diff --name-only)
GIT_STAGED_FILES=$(git diff --cached --name-only)
ERROR=0
# if there are staged files and unstaged files, check if each unstaged file is the staged file
if [ -n "$GIT_STAGED_FILES" ] && [ -n "$GIT_UNSTAGED_FILES" ];
then
for staged_file in $GIT_STAGED_FILES
do
for unstaged_file in $GIT_UNSTAGED_FILES
do
if [ "$staged_file" = "$unstaged_file" ];
then
echo -e "${RED}You should probably add ${ORANGE}${staged_file}${NC}${RED}, you have staged and unstaged changes.${NC}"
ERROR=1
fi
done
done
if [ $ERROR -eq 1 ]; # short_circuit
then
exit 1
fi
fi
GIT_DIFF_COUNT=$(git diff --cached --name-only | wc -l)
if [ "$GIT_DIFF_COUNT" -eq 0 ] ; then
echo "No changes, not stashing."
exit 0
fi
git stash -q --keep-index
ret=$?
if [ $ret != 0 ] ; then
echo "Stash failed... Uh oh. Returned ${ret}"
ERROR=$ret
fi
exit $ERROR
| BillWeiss/2015PuppetCampMaterials | precommit/00_stash-things.sh | Shell | apache-2.0 | 1,848 |
#!/usr/bin/env bash
$SPARK_HOME/bin/spark-submit \
--class gloving.Evaluate \
--name "gloving-evaluate" \
--master "local[*]" \
--driver-memory ${DRIVER_MEMORY:-8G} \
./target/scala-2.10/gloving-assembly-0.1.0-SNAPSHOT.jar \
"$@"
| anelson/gloving | bin/evaluate.sh | Shell | apache-2.0 | 237 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.