code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
/* ######################## Shared ######################## */
* { box-sizing: border-box; }
body {
background-color: #D4D4D4;
padding: 0px;
margin: 0px;
font-family: 'Rubik', serif;
}
section {
float: left;
width: 100%;
min-height: 100vh;
padding: 40px 80px;
}
section div {
max-width: 800px;
margin: auto;
}
p, #essay p, #classes p {
font-family: 'Roboto Mono', serif;
font-size: 1.3em;
}
li {
color: #D4D4D4;
line-height: 1.4em;
font-style: italic;
padding-right: 30px;
}
h1, h2, h3, h4, h5, h6 { /*reset for mobile browsers */
font-weight: normal;
}
h2 {
color: #ff431d;
font-size: 3em;
}
a {
color: indianred;
text-decoration: none;
font-weight: bold;
}
a:hover {
color: #ff431d;
}
/*######################## hero ########################*/
#hero h1 {
color: #ff431d;
font-weight: 800;
font-size: 5em;
font-family: "Rubik", sans-serif;
margin-bottom: 0px;
font-style: italic;
}
h3 {
font-size: 2em;
font-family: "Roboto Mono", sans-serif;
}
#hero a {
text-transform: uppercase;
}
/*######################## footer ########################*/
#colophon {
background-color: #7b868e;
padding-top: 10vh;
text-align: center;
border-top: 1px solid #333;
}
#colophon div p {
font-size: .9em;
color: #333;
margin: 20px 40px;
}
#colophon a {
text-decoration: none;
color: #1d6496;
}
#colophon h2 {
color: #D4D4D4;
font-size: 1.3em;
border-bottom: 1px solid #D4D4D4;
line-height: 200%;
margin:5px 20px;
}
/* ######################## Mobile ######################## */
@media screen and (max-width: 800px) {
* {
box-sizing: border-box;
}
body {
margin: 0px;
padding: 0px;
font-size: 20px;
}
#hero h1 {
font-size: 2.5em;
}
#hero h3, h3 {
font-size: 1em;
}
p {
font-size: .9em;
}
}
| nimwunnan/asphorisms | style.css | CSS | artistic-2.0 | 2,189 |
/*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package uk.co.senab.photoview;
import uk.co.senab.photoview.PhotoViewAttacher.OnMatrixChangedListener;
import uk.co.senab.photoview.PhotoViewAttacher.OnPhotoTapListener;
import uk.co.senab.photoview.PhotoViewAttacher.OnViewTapListener;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.util.AttributeSet;
import android.widget.ImageView;
public class PhotoView extends ImageView implements IPhotoView {
private final PhotoViewAttacher mAttacher;
private ScaleType mPendingScaleType;
public PhotoView(Context context) {
this(context, null);
}
public PhotoView(Context context, AttributeSet attr) {
this(context, attr, 0);
}
public PhotoView(Context context, AttributeSet attr, int defStyle) {
super(context, attr, defStyle);
super.setScaleType(ScaleType.MATRIX);
mAttacher = new PhotoViewAttacher(this);
if (null != mPendingScaleType) {
setScaleType(mPendingScaleType);
mPendingScaleType = null;
}
}
@Override
public void setPhotoViewRotation(float rotationDegree) {
mAttacher.setPhotoViewRotation(rotationDegree);
}
@Override
public boolean canZoom() {
return mAttacher.canZoom();
}
@Override
public RectF getDisplayRect() {
return mAttacher.getDisplayRect();
}
@Override
public Matrix getDisplayMatrix() {
return mAttacher.getDrawMatrix();
}
@Override
public boolean setDisplayMatrix(Matrix finalRectangle) {
return mAttacher.setDisplayMatrix(finalRectangle);
}
@Override
@Deprecated
public float getMinScale() {
return getMinimumScale();
}
@Override
public float getMinimumScale() {
return mAttacher.getMinimumScale();
}
@Override
@Deprecated
public float getMidScale() {
return getMediumScale();
}
@Override
public float getMediumScale() {
return mAttacher.getMediumScale();
}
@Override
@Deprecated
public float getMaxScale() {
return getMaximumScale();
}
@Override
public float getMaximumScale() {
return mAttacher.getMaximumScale();
}
@Override
public float getScale() {
return mAttacher.getScale();
}
@Override
public ScaleType getScaleType() {
return mAttacher.getScaleType();
}
@Override
public void setAllowParentInterceptOnEdge(boolean allow) {
mAttacher.setAllowParentInterceptOnEdge(allow);
}
@Override
@Deprecated
public void setMinScale(float minScale) {
setMinimumScale(minScale);
}
@Override
public void setMinimumScale(float minimumScale) {
mAttacher.setMinimumScale(minimumScale);
}
@Override
@Deprecated
public void setMidScale(float midScale) {
setMediumScale(midScale);
}
@Override
public void setMediumScale(float mediumScale) {
mAttacher.setMediumScale(mediumScale);
}
@Override
@Deprecated
public void setMaxScale(float maxScale) {
setMaximumScale(maxScale);
}
@Override
public void setMaximumScale(float maximumScale) {
mAttacher.setMaximumScale(maximumScale);
}
@Override
// setImageBitmap calls through to this method
public void setImageDrawable(Drawable drawable) {
super.setImageDrawable(drawable);
if (null != mAttacher) {
mAttacher.update();
}
}
@Override
public void setImageResource(int resId) {
super.setImageResource(resId);
if (null != mAttacher) {
mAttacher.update();
}
}
@Override
public void setImageURI(Uri uri) {
super.setImageURI(uri);
if (null != mAttacher) {
mAttacher.update();
}
}
@Override
public void setOnMatrixChangeListener(OnMatrixChangedListener listener) {
mAttacher.setOnMatrixChangeListener(listener);
}
@Override
public void setOnLongClickListener(OnLongClickListener l) {
mAttacher.setOnLongClickListener(l);
}
@Override
public void setOnPhotoTapListener(OnPhotoTapListener listener) {
mAttacher.setOnPhotoTapListener(listener);
}
@Override
public OnPhotoTapListener getOnPhotoTapListener() {
return mAttacher.getOnPhotoTapListener();
}
@Override
public void setOnViewTapListener(OnViewTapListener listener) {
mAttacher.setOnViewTapListener(listener);
}
@Override
public OnViewTapListener getOnViewTapListener() {
return mAttacher.getOnViewTapListener();
}
@Override
public void setScale(float scale) {
mAttacher.setScale(scale);
}
@Override
public void setScale(float scale, boolean animate) {
mAttacher.setScale(scale, animate);
}
@Override
public void setScale(float scale, float focalX, float focalY, boolean animate) {
mAttacher.setScale(scale, focalX, focalY, animate);
}
@Override
public void setScaleType(ScaleType scaleType) {
if (null != mAttacher) {
mAttacher.setScaleType(scaleType);
} else {
mPendingScaleType = scaleType;
}
}
@Override
public void setZoomable(boolean zoomable) {
mAttacher.setZoomable(zoomable);
}
@Override
public Bitmap getVisibleRectangleBitmap() {
return mAttacher.getVisibleRectangleBitmap();
}
@Override
public void setZoomTransitionDuration(int milliseconds) {
mAttacher.setZoomTransitionDuration(milliseconds);
}
@Override
protected void onDetachedFromWindow() {
mAttacher.cleanup();
super.onDetachedFromWindow();
}
} | cocolove2/LISDemo | library-lis/src/main/java/uk/co/senab/photoview/PhotoView.java | Java | artistic-2.0 | 6,686 |
package Mojolicious::Plugins;
use Mojo::Base 'Mojo::EventEmitter';
use Mojo::Util 'camelize';
has namespaces => sub { ['Mojolicious::Plugin'] };
sub emit_hook {
my $self = shift;
for my $cb (@{$self->subscribers(shift)}) { $cb->(@_) }
return $self;
}
sub emit_chain {
my ($self, $name, @args) = @_;
my $wrapper;
for my $cb (reverse @{$self->subscribers($name)}) {
my $next = $wrapper;
$wrapper = sub { $cb->($next, @args) };
}
!$wrapper ? return : return $wrapper->();
}
sub emit_hook_reverse {
my $self = shift;
for my $cb (reverse @{$self->subscribers(shift)}) { $cb->(@_) }
return $self;
}
sub load_plugin {
my ($self, $name) = @_;
# Try all namespaces and full module name
my $suffix = $name =~ /^[a-z]/ ? camelize($name) : $name;
my @classes = map {"${_}::$suffix"} @{$self->namespaces};
for my $class (@classes, $name) { return $class->new if _load($class) }
# Not found
die qq{Plugin "$name" missing, maybe you need to install it?\n};
}
sub register_plugin {
shift->load_plugin(shift)->register(shift, ref $_[0] ? $_[0] : {@_});
}
sub _load {
my $module = shift;
return $module->isa('Mojolicious::Plugin')
unless my $e = Mojo::Loader->new->load($module);
ref $e ? die $e : return undef;
}
1;
=encoding utf8
=head1 NAME
Mojolicious::Plugins - Plugin manager
=head1 SYNOPSIS
use Mojolicious::Plugins;
my $plugins = Mojolicious::Plugins->new;
push @{$plugins->namespaces}, 'MyApp::Plugin';
=head1 DESCRIPTION
L<Mojolicious::Plugins> is the plugin manager of L<Mojolicious>.
=head1 PLUGINS
The following plugins are included in the L<Mojolicious> distribution as
examples.
=over 2
=item L<Mojolicious::Plugin::Charset>
Change the application charset.
=item L<Mojolicious::Plugin::Config>
Perl-ish configuration files.
=item L<Mojolicious::Plugin::DefaultHelpers>
General purpose helper collection, loaded automatically.
=item L<Mojolicious::Plugin::EPLRenderer>
Renderer for plain embedded Perl templates, loaded automatically.
=item L<Mojolicious::Plugin::EPRenderer>
Renderer for more sophisticated embedded Perl templates, loaded automatically.
=item L<Mojolicious::Plugin::HeaderCondition>
Route condition for all kinds of headers, loaded automatically.
=item L<Mojolicious::Plugin::JSONConfig>
JSON configuration files.
=item L<Mojolicious::Plugin::Mount>
Mount whole L<Mojolicious> applications.
=item L<Mojolicious::Plugin::PODRenderer>
Renderer for turning POD into HTML and documentation browser for
L<Mojolicious::Guides>.
=item L<Mojolicious::Plugin::TagHelpers>
Template specific helper collection, loaded automatically.
=back
=head1 EVENTS
L<Mojolicious::Plugins> inherits all events from L<Mojo::EventEmitter>.
=head1 ATTRIBUTES
L<Mojolicious::Plugins> implements the following attributes.
=head2 namespaces
my $namespaces = $plugins->namespaces;
$plugins = $plugins->namespaces(['Mojolicious::Plugin']);
Namespaces to load plugins from, defaults to L<Mojolicious::Plugin>.
# Add another namespace to load plugins from
push @{$plugins->namespaces}, 'MyApp::Plugin';
=head1 METHODS
L<Mojolicious::Plugins> inherits all methods from L<Mojo::EventEmitter> and
implements the following new ones.
=head2 emit_chain
$plugins->emit_chain('foo');
$plugins->emit_chain(foo => 123);
Emit events as chained hooks.
=head2 emit_hook
$plugins = $plugins->emit_hook('foo');
$plugins = $plugins->emit_hook(foo => 123);
Emit events as hooks.
=head2 emit_hook_reverse
$plugins = $plugins->emit_hook_reverse('foo');
$plugins = $plugins->emit_hook_reverse(foo => 123);
Emit events as hooks in reverse order.
=head2 load_plugin
my $plugin = $plugins->load_plugin('some_thing');
my $plugin = $plugins->load_plugin('SomeThing');
my $plugin = $plugins->load_plugin('MyApp::Plugin::SomeThing');
Load a plugin from the configured namespaces or by full module name.
=head2 register_plugin
$plugins->register_plugin('some_thing', Mojolicious->new);
$plugins->register_plugin('some_thing', Mojolicious->new, foo => 23);
$plugins->register_plugin('some_thing', Mojolicious->new, {foo => 23});
$plugins->register_plugin('SomeThing', Mojolicious->new);
$plugins->register_plugin('SomeThing', Mojolicious->new, foo => 23);
$plugins->register_plugin('SomeThing', Mojolicious->new, {foo => 23});
$plugins->register_plugin('MyApp::Plugin::SomeThing', Mojolicious->new);
$plugins->register_plugin(
'MyApp::Plugin::SomeThing', Mojolicious->new, foo => 23);
$plugins->register_plugin(
'MyApp::Plugin::SomeThing', Mojolicious->new, {foo => 23});
Load a plugin from the configured namespaces or by full module name and run
C<register>, optional arguments are passed through.
=head1 SEE ALSO
L<Mojolicious>, L<Mojolicious::Guides>, L<http://mojolicio.us>.
=cut
| gitpan/Mojolicious | lib/Mojolicious/Plugins.pm | Perl | artistic-2.0 | 4,829 |
# Crafting
- Rethink of Crafting Ideas....
- It takes a village to craft an item.
- Crafting will no longer be something every player has several types of on each character – making alts to max it out on each.
- Crafting will be done at a guild / tribe level.
- Players provide mats to NPCs who can craft items for them.
- Crafted items take a non-trivial amount of time to produce.
- Making 20 of an item and vending it is the opposite of what we want.
- Recipes will be very hard to come by.
- Crafted items will rely on 'tech level' and factors like which materials are available and who can 'remember' how to 'card wool' / 'spin yarn' etc.
## Resources
- Basic resource nodes should be plentiful and respawn at frequent intervals.
- It should be enjoyable to harvest them.
- A freshly made char should be able to access all areas and harvest all nodes.
- Your harvesting skill should in part determine what you harvest e.g. lvl 1 gets only white resources, lvl 2 some white occasional greens, lvl 3 greens mostly some blues, etc
- Short-term buffs ala Tera
- Harvest time of day, cycle of moon and other factors affect what you are harvesting to some degree
## Crafting
- Only the most base recipes are learnt at first
- You don't need to craft things to lvl up i.e. 10 x robe of vending to prevent vast waste of mats and time / money
- Recipes are learnt from a trainer or another player...you trade money and potentially mats / etc in order to learn the recipe. You can teach that recipe to others once you've made it x number of times. The further down the chain of learning you are the more times you need to make it to perfect the recipe. You cannot charge any more than half of what you paid to learn the recipe when teaching it to others – making a shiny pyramid.
- A recipe can be taught no end of times – the pyramid can be very wide, but limited in depth eventually by value recouped on teaching it – and selling items from it
- Recipes are viral – and very very rarely ever drop making them one of the most important resources on a server to control.
- City armour, and armour specific to regions / bosses
- New recipes can be discovered or possibly formulated e.g. new armour created based on a piece of armour, a bunch of mats, gems, etc. This might be the same model as the base armour or one of many specialisations e.g. more ornate detail on the front, a fur collar, a special effect (maybe)
- Craft mats can be used from any storage you have, starting with your bags, then your bank
- Consider crafting failure, creating a lesser item
- You can learn all the crafts and all harvesting
## Harvesting
Nodes will be of a few basic types.
- Crystals
- Ore
- Plants
- Leather drops from animals / teeth / claws / sinew / etc
- Wood – or buy this from lumber mills?
- Highly rare mats like oil, sulfur, guano, unguents...bark, mushrooms, moss, etc? Weird spawn times, midnight for some items, blood moon, waxing moon...
What you harvest will be limited by both the quality of the node and your resource skill. Spread nodes of all quality over all the regions...I want regions to feel free, so you can herb in your favorite zone...you might not get mushrooms in a desert, but you shouldn't get only lvl 15 stuff because you don't like the lvl 60 regions.
## Mob Drops
Bosses and mobs don't drop armour, at least not anything worth wearing, though some might be good for prototyping other armour on...white lvl armour with a good look.
Drops will usually be rare mats for crafting, gold, potions / consumables, very very rarely a recipe,
## Customisation
Armour can be re-worked to look like any other armour in it's class i.e. leather made to look like other leather
Armour can be customised with colour
- Enchants. Just another layer of bullshit on top? Every mage ended up with basically the same enchants, same with DPS warriors and prot warriors.
- Gems. Same shit as enchants or worth thinking over?
- Reforge. Same shit as enchants?
- 3 different systems to tweak your armour – seems better to handle via one system that is far more flexible – but give them a small ability to tweak it. These seem more like cheap ways to drain cash out the economy...think of a better way.
| ivanhawkes/Chrysalis | _wiki/design/crafting.md | Markdown | bsd-2-clause | 4,292 |
"""
WSGI config for brp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from dj_static import Cling
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "brp.settings")
os.environ.setdefault('DJANGO_CONFIGURATION', 'Dev')
application = Cling(get_wsgi_application())
| chop-dbhi/biorepo-portal | brp/wsgi.py | Python | bsd-2-clause | 471 |
class Imagemagick < Formula
desc "Tools and libraries to manipulate images in many formats"
homepage "https://www.imagemagick.org/"
# Please always keep the Homebrew mirror as the primary URL as the
# ImageMagick site removes tarballs regularly which means we get issues
# unnecessarily and older versions of the formula are broken.
url "https://dl.bintray.com/homebrew/mirror/imagemagick-7.0.7-7.tar.xz"
mirror "https://www.imagemagick.org/download/ImageMagick-7.0.7-7.tar.xz"
sha256 "01b7171b784fdc689697fa35fbfa70e716a9e1608d45f81015ba78f6583f1dcf"
head "https://github.com/ImageMagick/ImageMagick.git"
bottle do
sha256 "7ba7870bae8a8b3236240cbbb6f9fde5598d78dd567c936857dd1470f7e3c7af" => :high_sierra
sha256 "7fe3ef5dc80215fb9050f441d1d00191233b90da646b3eb60f0295f00cf38169" => :sierra
sha256 "3da2ca31353e9139acd51037696a4b0bd5e2ebecdcd9f06cef1d889836e99277" => :el_capitan
end
option "with-fftw", "Compile with FFTW support"
option "with-hdri", "Compile with HDRI support"
option "with-opencl", "Compile with OpenCL support"
option "with-openmp", "Compile with OpenMP support"
option "with-perl", "Compile with PerlMagick"
option "without-magick-plus-plus", "disable build/install of Magick++"
option "without-modules", "Disable support for dynamically loadable modules"
option "without-threads", "Disable threads support"
option "with-zero-configuration", "Disables depending on XML configuration files"
deprecated_option "enable-hdri" => "with-hdri"
deprecated_option "with-jp2" => "with-openjpeg"
depends_on "pkg-config" => :build
depends_on "libtool" => :run
depends_on "xz"
depends_on "jpeg" => :recommended
depends_on "libpng" => :recommended
depends_on "libtiff" => :recommended
depends_on "freetype" => :recommended
depends_on :x11 => :optional
depends_on "fontconfig" => :optional
depends_on "little-cms" => :optional
depends_on "little-cms2" => :optional
depends_on "libwmf" => :optional
depends_on "librsvg" => :optional
depends_on "liblqr" => :optional
depends_on "openexr" => :optional
depends_on "ghostscript" => :optional
depends_on "webp" => :optional
depends_on "openjpeg" => :optional
depends_on "fftw" => :optional
depends_on "pango" => :optional
depends_on :perl => ["5.5", :optional]
needs :openmp if build.with? "openmp"
skip_clean :la
def install
args = %W[
--disable-osx-universal-binary
--prefix=#{prefix}
--disable-dependency-tracking
--disable-silent-rules
--enable-shared
--enable-static
]
if build.without? "modules"
args << "--without-modules"
else
args << "--with-modules"
end
if build.with? "opencl"
args << "--enable-opencl"
else
args << "--disable-opencl"
end
if build.with? "openmp"
args << "--enable-openmp"
else
args << "--disable-openmp"
end
if build.with? "webp"
args << "--with-webp=yes"
else
args << "--without-webp"
end
if build.with? "openjpeg"
args << "--with-openjp2"
else
args << "--without-openjp2"
end
args << "--without-gslib" if build.without? "ghostscript"
args << "--with-perl" << "--with-perl-options='PREFIX=#{prefix}'" if build.with? "perl"
args << "--with-gs-font-dir=#{HOMEBREW_PREFIX}/share/ghostscript/fonts" if build.without? "ghostscript"
args << "--without-magick-plus-plus" if build.without? "magick-plus-plus"
args << "--enable-hdri=yes" if build.with? "hdri"
args << "--without-fftw" if build.without? "fftw"
args << "--without-pango" if build.without? "pango"
args << "--without-threads" if build.without? "threads"
args << "--with-rsvg" if build.with? "librsvg"
args << "--without-x" if build.without? "x11"
args << "--with-fontconfig=yes" if build.with? "fontconfig"
args << "--with-freetype=yes" if build.with? "freetype"
args << "--enable-zero-configuration" if build.with? "zero-configuration"
args << "--without-wmf" if build.without? "libwmf"
# versioned stuff in main tree is pointless for us
inreplace "configure", "${PACKAGE_NAME}-${PACKAGE_VERSION}", "${PACKAGE_NAME}"
system "./configure", *args
system "make", "install"
end
def caveats
s = <<-EOS.undent
For full Perl support you may need to adjust your PERL5LIB variable:
export PERL5LIB="#{HOMEBREW_PREFIX}/lib/perl5/site_perl":$PERL5LIB
EOS
s if build.with? "perl"
end
test do
assert_match "PNG", shell_output("#{bin}/identify #{test_fixtures("test.png")}")
# Check support for recommended features and delegates.
features = shell_output("#{bin}/convert -version")
%w[Modules freetype jpeg png tiff].each do |feature|
assert_match feature, features
end
end
end
| bfontaine/homebrew-core | Formula/imagemagick.rb | Ruby | bsd-2-clause | 4,820 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using System.Reflection;
using CSScriptLibrary;
using System.Windows.Controls;
using Mono.CSharp;
namespace RobinhoodDesktop.Script
{
/// <summary>
/// Specifies the options for keeping processed data in memory. Ideally all processed data would be kept,
/// but if there is not enough memory for that then some may be discarded once processing has completed.
/// </summary>
public enum MemoryScheme
{
MEM_KEEP_NONE,
MEM_KEEP_SOURCE,
MEM_KEEP_DERIVED
}
[Serializable]
public class StockSession
{
#region Constants
/// <summary>
/// The designator for the stock data source (the data read from the file/live interface)
/// </summary>
public const string SOURCE_CLASS = "StockDataSource";
/// <summary>
/// The designator for the stock data sink (the analyzed data)
/// </summary>
public const string SINK_CLASS = "StockDataSink";
#endregion
#region Variables
/// <summary>
/// Stores a reference to the most recent session that was created (probably the only session)
/// </summary>
public static StockSession Instance;
/// <summary>
/// The path to the source data file
/// </summary>
public string SourceFilePath;
/// <summary>
/// The list of scripts that should be loaded to process the stock data
/// </summary>
public List<string> DataScriptPaths = new List<string>();
/// <summary>
/// The data file the source stock data is being pulled from
/// </summary>
[NonSerialized]
public StockDataFile SourceFile;
/// <summary>
/// The data file representing the analyzed stock data
/// </summary>
[NonSerialized]
public StockDataFile SinkFile;
/// <summary>
/// The stock data associated with the session
/// </summary>
[NonSerialized]
public Dictionary<string, List<StockDataInterface>> Data;
/// <summary>
/// List of action scripts that have been executed
/// </summary>
public Dictionary<object, Assembly> Scripts = new Dictionary<object, Assembly>();
/// <summary>
/// Callback that can be used to add an element to the GUI
/// </summary>
[NonSerialized]
public static AddGuiFunc AddToGui = null;
public delegate void AddGuiFunc(System.Windows.Forms.Control c);
/// <summary>
/// The container object that other GUI elements should be added to
/// </summary>
[NonSerialized]
public static System.Windows.Forms.Control GuiContainer = null;
/// <summary>
/// Callback that can be executed when the session is reloaded
/// </summary>
/// [NonSerialized]
public Action OnReload;
/// <summary>
/// A list of charts associated with this session
/// </summary>
public List<DataChartGui> Charts = new List<DataChartGui>();
#endregion
/// <summary>
/// Creates a session based on the specified source data an analysis scripts
/// </summary>
/// <param name="sources">The source data files</param>
/// <param name="sinkScripts">The data analysis scripts</param>
/// <returns>The session instance</returns>
public static StockSession LoadData(List<string> sources, List<string> sinkScripts)
{
StockSession session = new StockSession();
session.DataScriptPaths.Clear();
Directory.CreateDirectory("tmp");
// Convert any legacy files before further processing
var legacyFiles = sources.Where((s) => { return s.EndsWith(".csv"); }).ToList();
if(legacyFiles.Count() > 0)
{
System.Windows.Forms.SaveFileDialog saveDiag = new System.Windows.Forms.SaveFileDialog();
saveDiag.Title = "Save converted data file as...";
saveDiag.CheckFileExists = false;
if (saveDiag.ShowDialog() == System.Windows.Forms.DialogResult.OK)
{
List<string> convertedFileNames;
var convertedFiles = StockDataFile.ConvertByMonth(legacyFiles, Path.GetDirectoryName(saveDiag.FileName), out convertedFileNames);
foreach (var cf in convertedFileNames) sources.Add(cf);
}
else
{
// Cancel running the script
return null;
}
foreach(var l in legacyFiles) sources.Remove(l);
}
session.SourceFile = StockDataFile.Open(sources.ConvertAll<Stream>((s) => { return System.IO.Stream.Synchronized(new FileStream(s, FileMode.Open)); }));
session.DataScriptPaths.Add("tmp/" + SOURCE_CLASS + ".cs");
using(var file = new StreamWriter(new FileStream(session.DataScriptPaths.Last(), FileMode.Create))) file.Write(session.SourceFile.GetSourceCode(SOURCE_CLASS));
// Put the data set reference script first
List<string> totalSinkScripts = sinkScripts.ToList();
totalSinkScripts.Insert(0, "Script\\Data\\DataSetReference.cs");
session.SinkFile = new StockDataFile(totalSinkScripts.ConvertAll<string>((f) => { return Path.GetFileNameWithoutExtension(f); }), totalSinkScripts.ConvertAll<string>((f) => { return File.ReadAllText(f); }));
session.SinkFile.Interval = session.SourceFile.Interval;
session.DataScriptPaths.Add("tmp/" + SINK_CLASS + ".cs");
using(var file = new StreamWriter(new FileStream(session.DataScriptPaths.Last(), FileMode.Create))) file.Write(session.SinkFile.GenStockDataSink());
session.DataScriptPaths.AddRange(totalSinkScripts);
// Create the evaluator file (needs to be compiled in the script since it references StockDataSource)
string[] embeddedFiles = new string[]
{
"RobinhoodDesktop.Script.StockEvaluator.cs",
"RobinhoodDesktop.Script.StockProcessor.cs"
};
foreach(var f in embeddedFiles)
{
session.DataScriptPaths.Add(string.Format("tmp/{0}.cs", f.Substring(24, f.Length - 27)));
StringBuilder analyzerCode = new StringBuilder();
analyzerCode.Append(new StreamReader(Assembly.GetExecutingAssembly().GetManifestResourceStream(f)).ReadToEnd());
using(var file = new StreamWriter(new FileStream(session.DataScriptPaths.Last(), FileMode.Create))) file.Write(StockDataFile.FormatSource(analyzerCode.ToString()));
}
// Add the user defined analyzers
foreach(string path in Directory.GetFiles(@"Script/Decision", "*.cs", SearchOption.AllDirectories)) session.DataScriptPaths.Add(path);
foreach(string path in Directory.GetFiles(@"Script/Action", "*.cs", SearchOption.AllDirectories)) session.DataScriptPaths.Add(path);
// Build the data
session.Reload();
if(session.Data != null)
{
StockSession.Instance = session;
}
else
{
session.SourceFile.Close();
}
return StockSession.Instance;
}
/// <summary>
/// Creates a chart instance within a data script
/// </summary>
/// <param name="sources">The data sources to load</param>
/// <param name="sinkScripts">The data processors to apply</param>
public static DataChartGui AddChart(List<string> sources, List<string> sinkScripts)
{
var session = (Instance != null) ? Instance : LoadData(sources, sinkScripts);
DataChartGui chart = null;
if(session != null) chart = session.AddChart();
return chart;
}
/// <summary>
/// Creates a new chart and adds it to the session
/// </summary>
/// <returns>The created chart</returns>
public DataChartGui AddChart()
{
DataChartGui chart = null;
if(this.Data != null)
{
try
{
chart = new DataChartGui(this.Data, this);
this.Charts.Add(chart);
var ctrl = (System.Windows.Forms.Control)(chart.GuiPanel);
if((ctrl != null) && (AddToGui != null))
{
AddToGui(ctrl);
}
}
catch(Exception ex)
{
System.Windows.Forms.MessageBox.Show(ex.ToString());
}
}
return chart;
}
/// <summary>
/// Reloads the scripts and executes them
/// </summary>
public void Reload()
{
Data = null;
SourceFile.Reload();
SinkFile.Reload();
// Re-load the data scripts, pulling in any recent changes
Run(this, DataScriptPaths);
// Create and get the StockProcessor instance, which also populates the Data field in the session
Assembly dataScript;
if(Scripts.TryGetValue(this, out dataScript))
{
var getProcessor = dataScript.GetStaticMethod("RobinhoodDesktop.Script.StockProcessor.GetInstance", this);
var processor = getProcessor(this);
// Execute the reload callback
if(OnReload != null) OnReload();
}
}
/// <summary>
/// Loads a script instance
/// </summary>
public void Run(object owner, List<string> scripts)
{
#if DEBUG
var isDebug = true;
#else
var isDebug = false;
#endif
Assembly oldScript;
if(Scripts.TryGetValue(owner, out oldScript))
{
oldScript.UnloadOwnerDomain();
Scripts.Remove(owner);
}
try
{
CSScript.EvaluatorConfig.Engine = EvaluatorEngine.Mono;
CSScript.MonoEvaluator.CompilerSettings.Platform = Mono.CSharp.Platform.X64;
List<string> references = new List<string>()
{
"TensorFlow.NET.dll",
"Google.Protobuf.dll",
"Newtonsoft.Json",
"NumSharp.Lite",
"netstandard",
"System.Memory",
"System.Numerics"
};
foreach(var s in Scripts.Values) references.Add(s.Location);
Scripts[owner] = CSScript.LoadFiles(scripts.ToArray(), null, isDebug, references.ToArray());
// Check if a "Run" method should be executed
MethodDelegate runFunc = null;
try { runFunc = Scripts[owner].GetStaticMethod("*.Run", this); } catch(Exception ex) { };
if(runFunc != null)
{
System.Threading.Tasks.Task.Run(() => { runFunc(this); });
}
}
catch(Exception ex)
{
string err = ex.ToString();
System.Text.RegularExpressions.Regex.Replace(err, "\r\n.*?warning.*?\r\n", "\r\n");
Console.WriteLine(err);
System.Windows.Forms.MessageBox.Show(err);
}
}
}
}
| Terohnon/RobinhoodDesktop | RobinhoodDesktop/RobinhoodDesktop/Script/StockSession.cs | C# | bsd-2-clause | 12,086 |
/*_ generic.hpp Tue Jul 5 1988 Modified by: Walter Bright */
#ifndef __GENERIC_H
#define __GENERIC_H 1
/* Name concatenator functions */
#define name2(n1,n2) n1 ## n2
#define name3(n1,n2,n3) n1 ## n2 ## n3
#define name4(n1,n2,n3,n4) n1 ## n2 ## n3 ## n4
typedef int (*GPT) (int,char *);
extern int genericerror(int,char *);
#define set_handler(generic,type,x) set_##type##generic##_handler(x)
#define errorhandler(generic,type) type##generic##handler
#define callerror(generic,type,a,b) (*errorhandler(generic,type))(a,b)
#define declare(a,type) a##declare(type)
#define implement(a,type) a##implement(type)
#define declare2(a,type1,type2) a##declare2(type1,type2)
#define implement2(a,type1,type2) a##implement2(type1,type2)
#endif /* __GENERIC_H */
| johnrsibert/tagest | 21mt/src/generic.h | C | bsd-2-clause | 769 |
<?php
$bbcode["code"] = array(
'callback' => 'bbcodeCodeHighlight',
'pre' => TRUE,
);
$bbcode["source"] = array(
'callback' => 'bbcodeCodeHighlight',
'pre' => TRUE,
);
function bbcodeCodeHighlight($dom, $contents, $arg)
{
// in <pre> style
$contents = preg_replace('/^\n|\n$/', "", $contents);
include_once("geshi.php");
if(!$arg)
{
$div = $dom->createElement('div');
$div->setAttribute('class', 'codeblock');
$div->appendChild($dom->createTextNode($contents));
return $div;
}
else
{
$language = $arg;
$geshi = new GeSHi($contents, $language);
$geshi->set_header_type(GESHI_HEADER_NONE);
$geshi->enable_classes();
$geshi->enable_keyword_links(false);
$code = str_replace("\n", "", $geshi->parse_code());
return markupToMarkup($dom, "<div class=\"codeblock geshi\">$code</div>");
}
}
| RoseyDreamy/ABXD-Omega | plugins/sourcetag/bbcode.php | PHP | bsd-2-clause | 834 |
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 400;
src: local('Source Sans Pro'), local('SourceSansPro-Regular'), url(fonts/SourceSansPro-Regular.otf) format('opentype');
}
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 600;
src: local('Source Sans Pro Semibold'), local('SourceSansPro-Semibold'), url(fonts/SourceSansPro-Semibold.otf) format('opentype');
}
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 700;
src: local('Source Sans Pro Bold'), local('SourceSansPro-Bold'), url(fonts/SourceSansPro-Bold.otf) format('opentype');
}
@font-face {
font-family: 'Source Sans Pro';
font-style: normal;
font-weight: 900;
src: local('Source Sans Pro Black'), local('SourceSansPro-Black'), url(fonts/SourceSansPro-Black.otf) format('opentype');
}
body {
font-family: 'Source Sans Pro', 'Helvetica Neue', 'Arial', sans-serif;
line-height: 1.8em;
color: #333935;
background-color: #f5f5f5;
font-size: 15px;
margin: 0;
}
div, header, select, a {
box-sizing: border-box;
}
.full-width {
width: 100%;
}
.u-floatRight {
float: right;
}
.u-floatLeft {
float: left;
}
.u-textCenter {
text-align: center;
}
.u-textRight {
text-align: right;
}
.u-posFixed {
position: fixed;
}
li {
padding-bottom: 4px;
}
.nav {
width: 100%;
background-color: #fafafa;
top: 0;
}
.wrap-width {
width: 600px;
margin: auto;
padding: 0 20px;
}
.wrapper {
min-width: 800px;
margin-top: 60px;
}
.page-wrapper {
max-width: 600px;
margin: 60px auto 0;
padding: 15px;
}
.sidebar {
width: 25%;
position: fixed;
height: 100%;
background-color: #f5f5f5;
}
.content {
padding: 15px;
margin-left: 25%;
width: 75%;
height: 100%;
background-color: #fff;
}
.site-header {
height: 60px;
position: fixed;
top: 0;
width: 100%;
font-size: 20px;
z-index: 9;
text-align: center;
background: #fafafa;
padding: 10px 0;
border-bottom: 1px solid #e0e0e0;
}
.challenge-title {
text-transform: capitalize;
font-size: 19px;
font-weight: 800;
color: #4BABFF;
line-height: 1;
display: inline-block;
}
.pagination {
position: absolute;
top: 12px;
right: 15px;
}
.list-of-challenges {
list-style-type: none;
margin: 0;
padding: 0;
}
.list-of-challenges li {
padding: 0;
}
.challenge-item {
padding: 3px 10px;
display: block;
text-transform: capitalize;
border-top: 3px solid transparent;
border-left: 3px solid transparent;
border-bottom: 3px solid transparent;
}
/* :after instead of :before cause it messes with text-transform */
.challenge-item:after {
content: " ";
float: left;
width: 20px;
display: inline-block;
}
.challenge-item.completed:after {
content: "✔️ ";
position: absolute;
left: 10px;
top: 4px;
color: #fff;
}
.challenge-item.current .current-arrow {
display: block;
}
.current-arrow {
float: right;
display: none;
}
.challenge-item.completed:hover {
background-color: #1EA577;
}
.challenge-item.current {
background-color: #ECFCE0;
color: #456;
margin-right: -3px;
border-color: white;
}
.challenge-item.completed {
position: relative;
background-color: #22B784;
color: #fff;
padding-left: 30px;
}
.hand,
.lang-select {
padding: 4px 13px;
border-radius: 3px;
color: #0087ff;
display: inline-block;
}
.hand {
border: 1px solid #8cf;
font-size: 26px;
margin-left: 10px;
}
.lang-select {
border-color: #8cf;
position: absolute;
height: 36px;
width: 100px;
margin: 5px 8px;
top: 0;
left: 0;
appearance: none;
-webkit-appearance: none;
}
.lang-select:after {
content: "";
border: 5px solid transparent;
border-top-color: #8cf;
position: relative;
left: 40px;
top: 5px;
pointer-events: none;
display: inline-block;
z-index: 10;
}
.hand:hover {
border-color: transparent;
background-color: #08f;
color: #fff;
}
.challenge-name {
text-transform: capitalize;
font-weight: 900;
color: #0087ff;
}
.filledblock {
display: inline-block;
padding: 4px 10px;
font-weight: 900;
}
.filledblock:hover {
background: #4BABFF;
color: #fff;
}
.all-caps {
font-size: 12px;
text-transform: uppercase;
}
.site-header h2 {
margin-top: 12px;
}
h1, h2, h3, h4, .toc {
font-family: 'Source Sans Pro', 'Helvetica Neue', 'Arial', sans-serif;
}
.content h1, .content h2, .content h3, .content h4 {
margin-top: 50px;
}
a {
color: #0087ff;
text-decoration: none;
}
a:hover {
color: #4BABFF;
}
.toc li {
margin-bottom: 10px;
}
.toc li a {
font-weight: 700;
}
.toc {
padding-left: 0;
text-align: left;
list-style-position: inside;
}
.toc .done:before {
content: 'done!';
float: right;
color: #aaa;
text-decoration: none !important;
}
.toc .done a {
text-decoration: line-through;
}
code, .outof {
font-size: 0.9em;
border: 1px solid #9DA6B3;
padding: 4px 6px;
border-radius: 2px;
font-family: Liberation Mono, Monaco, Courier New, monospace;
white-space: nowrap;
}
footer {
border-top: 3px solid #0087ff;
margin: 20px 0 0;
padding: 12px 0;
}
footer ul {
margin: 0;
padding: 0;
text-align: center;
}
footer ul li {
display: inline-block;
padding: 0 6px;
}
.prenext {
overflow: auto;
}
.challenge-desc {
color: #777;
font-style: italic;
}
#git-tips,
.didnt-pass {
border: 1px solid #BADFFF;
padding: 18px;
margin: 18px 0 36px;
}
.didnt-pass {
border-color: #999;
}
#git-tips h2 {
color: #08f;
}
#git-tips h2,
.didnt-pass h2 {
margin: -32px -8px 0;
float: left;
background: #fff;
padding: 0 8px;
}
#git-tips h2 + p {
margin-top: 0;
}
#git-tips p:last-child,
.didnt-pass p:last-child {
margin-bottom: 0;
}
#git-tips ul {
list-style: none;
margin: 0;
padding: 0;
}
#git-tips code, .didnt-pass code {
border-color: #0087ff;
}
#git-tips li {
margin-top: 0px;
}
.challenge, .verify {
background-color: #0087ff;
color: #fff;
padding: 18px 18px 5px;
}
.verify code {
border-color: #fff;
display: inline-block;
}
.verify {
margin-top: 30px;
margin-bottom: 30px;
}
#git-tips h4,
.challenge h2,
.verify h3 {
margin-top: 0;
}
.didnt-pass h4 {
margin: 10px 0 0;
}
.didnt-pass h4:before {
content: '⚠️ ';
font-weight: normal;
}
.didnt-pass p + h4 {
padding-top: 30px;
}
.verify h3 {
display: inline-block;
padding-right: 12px;
}
blockquote {
border-left: 2px solid #acacac;
padding-left: 30px;
margin: 30px 0;
color: #acacac;
}
.weehero {
background-color: #0087ff;
padding: 30px;
text-align: center;
color: #fff;
font-size: 50px;
font-weight: 900;
}
img {
margin: 30px 0 18px;
}
n, v, adj {
position: relative;
}
.superscript {
font-size: 0.6em;
position: absolute;
top: -20%;
line-height: 0.6em;
font-weight: normal;
white-space: nowrap;
opacity: 0.8;
}
.u-inlineBlock {
display: inline-block;
}
/* new stuff */
#directory-path:empty {
display: none;
}
#path-required-warning,
#directory-path {
font-family: Liberation Mono, Monaco, Courier New, monospace;
padding: 6px 12px 6px 10px;
vertical-align: middle;
margin-bottom: 0;
font-size: 13px;
font-weight: 400;
text-align: center;
white-space: nowrap;
border-right: 1px solid #fff;
border-top: 1px solid #fff;
border-bottom: 1px solid #fff;
margin-left: -3px;
}
#path-required-warning {
display: none;
color: #ff0;
}
#path-required-warning.show {
display: inline;
}
#path-required-warning.show + #directory-path {
display: none;
}
.verify-fail:before {
content: '✗ ';
}
.verify-pass:before {
content: '✔︎ ';
}
#verify-list {
list-style: none;
padding-left: 0;
}
#challenge-completed {
color: #2BDA9E;
font-size: 24px;
font-family: "NothingYouCouldDo";
}
#challenge-completed h2 {
padding: 0; margin: 0;
}
.completed-challenge-list {
font-family: "NothingYouCouldDo";
/*background-color: #BADFFF;*/
color: #2BDA9E;
padding: 2px 4px;
text-transform: uppercase;
font-size: 12px;
}
button {
display: inline-block;
background-color: #CBCBCB;
color: #818181;
padding: 6px 12px;
margin-bottom: 0;
font-size: 12px;
font-weight: 400;
text-align: center;
white-space: nowrap;
vertical-align: middle;
cursor: pointer;
-webkit-user-select: none;
border: 1px solid transparent;
font-family: 'Source Sans Pro', 'Helvetica Neue', 'Arial', sans-serif;
text-transform: uppercase;
}
button:focus {
outline: 0;
}
button:hover {
background-color: #e7e7e7;
}
button.white {
background-color: #fff;
color: #0087ff;
}
button.white:hover {
background-color: #eeeeee;
}
button.white:disabled {
background-color: #F5F5F5;
color: #C8C8C8;
}
button.light-blue {
background-color: #D2EAFF;
color: #86B6E0;
}
button.light-blue:hover {
background-color: #e2effa;
}
#clear-completed-challenge {
margin-bottom: 11px;
}
#clear-all-challenges:disabled,
#get-started:disabled {
display: none;
}
.teal {
background: #2BDA9E;
color: #fff;
}
.teal:hover {
background: #6EE7BD;
color: #fff;
}
.grey-border {
border: 1px solid #818181
}
| rets5s/git-it-electron | assets/css/style.css | CSS | bsd-2-clause | 9,127 |
/*
+------------------------------------------------------------------------+
| Phalcon Framework |
+------------------------------------------------------------------------+
| Copyright (c) 2011-2014 Phalcon Team (http://www.phalconphp.com) |
+------------------------------------------------------------------------+
| This source file is subject to the New BSD License that is bundled |
| with this package in the file docs/LICENSE.txt. |
| |
| If you did not receive a copy of the license and are unable to |
| obtain it through the world-wide-web, please send an email |
| to [email protected] so we can send you a copy immediately. |
+------------------------------------------------------------------------+
| Authors: Andres Gutierrez <[email protected]> |
| Eduar Carvajal <[email protected]> |
+------------------------------------------------------------------------+
*/
#include "mvc/view/engine.h"
#include "mvc/view/engineinterface.h"
#include "di/injectable.h"
#include "kernel/main.h"
#include "kernel/memory.h"
#include "kernel/object.h"
#include "kernel/fcall.h"
/**
* Phalcon\Mvc\View\Engine
*
* All the template engine adapters must inherit this class. This provides
* basic interfacing between the engine and the Phalcon\Mvc\View component.
*/
zend_class_entry *phalcon_mvc_view_engine_ce;
PHP_METHOD(Phalcon_Mvc_View_Engine, __construct);
PHP_METHOD(Phalcon_Mvc_View_Engine, getContent);
PHP_METHOD(Phalcon_Mvc_View_Engine, partial);
PHP_METHOD(Phalcon_Mvc_View_Engine, getView);
ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_view_engine___construct, 0, 0, 1)
ZEND_ARG_INFO(0, view)
ZEND_ARG_INFO(0, dependencyInjector)
ZEND_END_ARG_INFO()
static const zend_function_entry phalcon_mvc_view_engine_method_entry[] = {
PHP_ME(Phalcon_Mvc_View_Engine, __construct, arginfo_phalcon_mvc_view_engine___construct, ZEND_ACC_PUBLIC|ZEND_ACC_CTOR)
PHP_ME(Phalcon_Mvc_View_Engine, getContent, NULL, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Mvc_View_Engine, partial, arginfo_phalcon_mvc_view_engineinterface_partial, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Mvc_View_Engine, getView, NULL, ZEND_ACC_PUBLIC)
PHP_FE_END
};
/**
* Phalcon\Mvc\View\Engine initializer
*/
PHALCON_INIT_CLASS(Phalcon_Mvc_View_Engine){
PHALCON_REGISTER_CLASS_EX(Phalcon\\Mvc\\View, Engine, mvc_view_engine, phalcon_di_injectable_ce, phalcon_mvc_view_engine_method_entry, ZEND_ACC_EXPLICIT_ABSTRACT_CLASS);
zend_declare_property_null(phalcon_mvc_view_engine_ce, SL("_view"), ZEND_ACC_PROTECTED TSRMLS_CC);
zend_class_implements(phalcon_mvc_view_engine_ce TSRMLS_CC, 1, phalcon_mvc_view_engineinterface_ce);
return SUCCESS;
}
/**
* Phalcon\Mvc\View\Engine constructor
*
* @param Phalcon\Mvc\ViewInterface $view
* @param Phalcon\DiInterface $dependencyInjector
*/
PHP_METHOD(Phalcon_Mvc_View_Engine, __construct){
zval *view, *dependency_injector = NULL;
phalcon_fetch_params(0, 1, 1, &view, &dependency_injector);
if (!dependency_injector) {
dependency_injector = PHALCON_GLOBAL(z_null);
}
phalcon_update_property_this(this_ptr, SL("_view"), view TSRMLS_CC);
phalcon_update_property_this(this_ptr, SL("_dependencyInjector"), dependency_injector TSRMLS_CC);
}
/**
* Returns cached ouput on another view stage
*
* @return array
*/
PHP_METHOD(Phalcon_Mvc_View_Engine, getContent)
{
zval *view = phalcon_fetch_nproperty_this(this_ptr, SL("_view"), PH_NOISY TSRMLS_CC);
PHALCON_RETURN_CALL_METHODW(view, "getcontent");
}
/**
* Renders a partial inside another view
*
* @param string $partialPath
* @param array $params
* @return string
*/
PHP_METHOD(Phalcon_Mvc_View_Engine, partial){
zval *partial_path, *params = NULL, *view;
phalcon_fetch_params(0, 1, 1, &partial_path, ¶ms);
if (!params) {
params = PHALCON_GLOBAL(z_null);
}
view = phalcon_fetch_nproperty_this(this_ptr, SL("_view"), PH_NOISY TSRMLS_CC);
PHALCON_RETURN_CALL_METHODW(view, "partial", partial_path, params);
}
/**
* Returns the view component related to the adapter
*
* @return Phalcon\Mvc\ViewInterface
*/
PHP_METHOD(Phalcon_Mvc_View_Engine, getView){
RETURN_MEMBER(this_ptr, "_view");
}
| unisys12/phalcon-hhvm | ext/mvc/view/engine.cpp | C++ | bsd-2-clause | 4,384 |
FROM microsoft/aspnetcore:2.0
ARG source
WORKDIR /app
EXPOSE 80
COPY ${source:-obj/Docker/publish} .
ENTRYPOINT ["dotnet", "WebApi.dll"]
| mrsalty/ncontract | src/Examples/WebApi/Dockerfile | Dockerfile | bsd-2-clause | 137 |
"""
recursely
"""
__version__ = "0.1"
__description__ = "Recursive importer for Python submodules"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import sys
from recursely._compat import IS_PY3
from recursely.importer import RecursiveImporter
from recursely.utils import SentinelList
__all__ = ['install']
def install(retroactive=True):
"""Install the recursive import hook in ``sys.meta_path``,
enabling the use of ``__recursive__`` directive.
:param retroactive: Whether the hook should be retroactively applied
to module's that have been imported before
it was installed.
"""
if RecursiveImporter.is_installed():
return
importer = RecursiveImporter()
# because the hook is a catch-all one, we ensure that it's always
# at the very end of ``sys.meta_path``, so that it's tried only if
# no other (more specific) hook has been chosen by Python
if IS_PY3:
for i in reversed(range(len(sys.meta_path))):
ih_module = getattr(sys.meta_path[i], '__module__', '')
is_builtin = ih_module == '_frozen_importlib'
if not is_builtin:
break
sys.meta_path = SentinelList(
sys.meta_path[:i],
sentinels=[importer] + sys.meta_path[i:])
else:
sys.meta_path = SentinelList(sys.meta_path, sentinel=importer)
# look through already imported packages and recursively import
# their submodules, if they contain the ``__recursive__`` directive
if retroactive:
for module in list(sys.modules.values()):
importer.recurse(module)
| Xion/recursely | recursely/__init__.py | Python | bsd-2-clause | 1,663 |
// This file was procedurally generated from the following sources:
// - src/dstr-binding/obj-ptrn-id-init-unresolvable.case
// - src/dstr-binding/error/for-of-let.template
/*---
description: Destructuring initializer is an unresolvable reference (for-of statement)
esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation
es6id: 13.7.5.11
features: [destructuring-binding]
flags: [generated]
info: |
IterationStatement :
for ( ForDeclaration of AssignmentExpression ) Statement
[...]
3. Return ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult,
lexicalBinding, labelSet).
13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation
[...]
3. Let destructuring be IsDestructuring of lhs.
[...]
5. Repeat
[...]
h. If destructuring is false, then
[...]
i. Else
i. If lhsKind is assignment, then
[...]
ii. Else if lhsKind is varBinding, then
[...]
iii. Else,
1. Assert: lhsKind is lexicalBinding.
2. Assert: lhs is a ForDeclaration.
3. Let status be the result of performing BindingInitialization
for lhs passing nextValue and iterationEnv as arguments.
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
SingleNameBinding : BindingIdentifier Initializeropt
[...]
6. If Initializer is present and v is undefined, then
a. Let defaultValue be the result of evaluating Initializer.
b. Let v be GetValue(defaultValue).
c. ReturnIfAbrupt(v).
6.2.3.1 GetValue (V)
1. ReturnIfAbrupt(V).
2. If Type(V) is not Reference, return V.
3. Let base be GetBase(V).
4. If IsUnresolvableReference(V), throw a ReferenceError exception.
---*/
assert.throws(ReferenceError, function() {
for (let { x = unresolvableReference } of [{}]) {
return;
}
});
| sebastienros/jint | Jint.Tests.Test262/test/language/statements/for-of/dstr-let-obj-ptrn-id-init-unresolvable.js | JavaScript | bsd-2-clause | 1,939 |
import {Component, OnInit} from '@angular/core';
import {MarketCard} from '../market-card';
import {MarketCardType} from '../market-card-type';
import {MarketService} from '../market.service';
import {Expansion} from '../expansion';
import { GameModeService } from '../game-mode.service';
import { GameMode } from '../game-mode';
@Component({
selector: 'app-market-selection',
templateUrl: './market-selection.component.html',
styleUrls: ['./market-selection.component.css']
})
export class MarketSelectionComponent implements OnInit {
constructor(private marketService: MarketService, private gameModeService: GameModeService) { }
cards: MarketCard[];
expeditionMode: boolean;
ngOnInit() {
this.marketService.marketCards$.subscribe((cards: MarketCard[]) => {
this.cards = cards;
});
this.cards = this.marketService.marketCards;
this.gameModeService.selectedGameMode$.subscribe((newGameMode: GameMode) => {
this.updateExpeditionMode(newGameMode);
});
this.updateExpeditionMode(this.gameModeService.selectedGameMode);
}
getCardCssClass(type: MarketCardType): string {
switch (type) {
case MarketCardType.Gem:
return 'gem-card';
case MarketCardType.Relic:
return 'relic-card';
case MarketCardType.Spell:
return 'spell-card';
}
}
getCardTypeLabel(type: MarketCardType): string {
switch (type) {
case MarketCardType.Gem:
return 'Gem';
case MarketCardType.Relic:
return 'Relic';
case MarketCardType.Spell:
return 'Spell';
}
}
private updateExpeditionMode(gameMode: GameMode): void {
this.expeditionMode = (gameMode !== GameMode.SingleGame);
}
}
| kbarnes3/AeonsEnd | aeons-end/src/app/market-selection/market-selection.component.ts | TypeScript | bsd-2-clause | 1,716 |
/**
* Copyright (c) 2013, Jens Hohmuth
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.lessvoid.coregl;
import com.lessvoid.coregl.spi.CoreGL;
/**
* Simple helper methods to render vertex arrays.
*
* @author void
*/
public class CoreRender {
private final CoreGL gl;
CoreRender(final CoreGL gl) {
this.gl = gl;
}
public static CoreRender createCoreRender(final CoreGL gl) {
return new CoreRender(gl);
}
// Lines
/**
* Render lines.
*
* @param count
* number of vertices
*/
public void renderLines(final int count) {
gl.glDrawArrays(gl.GL_LINE_STRIP(), 0, count);
gl.checkGLError("glDrawArrays");
}
/**
* Render adjacent lines.
*
* @param count
* number of vertices
*/
public void renderLinesAdjacent(final int count) {
gl.glDrawArrays(gl.GL_LINE_STRIP_ADJACENCY(), 0, count);
gl.checkGLError("glDrawArrays");
}
// Triangle Strip
/**
* Render the currently active VAO using triangle strips with the given number
* of vertices.
*
* @param count
* number of vertices to render as triangle strips
*/
public void renderTriangleStrip(final int count) {
gl.glDrawArrays(gl.GL_TRIANGLE_STRIP(), 0, count);
gl.checkGLError("glDrawArrays");
}
/**
* Render the currently active VAO using triangle strips, sending the given
* number of indizes.
*
* @param count
* number of indizes to render as triangle strips
*/
public void renderTriangleStripIndexed(final int count) {
gl.glDrawElements(gl.GL_TRIANGLE_STRIP(), count, gl.GL_UNSIGNED_INT(), 0);
gl.checkGLError("glDrawElements(GL_TRIANGLE_STRIP)");
}
/**
* Render the currently active VAO using triangle strips with the given number
* of vertices AND do that primCount times.
*
* @param count
* number of vertices to render as triangle strips per primitve
* @param primCount
* number of primitives to render
*/
public void renderTriangleStripInstances(final int count, final int primCount) {
gl.glDrawArraysInstanced(gl.GL_TRIANGLE_STRIP(), 0, count, primCount);
gl.checkGLError("glDrawArraysInstanced(GL_TRIANGLE_STRIP)");
}
// Triangle Fan
/**
* Render the currently active VAO using triangle fan with the given number of
* vertices.
*
* @param count
* number of vertices to render as triangle fan
*/
public void renderTriangleFan(final int count) {
gl.glDrawArrays(gl.GL_TRIANGLE_FAN(), 0, count);
gl.checkGLError("glDrawArrays");
}
/**
* Render the currently active VAO using triangle fans, sending the given
* number of indizes.
*
* @param count
* number of indizes to render as triangle fans.
*/
public void renderTriangleFanIndexed(final int count) {
gl.glDrawElements(gl.GL_TRIANGLE_FAN(), count, gl.GL_UNSIGNED_INT(), 0);
gl.checkGLError("glDrawElements(GL_TRIANGLE_FAN)");
}
// Individual Triangles
/**
* Render the currently active VAO using triangles with the given number of
* vertices.
*
* @param vertexCount
* number of vertices to render as triangle strips
*/
public void renderTriangles(final int vertexCount) {
gl.glDrawArrays(gl.GL_TRIANGLES(), 0, vertexCount);
gl.checkGLError("glDrawArrays");
}
/**
* Render the currently active VAO using triangles with the given number of
* vertices starting at the given offset.
*
* @param offset
* offset to start sending vertices
* @param vertexCount
* number of vertices to render as triangle strips
*/
public void renderTrianglesOffset(final int offset, final int vertexCount) {
gl.glDrawArrays(gl.GL_TRIANGLES(), offset, vertexCount);
gl.checkGLError("glDrawArrays");
}
/**
* Render the currently active VAO using triangles with the given number of
* vertices.
*
* @param count
* number of vertices to render as triangles
*/
public void renderTrianglesIndexed(final int count) {
gl.glDrawElements(gl.GL_TRIANGLES(), count, gl.GL_UNSIGNED_INT(), 0);
gl.checkGLError("glDrawElements");
}
/**
* Render the currently active VAO using triangles with the given number of
* vertices AND do that primCount times.
*
* @param count
* number of vertices to render as triangles per primitve
* @param primCount
* number of primitives to render
*/
public void renderTrianglesInstances(final int count, final int primCount) {
gl.glDrawArraysInstanced(gl.GL_TRIANGLES(), 0, count, primCount);
gl.checkGLError("glDrawArraysInstanced(GL_TRIANGLES)");
}
// Points
/**
* Render the currently active VAO using points with the given number of
* vertices.
*
* @param count
* number of vertices to render as points
*/
public void renderPoints(final int count) {
gl.glDrawArrays(gl.GL_POINTS(), 0, count);
gl.checkGLError("glDrawArrays(GL_POINTS)");
}
/**
* Render the currently active VAO using points with the given number of
* vertices AND do that primCount times.
*
* @param count
* number of vertices to render as points per primitive
* @param primCount
* number of primitives to render
*/
public void renderPointsInstances(final int count, final int primCount) {
gl.glDrawArraysInstanced(gl.GL_POINTS(), 0, count, primCount);
gl.checkGLError("glDrawArraysInstanced(GL_POINTS)");
}
// Utils
/**
* Set the clear color.
*
* @param r
* red
* @param g
* green
* @param b
* blue
* @param a
* alpha
*/
public void clearColor(final float r, final float g, final float b, final float a) {
gl.glClearColor(r, g, b, a);
}
/**
* Clear the color buffer.
*/
public void clearColorBuffer() {
gl.glClear(gl.GL_COLOR_BUFFER_BIT());
}
}
| bgroenks96/coregl | coregl-utils/src/main/java/com/lessvoid/coregl/CoreRender.java | Java | bsd-2-clause | 7,233 |
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2020, Daniel Stenberg, <[email protected]>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
#include "curl_setup.h"
#include <curl/curl.h>
#include "urldata.h"
#include "getinfo.h"
#include "vtls/vtls.h"
#include "connect.h" /* Curl_getconnectinfo() */
#include "progress.h"
/* The last #include files should be: */
#include "curl_memory.h"
#include "memdebug.h"
/*
* Initialize statistical and informational data.
*
* This function is called in curl_easy_reset, curl_easy_duphandle and at the
* beginning of a perform session. It must reset the session-info variables,
* in particular all variables in struct PureInfo.
*/
CURLcode Curl_initinfo(struct Curl_easy *data)
{
struct Progress *pro = &data->progress;
struct PureInfo *info = &data->info;
pro->t_nslookup = 0;
pro->t_connect = 0;
pro->t_appconnect = 0;
pro->t_pretransfer = 0;
pro->t_starttransfer = 0;
pro->timespent = 0;
pro->t_redirect = 0;
pro->is_t_startransfer_set = false;
info->httpcode = 0;
info->httpproxycode = 0;
info->httpversion = 0;
info->filetime = -1; /* -1 is an illegal time and thus means unknown */
info->timecond = FALSE;
info->header_size = 0;
info->request_size = 0;
info->proxyauthavail = 0;
info->httpauthavail = 0;
info->numconnects = 0;
free(info->contenttype);
info->contenttype = NULL;
free(info->wouldredirect);
info->wouldredirect = NULL;
info->conn_primary_ip[0] = '\0';
info->conn_local_ip[0] = '\0';
info->conn_primary_port = 0;
info->conn_local_port = 0;
info->conn_scheme = 0;
info->conn_protocol = 0;
#ifdef USE_SSL
Curl_ssl_free_certinfo(data);
#endif
return CURLE_OK;
}
static CURLcode getinfo_char(struct Curl_easy *data, CURLINFO info,
const char **param_charp)
{
switch(info) {
case CURLINFO_EFFECTIVE_URL:
*param_charp = data->change.url?data->change.url:(char *)"";
break;
case CURLINFO_CONTENT_TYPE:
*param_charp = data->info.contenttype;
break;
case CURLINFO_PRIVATE:
*param_charp = (char *) data->set.private_data;
break;
case CURLINFO_FTP_ENTRY_PATH:
/* Return the entrypath string from the most recent connection.
This pointer was copied from the connectdata structure by FTP.
The actual string may be free()ed by subsequent libcurl calls so
it must be copied to a safer area before the next libcurl call.
Callers must never free it themselves. */
*param_charp = data->state.most_recent_ftp_entrypath;
break;
case CURLINFO_REDIRECT_URL:
/* Return the URL this request would have been redirected to if that
option had been enabled! */
*param_charp = data->info.wouldredirect;
break;
case CURLINFO_PRIMARY_IP:
/* Return the ip address of the most recent (primary) connection */
*param_charp = data->info.conn_primary_ip;
break;
case CURLINFO_LOCAL_IP:
/* Return the source/local ip address of the most recent (primary)
connection */
*param_charp = data->info.conn_local_ip;
break;
case CURLINFO_RTSP_SESSION_ID:
*param_charp = data->set.str[STRING_RTSP_SESSION_ID];
break;
case CURLINFO_SCHEME:
*param_charp = data->info.conn_scheme;
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
static CURLcode getinfo_long(struct Curl_easy *data, CURLINFO info,
long *param_longp)
{
curl_socket_t sockfd;
union {
unsigned long *to_ulong;
long *to_long;
} lptr;
#ifdef DEBUGBUILD
char *timestr = getenv("CURL_TIME");
if(timestr) {
unsigned long val = strtol(timestr, NULL, 10);
switch(info) {
case CURLINFO_LOCAL_PORT:
*param_longp = (long)val;
return CURLE_OK;
default:
break;
}
}
/* use another variable for this to allow different values */
timestr = getenv("CURL_DEBUG_SIZE");
if(timestr) {
unsigned long val = strtol(timestr, NULL, 10);
switch(info) {
case CURLINFO_HEADER_SIZE:
case CURLINFO_REQUEST_SIZE:
*param_longp = (long)val;
return CURLE_OK;
default:
break;
}
}
#endif
switch(info) {
case CURLINFO_RESPONSE_CODE:
*param_longp = data->info.httpcode;
break;
case CURLINFO_HTTP_CONNECTCODE:
*param_longp = data->info.httpproxycode;
break;
case CURLINFO_FILETIME:
if(data->info.filetime > LONG_MAX)
*param_longp = LONG_MAX;
else if(data->info.filetime < LONG_MIN)
*param_longp = LONG_MIN;
else
*param_longp = (long)data->info.filetime;
break;
case CURLINFO_HEADER_SIZE:
*param_longp = (long)data->info.header_size;
break;
case CURLINFO_REQUEST_SIZE:
*param_longp = (long)data->info.request_size;
break;
case CURLINFO_SSL_VERIFYRESULT:
*param_longp = data->set.ssl.certverifyresult;
break;
case CURLINFO_PROXY_SSL_VERIFYRESULT:
*param_longp = data->set.proxy_ssl.certverifyresult;
break;
case CURLINFO_REDIRECT_COUNT:
*param_longp = data->set.followlocation;
break;
case CURLINFO_HTTPAUTH_AVAIL:
lptr.to_long = param_longp;
*lptr.to_ulong = data->info.httpauthavail;
break;
case CURLINFO_PROXYAUTH_AVAIL:
lptr.to_long = param_longp;
*lptr.to_ulong = data->info.proxyauthavail;
break;
case CURLINFO_OS_ERRNO:
*param_longp = data->state.os_errno;
break;
case CURLINFO_NUM_CONNECTS:
*param_longp = data->info.numconnects;
break;
case CURLINFO_LASTSOCKET:
sockfd = Curl_getconnectinfo(data, NULL);
/* note: this is not a good conversion for systems with 64 bit sockets and
32 bit longs */
if(sockfd != CURL_SOCKET_BAD)
*param_longp = (long)sockfd;
else
/* this interface is documented to return -1 in case of badness, which
may not be the same as the CURL_SOCKET_BAD value */
*param_longp = -1;
break;
case CURLINFO_PRIMARY_PORT:
/* Return the (remote) port of the most recent (primary) connection */
*param_longp = data->info.conn_primary_port;
break;
case CURLINFO_LOCAL_PORT:
/* Return the local port of the most recent (primary) connection */
*param_longp = data->info.conn_local_port;
break;
case CURLINFO_CONDITION_UNMET:
if(data->info.httpcode == 304)
*param_longp = 1L;
else
/* return if the condition prevented the document to get transferred */
*param_longp = data->info.timecond ? 1L : 0L;
break;
case CURLINFO_RTSP_CLIENT_CSEQ:
*param_longp = data->state.rtsp_next_client_CSeq;
break;
case CURLINFO_RTSP_SERVER_CSEQ:
*param_longp = data->state.rtsp_next_server_CSeq;
break;
case CURLINFO_RTSP_CSEQ_RECV:
*param_longp = data->state.rtsp_CSeq_recv;
break;
case CURLINFO_HTTP_VERSION:
switch(data->info.httpversion) {
case 10:
*param_longp = CURL_HTTP_VERSION_1_0;
break;
case 11:
*param_longp = CURL_HTTP_VERSION_1_1;
break;
case 20:
*param_longp = CURL_HTTP_VERSION_2_0;
break;
case 30:
*param_longp = CURL_HTTP_VERSION_3;
break;
default:
*param_longp = CURL_HTTP_VERSION_NONE;
break;
}
break;
case CURLINFO_PROTOCOL:
*param_longp = data->info.conn_protocol;
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
#define DOUBLE_SECS(x) (double)(x)/1000000
static CURLcode getinfo_offt(struct Curl_easy *data, CURLINFO info,
curl_off_t *param_offt)
{
#ifdef DEBUGBUILD
char *timestr = getenv("CURL_TIME");
if(timestr) {
unsigned long val = strtol(timestr, NULL, 10);
switch(info) {
case CURLINFO_TOTAL_TIME_T:
case CURLINFO_NAMELOOKUP_TIME_T:
case CURLINFO_CONNECT_TIME_T:
case CURLINFO_APPCONNECT_TIME_T:
case CURLINFO_PRETRANSFER_TIME_T:
case CURLINFO_STARTTRANSFER_TIME_T:
case CURLINFO_REDIRECT_TIME_T:
case CURLINFO_SPEED_DOWNLOAD_T:
case CURLINFO_SPEED_UPLOAD_T:
*param_offt = (curl_off_t)val;
return CURLE_OK;
default:
break;
}
}
#endif
switch(info) {
case CURLINFO_FILETIME_T:
*param_offt = (curl_off_t)data->info.filetime;
break;
case CURLINFO_SIZE_UPLOAD_T:
*param_offt = data->progress.uploaded;
break;
case CURLINFO_SIZE_DOWNLOAD_T:
*param_offt = data->progress.downloaded;
break;
case CURLINFO_SPEED_DOWNLOAD_T:
*param_offt = data->progress.dlspeed;
break;
case CURLINFO_SPEED_UPLOAD_T:
*param_offt = data->progress.ulspeed;
break;
case CURLINFO_CONTENT_LENGTH_DOWNLOAD_T:
*param_offt = (data->progress.flags & PGRS_DL_SIZE_KNOWN)?
data->progress.size_dl:-1;
break;
case CURLINFO_CONTENT_LENGTH_UPLOAD_T:
*param_offt = (data->progress.flags & PGRS_UL_SIZE_KNOWN)?
data->progress.size_ul:-1;
break;
case CURLINFO_TOTAL_TIME_T:
*param_offt = data->progress.timespent;
break;
case CURLINFO_NAMELOOKUP_TIME_T:
*param_offt = data->progress.t_nslookup;
break;
case CURLINFO_CONNECT_TIME_T:
*param_offt = data->progress.t_connect;
break;
case CURLINFO_APPCONNECT_TIME_T:
*param_offt = data->progress.t_appconnect;
break;
case CURLINFO_PRETRANSFER_TIME_T:
*param_offt = data->progress.t_pretransfer;
break;
case CURLINFO_STARTTRANSFER_TIME_T:
*param_offt = data->progress.t_starttransfer;
break;
case CURLINFO_REDIRECT_TIME_T:
*param_offt = data->progress.t_redirect;
break;
case CURLINFO_RETRY_AFTER:
*param_offt = data->info.retry_after;
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
static CURLcode getinfo_double(struct Curl_easy *data, CURLINFO info,
double *param_doublep)
{
#ifdef DEBUGBUILD
char *timestr = getenv("CURL_TIME");
if(timestr) {
unsigned long val = strtol(timestr, NULL, 10);
switch(info) {
case CURLINFO_TOTAL_TIME:
case CURLINFO_NAMELOOKUP_TIME:
case CURLINFO_CONNECT_TIME:
case CURLINFO_APPCONNECT_TIME:
case CURLINFO_PRETRANSFER_TIME:
case CURLINFO_STARTTRANSFER_TIME:
case CURLINFO_REDIRECT_TIME:
case CURLINFO_SPEED_DOWNLOAD:
case CURLINFO_SPEED_UPLOAD:
*param_doublep = (double)val;
return CURLE_OK;
default:
break;
}
}
#endif
switch(info) {
case CURLINFO_TOTAL_TIME:
*param_doublep = DOUBLE_SECS(data->progress.timespent);
break;
case CURLINFO_NAMELOOKUP_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_nslookup);
break;
case CURLINFO_CONNECT_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_connect);
break;
case CURLINFO_APPCONNECT_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_appconnect);
break;
case CURLINFO_PRETRANSFER_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_pretransfer);
break;
case CURLINFO_STARTTRANSFER_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_starttransfer);
break;
case CURLINFO_SIZE_UPLOAD:
*param_doublep = (double)data->progress.uploaded;
break;
case CURLINFO_SIZE_DOWNLOAD:
*param_doublep = (double)data->progress.downloaded;
break;
case CURLINFO_SPEED_DOWNLOAD:
*param_doublep = (double)data->progress.dlspeed;
break;
case CURLINFO_SPEED_UPLOAD:
*param_doublep = (double)data->progress.ulspeed;
break;
case CURLINFO_CONTENT_LENGTH_DOWNLOAD:
*param_doublep = (data->progress.flags & PGRS_DL_SIZE_KNOWN)?
(double)data->progress.size_dl:-1;
break;
case CURLINFO_CONTENT_LENGTH_UPLOAD:
*param_doublep = (data->progress.flags & PGRS_UL_SIZE_KNOWN)?
(double)data->progress.size_ul:-1;
break;
case CURLINFO_REDIRECT_TIME:
*param_doublep = DOUBLE_SECS(data->progress.t_redirect);
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
static CURLcode getinfo_slist(struct Curl_easy *data, CURLINFO info,
struct curl_slist **param_slistp)
{
union {
struct curl_certinfo *to_certinfo;
struct curl_slist *to_slist;
} ptr;
switch(info) {
case CURLINFO_SSL_ENGINES:
*param_slistp = Curl_ssl_engines_list(data);
break;
case CURLINFO_COOKIELIST:
*param_slistp = Curl_cookie_list(data);
break;
case CURLINFO_CERTINFO:
/* Return the a pointer to the certinfo struct. Not really an slist
pointer but we can pretend it is here */
ptr.to_certinfo = &data->info.certs;
*param_slistp = ptr.to_slist;
break;
case CURLINFO_TLS_SESSION:
case CURLINFO_TLS_SSL_PTR:
{
struct curl_tlssessioninfo **tsip = (struct curl_tlssessioninfo **)
param_slistp;
struct curl_tlssessioninfo *tsi = &data->tsi;
#ifdef USE_SSL
struct connectdata *conn = data->conn;
#endif
*tsip = tsi;
tsi->backend = Curl_ssl_backend();
tsi->internals = NULL;
#ifdef USE_SSL
if(conn && tsi->backend != CURLSSLBACKEND_NONE) {
unsigned int i;
for(i = 0; i < (sizeof(conn->ssl) / sizeof(conn->ssl[0])); ++i) {
if(conn->ssl[i].use) {
tsi->internals = Curl_ssl->get_internals(&conn->ssl[i], info);
break;
}
}
}
#endif
}
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
static CURLcode getinfo_socket(struct Curl_easy *data, CURLINFO info,
curl_socket_t *param_socketp)
{
switch(info) {
case CURLINFO_ACTIVESOCKET:
*param_socketp = Curl_getconnectinfo(data, NULL);
break;
default:
return CURLE_UNKNOWN_OPTION;
}
return CURLE_OK;
}
CURLcode Curl_getinfo(struct Curl_easy *data, CURLINFO info, ...)
{
va_list arg;
long *param_longp = NULL;
double *param_doublep = NULL;
curl_off_t *param_offt = NULL;
const char **param_charp = NULL;
struct curl_slist **param_slistp = NULL;
curl_socket_t *param_socketp = NULL;
int type;
CURLcode result = CURLE_UNKNOWN_OPTION;
if(!data)
return result;
va_start(arg, info);
type = CURLINFO_TYPEMASK & (int)info;
switch(type) {
case CURLINFO_STRING:
param_charp = va_arg(arg, const char **);
if(param_charp)
result = getinfo_char(data, info, param_charp);
break;
case CURLINFO_LONG:
param_longp = va_arg(arg, long *);
if(param_longp)
result = getinfo_long(data, info, param_longp);
break;
case CURLINFO_DOUBLE:
param_doublep = va_arg(arg, double *);
if(param_doublep)
result = getinfo_double(data, info, param_doublep);
break;
case CURLINFO_OFF_T:
param_offt = va_arg(arg, curl_off_t *);
if(param_offt)
result = getinfo_offt(data, info, param_offt);
break;
case CURLINFO_SLIST:
param_slistp = va_arg(arg, struct curl_slist **);
if(param_slistp)
result = getinfo_slist(data, info, param_slistp);
break;
case CURLINFO_SOCKET:
param_socketp = va_arg(arg, curl_socket_t *);
if(param_socketp)
result = getinfo_socket(data, info, param_socketp);
break;
default:
break;
}
va_end(arg);
return result;
}
| nyaki-HUN/DESIRE | DESIRE-Modules/Network-curl/Externals/curl/src/getinfo.c | C | bsd-2-clause | 15,969 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Simulation — OSIM 0.1 documentation</title>
<link rel="stylesheet" href="_static/alabaster.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '0.1',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="next" title="Optimization" href="Optimization.html" />
<link rel="prev" title="Components" href="Components.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
<meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
</head>
<body role="document">
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="simulation">
<h1>Simulation<a class="headerlink" href="#simulation" title="Permalink to this headline">¶</a></h1>
<div class="section" id="module-Simulation.NetToComp">
<span id="simulation-nettocomp"></span><h2>Simulation.NetToComp<a class="headerlink" href="#module-Simulation.NetToComp" title="Permalink to this headline">¶</a></h2>
<p>Use the triangle class to represent triangles.</p>
<dl class="class">
<dt id="Simulation.NetToComp.NetToComp">
<em class="property">class </em><code class="descclassname">Simulation.NetToComp.</code><code class="descname">NetToComp</code><span class="sig-paren">(</span><em>filename</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/Simulation/NetToComp.html#NetToComp"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#Simulation.NetToComp.NetToComp" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <code class="xref py py-class docutils literal"><span class="pre">object</span></code></p>
<p>Beispielkommentar</p>
<dl class="method">
<dt id="Simulation.NetToComp.NetToComp.getCommentsFromNetlist">
<code class="descname">getCommentsFromNetlist</code><span class="sig-paren">(</span><em>netListFile</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/Simulation/NetToComp.html#NetToComp.getCommentsFromNetlist"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#Simulation.NetToComp.NetToComp.getCommentsFromNetlist" title="Permalink to this definition">¶</a></dt>
<dd><p>Create a triangle with sides of lengths <cite>a</cite>, <cite>b</cite>, and <cite>c</cite>.</p>
<p>Raises <cite>ValueError</cite> if the three length values provided cannot
actually form a triangle.</p>
</dd></dl>
<dl class="method">
<dt id="Simulation.NetToComp.NetToComp.getComponents">
<code class="descname">getComponents</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/Simulation/NetToComp.html#NetToComp.getComponents"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#Simulation.NetToComp.NetToComp.getComponents" title="Permalink to this definition">¶</a></dt>
<dd><p>Create a triangle with sides of lengths <cite>a</cite>, <cite>b</cite>, and <cite>c</cite>.</p>
<p>Raises <cite>ValueError</cite> if the three length values provided cannot
actually form a triangle.</p>
</dd></dl>
<dl class="method">
<dt id="Simulation.NetToComp.NetToComp.parseCommentsToArgs">
<code class="descname">parseCommentsToArgs</code><span class="sig-paren">(</span><em>args</em>, <em>commentList</em>, <em>name</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/Simulation/NetToComp.html#NetToComp.parseCommentsToArgs"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#Simulation.NetToComp.NetToComp.parseCommentsToArgs" title="Permalink to this definition">¶</a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="Simulation.NetToComp.NetToComp.stringArrToDict">
<code class="descname">stringArrToDict</code><span class="sig-paren">(</span><em>strArr</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/Simulation/NetToComp.html#NetToComp.stringArrToDict"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#Simulation.NetToComp.NetToComp.stringArrToDict" title="Permalink to this definition">¶</a></dt>
<dd><p>Create a triangle with sides of lengths <cite>a</cite>, <cite>b</cite>, and <cite>c</cite>.</p>
<p>Raises <cite>ValueError</cite> if the three length values provided cannot
actually form a triangle.</p>
</dd></dl>
</dd></dl>
</div>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h3><a href="index.html">Table Of Contents</a></h3>
<ul>
<li><a class="reference internal" href="#">Simulation</a><ul>
<li><a class="reference internal" href="#module-Simulation.NetToComp">Simulation.NetToComp</a></li>
</ul>
</li>
</ul>
<div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="index.html">Documentation overview</a><ul>
<li>Previous: <a href="Components.html" title="previous chapter">Components</a></li>
<li>Next: <a href="Optimization.html" title="next chapter">Optimization</a></li>
</ul></li>
</ul>
</div>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="_sources/Simulation.rst.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<div><input type="text" name="q" /></div>
<div><input type="submit" value="Go" /></div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
©2017, Tim Maiwald.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 1.5.4</a>
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.9</a>
|
<a href="_sources/Simulation.rst.txt"
rel="nofollow">Page source</a>
</div>
</body>
</html> | tmaiwald/OSIM | doc/_build/html/Simulation.html | HTML | bsd-2-clause | 7,250 |
# coding: utf-8
import sys
from setuptools import setup, find_packages
NAME = "pollster"
VERSION = "2.0.2"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil", "pandas >= 0.19.1"]
setup(
name=NAME,
version=VERSION,
description="Pollster API",
author_email="Adam Hooper <[email protected]>",
url="https://github.com/huffpostdata/python-pollster",
keywords=["Pollster API"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""Download election-related polling data from Pollster."""
)
| huffpostdata/python-pollster | setup.py | Python | bsd-2-clause | 756 |
import requests
import logging
import redis
from requests.packages.urllib3.exceptions import ConnectionError
from core.serialisers import json
from dss import localsettings
# TODO([email protected]): refactor these out to
# classes to avoid duplicating constants below
HEADERS = {
'content-type': 'application/json'
}
logger = logging.getLogger('spa')
def post_notification(session_id, image, message):
try:
payload = {
'sessionid': session_id,
'image': image,
'message': message
}
data = json.dumps(payload)
r = requests.post(
localsettings.REALTIME_HOST + 'notification',
data=data,
headers=HEADERS
)
if r.status_code == 200:
return ""
else:
return r.text
except ConnectionError:
#should probably implement some sort of retry in here
pass | fergalmoran/dss | core/realtime/notification.py | Python | bsd-2-clause | 928 |
unless ENV["HOMEBREW_BREW_FILE"]
raise "HOMEBREW_BREW_FILE was not exported! Please call bin/brew directly!"
end
require "constants"
require "tmpdir"
require "pathname"
HOMEBREW_BREW_FILE = Pathname.new(ENV["HOMEBREW_BREW_FILE"])
TEST_TMPDIR = ENV.fetch("HOMEBREW_TEST_TMPDIR") do |k|
dir = Dir.mktmpdir("homebrew-tests-", ENV["HOMEBREW_TEMP"] || "/tmp")
at_exit { FileUtils.remove_entry(dir) }
ENV[k] = dir
end
# Paths pointing into the Homebrew code base that persist across test runs
HOMEBREW_LIBRARY_PATH = Pathname.new(File.expand_path("../../../..", __FILE__))
HOMEBREW_SHIMS_PATH = HOMEBREW_LIBRARY_PATH.parent+"Homebrew/shims"
HOMEBREW_LOAD_PATH = [File.expand_path("..", __FILE__), HOMEBREW_LIBRARY_PATH].join(":")
# Paths redirected to a temporary directory and wiped at the end of the test run
HOMEBREW_PREFIX = Pathname.new(TEST_TMPDIR).join("prefix")
HOMEBREW_REPOSITORY = HOMEBREW_PREFIX
HOMEBREW_LIBRARY = HOMEBREW_REPOSITORY+"Library"
HOMEBREW_CACHE = HOMEBREW_PREFIX.parent+"cache"
HOMEBREW_CACHE_FORMULA = HOMEBREW_PREFIX.parent+"formula_cache"
HOMEBREW_LINKED_KEGS = HOMEBREW_PREFIX.parent+"linked"
HOMEBREW_PINNED_KEGS = HOMEBREW_PREFIX.parent+"pinned"
HOMEBREW_LOCK_DIR = HOMEBREW_PREFIX.parent+"locks"
HOMEBREW_CELLAR = HOMEBREW_PREFIX.parent+"cellar"
HOMEBREW_LOGS = HOMEBREW_PREFIX.parent+"logs"
HOMEBREW_TEMP = HOMEBREW_PREFIX.parent+"temp"
TEST_FIXTURE_DIR = HOMEBREW_LIBRARY_PATH.join("test", "support", "fixtures")
TESTBALL_SHA1 = "be478fd8a80fe7f29196d6400326ac91dad68c37".freeze
TESTBALL_SHA256 = "91e3f7930c98d7ccfb288e115ed52d06b0e5bc16fec7dce8bdda86530027067b".freeze
TESTBALL_PATCHES_SHA256 = "799c2d551ac5c3a5759bea7796631a7906a6a24435b52261a317133a0bfb34d9".freeze
PATCH_A_SHA256 = "83404f4936d3257e65f176c4ffb5a5b8d6edd644a21c8d8dcc73e22a6d28fcfa".freeze
PATCH_B_SHA256 = "57958271bb802a59452d0816e0670d16c8b70bdf6530bcf6f78726489ad89b90".freeze
LINUX_TESTBALL_SHA256 = "fa7fac451a7c37e74f02e2a425a76aff89106098a55707832a02be5af5071cf8".freeze
TEST_SHA1 = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef".freeze
TEST_SHA256 = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef".freeze
| rwhogg/brew | Library/Homebrew/test/support/lib/config.rb | Ruby | bsd-2-clause | 2,216 |
# Copyright (c) 2015-2016, Ruslan Baratov
# All rights reserved.
include(hunter_apply_copy_rules)
include(hunter_apply_gate_settings)
include(hunter_calculate_self)
include(hunter_create_cache_file)
include(hunter_fatal_error)
include(hunter_internal_error)
include(hunter_sanity_checks)
include(hunter_status_debug)
include(hunter_status_print)
include(hunter_test_string_not_empty)
# Continue initialization of key variables (also see 'hunter_initialize')
# * calculate toolchain-id
# * calculate config-id
macro(hunter_finalize)
# Check preconditions
hunter_sanity_checks()
list(APPEND HUNTER_CACHE_SERVERS "https://github.com/ingenue/hunter-cache")
list(REMOVE_DUPLICATES HUNTER_CACHE_SERVERS)
hunter_status_debug("List of cache servers:")
foreach(_server ${HUNTER_CACHE_SERVERS})
hunter_status_debug(" * ${_server}")
endforeach()
get_property(_enabled_languages GLOBAL PROPERTY ENABLED_LANGUAGES)
list(FIND _enabled_languages "C" _c_enabled_result)
if(_c_enabled_result EQUAL -1)
set(_c_enabled FALSE)
else()
set(_c_enabled TRUE)
endif()
list(FIND _enabled_languages "CXX" _cxx_enabled_result)
if(_cxx_enabled_result EQUAL -1)
set(_cxx_enabled FALSE)
else()
set(_cxx_enabled TRUE)
endif()
if(_c_enabled AND NOT CMAKE_C_ABI_COMPILED)
hunter_fatal_error(
"ABI not detected for C compiler" WIKI "error.abi.detection.failure"
)
endif()
if(_cxx_enabled AND NOT CMAKE_CXX_ABI_COMPILED)
hunter_fatal_error(
"ABI not detected for CXX compiler" WIKI "error.abi.detection.failure"
)
endif()
string(COMPARE NOTEQUAL "$ENV{HUNTER_BINARY_DIR}" "" _env_not_empty)
if(_env_not_empty)
get_filename_component(HUNTER_BINARY_DIR "$ENV{HUNTER_BINARY_DIR}" ABSOLUTE)
hunter_status_debug("HUNTER_BINARY_DIR: ${HUNTER_BINARY_DIR}")
endif()
# * Read HUNTER_GATE_* variables
# * Check cache HUNTER_* variables is up-to-date
# * Update cache if needed
hunter_apply_gate_settings()
string(SUBSTRING "${HUNTER_SHA1}" 0 7 HUNTER_ID)
string(SUBSTRING "${HUNTER_CONFIG_SHA1}" 0 7 HUNTER_CONFIG_ID)
string(SUBSTRING "${HUNTER_TOOLCHAIN_SHA1}" 0 7 HUNTER_TOOLCHAIN_ID)
set(HUNTER_ID_PATH "${HUNTER_CACHED_ROOT}/_Base/${HUNTER_ID}")
set(HUNTER_CONFIG_ID_PATH "${HUNTER_ID_PATH}/${HUNTER_CONFIG_ID}")
set(
HUNTER_TOOLCHAIN_ID_PATH
"${HUNTER_CONFIG_ID_PATH}/${HUNTER_TOOLCHAIN_ID}"
)
set(HUNTER_INSTALL_PREFIX "${HUNTER_TOOLCHAIN_ID_PATH}/Install")
list(APPEND CMAKE_PREFIX_PATH "${HUNTER_INSTALL_PREFIX}")
if(ANDROID)
# OpenCV support: https://github.com/ruslo/hunter/issues/153
list(APPEND CMAKE_PREFIX_PATH "${HUNTER_INSTALL_PREFIX}/sdk/native/jni")
endif()
list(APPEND CMAKE_FIND_ROOT_PATH "${HUNTER_INSTALL_PREFIX}")
hunter_status_print("HUNTER_ROOT: ${HUNTER_CACHED_ROOT}")
hunter_status_debug("HUNTER_TOOLCHAIN_ID_PATH: ${HUNTER_TOOLCHAIN_ID_PATH}")
hunter_status_debug(
"HUNTER_CONFIGURATION_TYPES: ${HUNTER_CACHED_CONFIGURATION_TYPES}"
)
set(_id_info "[ Hunter-ID: ${HUNTER_ID} |")
set(_id_info "${_id_info} Config-ID: ${HUNTER_CONFIG_ID} |")
set(_id_info "${_id_info} Toolchain-ID: ${HUNTER_TOOLCHAIN_ID} ]")
hunter_status_print("${_id_info}")
set(HUNTER_CACHE_FILE "${HUNTER_TOOLCHAIN_ID_PATH}/cache.cmake")
hunter_create_cache_file("${HUNTER_CACHE_FILE}")
if(MSVC)
include(hunter_setup_msvc)
hunter_setup_msvc()
endif()
### Disable package registry
### http://www.cmake.org/cmake/help/v3.1/manual/cmake-packages.7.html#disabling-the-package-registry
set(CMAKE_EXPORT_NO_PACKAGE_REGISTRY ON)
set(CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY ON)
set(CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY ON)
### -- end
### Disable environment variables
### http://www.cmake.org/cmake/help/v3.3/command/find_package.html
set(ENV{CMAKE_PREFIX_PATH} "")
set(ENV{CMAKE_FRAMEWORK_PATH} "")
set(ENV{CMAKE_APPBUNDLE_PATH} "")
### -- end
### 1. Clear all '<NAME>_ROOT' variables (cache, environment, ...)
### 2. Set '<NAME>_ROOT' or 'HUNTER_<name>_VERSION' variables
set(HUNTER_ALLOW_CONFIG_LOADING YES)
include("${HUNTER_CONFIG_ID_PATH}/config.cmake")
set(HUNTER_ALLOW_CONFIG_LOADING NO)
hunter_test_string_not_empty("${HUNTER_INSTALL_PREFIX}")
hunter_test_string_not_empty("${CMAKE_BINARY_DIR}")
file(
WRITE
"${CMAKE_BINARY_DIR}/_3rdParty/Hunter/install-root-dir"
"${HUNTER_INSTALL_PREFIX}"
)
hunter_apply_copy_rules()
if(ANDROID AND CMAKE_VERSION VERSION_LESS "3.7.1")
hunter_user_error(
"CMake version 3.7.1+ required for Android platforms, see"
" https://docs.hunter.sh/en/latest/quick-start/cmake.html"
)
endif()
# Android GDBSERVER moved to
# https://github.com/hunter-packages/android-apk/commit/32531adeb287d3e3b20498ff1a0f76336cbe0551
# Fix backslashed provided by user:
# * https://github.com/ruslo/hunter/issues/693
# Note: we can't use 'get_filename_component(... ABSOLUTE)' because sometimes
# original path expected. E.g. NMake build:
# * https://ci.appveyor.com/project/ingenue/hunter/build/1.0.1412/job/o8a21ue85ivt5d0p
string(REPLACE "\\" "\\\\" CMAKE_MAKE_PROGRAM "${CMAKE_MAKE_PROGRAM}")
endmacro()
| x10mind/hunter | cmake/modules/hunter_finalize.cmake | CMake | bsd-2-clause | 5,210 |
# Copyright (c) 2021, DjaoDjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = '0.6.4-dev'
| djaodjin/djaodjin-deployutils | deployutils/__init__.py | Python | bsd-2-clause | 1,370 |
/*
* Copyright (C) 2005 by egnite Software GmbH. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* For additional information see http://www.ethernut.de/
*/
/*
* $Log$
* Revision 0.01 2009/09/20 ulrichprinz
* First checkin of using DBGU as limited standard USART.
*
*/
//#define NUT_DEPRECATED
#include <cfg/os.h>
#include <cfg/clock.h>
#include <cfg/arch.h>
#include <cfg/uart.h>
#include <string.h>
#include <sys/atom.h>
#include <sys/event.h>
#include <sys/timer.h>
#include <dev/irqreg.h>
#include <dev/debug.h>
#include <arch/cm3.h>
#include <arch/arm/at91_dbgu.h>
#include <dev/usartat91.h>
/*
* Local function prototypes.
*
* Commented functions are not supported by DBGU
*
*/
static uint32_t At91UsartGetSpeed(void);
static int At91UsartSetSpeed(uint32_t rate);
static uint8_t At91UsartGetDataBits(void);
static int At91UsartSetDataBits( uint8_t bits);
static uint8_t At91UsartGetParity(void);
static int At91UsartSetParity(uint8_t mode);
static uint8_t At91UsartGetStopBits(void);
static int At91UsartSetStopBits(uint8_t bits);
static uint32_t At91UsartGetStatus(void);
static int At91UsartSetStatus(uint32_t flags);
static uint8_t At91UsartGetClockMode(void);
static int At91UsartSetClockMode(uint8_t mode);
static void At91UsartTxStart(void);
static void At91UsartRxStart(void);
static int At91UsartInit(void);
static int At91UsartDeinit(void);
static uint32_t At91UsartGetFlowControl(void);
static int At91UsartSetFlowControl(uint32_t flags);
#define sig_DBGU sig_UART
#define DBGU_BASE AT91C_BASE_DBGU
#define US_ID AT91C_ID_DBGU
#define PMC_PCER AT91C_PMC_PCER
#define PMC_PCDR AT91C_PMC_PCDR
extern IRQ_HANDLER sig_DBGU;
/*!
* \addtogroup xgNutArchArmAt91Us
*/
/*@{*/
/*!
* \brief DBGU device control block structure.
*/
static USARTDCB dcb_dbgu = {
0, /* dcb_modeflags */
0, /* dcb_statusflags */
0, /* dcb_rtimeout */
0, /* dcb_wtimeout */
{0, 0, 0, 0, 0, 0, 0, 0}, /* dcb_tx_rbf */
{0, 0, 0, 0, 0, 0, 0, 0}, /* dcb_rx_rbf */
0, /* dbc_last_eol */
At91UsartInit, /* dcb_init */
At91UsartDeinit, /* dcb_deinit */
At91UsartTxStart, /* dcb_tx_start */
At91UsartRxStart, /* dcb_rx_start */
At91UsartSetFlowControl, /* dcb_set_flow_control */
At91UsartGetFlowControl, /* dcb_get_flow_control */
At91UsartSetSpeed, /* dcb_set_speed */
At91UsartGetSpeed, /* dcb_get_speed */
At91UsartSetDataBits, /* dcb_set_data_bits */
At91UsartGetDataBits, /* dcb_get_data_bits */
At91UsartSetParity, /* dcb_set_parity */
At91UsartGetParity, /* dcb_get_parity */
At91UsartSetStopBits, /* dcb_set_stop_bits */
At91UsartGetStopBits, /* dcb_get_stop_bits */
At91UsartSetStatus, /* dcb_set_status */
At91UsartGetStatus, /* dcb_get_status */
At91UsartSetClockMode, /* dcb_set_clock_mode */
At91UsartGetClockMode, /* dcb_get_clock_mode */
};
/*!
* \name AT91 DBGU Device
*/
/*@{*/
/*!
* \brief USART0 device information structure.
*
* An application must pass a pointer to this structure to
* NutRegisterDevice() before using the serial communication
* driver of the AT91's on-chip USART0.
*
* The device is named \b uart0.
*
* \showinitializer
*/
NUTDEVICE devDebug = {
0, /* Pointer to next device, dev_next. */
{'d', 'b', 'g', 'u', 0, 0, 0, 0, 0}, /* Unique device name, dev_name. */
IFTYP_CHAR, /* Type of device, dev_type. */
AT91C_BASE_DBGU, /* Base address, dev_base (not used). */
0, /* First interrupt number, dev_irq (not used). */
0, /* Interface control block, dev_icb (not used). */
&dcb_dbgu, /* Driver control block, dev_dcb. */
UsartInit, /* Driver initialization routine, dev_init. */
UsartIOCtl, /* Driver specific control function, dev_ioctl. */
UsartRead, /* Read from device, dev_read. */
UsartWrite, /* Write to device, dev_write. */
UsartOpen, /* Open a device or file, dev_open. */
UsartClose, /* Close a device or file, dev_close. */
UsartSize /* Request file size, dev_size. */
};
/*@}*/
/*@}*/
/* Modem control includes hardware handshake. */
/*
* Hardware driven control signals are not available
* with the DBUG unit of most chips.
*/
#undef UART_MODEM_CONTROL
#undef UART_HARDWARE_HANDSHAKE
#define UART_RXTX_PINS (AT91C_PA11_DRXD|AT91C_PA12_DTXD)
#undef UART_HDX_PIN
#undef UART_RTS_PIN
#undef UART_CTS_PIN
#undef UART_MODEM_PINS
#define UART_RXTX_PINS_ENABLE() outr(AT91C_PIOA_PDR, UART_RXTX_PINS)
#define UART_INIT_BAUDRATE 115200
#define USARTn_BASE AT91C_BASE_DBGU
#define US_ID AT91C_ID_DBGU
#define SIG_UART sig_UART
#define dcb_usart dcb_dbgu
#include "usartat91.c"
| Astralix/ethernut32 | nut/arch/cm3/dev/atmel/debug_sam3u.c | C | bsd-2-clause | 6,628 |
global _check_for_key
_check_for_key:
push bp
mov bp, sp
mov ax, word [bp+4]
call os_check_for_key
pop bp
ret
| I8087/mlib | src/check_for_key.asm | Assembly | bsd-2-clause | 138 |
// --------------------------------------------------------------------------------
// SharpDisasm (File: SharpDisasm\ud_operand_code.cs)
// Copyright (c) 2014-2015 Justin Stenning
// http://spazzarama.com
// https://github.com/spazzarama/SharpDisasm
// https://sharpdisasm.codeplex.com/
//
// SharpDisasm is distributed under the 2-clause "Simplified BSD License".
//
// Portions of SharpDisasm are ported to C# from udis86 a C disassembler project
// also distributed under the terms of the 2-clause "Simplified BSD License" and
// Copyright (c) 2002-2012, Vivek Thampi <[email protected]>
// All rights reserved.
// UDIS86: https://github.com/vmt/udis86
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// --------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace SharpDisasm.Udis86
{
#pragma warning disable 1591
/* operand type constants -- order is important! */
/// <summary>
/// Operand codes
/// </summary>
public enum ud_operand_code {
OP_NONE,
OP_A, OP_E, OP_M, OP_G,
OP_I, OP_F,
OP_R0, OP_R1, OP_R2, OP_R3,
OP_R4, OP_R5, OP_R6, OP_R7,
OP_AL, OP_CL, OP_DL,
OP_AX, OP_CX, OP_DX,
OP_eAX, OP_eCX, OP_eDX,
OP_rAX, OP_rCX, OP_rDX,
OP_ES, OP_CS, OP_SS, OP_DS,
OP_FS, OP_GS,
OP_ST0, OP_ST1, OP_ST2, OP_ST3,
OP_ST4, OP_ST5, OP_ST6, OP_ST7,
OP_J, OP_S, OP_O,
OP_I1, OP_I3, OP_sI,
OP_V, OP_W, OP_Q, OP_P,
OP_U, OP_N, OP_MU, OP_H,
OP_L,
OP_R, OP_C, OP_D,
OP_MR
}
#pragma warning restore 1591
}
| Raptur/SharpDisasmGUI | SharpDisasm/Udis86/ud_operand_code.cs | C# | bsd-2-clause | 3,187 |
# Contacts and Contributors
This section contains important contacts for the project, in different know how areas. It is also our know how management list.
## Active Contacts
Topic | Contacts
:---|:---
Product Management | Joakim Bucher, Patrick Schweizer
Business Analysis | Markus Flückiger, Stephan Koch
Interaction Design and UX | Markus Flückiger
Requirements Engineering | Stephan Koch, Patrick Schweizer, Markus Flückiger
Business Consulting | Christoph Hauert
Software Architecture | Joakim Bucher, Rolf Bruderer, Patrick Schweizer
Development | Joakim Bucher, Martin Leimer, Patrick Schweizer, Rolf Bruderer, ...
Build System | ???
BDD Testing with Spec Flow in C# | Joakim Bucher
BDD Testing with JBehave in Java | Martin Leimer
Markdown Tooling | Rolf Bruderer
Scenarioo | Rolf Bruderer, Patrick Schweizer
This list should help to find quickly a contact for a special know how area. It also helps to identify quickly in which area we have enough contributors with know how and where we should try to do more know how transfer during our work.
## Alumni Contributors
These are the former contributors that have left the team, with their most important know how areas. Just to remember all contributors that gave valuable input, in case to contact them for later questions and also to know who to invite for any "Project Success Event" ;-)
Name | Contributions | Year of Leave
:---|:---|:---
Joram Zimmermann | Conducting first Spec-by-Ex Workshops, Focus Group Kickoff | 2015
| scenarioo/scenarioo-spec-by-example | heatclinic/docs/contacts.md | Markdown | bsd-2-clause | 1,498 |
# Copyright 2014 Dev in Cachu authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.views.decorators import csrf
from django.views.generic import base
from django.contrib import admin
admin.autodiscover()
from devincachu.destaques import views as dviews
from devincachu.inscricao import views as iviews
from devincachu.palestras import views as pviews
p = patterns
urlpatterns = p("",
url(r"^admin/", include(admin.site.urls)),
url(r"^palestrantes/$",
pviews.PalestrantesView.as_view(),
name="palestrantes"),
url(r"^programacao/$",
pviews.ProgramacaoView.as_view(),
name="programacao"),
url(r"^programacao/(?P<palestrantes>.*)/(?P<slug>[\w-]+)/$",
pviews.PalestraView.as_view(),
name="palestra"),
url(r"^inscricao/$",
iviews.Inscricao.as_view(),
name="inscricao"),
url(r"^notificacao/$",
csrf.csrf_exempt(iviews.Notificacao.as_view()),
name="notificacao"),
url(r"^certificado/validar/$",
iviews.ValidacaoCertificado.as_view(),
name="validacao_certificado"),
url(r"^certificado/$",
iviews.BuscarCertificado.as_view(),
name="busca_certificado"),
url(r"^certificado/(?P<slug>[0-9a-f]+)/$",
iviews.Certificado.as_view(),
name="certificado"),
url(r"^sobre/$",
base.TemplateView.as_view(
template_name="sobre.html",
),
name="sobre-o-evento"),
url(r"^quando-e-onde/$",
base.TemplateView.as_view(
template_name="quando-e-onde.html",
),
name="quando-e-onde"),
url(r"^$", dviews.IndexView.as_view(), name="index"),
)
if settings.DEBUG:
urlpatterns += patterns("",
url(r"^media/(?P<path>.*)$",
"django.views.static.serve",
{"document_root": settings.MEDIA_ROOT}),
)
| devincachu/devincachu-2014 | devincachu/urls.py | Python | bsd-2-clause | 2,555 |
{-# LANGUAGE TupleSections #-}
module Arbitrary.TestModule where
import Data.Integrated.TestModule
import Test.QuickCheck
import Data.ModulePath
import Control.Applicative
import Arbitrary.Properties
import Test.Util
import Filesystem.Path.CurrentOS
import Prelude hiding (FilePath)
import qualified Arbitrary.ModulePath as MP
import qualified Data.Set as S
testModulePath :: Gen Char -> S.Set ModulePath -> Gen ModulePath
testModulePath subpath avoided =
suchThat
(fromModPath <$> MP.toModulePath subpath)
(not . flip S.member avoided)
where
fromModPath :: ModulePath -> ModulePath
fromModPath (ModulePath pth) =
ModulePath $ take (length pth - 1) pth ++ [testFormat $ last pth]
toTestModule :: ModulePath -> Gen TestModule
toTestModule mp = do
props <- arbitrary :: Gen Properties
return $ TestModule mp (list props)
-- Generate a random test file, care must be taken to avoid generating
-- the same path twice
toGenerated :: Gen Char -> S.Set ModulePath -> Gen (FilePath, TestModule)
toGenerated subpath avoided = do
mp <- testModulePath subpath avoided
(relPath mp,) <$> toTestModule mp
| jfeltz/tasty-integrate | tests/Arbitrary/TestModule.hs | Haskell | bsd-2-clause | 1,129 |
# -*- coding: utf-8 -*-
class Pkg
@pkgDir
@jailDir
def self.init()
@pkgDir = System.getConf("pkgDir")
@jailDir = System.getConf("jailDir")
end
def self.main(data)
if (data["control"] == "search") then
search(data["name"]).each_line do |pname|
pname = pname.chomp
SendMsg.machine("pkg","search",pname)
end
elsif(data["control"] == "list") then
pkg = list("all")
pkg.each do |pname|
SendMsg.machine("pkg","list",pname[0])
end
elsif(data["control"] == "install") then
cmdLog,cause = install(data["name"])
if(cmdLog == false)
SendMsg.status(MACHINE,"failed",cause)
return
else
SendMsg.status(MACHINE,"success","完了しました。")
end
end
end
def self.add(jname,pname)
puts "pkg-static -j #{jname} add /pkg/#{pname}.txz"
s,e = Open3.capture3("pkg-static -j #{jname} add /pkg/#{pname}.txz")
end
def self.search(pname) #host側でやらせる
s,e = Open3.capture3("pkg-static search #{pname}")
return s
end
def self.download(pname)
flag = false
pkgVal = 0
now = 1
IO.popen("echo y|pkg-static fetch -d #{pname}") do |pipe|
pipe.each do | line |
# puts line
if(line.include?("New packages to be FETCHED:")) then #ダウンロードするパッケージの数を計算(NEw packages〜からThe process〜までの行数)
flag = true
end
if(line.include?("The process will require")) then
pkgVal -= 2
flag = false
end
if(flag == true) then
pkgVal += 1
end
if(line.include?("Fetching")) then
if(line.include?("Proceed with fetching packages? [y/N]: ")) then
line.gsub!("Proceed with fetching packages? [y/N]: ","")
end
SendMsg.status(MACHINE,"log","#{line}(#{now}/#{pkgVal})")
now += 1
end
end
end
cmdLog,e = Open3.capture3("ls #{@jailDir}/sharedfs/pkg")
s,e = Open3.capture3("cp -pn #{@pkgDir}/* #{@jailDir}/sharedfs/pkg/") #sharedfsにコピー(qjail)
cmdLog2,e = Open3.capture3("ls #{@jailDir}/sharedfs/pkg")
=begin
if(cmdLog == cmdLog2) #ダウンロード前後にlsの結果を取って、要素が同じならばダウンロードに失敗しているとわかる(ファイルが増えていない)
puts ("pkgcopyerror")
return false,"pkgcopy"
end
=end
end
def self.install(pname) #host側でやらせる
cmdLog, cause = download(pname)
SendMsg.status(MACHINE,"report","pkgdownload")
cmdLog,e = Open3.capture3("ls #{@jailDir}/sharedfs/pkg/#{pname}.txz")
cmdLog = cmdLog.chomp #改行削除
if(cmdLog != "#{@jailDir}/sharedfs/pkg/#{pname}.txz")
return false,"copy"
end
SendMsg.status(MACHINE,"report","pkgcopy")
nextid = SQL.select("pkg","maxid") + 1
SQL.insert("pkg",pname)
sqlid = SQL.select("pkg",nextid)[0] #作成したpkgのIDを取得
if (sqlid != nextid ) then #sqlidがnextidではない(恐らくnextid-1)場合は、データベースが正常に作成されていない
return false,"database"
end
return true
end
def self.list(mode)
if (mode == "all") then
return SQL.sql("select name from pkg")
end
end
end | shutingrz/gvitocha | bin/pkg.rb | Ruby | bsd-2-clause | 3,178 |
#!/usr/bin/env python
#*********************************************************************
# Software License Agreement (BSD License)
#
# Copyright (c) 2011 andrewtron3000
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#********************************************************************/
import roslib; roslib.load_manifest('face_detection')
import rospy
import sys
import cv
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
from geometry_msgs.msg import PointStamped
#
# Instantiate a new opencv to ROS bridge adaptor
#
cv_bridge = CvBridge()
#
# Define the callback that will be called when a new image is received.
#
def callback(publisher, coord_publisher, cascade, imagemsg):
#
# Convert the ROS imagemsg to an opencv image.
#
image = cv_bridge.imgmsg_to_cv(imagemsg, 'mono8')
#
# Blur the image.
#
cv.Smooth(image, image, cv.CV_GAUSSIAN)
#
# Allocate some storage for the haar detect operation.
#
storage = cv.CreateMemStorage(0)
#
# Call the face detector function.
#
faces = cv.HaarDetectObjects(image, cascade, storage, 1.2, 2,
cv.CV_HAAR_DO_CANNY_PRUNING, (100,100))
#
# If faces are detected, compute the centroid of all the faces
# combined.
#
face_centroid_x = 0.0
face_centroid_y = 0.0
if len(faces) > 0:
#
# For each face, draw a rectangle around it in the image,
# and also add the position of the face to the centroid
# of all faces combined.
#
for (i, n) in faces:
x = int(i[0])
y = int(i[1])
width = int(i[2])
height = int(i[3])
cv.Rectangle(image,
(x, y),
(x + width, y + height),
cv.CV_RGB(0,255,0), 3, 8, 0)
face_centroid_x += float(x) + (float(width) / 2.0)
face_centroid_y += float(y) + (float(height) / 2.0)
#
# Finish computing the face_centroid by dividing by the
# number of faces found above.
#
face_centroid_x /= float(len(faces))
face_centroid_y /= float(len(faces))
#
# Lastly, if faces were detected, publish a PointStamped
# message that contains the centroid values.
#
pt = Point(x = face_centroid_x, y = face_centroid_y, z = 0.0)
pt_stamped = PointStamped(point = pt)
coord_publisher.publish(pt_stamped)
#
# Convert the opencv image back to a ROS image using the
# cv_bridge.
#
newmsg = cv_bridge.cv_to_imgmsg(image, 'mono8')
#
# Republish the image. Note this image has boxes around
# faces if faces were found.
#
publisher.publish(newmsg)
def listener(publisher, coord_publisher):
rospy.init_node('face_detector', anonymous=True)
#
# Load the haar cascade. Note we get the
# filename from the "classifier" parameter
# that is configured in the launch script.
#
cascadeFileName = rospy.get_param("~classifier")
cascade = cv.Load(cascadeFileName)
rospy.Subscriber("/stereo/left/image_rect",
Image,
lambda image: callback(publisher, coord_publisher, cascade, image))
rospy.spin()
# This is called first.
if __name__ == '__main__':
publisher = rospy.Publisher('face_view', Image)
coord_publisher = rospy.Publisher('face_coords', PointStamped)
listener(publisher, coord_publisher)
| andrewtron3000/hacdc-ros-pkg | face_detection/src/detector.py | Python | bsd-2-clause | 5,077 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import urllib
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
from jp.ac.kyoto_su.aokilab.dragon.mvc.model import OpenGLModel
from jp.ac.kyoto_su.aokilab.dragon.mvc.view import *
from jp.ac.kyoto_su.aokilab.dragon.opengl.triangle import OpenGLTriangle
from jp.ac.kyoto_su.aokilab.dragon.opengl.polygon import OpenGLPolygon
TRACE = True
DEBUG = False
class DragonModel(OpenGLModel):
"""ドラゴンのモデル。"""
def __init__(self):
"""ドラゴンのモデルのコンストラクタ。"""
if TRACE: print(__name__), self.__init__.__doc__
super(DragonModel, self).__init__()
self._eye_point = [-5.5852450791872 , 3.07847342734 , 15.794105252496]
self._sight_point = [0.27455347776413 , 0.20096999406815 , -0.11261999607086]
self._up_vector = [0.1018574904194 , 0.98480906061847 , -0.14062775604137]
self._fovy = self._default_fovy = 12.642721790235
filename = os.path.join(os.getcwd(), 'dragon.txt')
if os.path.exists(filename) and os.path.isfile(filename):
pass
else:
url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Dragon/dragon.txt'
urllib.urlretrieve(url, filename)
with open(filename, "rU") as a_file:
while True:
a_string = a_file.readline()
if len(a_string) == 0: break
a_list = a_string.split()
if len(a_list) == 0: continue
first_string = a_list[0]
if first_string == "number_of_vertexes":
number_of_vertexes = int(a_list[1])
if first_string == "number_of_triangles":
number_of_triangles = int(a_list[1])
if first_string == "end_header":
get_tokens = (lambda file: file.readline().split())
collection_of_vertexes = []
for n_th in range(number_of_vertexes):
a_list = get_tokens(a_file)
a_vertex = map(float, a_list[0:3])
collection_of_vertexes.append(a_vertex)
index_to_vertex = (lambda index: collection_of_vertexes[index-1])
for n_th in range(number_of_triangles):
a_list = get_tokens(a_file)
indexes = map(int, a_list[0:3])
vertexes = map(index_to_vertex, indexes)
a_tringle = OpenGLTriangle(*vertexes)
self._display_object.append(a_tringle)
return
def default_window_title(self):
"""ドラゴンのウィンドウのタイトル(ラベル)を応答する。"""
if TRACE: print(__name__), self.default_window_title.__doc__
return "Dragon"
class WaspModel(OpenGLModel):
"""スズメバチのモデル。"""
def __init__(self):
"""スズメバチのモデルのコンストラクタ。"""
if TRACE: print(__name__), self.__init__.__doc__
super(WaspModel, self).__init__()
self._eye_point = [-5.5852450791872 , 3.07847342734 , 15.794105252496]
self._sight_point = [0.19825005531311 , 1.8530999422073 , -0.63795006275177]
self._up_vector = [0.070077999093727 , 0.99630606032682 , -0.049631725731267]
self._fovy = self._default_fovy = 41.480099231656
filename = os.path.join(os.getcwd(), 'wasp.txt')
if os.path.exists(filename) and os.path.isfile(filename):
pass
else:
url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Wasp/wasp.txt'
urllib.urlretrieve(url, filename)
with open(filename, "rU") as a_file:
while True:
a_string = a_file.readline()
if len(a_string) == 0: break
a_list = a_string.split()
if len(a_list) == 0: continue
first_string = a_list[0]
if first_string == "number_of_vertexes":
number_of_vertexes = int(a_list[1])
if first_string == "number_of_polygons":
number_of_polygons = int(a_list[1])
if first_string == "end_header":
get_tokens = (lambda file: file.readline().split())
collection_of_vertexes = []
for n_th in range(number_of_vertexes):
a_list = get_tokens(a_file)
a_vertex = map(float, a_list[0:3])
collection_of_vertexes.append(a_vertex)
index_to_vertex = (lambda index: collection_of_vertexes[index-1])
for n_th in range(number_of_polygons):
a_list = get_tokens(a_file)
number_of_indexes = int(a_list[0])
index = number_of_indexes + 1
indexes = map(int, a_list[1:index])
vertexes = map(index_to_vertex, indexes)
rgb_color = map(float, a_list[index:index+3])
a_polygon = OpenGLPolygon(vertexes, rgb_color)
self._display_object.append(a_polygon)
return
def default_view_class(self):
"""スズメバチのモデルを表示するデフォルトのビューのクラスを応答する。"""
if TRACE: print(__name__), self.default_view_class.__doc__
return WaspView
def default_window_title(self):
"""スズメバチのウィンドウのタイトル(ラベル)を応答する。"""
if TRACE: print(__name__), self.default_window_title.__doc__
return "Wasp"
class BunnyModel(OpenGLModel):
"""うさぎのモデル。"""
def __init__(self):
"""うさぎのモデルのコンストラクタ。"""
if TRACE: print(__name__), self.__init__.__doc__
super(BunnyModel, self).__init__()
filename = os.path.join(os.getcwd(), 'bunny.ply')
if os.path.exists(filename) and os.path.isfile(filename):
pass
else:
url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Bunny/bunny.ply'
urllib.urlretrieve(url, filename)
with open(filename, "rU") as a_file:
while True:
a_string = a_file.readline()
if len(a_string) == 0: break
a_list = a_string.split()
if len(a_list) == 0: continue
first_string = a_list[0]
if first_string == "element":
second_string = a_list[1]
if second_string == "vertex":
number_of_vertexes = int(a_list[2])
if second_string == "face":
number_of_faces = int(a_list[2])
if first_string == "end_header":
get_tokens = (lambda file: file.readline().split())
collection_of_vertexes = []
for n_th in range(number_of_vertexes):
a_list = get_tokens(a_file)
a_vertex = map(float, a_list[0:3])
collection_of_vertexes.append(a_vertex)
index_to_vertex = (lambda index: collection_of_vertexes[index])
for n_th in range(number_of_faces):
a_list = get_tokens(a_file)
indexes = map(int, a_list[1:4])
vertexes = map(index_to_vertex, indexes)
a_tringle = OpenGLTriangle(*vertexes)
self._display_object.append(a_tringle)
if first_string == "comment":
second_string = a_list[1]
if second_string == "eye_point_xyz":
self._eye_point = map(float, a_list[2:5])
if second_string == "sight_point_xyz":
self._sight_point = map(float, a_list[2:5])
if second_string == "up_vector_xyz":
self._up_vector = map(float, a_list[2:5])
if second_string == "zoom_height" and a_list[3] == "fovy":
self._fovy = self._default_fovy = float(a_list[4])
return
def default_view_class(self):
"""うさぎのモデルを表示するデフォルトのビューのクラスを応答する。"""
if TRACE: print(__name__), self.default_view_class.__doc__
return BunnyView
def default_window_title(self):
"""うさぎのウィンドウのタイトル(ラベル)を応答する。"""
if TRACE: print(__name__), self.default_window_title.__doc__
return "Stanford Bunny"
# end of file | frederica07/Dragon_Programming_Process | jp/ac/kyoto_su/aokilab/dragon/example_model/example.py | Python | bsd-2-clause | 7,196 |
# Copyright 2015-2017 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
from lino.core.roles import UserRole
class SimpleContactsUser(UserRole):
pass
class ContactsUser(SimpleContactsUser):
pass
class ContactsStaff(ContactsUser):
pass
| khchine5/xl | lino_xl/lib/contacts/roles.py | Python | bsd-2-clause | 269 |
/*
* block.h -- block transfer
*
* Copyright (C) 2010-2012,2014-2015 Olaf Bergmann <[email protected]>
*
* SPDX-License-Identifier: BSD-2-Clause
*
* This file is part of the CoAP library libcoap. Please see README for terms
* of use.
*/
#ifndef COAP_BLOCK_H_
#define COAP_BLOCK_H_
#include "encode.h"
#include "option.h"
#include "pdu.h"
/**
* @defgroup block Block Transfer
* API functions for handling PDUs using CoAP BLOCK options
* @{
*/
#ifndef COAP_MAX_BLOCK_SZX
/**
* The largest value for the SZX component in a Block option.
*/
#define COAP_MAX_BLOCK_SZX 6
#endif /* COAP_MAX_BLOCK_SZX */
/**
* Structure of Block options.
*/
typedef struct {
unsigned int num; /**< block number */
unsigned int m:1; /**< 1 if more blocks follow, 0 otherwise */
unsigned int szx:3; /**< block size */
} coap_block_t;
#define COAP_BLOCK_USE_LIBCOAP 0x01 /* Use libcoap to do block requests */
#define COAP_BLOCK_SINGLE_BODY 0x02 /* Deliver the data as a single body */
/**
* Returns the value of the least significant byte of a Block option @p opt.
* For zero-length options (i.e. num == m == szx == 0), COAP_OPT_BLOCK_LAST
* returns @c NULL.
*/
#define COAP_OPT_BLOCK_LAST(opt) \
(coap_opt_length(opt) ? (coap_opt_value(opt) + (coap_opt_length(opt)-1)) : 0)
/** Returns the value of the More-bit of a Block option @p opt. */
#define COAP_OPT_BLOCK_MORE(opt) \
(coap_opt_length(opt) ? (*COAP_OPT_BLOCK_LAST(opt) & 0x08) : 0)
/** Returns the value of the SZX-field of a Block option @p opt. */
#define COAP_OPT_BLOCK_SZX(opt) \
(coap_opt_length(opt) ? (*COAP_OPT_BLOCK_LAST(opt) & 0x07) : 0)
/**
* Returns the value of field @c num in the given block option @p block_opt.
*/
unsigned int coap_opt_block_num(const coap_opt_t *block_opt);
/**
* Checks if more than @p num blocks are required to deliver @p data_len
* bytes of data for a block size of 1 << (@p szx + 4).
*/
COAP_STATIC_INLINE int
coap_more_blocks(size_t data_len, unsigned int num, uint16_t szx) {
return ((num+1) << (szx + 4)) < data_len;
}
#if 0
/** Sets the More-bit in @p block_opt */
COAP_STATIC_INLINE void
coap_opt_block_set_m(coap_opt_t *block_opt, int m) {
if (m)
*(coap_opt_value(block_opt) + (coap_opt_length(block_opt) - 1)) |= 0x08;
else
*(coap_opt_value(block_opt) + (coap_opt_length(block_opt) - 1)) &= ~0x08;
}
#endif
/**
* Initializes @p block from @p pdu. @p number must be either COAP_OPTION_BLOCK1
* or COAP_OPTION_BLOCK2. When option @p number was found in @p pdu, @p block is
* initialized with values from this option and the function returns the value
* @c 1. Otherwise, @c 0 is returned.
*
* @param pdu The pdu to search for option @p number.
* @param number The option number to search for (must be COAP_OPTION_BLOCK1 or
* COAP_OPTION_BLOCK2).
* @param block The block structure to initilize.
*
* @return @c 1 on success, @c 0 otherwise.
*/
int coap_get_block(const coap_pdu_t *pdu, coap_option_num_t number,
coap_block_t *block);
/**
* Writes a block option of type @p number to message @p pdu. If the requested
* block size is too large to fit in @p pdu, it is reduced accordingly. An
* exception is made for the final block when less space is required. The actual
* length of the resource is specified in @p data_length.
*
* This function may change *block to reflect the values written to @p pdu. As
* the function takes into consideration the remaining space @p pdu, no more
* options should be added after coap_write_block_opt() has returned.
*
* @param block The block structure to use. On return, this object is
* updated according to the values that have been written to
* @p pdu.
* @param number COAP_OPTION_BLOCK1 or COAP_OPTION_BLOCK2.
* @param pdu The message where the block option should be written.
* @param data_length The length of the actual data that will be added the @p
* pdu by calling coap_add_block().
*
* @return @c 1 on success, or a negative value on error.
*/
int coap_write_block_opt(coap_block_t *block,
coap_option_num_t number,
coap_pdu_t *pdu,
size_t data_length);
/**
* Adds the @p block_num block of size 1 << (@p block_szx + 4) from source @p
* data to @p pdu.
*
* @param pdu The message to add the block.
* @param len The length of @p data.
* @param data The source data to fill the block with.
* @param block_num The actual block number.
* @param block_szx Encoded size of block @p block_number.
*
* @return @c 1 on success, @c 0 otherwise.
*/
int coap_add_block(coap_pdu_t *pdu,
size_t len,
const uint8_t *data,
unsigned int block_num,
unsigned char block_szx);
/**
* Re-assemble payloads into a body
*
* @param body_data The pointer to the data for the body holding the
* representation so far or NULL if the first time.
* @param length The length of @p data.
* @param data The payload data to update the body with.
* @param offset The offset of the @p data into the body.
* @param total The estimated total size of the body.
*
* @return The current representation of the body or @c NULL if error.
* If NULL, @p body_data will have been de-allocated.
*/
coap_binary_t *
coap_block_build_body(coap_binary_t *body_data, size_t length,
const uint8_t *data, size_t offset, size_t total);
/**
* Adds the appropriate part of @p data to the @p response pdu. If blocks are
* required, then the appropriate block will be added to the PDU and sent.
* Adds a ETAG option that is the hash of the entire data if the data is to be
* split into blocks
* Used by a request handler.
*
* Note: The application will get called for every packet of a large body to
* process. Consider using coap_add_data_response_large() instead.
*
* @param request The requesting pdu.
* @param response The response pdu.
* @param media_type The format of the data.
* @param maxage The maxmimum life of the data. If @c -1, then there
* is no maxage.
* @param length The total length of the data.
* @param data The entire data block to transmit.
*
*/
void
coap_add_data_blocked_response(const coap_pdu_t *request,
coap_pdu_t *response,
uint16_t media_type,
int maxage,
size_t length,
const uint8_t* data);
/**
* Callback handler for de-allocating the data based on @p app_ptr provided to
* coap_add_data_large_*() functions following transmission of the supplied
* data.
*
* @param session The session that this data is associated with
* @param app_ptr The application provided pointer provided to the
* coap_add_data_large_* functions.
*/
typedef void (*coap_release_large_data_t)(coap_session_t *session,
void *app_ptr);
/**
* Associates given data with the @p pdu that is passed as second parameter.
*
* If all the data can be transmitted in a single PDU, this is functionally
* the same as coap_add_data() except @p release_func (if not NULL) will get
* invoked after data transmission.
*
* Used for a client request.
*
* If the data spans multiple PDUs, then the data will get transmitted using
* BLOCK1 option with the addition of the SIZE1 option.
* The underlying library will handle the transmission of the individual blocks.
* Once the body of data has been transmitted (or a failure occurred), then
* @p release_func (if not NULL) will get called so the application can
* de-allocate the @p data based on @p app_data. It is the responsibility of
* the application not to change the contents of @p data until the data
* transfer has completed.
*
* There is no need for the application to include the BLOCK1 option in the
* @p pdu.
*
* coap_add_data_large_request() (or the alternative coap_add_data_large_*()
* functions) must be called only once per PDU and must be the last PDU update
* before the PDU is transmitted. The (potentially) initial data will get
* transmitted when coap_send() is invoked.
*
* Note: COAP_BLOCK_USE_LIBCOAP must be set by coap_context_set_block_mode()
* for libcoap to work correctly when using this function.
*
* @param session The session to associate the data with.
* @param pdu The PDU to associate the data with.
* @param length The length of data to transmit.
* @param data The data to transmit.
* @param release_func The function to call to de-allocate @p data or @c NULL
* if the function is not required.
* @param app_ptr A Pointer that the application can provide for when
* release_func() is called.
*
* @return @c 1 if addition is successful, else @c 0.
*/
int coap_add_data_large_request(coap_session_t *session,
coap_pdu_t *pdu,
size_t length,
const uint8_t *data,
coap_release_large_data_t release_func,
void *app_ptr);
/**
* Associates given data with the @p response pdu that is passed as fourth
* parameter.
*
* If all the data can be transmitted in a single PDU, this is functionally
* the same as coap_add_data() except @p release_func (if not NULL) will get
* invoked after data transmission. The MEDIA_TYPE, MAXAGE and ETAG options may
* be added in as appropriate.
*
* Used by a server request handler to create the response.
*
* If the data spans multiple PDUs, then the data will get transmitted using
* BLOCK2 (response) option with the addition of the SIZE2 and ETAG
* options. The underlying library will handle the transmission of the
* individual blocks. Once the body of data has been transmitted (or a
* failure occurred), then @p release_func (if not NULL) will get called so the
* application can de-allocate the @p data based on @p app_data. It is the
* responsibility of the application not to change the contents of @p data
* until the data transfer has completed.
*
* There is no need for the application to include the BLOCK2 option in the
* @p pdu.
*
* coap_add_data_large_response() (or the alternative coap_add_data_large*()
* functions) must be called only once per PDU and must be the last PDU update
* before returning from the request handler function.
*
* Note: COAP_BLOCK_USE_LIBCOAP must be set by coap_context_set_block_mode()
* for libcoap to work correctly when using this function.
*
* @param resource The resource the data is associated with.
* @param session The coap session.
* @param request The requesting pdu.
* @param response The response pdu.
* @param query The query taken from the (original) requesting pdu.
* @param media_type The format of the data.
* @param maxage The maxmimum life of the data. If @c -1, then there
* is no maxage.
* @param etag ETag to use if not 0.
* @param length The total length of the data.
* @param data The entire data block to transmit.
* @param release_func The function to call to de-allocate @p data or NULL if
* the function is not required.
* @param app_ptr A Pointer that the application can provide for when
* release_func() is called.
*
* @return @c 1 if addition is successful, else @c 0.
*/
int
coap_add_data_large_response(coap_resource_t *resource,
coap_session_t *session,
const coap_pdu_t *request,
coap_pdu_t *response,
const coap_string_t *query,
uint16_t media_type,
int maxage,
uint64_t etag,
size_t length,
const uint8_t *data,
coap_release_large_data_t release_func,
void *app_ptr);
/**
* Set the context level CoAP block handling bits for handling RFC7959.
* These bits flow down to a session when a session is created and if the peer
* does not support something, an appropriate bit may get disabled in the
* session block_mode.
* The session block_mode then flows down into coap_crcv_t or coap_srcv_t where
* again an appropriate bit may get disabled.
*
* Note: This function must be called before the session is set up.
*
* Note: COAP_BLOCK_USE_LIBCOAP must be set if libcoap is to do all the
* block tracking and requesting, otherwise the application will have to do
* all of this work (the default if coap_context_set_block_mode() is not
* called).
*
* @param context The coap_context_t object.
* @param block_mode Zero or more COAP_BLOCK_ or'd options
*/
void coap_context_set_block_mode(coap_context_t *context,
uint8_t block_mode);
/**
* Cancel an observe that is being tracked by the client large receive logic.
* (coap_context_set_block_mode() has to be called)
* This will trigger the sending of an observe cancel pdu to the server.
*
* @param session The session that is being used for the observe.
* @param token The original token used to initiate the observation.
* @param message_type The COAP_MESSAGE_ type (NON or CON) to send the observe
* cancel pdu as.
*
* @return @c 1 if observe cancel transmission initiation is successful,
* else @c 0.
*/
int coap_cancel_observe(coap_session_t *session, coap_binary_t *token,
coap_pdu_type_t message_type);
/**@}*/
#endif /* COAP_BLOCK_H_ */
| authmillenon/libcoap | include/coap3/block.h | C | bsd-2-clause | 13,978 |
from django.db.models import Transform
from django.db.models import DateTimeField, TimeField
from django.utils.functional import cached_property
class TimeValue(Transform):
lookup_name = 'time'
function = 'time'
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'TIME({})'.format(lhs), params
@cached_property
def output_field(self):
return TimeField()
DateTimeField.register_lookup(TimeValue) | mivanov-utwente/t4proj | t4proj/apps/stats/models.py | Python | bsd-2-clause | 481 |
/*
Copyright (c) 2012, William Magato
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ''AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the copyright holder(s) or contributors.
*/
#include <errno.h>
#include <unistd.h>
#include <sys/types.h>
// define function pointer
typedef off64_t (*llamaos_lseek64_t) (int, off64_t, int);
// function pointer variable
static llamaos_lseek64_t llamaos_lseek64 = 0;
// function called by llamaOS to register pointer
void register_llamaos_lseek64 (llamaos_lseek64_t func)
{
llamaos_lseek64 = func;
}
/* Seek to OFFSET on FD, starting from WHENCE. */
off64_t __libc_lseek64 (int fd, off64_t offset, int whence)
{
if (0 != llamaos_lseek64)
{
if (fd < 0)
{
__set_errno (EBADF);
return -1;
}
switch (whence)
{
case SEEK_SET:
case SEEK_CUR:
case SEEK_END:
break;
default:
__set_errno (EINVAL);
return -1;
}
return llamaos_lseek64 (fd, offset, whence);
}
__set_errno (ENOSYS);
return -1;
}
weak_alias (__libc_lseek64, __lseek64)
weak_alias (__libc_lseek64, lseek64)
| wilseypa/llamaOS | src/sys/glibc-2.17/sysdeps/llamaos/export/lseek64.c | C | bsd-2-clause | 2,517 |
<?php
header('Content-Type: text/html; charset=utf-8');
require_once 'Akna.php';
$user = '';
$pass = '';
$akna = new Akna( $user, $pass );
$contacts = $akna->emailMarketing->contacts;
$messages = $akna->emailMarketing->messages;
$campaigns = $akna->emailMarketing->campaigns;
try {
// $result_1 = $contacts->get('[email protected]', 'Apiki 1');
// var_dump($result_1);
// $result_2 = $contacts->getLists();
// var_dump($result_2);
// $result_3 = $messages->create( array(
// 'nome' => 'Teste',
// 'html' => htmlspecialchars( '<h1>Curso à distância</h1>' )
// ) );
// var_dump($result_3);
// $result_4 = $messages->test( array(
// 'titulo' => 'Teste',
// 'email_remetente' => '[email protected]',
// 'assunto' => 'Teste de envio 15.07',
// 'email' => '[email protected]'
// ) );
// var_dump($result_4);
// $result_5 = $campaigns->addAction( array(
// 'nome' => 'Itaú 6',
// 'mensagem' => 'Teste',
// 'data_encerramento' => '2013-07-30',
// 'nome_remetente' => 'CESP - Relações com Investidores',
// 'email_remetente' => '[email protected]',
// 'email_retorno' => '[email protected]',
// 'assunto' => 'Assunto 1',
// 'lista' => array( 'Apiki 1', 'Apiki 2' ),
// 'agendar' => array( 'datahora' => date( 'Y-m-d H:i:s' ) )
// ) );
// var_dump($result_5);
} catch( Akna_Exception $e ){
echo $e->getmessage();
} | danielantunes/php-akna | index.php | PHP | bsd-2-clause | 1,452 |
/*
* Copyright 2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.initialization;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.gradle.api.internal.project.ProjectIdentifier;
import org.gradle.api.internal.project.ProjectRegistry;
import org.gradle.api.InvalidUserDataException;
import java.io.File;
import java.io.Serializable;
public class ProjectDirectoryProjectSpec extends AbstractProjectSpec implements Serializable {
private final File dir;
public ProjectDirectoryProjectSpec(File dir) {
this.dir = dir;
}
public String getDisplayName() {
return String.format("with project directory '%s'", dir);
}
public boolean isCorresponding(File file) {
return dir.equals(file);
}
protected String formatNoMatchesMessage() {
return String.format("No projects in this build have project directory '%s'.", dir);
}
protected String formatMultipleMatchesMessage(Iterable<? extends ProjectIdentifier> matches) {
return String.format("Multiple projects in this build have project directory '%s': %s", dir, matches);
}
protected boolean select(ProjectIdentifier project) {
return project.getProjectDir().equals(dir);
}
@Override
protected void checkPreconditions(ProjectRegistry<?> registry) {
if (!dir.exists()) {
throw new InvalidUserDataException(String.format("Project directory '%s' does not exist.", dir));
}
if (!dir.isDirectory()) {
throw new InvalidUserDataException(String.format("Project directory '%s' is not a directory.", dir));
}
}
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
}
| tkmnet/RCRS-ADF | gradle/gradle-2.1/src/core/org/gradle/initialization/ProjectDirectoryProjectSpec.java | Java | bsd-2-clause | 2,457 |
// Copyright 2014 David Miller. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package sequtil
import (
"errors"
"github.com/dmiller/go-seq/iseq"
"github.com/dmiller/go-seq/murmur3"
//"fmt"
"math"
"reflect"
)
// Hash returns a hash code for an object.
// Uses iseq.Hashable.Hash if the interface is implemented.
// Otherwise, special cases Go numbers and strings.
// Returns a default value if not covered by these cases.
// Returning a default value is really not good for hashing performance.
// But it is one way not to have an error code and to avoid a panic.
// Use IsHashable to determine if Hash is supported.
// Or call HashE which has an error return.
func Hash(v interface{}) uint32 {
h, err := HashE(v)
if err != nil {
return 0
}
return h
}
// HashE returns a hash code for an object.
// Uses iseq.Hashable.Hash if the interface is implemented.
// It special cases Go numbers and strings.
// Returns an error if the object is not covered by these cases.
func HashE(v interface{}) (uint32, error) {
if h, ok := v.(iseq.Hashable); ok {
return h.Hash(), nil
}
switch v := v.(type) {
case bool, int, int8, int32, int64:
return murmur3.HashUInt64(uint64(reflect.ValueOf(v).Int())), nil
case uint, uint8, uint32, uint64:
return murmur3.HashUInt64(uint64(reflect.ValueOf(v).Uint())), nil
case float32, float64:
return murmur3.HashUInt64(math.Float64bits(reflect.ValueOf(v).Float())), nil
case nil:
return murmur3.HashUInt64(0), nil
case string:
return murmur3.HashString(v), nil
case complex64, complex128:
return HashComplex128(v.(complex128)), nil
}
return 0, errors.New("don't know how to hash")
}
// IsHashable returns true if Hash/HashE can compute a hash for this object.
func IsHashable(v interface{}) bool {
if _, ok := v.(iseq.Hashable); ok {
return true
}
switch v.(type) {
case bool, int, int8, int32, int64,
uint, uint8, uint32, uint64,
float32, float64,
nil,
string,
complex64, complex128:
return true
}
return false
}
// HashSeq computes a hash for an iseq.Seq
func HashSeq(s iseq.Seq) uint32 {
return HashOrdered(s)
}
// HashMap computes a hash for an iseq.PMap
func HashMap(m iseq.PMap) uint32 {
return HashUnordered(m.Seq())
}
// HashOrdered computes a hash for an iseq.Seq, where order is important
func HashOrdered(s iseq.Seq) uint32 {
n := int32(0)
hash := uint32(1)
for ; s != nil; s = s.Next() {
hash = 31*hash + Hash(s.First)
n++
}
return murmur3.FinalizeCollHash(hash, n)
}
// HashUnordered computes a hash for an iseq.Seq, independent of order of elements
func HashUnordered(s iseq.Seq) uint32 {
n := int32(0)
hash := uint32(0)
for ; s != nil; s = s.Next() {
hash += Hash(s.First)
n++
}
return murmur3.FinalizeCollHash(hash, n)
}
// HashComplex128 computes a hash for a complex128
func HashComplex128(c complex128) uint32 {
hash := murmur3.MixHash(
murmur3.HashUInt64(math.Float64bits(real(c))),
murmur3.HashUInt64(math.Float64bits(imag(c))))
return murmur3.Finalize(hash, 2)
}
| dmiller/go-seq | sequtil/hash.go | GO | bsd-2-clause | 3,082 |
// Copyright (C) 2016 André Bargull. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-white-space
description: >
Mongolian Vowel Separator is not recognized as white space.
info: |
11.2 White Space
WhiteSpace ::
<TAB>
<VT>
<FF>
<SP>
<NBSP>
<ZWNBSP>
<USP>
<USP> ::
Other category “Zs” code points
General Category of U+180E is “Cf” (Format).
negative:
phase: parse
type: SyntaxError
features: [u180e]
---*/
throw "Test262: This statement should not be evaluated.";
// U+180E between "var" and "foo"; UTF8(0x180E) = 0xE1 0xA0 0x8E
varfoo;
| sebastienros/jint | Jint.Tests.Test262/test/language/white-space/mongolian-vowel-separator.js | JavaScript | bsd-2-clause | 659 |
/*
* Copyright (C) 2006, 2007, 2009, 2010, 2011, 2012 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "AffineTransform.h"
#include "CanvasPathMethods.h"
#include "CanvasRenderingContext.h"
#include "CanvasStyle.h"
#include "Color.h"
#include "FloatSize.h"
#include "FontCascade.h"
#include "FontSelectorClient.h"
#include "GraphicsContext.h"
#include "GraphicsTypes.h"
#include "ImageBuffer.h"
#include "Path.h"
#include "PlatformLayer.h"
#include "TextFlags.h"
#include <wtf/Vector.h>
#include <wtf/text/WTFString.h>
namespace WebCore {
class CanvasGradient;
class CanvasPattern;
class DOMPath;
class FloatRect;
class GraphicsContext;
class HTMLCanvasElement;
class HTMLImageElement;
class HTMLVideoElement;
class ImageData;
class TextMetrics;
typedef int ExceptionCode;
class CanvasRenderingContext2D final : public CanvasRenderingContext, public CanvasPathMethods {
public:
CanvasRenderingContext2D(HTMLCanvasElement*, bool usesCSSCompatibilityParseMode, bool usesDashboardCompatibilityMode);
virtual ~CanvasRenderingContext2D();
const CanvasStyle& strokeStyle() const { return state().strokeStyle; }
void setStrokeStyle(CanvasStyle);
const CanvasStyle& fillStyle() const { return state().fillStyle; }
void setFillStyle(CanvasStyle);
float lineWidth() const;
void setLineWidth(float);
String lineCap() const;
void setLineCap(const String&);
String lineJoin() const;
void setLineJoin(const String&);
float miterLimit() const;
void setMiterLimit(float);
const Vector<float>& getLineDash() const;
void setLineDash(const Vector<float>&);
void setWebkitLineDash(const Vector<float>&);
float lineDashOffset() const;
void setLineDashOffset(float);
float webkitLineDashOffset() const;
void setWebkitLineDashOffset(float);
float shadowOffsetX() const;
void setShadowOffsetX(float);
float shadowOffsetY() const;
void setShadowOffsetY(float);
float shadowBlur() const;
void setShadowBlur(float);
String shadowColor() const;
void setShadowColor(const String&);
float globalAlpha() const;
void setGlobalAlpha(float);
String globalCompositeOperation() const;
void setGlobalCompositeOperation(const String&);
void save() { ++m_unrealizedSaveCount; }
void restore();
void scale(float sx, float sy);
void rotate(float angleInRadians);
void translate(float tx, float ty);
void transform(float m11, float m12, float m21, float m22, float dx, float dy);
void setTransform(float m11, float m12, float m21, float m22, float dx, float dy);
void setStrokeColor(const String& color, Optional<float> alpha = Nullopt);
void setStrokeColor(float grayLevel, float alpha = 1.0);
void setStrokeColor(float r, float g, float b, float a);
void setStrokeColor(float c, float m, float y, float k, float a);
void setFillColor(const String& color, Optional<float> alpha = Nullopt);
void setFillColor(float grayLevel, float alpha = 1.0f);
void setFillColor(float r, float g, float b, float a);
void setFillColor(float c, float m, float y, float k, float a);
void beginPath();
enum class WindingRule { Nonzero, Evenodd };
void fill(WindingRule = WindingRule::Nonzero);
void stroke();
void clip(WindingRule = WindingRule::Nonzero);
void fill(DOMPath&, WindingRule = WindingRule::Nonzero);
void stroke(DOMPath&);
void clip(DOMPath&, WindingRule = WindingRule::Nonzero);
bool isPointInPath(float x, float y, WindingRule = WindingRule::Nonzero);
bool isPointInStroke(float x, float y);
bool isPointInPath(DOMPath&, float x, float y, WindingRule = WindingRule::Nonzero);
bool isPointInStroke(DOMPath&, float x, float y);
void clearRect(float x, float y, float width, float height);
void fillRect(float x, float y, float width, float height);
void strokeRect(float x, float y, float width, float height);
void setShadow(float width, float height, float blur, const String& color = String(), Optional<float> alpha = Nullopt);
void setShadow(float width, float height, float blur, float grayLevel, float alpha = 1.0);
void setShadow(float width, float height, float blur, float r, float g, float b, float a);
void setShadow(float width, float height, float blur, float c, float m, float y, float k, float a);
void clearShadow();
void drawImage(HTMLImageElement&, float x, float y, ExceptionCode&);
void drawImage(HTMLImageElement&, float x, float y, float width, float height, ExceptionCode&);
void drawImage(HTMLImageElement&, float sx, float sy, float sw, float sh, float dx, float dy, float dw, float dh, ExceptionCode&);
void drawImage(HTMLImageElement&, const FloatRect& srcRect, const FloatRect& dstRect, ExceptionCode&);
void drawImage(HTMLCanvasElement&, float x, float y, ExceptionCode&);
void drawImage(HTMLCanvasElement&, float x, float y, float width, float height, ExceptionCode&);
void drawImage(HTMLCanvasElement&, float sx, float sy, float sw, float sh, float dx, float dy, float dw, float dh, ExceptionCode&);
void drawImage(HTMLCanvasElement&, const FloatRect& srcRect, const FloatRect& dstRect, ExceptionCode&);
void drawImage(HTMLImageElement&, const FloatRect& srcRect, const FloatRect& dstRect, const CompositeOperator&, const BlendMode&, ExceptionCode&);
#if ENABLE(VIDEO)
void drawImage(HTMLVideoElement&, float x, float y, ExceptionCode&);
void drawImage(HTMLVideoElement&, float x, float y, float width, float height, ExceptionCode&);
void drawImage(HTMLVideoElement&, float sx, float sy, float sw, float sh, float dx, float dy, float dw, float dh, ExceptionCode&);
void drawImage(HTMLVideoElement&, const FloatRect& srcRect, const FloatRect& dstRect, ExceptionCode&);
#endif
void drawImageFromRect(HTMLImageElement&, float sx = 0, float sy = 0, float sw = 0, float sh = 0,
float dx = 0, float dy = 0, float dw = 0, float dh = 0, const String& compositeOperation = emptyString());
void setAlpha(float);
void setCompositeOperation(const String&);
RefPtr<CanvasGradient> createLinearGradient(float x0, float y0, float x1, float y1, ExceptionCode&);
RefPtr<CanvasGradient> createRadialGradient(float x0, float y0, float r0, float x1, float y1, float r1, ExceptionCode&);
RefPtr<CanvasPattern> createPattern(HTMLImageElement&, const String& repetitionType, ExceptionCode&);
RefPtr<CanvasPattern> createPattern(HTMLCanvasElement&, const String& repetitionType, ExceptionCode&);
#if ENABLE(VIDEO)
RefPtr<CanvasPattern> createPattern(HTMLVideoElement&, const String& repetitionType, ExceptionCode&);
#endif
RefPtr<ImageData> createImageData(RefPtr<ImageData>&&, ExceptionCode&) const;
RefPtr<ImageData> createImageData(float width, float height, ExceptionCode&) const;
RefPtr<ImageData> getImageData(float sx, float sy, float sw, float sh, ExceptionCode&) const;
RefPtr<ImageData> webkitGetImageDataHD(float sx, float sy, float sw, float sh, ExceptionCode&) const;
void putImageData(ImageData&, float dx, float dy, ExceptionCode&);
void putImageData(ImageData&, float dx, float dy, float dirtyX, float dirtyY, float dirtyWidth, float dirtyHeight, ExceptionCode&);
void webkitPutImageDataHD(ImageData&, float dx, float dy, ExceptionCode&);
void webkitPutImageDataHD(ImageData&, float dx, float dy, float dirtyX, float dirtyY, float dirtyWidth, float dirtyHeight, ExceptionCode&);
void drawFocusIfNeeded(Element*);
void drawFocusIfNeeded(DOMPath&, Element*);
float webkitBackingStorePixelRatio() const { return 1; }
void reset();
String font() const;
void setFont(const String&);
String textAlign() const;
void setTextAlign(const String&);
String textBaseline() const;
void setTextBaseline(const String&);
String direction() const;
void setDirection(const String&);
void fillText(const String& text, float x, float y, Optional<float> maxWidth = Nullopt);
void strokeText(const String& text, float x, float y, Optional<float> maxWidth = Nullopt);
Ref<TextMetrics> measureText(const String& text);
LineCap getLineCap() const { return state().lineCap; }
LineJoin getLineJoin() const { return state().lineJoin; }
bool imageSmoothingEnabled() const;
void setImageSmoothingEnabled(bool);
enum class ImageSmoothingQuality { Low, Medium, High };
ImageSmoothingQuality imageSmoothingQuality() const;
void setImageSmoothingQuality(ImageSmoothingQuality);
bool usesDisplayListDrawing() const { return m_usesDisplayListDrawing; };
void setUsesDisplayListDrawing(bool flag) { m_usesDisplayListDrawing = flag; };
bool tracksDisplayListReplay() const { return m_tracksDisplayListReplay; }
void setTracksDisplayListReplay(bool);
String displayListAsText(DisplayList::AsTextFlags) const;
String replayDisplayListAsText(DisplayList::AsTextFlags) const;
private:
enum class Direction {
Inherit,
RTL,
LTR
};
class FontProxy : public FontSelectorClient {
public:
FontProxy() = default;
virtual ~FontProxy();
FontProxy(const FontProxy&);
FontProxy& operator=(const FontProxy&);
bool realized() const { return m_font.fontSelector(); }
void initialize(FontSelector&, const RenderStyle&);
FontMetrics fontMetrics() const;
const FontCascadeDescription& fontDescription() const;
float width(const TextRun&) const;
void drawBidiText(GraphicsContext&, const TextRun&, const FloatPoint&, FontCascade::CustomFontNotReadyAction) const;
private:
void update(FontSelector&);
void fontsNeedUpdate(FontSelector&) override;
FontCascade m_font;
};
struct State final {
State();
State(const State&);
State& operator=(const State&);
String unparsedStrokeColor;
String unparsedFillColor;
CanvasStyle strokeStyle;
CanvasStyle fillStyle;
float lineWidth;
LineCap lineCap;
LineJoin lineJoin;
float miterLimit;
FloatSize shadowOffset;
float shadowBlur;
RGBA32 shadowColor;
float globalAlpha;
CompositeOperator globalComposite;
BlendMode globalBlend;
AffineTransform transform;
bool hasInvertibleTransform;
Vector<float> lineDash;
float lineDashOffset;
bool imageSmoothingEnabled;
ImageSmoothingQuality imageSmoothingQuality;
// Text state.
TextAlign textAlign;
TextBaseline textBaseline;
Direction direction;
String unparsedFont;
FontProxy font;
};
enum CanvasDidDrawOption {
CanvasDidDrawApplyNone = 0,
CanvasDidDrawApplyTransform = 1,
CanvasDidDrawApplyShadow = 1 << 1,
CanvasDidDrawApplyClip = 1 << 2,
CanvasDidDrawApplyAll = 0xffffffff
};
State& modifiableState() { ASSERT(!m_unrealizedSaveCount); return m_stateStack.last(); }
const State& state() const { return m_stateStack.last(); }
void applyLineDash() const;
void setShadow(const FloatSize& offset, float blur, RGBA32 color);
void applyShadow();
bool shouldDrawShadows() const;
void didDraw(const FloatRect&, unsigned options = CanvasDidDrawApplyAll);
void didDrawEntireCanvas();
void paintRenderingResultsToCanvas() override;
GraphicsContext* drawingContext() const;
void unwindStateStack();
void realizeSaves()
{
if (m_unrealizedSaveCount)
realizeSavesLoop();
}
void realizeSavesLoop();
void applyStrokePattern();
void applyFillPattern();
void drawTextInternal(const String& text, float x, float y, bool fill, Optional<float> maxWidth = Nullopt);
// The relationship between FontCascade and CanvasRenderingContext2D::FontProxy must hold certain invariants.
// Therefore, all font operations must pass through the State.
const FontProxy& fontProxy();
#if ENABLE(DASHBOARD_SUPPORT)
void clearPathForDashboardBackwardCompatibilityMode();
#endif
void beginCompositeLayer();
void endCompositeLayer();
void fillInternal(const Path&, WindingRule);
void strokeInternal(const Path&);
void clipInternal(const Path&, WindingRule);
bool isPointInPathInternal(const Path&, float x, float y, WindingRule);
bool isPointInStrokeInternal(const Path&, float x, float y);
void drawFocusIfNeededInternal(const Path&, Element*);
void clearCanvas();
Path transformAreaToDevice(const Path&) const;
Path transformAreaToDevice(const FloatRect&) const;
bool rectContainsCanvas(const FloatRect&) const;
template<class T> IntRect calculateCompositingBufferRect(const T&, IntSize*);
std::unique_ptr<ImageBuffer> createCompositingBuffer(const IntRect&);
void compositeBuffer(ImageBuffer&, const IntRect&, CompositeOperator);
void inflateStrokeRect(FloatRect&) const;
template<class T> void fullCanvasCompositedDrawImage(T&, const FloatRect&, const FloatRect&, CompositeOperator);
void prepareGradientForDashboard(CanvasGradient& gradient) const;
RefPtr<ImageData> getImageData(ImageBuffer::CoordinateSystem, float sx, float sy, float sw, float sh, ExceptionCode&) const;
void putImageData(ImageData&, ImageBuffer::CoordinateSystem, float dx, float dy, float dirtyX, float dirtyY, float dirtyWidth, float dirtyHeight, ExceptionCode&);
bool is2d() const override { return true; }
bool isAccelerated() const override;
bool hasInvertibleTransform() const override { return state().hasInvertibleTransform; }
TextDirection toTextDirection(Direction, const RenderStyle** computedStyle = nullptr) const;
#if ENABLE(ACCELERATED_2D_CANVAS)
PlatformLayer* platformLayer() const override;
#endif
Vector<State, 1> m_stateStack;
unsigned m_unrealizedSaveCount { 0 };
bool m_usesCSSCompatibilityParseMode;
#if ENABLE(DASHBOARD_SUPPORT)
bool m_usesDashboardCompatibilityMode;
#endif
bool m_usesDisplayListDrawing { false };
bool m_tracksDisplayListReplay { false };
mutable std::unique_ptr<struct DisplayListDrawingContext> m_recordingContext;
};
} // namespace WebCore
SPECIALIZE_TYPE_TRAITS_CANVASRENDERINGCONTEXT(WebCore::CanvasRenderingContext2D, is2d())
| applesrc/WebCore | html/canvas/CanvasRenderingContext2D.h | C | bsd-2-clause | 15,678 |
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "vm_strings.h"
#include <assert.h>
String *String_alloc(size_t length) {
String *p = (String *)calloc(1, sizeof(String) + (length+1) * sizeof(char));
p->length = length;
return p;
}
String *String_new(char *orig) {
String *s = String_alloc(strlen(orig));
strcpy(s->str, orig);
return s;
}
String *String_dup(String *orig) {
String *s = String_alloc(orig->length);
strcpy(s->str, orig->str);
return s;
}
String *String_from_char(char c) {
char buf[2] = {c, '\0'};
return String_new(buf);
}
String *String_from_int(int value) {
char buf[50];
sprintf(buf,"%d",value);
return String_new(buf);
}
int String_len(String *s) {
if (s == NULL) {
fprintf(stderr, "len() cannot be applied to NULL string object\n");
return -1;
}
return (int)s->length;
}
String *String_add(String *s, String *t) {
if ( s == NULL && t == NULL) {
fprintf(stderr, "Addition Operator cannot be applied to two NULL string objects\n");
return NIL_STRING;
}
if ( s == NULL ) return t; // don't REF/DEREF as we might free our return value
if ( t == NULL ) return s;
size_t n = strlen(s->str) + strlen(t->str);
String *u = String_alloc(n);
strcpy(u->str, s->str);
strcat(u->str, t->str);
return u;
}
bool String_eq(String *s, String *t) {
assert(s);
assert(t);
return strcmp(s->str, t->str) == 0;
}
bool String_neq(String *s, String *t) {
return !String_eq(s,t);
}
bool String_gt(String *s, String *t) {
assert(s);
assert(t);
return strcmp(s->str, t->str) > 0;
}
bool String_ge(String *s, String *t) {
assert(s);
assert(t);
return strcmp(s->str, t->str) >= 0;
}
bool String_lt(String *s, String *t) {
assert(s);
assert(t);
return strcmp(s->str, t->str) < 0;
}
bool String_le(String *s, String *t) {
assert(s);
assert(t);
return strcmp(s->str, t->str) <= 0;
}
| USF-CS345-starterkits/parrt-bytecode | src/vm_strings.c | C | bsd-2-clause | 1,852 |
#!/usr/bin/env python
__author__ = 'Adam R. Smith, Michael Meisinger, Dave Foster <[email protected]>'
import threading
import traceback
import gevent
from gevent import greenlet, Timeout
from gevent.event import Event, AsyncResult
from gevent.queue import Queue
from pyon.core import MSG_HEADER_ACTOR
from pyon.core.bootstrap import CFG
from pyon.core.exception import IonException, ContainerError
from pyon.core.exception import Timeout as IonTimeout
from pyon.core.thread import PyonThreadManager, PyonThread, ThreadManager, PyonThreadTraceback, PyonHeartbeatError
from pyon.datastore.postgresql.pg_util import init_db_stats, get_db_stats, clear_db_stats
from pyon.ion.service import BaseService
from pyon.util.containers import get_ion_ts, get_ion_ts_millis
from pyon.util.log import log
STAT_INTERVAL_LENGTH = 60000 # Interval time for process saturation stats collection
stats_callback = None
class OperationInterruptedException(BaseException):
"""
Interrupted exception. Used by external items timing out execution in the
IonProcessThread's control thread.
Derived from BaseException to specifically avoid try/except Exception blocks,
such as in Publisher's publish_event.
"""
pass
class IonProcessError(StandardError):
pass
class IonProcessThread(PyonThread):
"""
The control part of an ION process.
"""
def __init__(self, target=None, listeners=None, name=None, service=None, cleanup_method=None,
heartbeat_secs=10, **kwargs):
"""
Constructs the control part of an ION process.
Used by the container's IonProcessThreadManager, as part of spawn_process.
@param target A callable to run in the PyonThread. If None (typical), will use the target method
defined in this class.
@param listeners A list of listening endpoints attached to this thread.
@param name The name of this ION process.
@param service An instance of the BaseService derived class which contains the business logic for
the ION process.
@param cleanup_method An optional callable to run when the process is stopping. Runs after all other
notify_stop calls have run. Should take one param, this instance.
@param heartbeat_secs Number of seconds to wait in between heartbeats.
"""
self._startup_listeners = listeners or []
self.listeners = []
self._listener_map = {}
self.name = name
self.service = service
self._cleanup_method = cleanup_method
self.thread_manager = ThreadManager(failure_notify_callback=self._child_failed) # bubbles up to main thread manager
self._dead_children = [] # save any dead children for forensics
self._ctrl_thread = None
self._ctrl_queue = Queue()
self._ready_control = Event()
self._errors = []
self._ctrl_current = None # set to the AR generated by _routing_call when in the context of a call
# processing vs idle time (ms)
self._start_time = None
self._proc_time = 0 # busy time since start
self._proc_time_prior = 0 # busy time at the beginning of the prior interval
self._proc_time_prior2 = 0 # busy time at the beginning of 2 interval's ago
self._proc_interval_num = 0 # interval num of last record
# for heartbeats, used to detect stuck processes
self._heartbeat_secs = heartbeat_secs # amount of time to wait between heartbeats
self._heartbeat_stack = None # stacktrace of last heartbeat
self._heartbeat_time = None # timestamp of heart beat last matching the current op
self._heartbeat_op = None # last operation (by AR)
self._heartbeat_count = 0 # number of times this operation has been seen consecutively
self._log_call_exception = CFG.get_safe("container.process.log_exceptions", False)
self._log_call_dbstats = CFG.get_safe("container.process.log_dbstats", False)
self._warn_call_dbstmt_threshold = CFG.get_safe("container.process.warn_dbstmt_threshold", 0)
PyonThread.__init__(self, target=target, **kwargs)
def heartbeat(self):
"""
Returns a 3-tuple indicating everything is ok.
Should only be called after the process has been started.
Checks the following:
- All attached endpoints are alive + listening (this means ready)
- The control flow greenlet is alive + listening or processing
@return 3-tuple indicating (listeners ok, ctrl thread ok, heartbeat status). Use all on it for a
boolean indication of success.
"""
listeners_ok = True
for l in self.listeners:
if not (l in self._listener_map and not self._listener_map[l].proc.dead and l.get_ready_event().is_set()):
listeners_ok = False
ctrl_thread_ok = self._ctrl_thread.running
# are we currently processing something?
heartbeat_ok = True
if self._ctrl_current is not None:
st = traceback.extract_stack(self._ctrl_thread.proc.gr_frame)
if self._ctrl_current == self._heartbeat_op:
if st == self._heartbeat_stack:
self._heartbeat_count += 1 # we've seen this before! increment count
# we've been in this for the last X ticks, or it's been X seconds, fail this part of the heartbeat
if self._heartbeat_count > CFG.get_safe('container.timeout.heartbeat_proc_count_threshold', 30) or \
get_ion_ts_millis() - int(self._heartbeat_time) >= CFG.get_safe('container.timeout.heartbeat_proc_time_threshold', 30) * 1000:
heartbeat_ok = False
else:
# it's made some progress
self._heartbeat_count = 1
self._heartbeat_stack = st
self._heartbeat_time = get_ion_ts()
else:
self._heartbeat_op = self._ctrl_current
self._heartbeat_count = 1
self._heartbeat_time = get_ion_ts()
self._heartbeat_stack = st
else:
self._heartbeat_op = None
self._heartbeat_count = 0
#log.debug("%s %s %s", listeners_ok, ctrl_thread_ok, heartbeat_ok)
return (listeners_ok, ctrl_thread_ok, heartbeat_ok)
@property
def time_stats(self):
"""
Returns a 5-tuple of (total time, idle time, processing time, time since prior interval start,
busy since prior interval start), all in ms (int).
"""
now = get_ion_ts_millis()
running_time = now - self._start_time
idle_time = running_time - self._proc_time
cur_interval = now / STAT_INTERVAL_LENGTH
now_since_prior = now - (cur_interval - 1) * STAT_INTERVAL_LENGTH
if cur_interval == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior2
elif cur_interval-1 == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior
else:
proc_time_since_prior = 0
return (running_time, idle_time, self._proc_time, now_since_prior, proc_time_since_prior)
def _child_failed(self, child):
"""
Callback from gevent as set in the TheadManager, when a child greenlet fails.
Kills the ION process main greenlet. This propagates the error up to the process supervisor.
"""
# remove the child from the list of children (so we can shut down cleanly)
for x in self.thread_manager.children:
if x.proc == child:
self.thread_manager.children.remove(x)
break
self._dead_children.append(child)
# kill this process's main greenlet. This should be noticed by the container's proc manager
self.proc.kill(child.exception)
def add_endpoint(self, listener, activate=True):
"""
Adds a listening endpoint to be managed by this ION process.
Spawns the listen loop and sets the routing call to synchronize incoming messages
here. If this process hasn't been started yet, adds it to the list of listeners
to start on startup.
@param activate If True (default), start consuming from listener
"""
if self.proc:
listener.routing_call = self._routing_call
if self.name:
svc_name = "unnamed-service"
if self.service is not None and hasattr(self.service, 'name'):
svc_name = self.service.name
listen_thread_name = "%s-%s-listen-%s" % (svc_name, self.name, len(self.listeners)+1)
else:
listen_thread_name = "unknown-listener-%s" % (len(self.listeners)+1)
listen_thread = self.thread_manager.spawn(listener.listen, thread_name=listen_thread_name, activate=activate)
listen_thread.proc._glname = "ION Proc listener %s" % listen_thread_name
self._listener_map[listener] = listen_thread
self.listeners.append(listener)
else:
self._startup_listeners.append(listener)
def remove_endpoint(self, listener):
"""
Removes a listening endpoint from management by this ION process.
If the endpoint is unknown to this ION process, raises an error.
@return The PyonThread running the listen loop, if it exists. You are
responsible for closing it when appropriate.
"""
if listener in self.listeners:
self.listeners.remove(listener)
return self._listener_map.pop(listener)
elif listener in self._startup_listeners:
self._startup_listeners.remove(listener)
return None
else:
raise IonProcessError("Cannot remove unrecognized listener: %s" % listener)
def target(self, *args, **kwargs):
"""
Entry point for the main process greenlet.
Setup the base properties for this process (mainly the control thread).
"""
if self.name:
threading.current_thread().name = "%s-target" % self.name
# start time
self._start_time = get_ion_ts_millis()
self._proc_interval_num = self._start_time / STAT_INTERVAL_LENGTH
# spawn control flow loop
self._ctrl_thread = self.thread_manager.spawn(self._control_flow)
self._ctrl_thread.proc._glname = "ION Proc CL %s" % self.name
# wait on control flow loop, heartbeating as appropriate
while not self._ctrl_thread.ev_exit.wait(timeout=self._heartbeat_secs):
hbst = self.heartbeat()
if not all(hbst):
log.warn("Heartbeat status for process %s returned %s", self, hbst)
if self._heartbeat_stack is not None:
stack_out = "".join(traceback.format_list(self._heartbeat_stack))
else:
stack_out = "N/A"
#raise PyonHeartbeatError("Heartbeat failed: %s, stacktrace:\n%s" % (hbst, stack_out))
log.warn("Heartbeat failed: %s, stacktrace:\n%s", hbst, stack_out)
# this is almost a no-op as we don't fall out of the above loop without
# exiting the ctrl_thread, but having this line here makes testing much easier.
self._ctrl_thread.join()
def _routing_call(self, call, context, *callargs, **callkwargs):
"""
Endpoints call into here to synchronize across the entire IonProcess.
Returns immediately with an AsyncResult that can be waited on. Calls
are made by the loop in _control_flow. We pass in the calling greenlet so
exceptions are raised in the correct context.
@param call The call to be made within this ION processes' calling greenlet.
@param callargs The keyword args to pass to the call.
@param context Optional process-context (usually the headers of the incoming call) to be
set. Process-context is greenlet-local, and since we're crossing greenlet
boundaries, we must set it again in the ION process' calling greenlet.
"""
ar = AsyncResult()
if len(callargs) == 0 and len(callkwargs) == 0:
log.trace("_routing_call got no arguments for the call %s, check your call's parameters", call)
self._ctrl_queue.put((greenlet.getcurrent(), ar, call, callargs, callkwargs, context))
return ar
def has_pending_call(self, ar):
"""
Returns true if the call (keyed by the AsyncResult returned by _routing_call) is still pending.
"""
for _, qar, _, _, _, _ in self._ctrl_queue.queue:
if qar == ar:
return True
return False
def _cancel_pending_call(self, ar):
"""
Cancels a pending call (keyed by the AsyncResult returend by _routing_call).
@return True if the call was truly pending.
"""
if self.has_pending_call(ar):
ar.set(False)
return True
return False
def _interrupt_control_thread(self):
"""
Signal the control flow thread that it needs to abort processing, likely due to a timeout.
"""
self._ctrl_thread.proc.kill(exception=OperationInterruptedException, block=False)
def cancel_or_abort_call(self, ar):
"""
Either cancels a future pending call, or aborts the current processing if the given AR is unset.
The pending call is keyed by the AsyncResult returned by _routing_call.
"""
if not self._cancel_pending_call(ar) and not ar.ready():
self._interrupt_control_thread()
def _control_flow(self):
"""
Entry point for process control thread of execution.
This method is run by the control greenlet for each ION process. Listeners attached
to the process, either RPC Servers or Subscribers, synchronize calls to the process
by placing call requests into the queue by calling _routing_call.
This method blocks until there are calls to be made in the synchronized queue, and
then calls from within this greenlet. Any exception raised is caught and re-raised
in the greenlet that originally scheduled the call. If successful, the AsyncResult
created at scheduling time is set with the result of the call.
"""
svc_name = getattr(self.service, "name", "unnamed-service") if self.service else "unnamed-service"
proc_id = getattr(self.service, "id", "unknown-pid") if self.service else "unknown-pid"
if self.name:
threading.current_thread().name = "%s-%s" % (svc_name, self.name)
thread_base_name = threading.current_thread().name
self._ready_control.set()
for calltuple in self._ctrl_queue:
calling_gl, ar, call, callargs, callkwargs, context = calltuple
request_id = (context or {}).get("request-id", None)
if request_id:
threading.current_thread().name = thread_base_name + "-" + str(request_id)
#log.debug("control_flow making call: %s %s %s (has context: %s)", call, callargs, callkwargs, context is not None)
res = None
start_proc_time = get_ion_ts_millis()
self._record_proc_time(start_proc_time)
# check context for expiration
if context is not None and 'reply-by' in context:
if start_proc_time >= int(context['reply-by']):
log.info("control_flow: attempting to process message already exceeding reply-by, ignore")
# raise a timeout in the calling thread to allow endpoints to continue processing
e = IonTimeout("Reply-by time has already occurred (reply-by: %s, op start time: %s)" % (context['reply-by'], start_proc_time))
calling_gl.kill(exception=e, block=False)
continue
# If ar is set, means it is cancelled
if ar.ready():
log.info("control_flow: attempting to process message that has been cancelled, ignore")
continue
init_db_stats()
try:
# ******************************************************************
# ****** THIS IS WHERE THE RPC OPERATION/SERVICE CALL IS MADE ******
with self.service.push_context(context), \
self.service.container.context.push_context(context):
self._ctrl_current = ar
res = call(*callargs, **callkwargs)
# ****** END CALL, EXCEPTION HANDLING FOLLOWS ******
# ******************************************************************
except OperationInterruptedException:
# endpoint layer takes care of response as it's the one that caused this
log.debug("Operation interrupted")
pass
except Exception as e:
if self._log_call_exception:
log.exception("PROCESS exception: %s" % e.message)
# Raise the exception in the calling greenlet.
# Try decorating the args of the exception with the true traceback -
# this should be reported by ThreadManager._child_failed
exc = PyonThreadTraceback("IonProcessThread _control_flow caught an exception "
"(call: %s, *args %s, **kwargs %s, context %s)\n"
"True traceback captured by IonProcessThread' _control_flow:\n\n%s" % (
call, callargs, callkwargs, context, traceback.format_exc()))
e.args = e.args + (exc,)
if isinstance(e, (TypeError, IonException)):
# Pass through known process exceptions, in particular IonException
calling_gl.kill(exception=e, block=False)
else:
# Otherwise, wrap unknown, forward and hopefully we can continue on our way
self._errors.append((call, callargs, callkwargs, context, e, exc))
log.warn(exc)
log.warn("Attempting to continue...")
# Note: Too large exception string will crash the container (when passed on as msg header).
exception_str = str(exc)
if len(exception_str) > 10000:
exception_str = (
"Exception string representation too large. "
"Begin and end of the exception:\n"
+ exception_str[:2000] + "\n...\n" + exception_str[-2000:]
)
calling_gl.kill(exception=ContainerError(exception_str), block=False)
finally:
try:
# Compute statistics
self._compute_proc_stats(start_proc_time)
db_stats = get_db_stats()
if db_stats:
if self._warn_call_dbstmt_threshold > 0 and db_stats.get("count.all", 0) >= self._warn_call_dbstmt_threshold:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.warn("PROC_OP '%s.%s' EXCEEDED DB THRESHOLD. stats=%s", svc_name, call.__name__, stats_str)
elif self._log_call_dbstats:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.info("PROC_OP '%s.%s' DB STATS: %s", svc_name, call.__name__, stats_str)
clear_db_stats()
if stats_callback:
stats_callback(proc_id=proc_id, proc_name=self.name, svc=svc_name, op=call.__name__,
request_id=request_id, context=context,
db_stats=db_stats, proc_stats=self.time_stats, result=res, exc=None)
except Exception:
log.exception("Error computing process call stats")
self._ctrl_current = None
threading.current_thread().name = thread_base_name
# Set response in AsyncEvent of caller (endpoint greenlet)
ar.set(res)
def _record_proc_time(self, cur_time):
""" Keep the _proc_time of the prior and prior-prior intervals for stats computation
"""
cur_interval = cur_time / STAT_INTERVAL_LENGTH
if cur_interval == self._proc_interval_num:
# We're still in the same interval - no update
pass
elif cur_interval-1 == self._proc_interval_num:
# Record the stats from the prior interval
self._proc_interval_num = cur_interval
self._proc_time_prior2 = self._proc_time_prior
self._proc_time_prior = self._proc_time
elif cur_interval-1 > self._proc_interval_num:
# We skipped an entire interval - everything is prior2
self._proc_interval_num = cur_interval
self._proc_time_prior2 = self._proc_time
self._proc_time_prior = self._proc_time
def _compute_proc_stats(self, start_proc_time):
cur_time = get_ion_ts_millis()
self._record_proc_time(cur_time)
proc_time = cur_time - start_proc_time
self._proc_time += proc_time
def start_listeners(self):
"""
Starts all listeners in managed greenlets.
Usually called by the ProcManager, unless using IonProcess manually.
"""
try:
# disable normal error reporting, this method should only be called from startup
self.thread_manager._failure_notify_callback = None
# spawn all listeners in startup listeners (from initializer, or added later)
for listener in self._startup_listeners:
self.add_endpoint(listener)
with Timeout(seconds=CFG.get_safe('container.messaging.timeout.start_listener', 30)):
gevent.wait([x.get_ready_event() for x in self.listeners])
except Timeout:
# remove failed endpoints before reporting failure above
for listener, proc in self._listener_map.iteritems():
if proc.proc.dead:
log.info("removed dead listener: %s", listener)
self.listeners.remove(listener)
self.thread_manager.children.remove(proc)
raise IonProcessError("start_listeners did not complete in expected time")
finally:
self.thread_manager._failure_notify_callback = self._child_failed
def _notify_stop(self):
"""
Called when the process is about to be shut down.
Instructs all listeners to close, puts a StopIteration into the synchronized queue,
and waits for the listeners to close and for the control queue to exit.
"""
for listener in self.listeners:
try:
listener.close()
except Exception as ex:
tb = traceback.format_exc()
log.warn("Could not close listener, attempting to ignore: %s\nTraceback:\n%s", ex, tb)
self._ctrl_queue.put(StopIteration)
# wait_children will join them and then get() them, which may raise an exception if any of them
# died with an exception.
self.thread_manager.wait_children(30)
PyonThread._notify_stop(self)
# run the cleanup method if we have one
if self._cleanup_method is not None:
try:
self._cleanup_method(self)
except Exception as ex:
log.warn("Cleanup method error, attempting to ignore: %s\nTraceback: %s", ex, traceback.format_exc())
def get_ready_event(self):
"""
Returns an Event that is set when the control greenlet is up and running.
"""
return self._ready_control
class IonProcessThreadManager(PyonThreadManager):
def _create_thread(self, target=None, **kwargs):
return IonProcessThread(target=target, heartbeat_secs=self.heartbeat_secs, **kwargs)
# ---------------------------------------------------------------------------------------------------
# Process type variants
class StandaloneProcess(BaseService):
"""
A process is an ION process of type "standalone" that has an incoming messaging
attachment for the process and operations as defined in a service YML.
"""
process_type = "standalone"
class SimpleProcess(BaseService):
"""
A simple process is an ION process of type "simple" that has no incoming messaging
attachment.
"""
process_type = "simple"
class ImmediateProcess(BaseService):
"""
An immediate process is an ION process of type "immediate" that does its action in
the on_init and on_start hooks, and that it terminated immediately after completion.
Has no messaging attachment.
"""
process_type = "immediate"
class StreamProcess(BaseService):
"""
Base class for a stream process.
Such a process handles a sequence of otherwise unconstrained messages, resulting from a
subscription. There are no operations.
"""
process_type = "stream_process"
def call_process(self, message, stream_route, stream_id):
"""
Handles pre-processing of packet and process work
"""
self.process(message)
def process(self, message):
"""
Process a message as arriving based on a subscription.
"""
pass
# ---------------------------------------------------------------------------------------------------
# Process helpers
def get_ion_actor_id(process):
"""Given an ION process, return the ion-actor-id from the context, if set and present"""
ion_actor_id = None
if process:
ctx = process.get_context()
ion_actor_id = ctx.get(MSG_HEADER_ACTOR, None) if ctx else None
return ion_actor_id
def set_process_stats_callback(stats_cb):
""" Sets a callback function (hook) to push stats after a process operation call. """
global stats_callback
if stats_cb is None:
pass
elif stats_callback:
log.warn("Stats callback already defined")
stats_callback = stats_cb
| scionrep/scioncc | src/pyon/ion/process.py | Python | bsd-2-clause | 27,034 |
# 98Fmplayer (beta)
PC-98 FM driver emulation (very early version)




*If you are just annoyed by some specific bugs in PMDWin, [patched PMDWin](https://github.com/takamichih/pmdwinbuild) might have less bugs and more features than this.*
## Current status:
* Supported formats: PMD, FMP(PLAY6)
* PMD: FM, SSG, Rhythm, ADPCM, PPZ8(partially) supported; PPS, P86 not supported yet
* FMP: FM, SSG, Rhythm, ADPCM, PPZ8, PDZF supported
* This is just a byproduct of reverse-engineering formats, and its emulation is much worse than PMDWin, WinFMP
* FM always generated in 55467Hz (closest integer to 7987200 / 144), SSG always generated in 249600Hz and downsampled with sinc filter (Never linear interpolates harmonics-rich signal like square wave)
* FM generation bit-perfect with actual OPNA/OPN3 chip under limited conditions including stereo output when 4 <= ALG (Envelope is not bit-perfect yet, attack is bit-perfect only when AR >= 21)
* SSGEG, Hardware LFO not supported
* PPZ8: support nearest neighbor, linear and sinc interpolation
* ADPCM: inaccurate (actual YM2608 seems to decode ADPCM at lower samplerate/resolution than any YM2608 emulator around, but I still couldn't get my YM2608 work with the DRAM)
## Installation/Usage (not very usable yet)
### gtk
Uses gtk3, pulseaudio/jack/alsa
```
$ cd gtk
$ autoreconf -i
$ ./configure
$ make
$ ./98fmplayer
```
Reads drum sample from `$HOME/.local/share/98fmplayer/ym2608_adpcm_rom.bin` (same format as MAME).
### win32
Releases:
https://github.com/takamichih/fmplayer/releases/
Uses MinGW-w64 to compile.
```
$ cd win32/x86
$ make
```
Reads drum sample from the directory in which `98fmplayer.exe` is placed.
Uses DirectSound (WinMM if there is no DirectSound) to output sound. This works on Windows 2000, so it is theoretically possible to run this on a real PC-98. (But it was too heavy for my PC-9821V12 which only has P5 Pentium 120MHz, or on PC-9821Ra300 with P6 Mendocino Celeron 300MHz)
| takamichih/fmplayer | README.md | Markdown | bsd-2-clause | 2,196 |
# Author: Nick Raptis <[email protected]>
"""
Module for listing commands and help.
"""
from basemodule import BaseModule, BaseCommandContext
from alternatives import _
class HelpContext(BaseCommandContext):
def cmd_list(self, argument):
"""List commands"""
arg = argument.lower()
index = self.bot.help_index
public = "public commands -- %s" % " ".join(index['public'])
private = "private commands -- %s" % " ".join(index['private'])
if 'all' in arg or 'both' in arg:
output = "\n".join((public, private))
elif 'pub' in arg or self.target.startswith('#'):
output = public
elif 'priv' in arg or not self.target.startswith('#'):
output = private
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
self.send(self.target, output)
def cmd_modules(self, argument):
"""List active modules"""
index = self.bot.help_index
output = "active modules -- %s" % " ".join(index['modules'].keys())
self.send(self.target, output)
def cmd_help(self, argument):
"""Get help on a command or module"""
arg = argument.lower()
index = self.bot.help_index
target = self.target
args = arg.split()
if not args:
s = "usage: help <command> [public|private] / help module <module>"
self.send(target, s)
elif args[0] == 'module':
args.pop(0)
if not args:
self.send(target, "usage: help module <module>")
else:
help_item = index['modules'].get(args[0])
if help_item:
self.send(target, help_item['summary'])
else:
self.send(target, _("No help for %s"), args[0])
else:
args.append("")
cmd = args.pop(0)
cmd_type = args.pop(0)
if 'pu' in cmd_type or self.target.startswith('#'):
cmd_type = 'public'
elif 'pr' in cmd_type or not self.target.startswith('#'):
cmd_type = 'private'
else:
# we shouldn't be here
self.logger.error("cmd_list")
return
help_item = index[cmd_type].get(cmd)
if help_item:
self.send(target, index[cmd_type][cmd]['summary'])
else:
self.send(target, _("No help for %s"), cmd)
class HelpModule(BaseModule):
context_class = HelpContext
module = HelpModule
| nickraptis/fidibot | src/modules/help.py | Python | bsd-2-clause | 2,615 |
Notebook with pandas in a container
===================================
Docker container for the IPython notebook (with pandas).
Usage
-----
docker run -i -t --rm -v `pwd`/notebooks:/notebooks -p 8888:8888 -e "PASSWORD=YOURPASSWORD" mlf4aiur/pandas
You'll now be able to access your notebook at https://localhost:8888 with password YOURPASSWORD.
**Using HTTP**
This docker image by default runs IPython notebook in HTTPS. If you'd like to run this in HTTP, you can use the USE_HTTP environment variable. Setting it to a non-zero value enables HTTP.
docker run -i -t --rm -v `pwd`/notebooks:/notebooks -p 8888:8888 -e "PASSWORD=YOURPASSWORD" -e "USE_HTTP=1" mlf4aiur/pandas
| mlf4aiur/dockerfiles | pandas/README.md | Markdown | bsd-2-clause | 688 |
cask 'feeder' do
version '3.6.8'
sha256 '4399609c1b04b1b92aa51bf9c240fc7dfec49e534eae014dbe750e7c7bbdfd2d'
url "https://reinventedsoftware.com/feeder/downloads/Feeder_#{version}.dmg"
appcast "https://reinventedsoftware.com/feeder/downloads/Feeder#{version.major}.xml"
name 'Feeder'
homepage 'https://reinventedsoftware.com/feeder/'
app "Feeder #{version.major}.app"
end
| uetchy/homebrew-cask | Casks/feeder.rb | Ruby | bsd-2-clause | 386 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes, OverloadedStrings #-}
module Handler.Root where
import Foundation
-- This is a handler function for the GET request method on the RootR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getRootR :: Handler RepHtml
getRootR = do
defaultLayout $ do
h2id <- lift newIdent
setTitle "TierList homepage"
$(widgetFile "homepage") | periodic/Simple-Yesod-ToDo | Handler/Root.hs | Haskell | bsd-2-clause | 625 |
<?php
/**
* MtMail - e-mail module for Zend Framework
*
* @link http://github.com/mtymek/MtMail
* @copyright Copyright (c) 2013-2017 Mateusz Tymek
* @license BSD 2-Clause
*/
namespace MtMail\Factory;
use Interop\Container\ContainerInterface;
use MtMail\ComposerPlugin\DefaultHeaders;
class DefaultHeadersPluginFactory
{
public function __invoke(ContainerInterface $serviceLocator)
{
if (!method_exists($serviceLocator, 'configure')) {
$serviceLocator = $serviceLocator->getServiceLocator();
}
$config = $serviceLocator->get('Configuration');
$plugin = new DefaultHeaders();
if (isset($config['mt_mail']['default_headers'])) {
$plugin->setHeaders($config['mt_mail']['default_headers']);
}
return $plugin;
}
}
| mtymek/MtMail | src/Factory/DefaultHeadersPluginFactory.php | PHP | bsd-2-clause | 817 |
<Global.Microsoft.VisualBasic.CompilerServices.DesignerGenerated()> _
Partial Class Form1
Inherits System.Windows.Forms.Form
'Form overrides dispose to clean up the component list.
<System.Diagnostics.DebuggerNonUserCode()> _
Protected Overrides Sub Dispose(ByVal disposing As Boolean)
Try
If disposing AndAlso components IsNot Nothing Then
components.Dispose()
End If
Finally
MyBase.Dispose(disposing)
End Try
End Sub
'Required by the Windows Form Designer
Private components As System.ComponentModel.IContainer
'NOTE: The following procedure is required by the Windows Form Designer
'It can be modified using the Windows Form Designer.
'Do not modify it using the code editor.
<System.Diagnostics.DebuggerStepThrough()> _
Private Sub InitializeComponent()
Me.Button1 = New System.Windows.Forms.Button()
Me.DataGridView1 = New System.Windows.Forms.DataGridView()
Me.Table1TableAdapter1 = New WindowsApplication1.Database1DataSetTableAdapters.Table1TableAdapter()
CType(Me.DataGridView1, System.ComponentModel.ISupportInitialize).BeginInit()
Me.SuspendLayout()
'
'Button1
'
Me.Button1.Location = New System.Drawing.Point(52, 218)
Me.Button1.Name = "Button1"
Me.Button1.Size = New System.Drawing.Size(75, 23)
Me.Button1.TabIndex = 1
Me.Button1.Text = "Button1"
Me.Button1.UseVisualStyleBackColor = True
'
'DataGridView1
'
Me.DataGridView1.AllowUserToOrderColumns = True
Me.DataGridView1.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize
Me.DataGridView1.Location = New System.Drawing.Point(52, 25)
Me.DataGridView1.Name = "DataGridView1"
Me.DataGridView1.Size = New System.Drawing.Size(417, 162)
Me.DataGridView1.TabIndex = 2
'
'Table1TableAdapter1
'
Me.Table1TableAdapter1.ClearBeforeFill = True
'
'Form1
'
Me.AutoScaleDimensions = New System.Drawing.SizeF(6.0!, 13.0!)
Me.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font
Me.ClientSize = New System.Drawing.Size(522, 344)
Me.Controls.Add(Me.DataGridView1)
Me.Controls.Add(Me.Button1)
Me.Name = "Form1"
Me.Text = "Form1"
CType(Me.DataGridView1, System.ComponentModel.ISupportInitialize).EndInit()
Me.ResumeLayout(False)
End Sub
Friend WithEvents Button1 As System.Windows.Forms.Button
Friend WithEvents DataGridView1 As System.Windows.Forms.DataGridView
Friend WithEvents Table1TableAdapter1 As WindowsApplication1.Database1DataSetTableAdapters.Table1TableAdapter
End Class
| Joeordie/POS408Team | ContactManager/WindowsApplication1/Form1.Designer.vb | Visual Basic | bsd-2-clause | 2,842 |
// This file was procedurally generated from the following sources:
// - src/async-generators/yield-as-identifier-reference-escaped.case
// - src/async-generators/syntax/async-class-expr-method.template
/*---
description: yield is a reserved keyword within generator function bodies and may not be used as an identifier reference. (Async generator method as a ClassExpression element)
esid: prod-AsyncGeneratorMethod
features: [async-iteration]
flags: [generated]
negative:
phase: parse
type: SyntaxError
info: |
ClassElement :
MethodDefinition
MethodDefinition :
AsyncGeneratorMethod
Async Generator Function Definitions
AsyncGeneratorMethod :
async [no LineTerminator here] * PropertyName ( UniqueFormalParameters ) { AsyncGeneratorBody }
IdentifierReference : Identifier
It is a Syntax Error if this production has a [Yield] parameter and
StringValue of Identifier is "yield".
---*/
throw "Test262: This statement should not be evaluated.";
var C = class { async *gen() {
void yi\u0065ld;
}};
| sebastienros/jint | Jint.Tests.Test262/test/language/expressions/class/async-gen-method-yield-as-identifier-reference-escaped.js | JavaScript | bsd-2-clause | 1,059 |
/*****************************************************************************************
*
* Portable classes for the HSFC
* These are (semi-)portable representations of a state that can be serialised
* and loaded between any HSFC instances loaded with the same GDL (we hope!!!).
*
*****************************************************************************************/
#ifndef HSFC_PORTABLE_H
#define HSFC_PORTABLE_H
#include <iostream>
#include <string>
#include <vector>
#include <set>
#include <memory>
#include <iterator>
#include <algorithm>
#include <stdint.h>
#include <boost/assert.hpp>
#include <boost/exception/all.hpp>
#include <boost/scoped_ptr.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/utility.hpp>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/map.hpp>
#include <boost/serialization/set.hpp>
#include <boost/serialization/shared_ptr.hpp>
#include <boost/serialization/access.hpp>
#include <hsfc/hsfcexception.h>
#include <hsfc/impl/fwd_decl.h>
namespace HSFC
{
/*****************************************************************************************
* PortableState.
*****************************************************************************************/
class PortableState
{
public:
PortableState();
PortableState(const State& state);
PortableState(const PortableState& other);
PortableState& operator=(const PortableState& other);
bool operator==(const PortableState& other) const;
bool operator!=(const PortableState& other) const;
bool operator<(const PortableState& other) const;
std::size_t hash_value() const;
private:
friend class State;
friend class boost::serialization::access;
int round_;
int currentstep_;
std::set<std::pair<int,int> > relationset_;
template<typename Archive>
void serialize(Archive& ar, const unsigned int version);
};
std::size_t hash_value(const PortableState& ps); /* Can be used as a key in boost::unordered_* */
template<typename Archive>
void PortableState::serialize(Archive& ar, const unsigned int version)
{
ar & round_;
ar & currentstep_;
ar & relationset_;
}
/*****************************************************************************************
* PortablePlayer
*****************************************************************************************/
class PortablePlayer
{
public:
PortablePlayer();
PortablePlayer(const Player& player);
PortablePlayer(const PortablePlayer& other);
PortablePlayer& operator=(const PortablePlayer& other);
bool operator==(const PortablePlayer& other) const;
bool operator!=(const PortablePlayer& other) const;
bool operator<(const PortablePlayer& other) const;
std::size_t hash_value() const;
private:
friend class Player;
friend class boost::serialization::access;
/* PortablePlayer default constructor is now public!
// NOTE: 1) Need to make PortablePlayerMove and PortablePlayerGoal friends
// to allow the deserialization of these objects to work.
// This is ugly and does in theory allow empty Portable objects to
// be created by a user (by way of the friend pair).
// 2) cannot be a friend of a typedef. I think this has changed in C++11.
friend class std::pair<PortablePlayer, unsigned int>;
friend class std::pair<const PortablePlayer, unsigned int>;
friend class std::pair<PortablePlayer, uint64_t>;
friend class std::pair<const PortablePlayer, uint64_t>;
friend class std::pair<PortablePlayer, PortableMove>;
friend class std::pair<const PortablePlayer, PortableMove>;
friend class std::pair<PortablePlayer, const PortableMove>;
friend class std::pair<const PortablePlayer, const PortableMove>;
*/
template<typename Archive>
void serialize(Archive& ar, const unsigned int version);
unsigned int roleid_;
};
template<typename Archive>
void PortablePlayer::serialize(Archive& ar, const unsigned int version)
{
ar & roleid_;
}
std::size_t hash_value(const PortablePlayer& pp); /* Can be used as a key in boost::unordered_* */
/*****************************************************************************************
* PortableMove
*****************************************************************************************/
class PortableMove
{
public:
PortableMove();
PortableMove(const Move& move);
PortableMove(const PortableMove& other);
PortableMove& operator=(const PortableMove& other);
bool operator==(const PortableMove& other) const;
bool operator!=(const PortableMove& other) const;
bool operator<(const PortableMove& other) const;
std::size_t hash_value() const;
private:
friend class Move;
friend class boost::serialization::access;
/*
PortableMove default constructor is now public!
// NOTE: 1) Need to make PortablePlayerMove a friend to allow the deserialization
// of these objects to work. This is ugly and does in theory allow empty
// Portable objects to be created by a user (by way of the friend pair).
// 2) cannot be a friend of a typedef. I think this has changed in C++11.
friend class std::pair<PortablePlayer, PortableMove>;
friend class std::pair<const PortablePlayer, PortableMove>;
friend class std::pair<PortablePlayer, const PortableMove>;
friend class std::pair<const PortablePlayer, const PortableMove>;
*/
template<typename Archive>
void serialize(Archive& ar, const unsigned int version);
int RoleIndex_;
std::string Text_;
int RelationIndex_;
int ID_;
};
template<typename Archive>
void PortableMove::serialize(Archive& ar, const unsigned int version)
{
ar & RoleIndex_;
ar & Text_;
ar & RelationIndex_;
ar & ID_;
}
std::size_t hash_value(const PortableMove& pm); /* Can be used as a key in boost::unordered_* */
/*****************************************************************************************
* Support functor to convert to/from collection of PortableX's. Here is an example
* of how to use it:
*
* Game game(<some_game>);
* std::vector<PortablePlayerMove> ppmoves;
*
* .... // Assign to ppmoves;
*
* std::vector<PlayerMove> pmoves;
* std::transform(ppmoves.begin(), ppmoves.end(),
* std::back_inserter(pmoves), FromPortable(game));
*
* To convert in the opposite direction use ToPortable(). This is only strictly necessary
* necessary for PortableJointMove and PortableJointGoal conversion, since in the other
* cases the std::copy could be used. However, these other functions have been defined for
* conversion to all portable types for simplicity/completeness.
*
* std::transform(pmoves.begin(), pmoves.end(), std::back_inserter(ppmoves), ToPortable());
*
* NOTE: I think the value_type for PortableJointMove is:
* std::pair<const PortablePlayer,PortableMove> This seems to be making conversion for
* PortablePlayerMove (which is std::pair<PortablePlayer,PortableMove>) ambiguous. So
* explicitly add conversions for this. Doing the same for PortableJointGoal as well.
*
*****************************************************************************************/
struct FromPortable
{
public:
FromPortable(Game& game);
JointMove operator()(const PortableJointMove& pjm);
JointGoal operator()(const PortableJointGoal& pjg);
PlayerMove operator()(const PortablePlayerMove& ppm);
PlayerMove operator()(const std::pair<const PortablePlayer, PortableMove>& ppm);
PlayerGoal operator()(const PortablePlayerGoal& ppg);
PlayerGoal operator()(const std::pair<const PortablePlayer, unsigned int>& ppg);
Player operator()(const PortablePlayer& pp);
Move operator()(const PortableMove& pm);
State operator()(const PortableState& ps);
private:
Game& game_;
};
/*****************************************************************************************
* Inlined implementation of FromPortable.
*****************************************************************************************/
inline FromPortable::FromPortable(Game& game) : game_(game) { }
inline JointMove FromPortable::operator()(const PortableJointMove& pjm)
{
JointMove jm;
std::transform(pjm.begin(), pjm.end(), std::inserter(jm,jm.begin()),*this);
return jm;
}
inline JointGoal FromPortable::operator()(const PortableJointGoal& pjg)
{
JointGoal jg;
std::transform(pjg.begin(), pjg.end(), std::inserter(jg,jg.begin()),*this);
return jg;
}
inline PlayerMove FromPortable::operator()(const PortablePlayerMove& ppm)
{ return PlayerMove(Player(game_, ppm.first), Move(game_,ppm.second)); }
inline PlayerMove FromPortable::operator()(const std::pair<const PortablePlayer, PortableMove>& ppm)
{ return PlayerMove(Player(game_, ppm.first), Move(game_,ppm.second)); }
inline PlayerGoal FromPortable::operator()(const PortablePlayerGoal& ppg)
{ return PlayerGoal(Player(game_, ppg.first), ppg.second); }
inline PlayerGoal FromPortable::operator()(const std::pair<const PortablePlayer, unsigned int>& ppg)
{ return PlayerGoal(Player(game_, ppg.first), ppg.second); }
inline Player FromPortable::operator()(const PortablePlayer& pp)
{ return Player(game_, pp); }
inline Move FromPortable::operator()(const PortableMove& pm)
{ return Move(game_, pm); }
inline State FromPortable::operator()(const PortableState& ps)
{ return State(game_, ps); }
/*****************************************************************************************
* ToPortable. See explanation of FromPortable.
*****************************************************************************************/
struct ToPortable
{
public:
ToPortable();
ToPortable(Game& game);
PortableJointMove operator()(const JointMove& jm);
PortableJointGoal operator()(const JointGoal& jg);
PortablePlayerMove operator()(const PlayerMove& ppm);
PortablePlayerGoal operator()(const PlayerGoal& ppg);
PortablePlayer operator()(const Player& pp);
PortableMove operator()(const Move& pm);
PortableState operator()(const State& ps);
};
/*****************************************************************************************
* inlined implementation of ToPortable.
*****************************************************************************************/
inline ToPortable::ToPortable(){ }
inline ToPortable::ToPortable(Game& game){ }
inline PortableJointMove ToPortable::operator()(const JointMove& jm)
{
PortableJointMove pjm;
std::copy(jm.begin(), jm.end(), std::inserter(pjm,pjm.begin()));
return pjm;
}
inline PortableJointGoal ToPortable::operator()(const JointGoal& jg)
{
PortableJointGoal pjg;
std::copy(jg.begin(), jg.end(), std::inserter(pjg,pjg.begin()));
return pjg;
}
inline PortablePlayerMove ToPortable::operator()(const PlayerMove& pm)
{ return PortablePlayerMove(pm); }
inline PortablePlayerGoal ToPortable::operator()(const PlayerGoal& pg)
{ return PortablePlayerGoal(pg); }
inline PortablePlayer ToPortable::operator()(const Player& p)
{ return PortablePlayer(p); }
inline PortableMove ToPortable::operator()(const Move& m)
{ return PortableMove(m); }
inline PortableState ToPortable::operator()(const State& s)
{ return PortableState(s); }
}; /* namespace HSFC */
#endif // HSFC_PORTABLE_H
| AbdallahS/ggp-hsfc | cpphsfc/hsfc/portable.h | C | bsd-2-clause | 11,417 |
#!/usr/bin/env python
#Copyright (c) <2015>, <Jaakko Leppakangas>
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
#ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
#(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
#ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
#SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#The views and conclusions contained in the software and documentation are those
#of the authors and should not be interpreted as representing official policies,
#either expressed or implied, of the FreeBSD Project.
'''
Created on Dec 16, 2014
@author: Jaakko Leppakangas
'''
import sys
from PyQt4 import QtGui
from ui.preprocessDialog import PreprocessDialog
def main():
app = QtGui.QApplication(sys.argv)
window=PreprocessDialog()
window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| jaeilepp/eggie | eggie.py | Python | bsd-2-clause | 1,903 |
# Customize below path information
#TET_INSTALL_PATH=/scratchbox/TETware
CURRENT_USER=`echo $HOME`
TET_INSTALL_PATH=$CURRENT_USER/sbs/TETware
# temporary fix for SPRC
if [ "$TET_ROOT_DIR" != "" ]; then
TET_INSTALL_PATH=$TET_ROOT_DIR
fi
TET_SIMUL_PATH=$TET_INSTALL_PATH/tetware-simulator
TET_TARGET_PATH=$TET_INSTALL_PATH/tetware-target
TET_MOUNTED_PATH=/mnt/nfs/sbs/TETware/tetware-target
#TET_MOUNTED_PATH=/opt/home/root/tmp/sbs/TETware/tetware-target
#MACHINE=`echo $SBOX_UNAME_MACHINE`
MACHINE=`echo $DEB_BUILD_ARCH_ABI`
if [ $MACHINE = "gnu" ] # SBS i386
then
export ARCH=simulator
export TET_ROOT=$TET_SIMUL_PATH
elif [ $MACHINE = "gnueabi" ] # SBS ARM
then
export ARCH=target
export TET_ROOT=$TET_TARGET_PATH
else
export ARCH=target
export TET_ROOT=$TET_MOUNTED_PATH
fi
export PATH=$TET_ROOT/bin:$PATH
export LD_LIBRARY_PATH=$TET_ROOT/lib/tet3:$LD_LIBRARY_PATH
set $(pwd)
export TET_SUITE_ROOT=$1
set $(date +%y%m%d_%H%M%S)
FILE_NAME_EXTENSION=$1
| tguillem/tizen_emotion | TC/_export_env.sh | Shell | bsd-2-clause | 976 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_22) on Tue Mar 08 15:19:00 PST 2011 -->
<TITLE>
Uses of Class com.perforce.p4java.ant.tasks.ServerTask.GlobalOption (P4Ant)
</TITLE>
<META NAME="date" CONTENT="2011-03-08">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.perforce.p4java.ant.tasks.ServerTask.GlobalOption (P4Ant)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?com/perforce/p4java/ant/tasks//class-useServerTask.GlobalOption.html" target="_top"><B>FRAMES</B></A>
<A HREF="ServerTask.GlobalOption.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>com.perforce.p4java.ant.tasks.ServerTask.GlobalOption</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.perforce.p4java.ant.tasks"><B>com.perforce.p4java.ant.tasks</B></A></TD>
<TD>
The Perforce tasks implement Perforce commands using the Perforce Java API. </TD>
</TR>
</TABLE>
<P>
<A NAME="com.perforce.p4java.ant.tasks"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A> in <A HREF="../../../../../../com/perforce/p4java/ant/tasks/package-summary.html">com.perforce.p4java.ant.tasks</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Fields in <A HREF="../../../../../../com/perforce/p4java/ant/tasks/package-summary.html">com.perforce.p4java.ant.tasks</A> with type parameters of type <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>protected java.util.List<<A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A>></CODE></FONT></TD>
<TD><CODE><B>ServerTask.</B><B><A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.html#globaloptions">globaloptions</A></B></CODE>
<BR>
Collection of globaloptions (name-value pairs) contained in the
"globaloption" nested elements.</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../com/perforce/p4java/ant/tasks/package-summary.html">com.perforce.p4java.ant.tasks</A> that return <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks">ServerTask.GlobalOption</A></CODE></FONT></TD>
<TD><CODE><B>ServerTask.</B><B><A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.html#createGlobalOption()">createGlobalOption</A></B>()</CODE>
<BR>
This method is called by an Ant factory method to instantiates a
collection of "globaloption" nested elements.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../com/perforce/p4java/ant/tasks/ServerTask.GlobalOption.html" title="class in com.perforce.p4java.ant.tasks"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?com/perforce/p4java/ant/tasks//class-useServerTask.GlobalOption.html" target="_top"><B>FRAMES</B></A>
<A HREF="ServerTask.GlobalOption.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright (c) 2010 Perforce Software. All rights reserved.</i>
</BODY>
</HTML>
| junejosheeraz/p4antExt | docs/javadoc/com/perforce/p4java/ant/tasks/class-use/ServerTask.GlobalOption.html | HTML | bsd-2-clause | 9,582 |
L.CommunistWorker = L.AbstractWorker.extend({
statics: {
// number of web workers, not using web workers when falsy
NUM_WORKERS: 2
},
initialize: function (workerFunc) {
this.workerFunc = workerFunc;
},
onAdd: function (map) {
this._workers = L.CommunistWorker.createWorkers(this.workerFunc);
},
onRemove: function (map) {
if (this._workers) {
// TODO do not close when other layers are still using the static instance
//this._workers.close();
}
},
process: function(tile, callback) {
if (this._workers){
tile._worker = this._workers.data(tile.datum).then(function(parsed) {
if (tile._worker) {
tile._worker = null;
tile.parsed = parsed;
tile.datum = null;
callback(null, tile);
} else {
// tile has been unloaded, don't continue with adding
//console.log('worker aborted ' + tile.key);
}
});
} else {
callback(null, tile);
}
},
abort: function(tile) {
if (tile._worker) {
// TODO abort worker, would need to recreate after close
//tile._worker.close();
tile._worker = null;
}
}
});
L.communistWorker = function (workerFunc) {
return new L.CommunistWorker(workerFunc);
};
L.extend(L.CommunistWorker, {
createWorkers: function(workerFunc) {
if ( L.CommunistWorker.NUM_WORKERS && typeof Worker === "function" && typeof communist === "function"
&& !("workers" in L.CommunistWorker)) {
L.CommunistWorker.workers = communist({
//data : L.TileLayer.Vector.parseData
data : workerFunc
}, L.CommunistWorker.NUM_WORKERS);
}
return L.CommunistWorker.workers;
}
});
| nrenner/leaflet-tilelayer-vector | CommunistWorker.js | JavaScript | bsd-2-clause | 1,976 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace nora.clara.machine {
public enum Event {
OPEN_REQUEST,
ACTION_REQUEST,
DEATH_REQUEST,
CONNECTED_TO_STEAM,
PLAYING_STATE_OPENED,
PLAYING_STATE_CLOSED,
DISCONNECTED_FROM_STEAM,
WELCOMED_STALE_LOBBY,
WELCOMED,
JOINED_CHAT,
CREATED_LOBBY,
LEFT_LOBBY,
PLAYER_JOINED,
EMPTIED,
LOBBY_READY,
LOBBY_NOT_READY,
GOT_APP_TICKET,
GOT_AUTH,
GOT_SESSION,
GOT_TV,
GAME_SERVER_NOT_FOUND,
DENIED_TV,
SERVER_RUNNING,
SERVER_WAITING_FOR_PLAYERS,
GAME_SERVER_QUIT,
}
}
| dschleck/nora | clara/machine/Event.cs | C# | bsd-2-clause | 795 |
About
=====
The 'reproject' package is a Python package to reproject astronomical images using various techniques via a uniform interface. By *reprojection*, we mean the re-gridding of images from one world coordinate system to another (for example changing the pixel resolution, orientation, coordinate system). Currently, we have implemented reprojection of celestial images by interpolation (like [SWARP](http://www.astromatic.net/software/swarp)), as well as by finding the exact overlap between pixels on the celestial sphere (like [Montage](http://montage.ipac.caltech.edu/index.html)). It can also reproject to/from HEALPIX projections by relying on the [healpy](https://github.com/healpy/healpy) package.
For more information, including on how to install the package, see http://reproject.readthedocs.org


Note on license
===============
The code in this package is released under the BSD license. However, the
functions relating to HEALPIX rely on the
[healpy](https://github.com/healpy/healpy) package, which is GPLv2, so if you
use these functions in your code, you are indirectly using healpy and therefore
will need to abide with the GPLv2 license.
Status
======
[](https://travis-ci.org/astrofrog/reproject) [](https://coveralls.io/r/astrofrog/reproject?branch=master) [](http://astrofrog.github.io/reproject-benchmarks/)
| mwcraig/reproject | README.md | Markdown | bsd-2-clause | 1,726 |
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2009 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
import re
import unittest
import StringIO
import tempfile
from os.path import join, dirname, isfile, abspath
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter, NullFormatter
from pygments.formatters.html import escape_html
import support
TESTFILE, TESTDIR = support.location(__file__)
tokensource = list(PythonLexer(encoding='utf-8').get_tokens(open(TESTFILE).read()))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = HtmlFormatter(nowrap=True)
houtfile = StringIO.StringIO()
hfmt.format(tokensource, houtfile)
nfmt = NullFormatter()
noutfile = StringIO.StringIO()
nfmt.format(tokensource, noutfile)
stripped_html = re.sub('<.*?>', '', houtfile.getvalue())
escaped_text = escape_html(noutfile.getvalue())
self.assertEquals(stripped_html, escaped_text)
def test_external_css(self):
# test correct behavior
# CSS should be in /tmp directory
fmt1 = HtmlFormatter(full=True, cssfile='fmt1.css', outencoding='utf-8')
# CSS should be in TESTDIR (TESTDIR is absolute)
fmt2 = HtmlFormatter(full=True, cssfile=join(TESTDIR, 'fmt2.css'),
outencoding='utf-8')
tfile = tempfile.NamedTemporaryFile(suffix='.html')
fmt1.format(tokensource, tfile)
try:
fmt2.format(tokensource, tfile)
self.assert_(isfile(join(TESTDIR, 'fmt2.css')))
except IOError:
# test directory not writable
pass
tfile.close()
self.assert_(isfile(join(dirname(tfile.name), 'fmt1.css')))
os.unlink(join(dirname(tfile.name), 'fmt1.css'))
try:
os.unlink(join(TESTDIR, 'fmt2.css'))
except OSError:
pass
def test_all_options(self):
for optdict in [dict(nowrap=True),
dict(linenos=True),
dict(linenos=True, full=True),
dict(linenos=True, full=True, noclasses=True)]:
outfile = StringIO.StringIO()
fmt = HtmlFormatter(**optdict)
fmt.format(tokensource, outfile)
def test_valid_output(self):
# test all available wrappers
fmt = HtmlFormatter(full=True, linenos=True, noclasses=True,
outencoding='utf-8')
handle, pathname = tempfile.mkstemp('.html')
tfile = os.fdopen(handle, 'w+b')
fmt.format(tokensource, tfile)
tfile.close()
catname = os.path.join(TESTDIR, 'dtds', 'HTML4.soc')
try:
try:
import subprocess
ret = subprocess.Popen(['nsgmls', '-s', '-c', catname, pathname],
stdout=subprocess.PIPE).wait()
except ImportError:
# Python 2.3 - no subprocess module
ret = os.popen('nsgmls -s -c "%s" "%s"' % (catname, pathname)).close()
if ret == 32512: raise OSError # not found
except OSError:
# nsgmls not available
pass
else:
self.failIf(ret, 'nsgmls run reported errors')
os.unlink(pathname)
def test_get_style_defs(self):
fmt = HtmlFormatter()
sd = fmt.get_style_defs()
self.assert_(sd.startswith('.'))
fmt = HtmlFormatter(cssclass='foo')
sd = fmt.get_style_defs()
self.assert_(sd.startswith('.foo'))
sd = fmt.get_style_defs('.bar')
self.assert_(sd.startswith('.bar'))
sd = fmt.get_style_defs(['.bar', '.baz'])
fl = sd.splitlines()[0]
self.assert_('.bar' in fl and '.baz' in fl)
def test_unicode_options(self):
fmt = HtmlFormatter(title=u'Föö',
cssclass=u'bär',
cssstyles=u'div:before { content: \'bäz\' }',
encoding='utf-8')
handle, pathname = tempfile.mkstemp('.html')
tfile = os.fdopen(handle, 'w+b')
fmt.format(tokensource, tfile)
tfile.close()
| erickt/pygments | tests/test_html_formatter.py | Python | bsd-2-clause | 4,348 |
/*
* Copyright (c) 2009-2010 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.gde.core.filters.impl;
import com.jme3.gde.core.filters.AbstractFilterNode;
import com.jme3.gde.core.filters.FilterNode;
import com.jme3.post.Filter;
import com.jme3.water.WaterFilter;
import org.openide.loaders.DataObject;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
/**
*
* @author Rémy Bouquet
*/
@org.openide.util.lookup.ServiceProvider(service = FilterNode.class)
public class JmeWaterFilter extends AbstractFilterNode {
public JmeWaterFilter() {
}
public JmeWaterFilter(WaterFilter filter, DataObject object, boolean readOnly) {
super(filter);
this.dataObject = object;
this.readOnly = readOnly;
}
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set set = Sheet.createPropertiesSet();
set.setDisplayName("Water");
set.setName("Water");
WaterFilter obj = (WaterFilter) filter;
if (obj == null) {
return sheet;
}
createFields(WaterFilter.class, set, obj);
sheet.put(set);
return sheet;
}
@Override
public Class<?> getExplorerObjectClass() {
return WaterFilter.class;
}
@Override
public Node[] createNodes(Object key, DataObject dataObject, boolean readOnly) {
return new Node[]{new JmeWaterFilter((WaterFilter) key, dataObject, readOnly)};
}
}
| chototsu/MikuMikuStudio | sdk/jme3-core/src/com/jme3/gde/core/filters/impl/JmeWaterFilter.java | Java | bsd-2-clause | 3,026 |
<?php
namespace Nether\Object\Meta;
use Attribute;
use Nether\Object\Prototype\AttributeInterface;
use Nether\Object\Prototype\PropertyAttributes;
#[Attribute(Attribute::TARGET_PROPERTY)]
class PropertyOrigin
implements AttributeInterface {
/*//
@date 2021-08-05
@related Nether\Object\Prototype::__Construct
when attached to a class property with a single string argument that will
tell the prototype object to pull the data stored in the arguement and to put
it into the property this is attached to.
//*/
public string
$Name;
public function
__Construct(string $Name) {
$this->Name = $Name;
return;
}
public function
OnPropertyAttributes(PropertyAttributes $Attrib):
static {
$Attrib->Origin = $this->Name;
return $this;
}
}
| netherphp/object | src/Nether/Object/Meta/PropertyOrigin.php | PHP | bsd-2-clause | 753 |
#!/usr/bin/perl
#
# Adduser script
# Rootnode, http://rootnode.net
#
# Copyright (C) 2012 Marcin Hlybin
# All rights reserved.
#
# Get user data from signup database
# Create PAM+Satan user
# Insert user data into adduser database
# Assign servers to user
# Create containers with lxc-add remote script
use warnings;
use strict;
use Readonly;
use FindBin qw($Bin);
use File::Basename qw(basename);
use POSIX qw(isdigit);
use DBI;
use Getopt::Long;
use Smart::Comments;
# Server ID's
Readonly my $WEB_SERVER_ID => 1;
Readonly my $APP_SERVER_ID => 1;
Readonly my $DEV_SERVER_ID => 1;
# Satan connection params
Readonly my $SATAN_ADDR => '10.101.0.5';
Readonly my $SATAN_PORT => '1600';
Readonly my $SATAN_KEY => '/etc/satan/key';
Readonly my $SATAN_BIN => '/usr/local/bin/satan';
# SSH params
Readonly my $SSH_BIN => '/usr/bin/ssh';
Readonly my $SSH_ADD_KEY => '/root/.ssh/lxc_add_rsa';
Readonly my $SSH_ADD_COMMAND => '/usr/local/sbin/lxc-add';
# Usage
Readonly my $BASENAME => basename($0);
Readonly my $USAGE => <<END_OF_USAGE;
Adduser script
Usage:
$BASENAME signup get signup data and create pam user
$BASENAME pam --uid <uid> create pam user omitting signup DB
$BASENAME container --uid <uid> --server <type> create container of given type
END_OF_USAGE
# Check configuration
-f $SATAN_BIN or die "\$SATAN_BIN ($SATAN_BIN) not found.\n";
-f $SATAN_KEY or die "\$SATAN_KEY ($SATAN_KEY) not found.\n";
-f $SSH_ADD_KEY or die "\$SSH_ADD_KEY ($SSH_ADD_KEY) not found.\n";
# Signup database
my %db_signup;
$db_signup{dbh} = DBI->connect("dbi:mysql:signup;mysql_read_default_file=$Bin/config/my.signup.cnf", undef, undef, { RaiseError => 1, AutoCommit => 1 });
$db_signup{get_user} = $db_signup{dbh}->prepare("SELECT * FROM users WHERE status IS NULL LIMIT 1");
$db_signup{del_user} = $db_signup{dbh}->prepare("DELETE FROM users WHERE id=?");
$db_signup{set_status} = $db_signup{dbh}->prepare("UPDATE users SET status=? WHERE id=?");
# Adduser database
my %db_adduser;
$db_adduser{dbh} = DBI->connect("dbi:mysql:adduser;mysql_read_default_file=$Bin/config/my.adduser.cnf", undef, undef, { RaiseError => 1, AutoCommit => 1 });
$db_adduser{add_user} = $db_adduser{dbh}->prepare("INSERT INTO users (uid, user_name, mail, created_at) VALUES(?,?,?, NOW())");
$db_adduser{get_user} = $db_adduser{dbh}->prepare("SELECT * FROM users WHERE uid=?");
$db_adduser{add_credentials} = $db_adduser{dbh}->prepare("INSERT INTO credentials(uid, satan_key, pam_passwd, pam_shadow, user_password, user_password_p) VALUES(?,?,?,?,?,?)");
$db_adduser{get_credentials} = $db_adduser{dbh}->prepare("SELECT * FROM credentials WHERE uid=?");
$db_adduser{add_container} = $db_adduser{dbh}->prepare("INSERT INTO containers(uid, server_type, server_no) VALUES(?,?,?)");
$db_adduser{get_container} = $db_adduser{dbh}->prepare("SELECT server_no FROM containers WHERE uid=? AND server_type=? and status is NULL");
$db_adduser{set_container_status} = $db_adduser{dbh}->prepare("UPDATE containers SET status=? WHERE uid=? AND server_type=?");
# Get arguments
die $USAGE unless @ARGV;
my $mode_type = shift or die "Mode not specified. Use 'signup', 'container' or 'pam'.\n";
# Get command line arguments
my ($opt_uid, $opt_server);
GetOptions(
'uid=i' => \$opt_uid,
'server=s' => \$opt_server,
);
# SIGNUP MODE
# Get 1 user from signup database
# Create Pam+Satan user
# Store data in adduser database
if ($mode_type eq 'signup') {
# Get one record from signup database
$db_signup{get_user}->execute;
# Exit if nothing found
my $record_found = $db_signup{get_user}->rows;
$record_found or exit;
# Get user data
my $signup_record = $db_signup{get_user}->fetchall_hashref('user_name');
### $signup_record
# Get username
my @user_names = keys %$signup_record;
my $user_name = shift @user_names;
### $user_name
# User record
my $user_record = $signup_record->{$user_name};
### $user_record
# Add PAM+Satan user
my $satan_response;
eval {
$satan_response = satan('admin', 'adduser', $user_name);
};
# Satan error
if ($@) {
my $error_message = $@;
chomp $error_message;
my $user_id = $user_record->{id};
$db_signup{set_status}->execute($error_message, $user_id);
die "Satan error: $@";
}
my $uid = $satan_response->{uid} or die "Didn't get uid from satan";
### $satan_response
# XXX Catch satan error
# $db_signup{set_status}->execute($error_message, $user_record->{id});
# Add record to adduser database
$db_adduser{add_user}->execute(
$uid,
$user_record->{user_name},
$user_record->{mail},
);
# Insert user credentials
$db_adduser{add_credentials}->execute(
$uid,
$satan_response->{satan_key},
$satan_response->{pam_passwd},
$satan_response->{pam_shadow},
$satan_response->{user_password},
$satan_response->{user_password_p}
);
# Assign servers
set_containers($uid);
# Remove record from signup database
$db_signup{del_user}->execute($user_record->{id});
exit;
}
# PAM MODE
# Create pam and satan user only.
if ($mode_type eq 'pam') {
# Mandatory arguments
defined $opt_uid or die "Uid not specified.";
my $uid = $opt_uid;
# Get user from adduser database
$db_adduser{get_user}->execute($uid);
my $user_found = $db_adduser{get_user}->rows;
$user_found or die "Uid '$uid' not found in adduser database.\n";
# User record
my $user_record = $db_adduser{get_user}->fetchall_hashref('uid')->{$uid};
my $user_name = $user_record->{user_name} or die "User name not found";
# Add PAM+Satan user with predefined uid
my $satan_response = satan('admin', 'adduser', $user_name, 'uid', $uid);
# Insert user credentials
$db_adduser{add_credentials}->execute(
$uid,
$satan_response->{satan_key},
$satan_response->{pam_passwd},
$satan_response->{pam_shadow},
$satan_response->{user_password},
$satan_response->{user_password_p}
);
# Assign servers to user
set_containers($uid);
exit;
}
# CONTAINER MODE
# Create user container on specified server.
# User must exist in adduser database.
if ($mode_type eq 'container') {
# Mandatory arguments
defined $opt_uid or die "Uid not specified.";
defined $opt_server or die "Server name not specified.";
my $uid = $opt_uid;
my $server_type = $opt_server;
# Get user from adduser database
$db_adduser{get_user}->execute($uid);
my $user_found = $db_adduser{get_user}->rows;
$user_found or die "Uid '$uid' not found in adduser database.\n";
# User record
my $user_record = $db_adduser{get_user}->fetchall_hashref('uid')->{$uid};
my $user_name = $user_record->{user_name} or die "User name not found";
### $user_name
# Get credentials
$db_adduser{get_credentials}->execute($uid);
my $credentials_found = $db_adduser{get_credentials}->rows;
$credentials_found or die "Uid '$uid' not found in database.\n";
my $credentials = $db_adduser{get_credentials}->fetchall_hashref('uid')->{$uid};
# Get container number
$db_adduser{get_container}->execute($uid, $server_type);
my $server_found = $db_adduser{get_container}->rows;
$server_found or die "Container type '$server_type' not defined for user '$uid'.\n";
my $server_no = $db_adduser{get_container}->fetchrow_arrayref->[0];
isdigit($server_no) or die "Server no '$server_no' not a number.\n";
my $server_name = $server_type . $server_no;
# Set SSH command arguments
my $command_args = "satan_key $credentials->{satan_key} "
. "pam_passwd $credentials->{pam_passwd} "
. "pam_shadow $credentials->{pam_shadow} "
. "uid $uid "
. "user_name $user_name";
### $command_args
system("$SSH_BIN -i $SSH_ADD_KEY root\@system.$server_name.rootnode.net $SSH_ADD_COMMAND $command_args");
if ($?) {
my $error_message = $!;
chomp $error_message;
$db_adduser{set_container_status}->execute($error_message, $uid, $server_type);
die "lxc-add failed: $!";
}
$db_adduser{set_container_status}->execute('OK', $uid, $server_type);
exit;
}
die "Unknown mode '$mode_type'. Cannot proceed.\n";
sub set_containers {
my ($uid) = @_;
defined $uid or die "Uid not defined in set_servers sub";
# Set server IDs
my %server_no_for = (
web => $WEB_SERVER_ID,
app => $APP_SERVER_ID,
dev => $DEV_SERVER_ID,
);
# Assing servers to user
foreach my $server_type (keys %server_no_for) {
# Get server number
my $server_no = $server_no_for{$server_type};
# Add server to database
$db_adduser{add_container}->execute(
$uid,
$server_type,
$server_no
);
}
return;
}
sub satan {
local @ARGV;
# Satan arguments
push @ARGV, '-a', $SATAN_ADDR if defined $SATAN_ADDR;
push @ARGV, '-p', $SATAN_PORT if defined $SATAN_PORT;
push @ARGV, '-k', $SATAN_KEY if defined $SATAN_KEY;
push @ARGV, @_;
# Send to satan
my $response = do $SATAN_BIN;
# Catch satan error
if ($@) {
my $error_message = $@;
die "Cannot proccess $@";
}
return $response;
}
sub do_rollback {
my ($error_message, $user_id) = @_;
$db_signup{set_status}->execute($error_message, $user_id);
}
exit;
| ingeniarius/lxc-rootnodes | adduser/adduser.pl | Perl | bsd-2-clause | 9,109 |
/**
* Copyright (c) 2013, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* https://raw.github.com/facebook/regenerator/master/LICENSE file. An
* additional grant of patent rights can be found in the PATENTS file in
* the same directory.
*/
var assert = require("assert");
var types = require("ast-types");
var n = types.namedTypes;
var b = types.builders;
var inherits = require("util").inherits;
function Entry() {
assert.ok(this instanceof Entry);
}
function FunctionEntry(returnLoc) {
Entry.call(this);
n.Literal.assert(returnLoc);
Object.defineProperties(this, {
returnLoc: { value: returnLoc }
});
}
inherits(FunctionEntry, Entry);
exports.FunctionEntry = FunctionEntry;
function LoopEntry(breakLoc, continueLoc, label) {
Entry.call(this);
n.Literal.assert(breakLoc);
n.Literal.assert(continueLoc);
if (label) {
n.Identifier.assert(label);
} else {
label = null;
}
Object.defineProperties(this, {
breakLoc: { value: breakLoc },
continueLoc: { value: continueLoc },
label: { value: label }
});
}
inherits(LoopEntry, Entry);
exports.LoopEntry = LoopEntry;
function SwitchEntry(breakLoc) {
Entry.call(this);
n.Literal.assert(breakLoc);
Object.defineProperties(this, {
breakLoc: { value: breakLoc }
});
}
inherits(SwitchEntry, Entry);
exports.SwitchEntry = SwitchEntry;
function TryEntry(catchEntry, finallyEntry) {
Entry.call(this);
if (catchEntry) {
assert.ok(catchEntry instanceof CatchEntry);
} else {
catchEntry = null;
}
if (finallyEntry) {
assert.ok(finallyEntry instanceof FinallyEntry);
} else {
finallyEntry = null;
}
Object.defineProperties(this, {
catchEntry: { value: catchEntry },
finallyEntry: { value: finallyEntry }
});
}
inherits(TryEntry, Entry);
exports.TryEntry = TryEntry;
function CatchEntry(firstLoc, paramId) {
Entry.call(this);
n.Literal.assert(firstLoc);
n.Identifier.assert(paramId);
Object.defineProperties(this, {
firstLoc: { value: firstLoc },
paramId: { value: paramId }
});
}
inherits(CatchEntry, Entry);
exports.CatchEntry = CatchEntry;
function FinallyEntry(firstLoc, nextLocTempVar) {
Entry.call(this);
n.Literal.assert(firstLoc);
n.Identifier.assert(nextLocTempVar);
Object.defineProperties(this, {
firstLoc: { value: firstLoc },
nextLocTempVar: { value: nextLocTempVar }
});
}
inherits(FinallyEntry, Entry);
exports.FinallyEntry = FinallyEntry;
function LeapManager(emitter) {
assert.ok(this instanceof LeapManager);
var Emitter = require("./emit").Emitter;
assert.ok(emitter instanceof Emitter);
Object.defineProperties(this, {
emitter: { value: emitter },
entryStack: {
value: [new FunctionEntry(emitter.finalLoc)]
}
});
}
var LMp = LeapManager.prototype;
exports.LeapManager = LeapManager;
LMp.withEntry = function(entry, callback) {
assert.ok(entry instanceof Entry);
this.entryStack.push(entry);
try {
callback.call(this.emitter);
} finally {
var popped = this.entryStack.pop();
assert.strictEqual(popped, entry);
}
};
LMp._leapToEntry = function(predicate, defaultLoc) {
var entry, loc;
var finallyEntries = [];
var skipNextTryEntry = null;
for (var i = this.entryStack.length - 1; i >= 0; --i) {
entry = this.entryStack[i];
if (entry instanceof CatchEntry ||
entry instanceof FinallyEntry) {
// If we are inside of a catch or finally block, then we must
// have exited the try block already, so we shouldn't consider
// the next TryStatement as a handler for this throw.
skipNextTryEntry = entry;
} else if (entry instanceof TryEntry) {
if (skipNextTryEntry) {
// If an exception was thrown from inside a catch block and this
// try statement has a finally block, make sure we execute that
// finally block.
if (skipNextTryEntry instanceof CatchEntry &&
entry.finallyEntry) {
finallyEntries.push(entry.finallyEntry);
}
skipNextTryEntry = null;
} else if ((loc = predicate.call(this, entry))) {
break;
} else if (entry.finallyEntry) {
finallyEntries.push(entry.finallyEntry);
}
} else if ((loc = predicate.call(this, entry))) {
break;
}
}
if (loc) {
// fall through
} else if (defaultLoc) {
loc = defaultLoc;
} else {
return null;
}
n.Literal.assert(loc);
var finallyEntry;
while ((finallyEntry = finallyEntries.pop())) {
this.emitter.emitAssign(finallyEntry.nextLocTempVar, loc);
loc = finallyEntry.firstLoc;
}
return loc;
};
function getLeapLocation(entry, property, label) {
var loc = entry[property];
if (loc) {
if (label) {
if (entry.label &&
entry.label.name === label.name) {
return loc;
}
} else {
return loc;
}
}
return null;
}
LMp.emitBreak = function(label) {
var loc = this._leapToEntry(function(entry) {
return getLeapLocation(entry, "breakLoc", label);
});
if (loc === null) {
throw new Error("illegal break statement");
}
this.emitter.clearPendingException();
this.emitter.jump(loc);
};
LMp.emitContinue = function(label) {
var loc = this._leapToEntry(function(entry) {
return getLeapLocation(entry, "continueLoc", label);
});
if (loc === null) {
throw new Error("illegal continue statement");
}
this.emitter.clearPendingException();
this.emitter.jump(loc);
};
| jlongster/unwinder | lib/leap.js | JavaScript | bsd-2-clause | 5,549 |
{--
Copyright (c) 2014-2020, Clockwork Dev Studio
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--}
{-# LANGUAGE CPP #-}
module Arguments where
import Prelude hiding (catch)
import LexerData
import Common
import Options
import Data.Char
import System.FilePath.Posix
import System.Directory
import System.IO
import Control.Exception
import System.Exit
import Control.Monad.State
import Control.Monad.Except
import Control.Monad.Identity
import Debug.Trace
import qualified Data.Sequence as Seq
data ConfigFile =
ConfigFile
{
configFileVariableList :: [ConfigFileVariable]
} deriving (Show,Eq)
data ConfigFileVariable =
ConfigFileVariable
{
configFileVariableName :: String,
configFileVariableValue :: String
} deriving (Show, Eq)
loadConfigFile :: Handle -> ConfigFile -> IO ConfigFile
loadConfigFile handle (ConfigFile variables) =
do let endOfFile :: IOError -> IO String
endOfFile e = do return "EOF"
line <- catch (hGetLine handle) endOfFile
if line == "EOF"
then return (ConfigFile variables)
else do let (variable,rest) = span (isAlpha) line
if variable == [] || rest == [] || head rest /= '='
then loadConfigFile handle (ConfigFile variables)
else do loadConfigFile handle (ConfigFile (variables ++ [(ConfigFileVariable variable (tail rest))]))
adjustOptionsBasedOnConfigFile :: Options -> [ConfigFileVariable] -> Options
adjustOptionsBasedOnConfigFile originalOptions (configFileVariable:rest) =
case configFileVariableName configFileVariable of
"backend" -> adjustOptionsBasedOnConfigFile (originalOptions {optionAssembler = configFileVariableValue configFileVariable}) rest
adjustOptionsBasedOnConfigFile originalOptions _ = originalOptions
processArguments :: CodeTransformation ()
processArguments =
do homeDirectory <- liftIO $ getHomeDirectory
let writeDefaultConfigFile :: IOError -> IO Handle
writeDefaultConfigFile _ =
do newConfHandle <- openFile (homeDirectory ++ "/.idlewild-lang.conf") WriteMode
hPutStrLn newConfHandle "backend=nasm"
hClose newConfHandle
newConfHandle <- openFile (homeDirectory ++ "/.idlewild-lang.conf") ReadMode
return newConfHandle
confHandle <- liftIO $ (catch (openFile (homeDirectory ++ "/.idlewild-lang.conf") ReadMode) writeDefaultConfigFile)
configFile <- liftIO $ loadConfigFile confHandle (ConfigFile [])
let customisedOptions = adjustOptionsBasedOnConfigFile defaultOptions (configFileVariableList configFile)
liftIO $ hClose confHandle
arguments <- gets argumentStateArguments
(options, nonOptions) <- liftIO $ processOptions customisedOptions arguments
if optionShowVersion options == True
then do liftIO $ putStrLn "Idlewild-Lang version 0.0.5."
liftIO $ exitSuccess
else return ()
if length nonOptions /= 1
then do liftIO $ putStrLn "Please specify one (and only one) source file name."
liftIO $ exitSuccess
else return ()
let sourceFileName = head nonOptions
asmFileName = replaceExtension sourceFileName ".asm"
#if LINUX==1 || MAC_OS==1
objectFileName = replaceExtension sourceFileName ".o"
#elif WINDOWS==1
objectFileName = replaceExtension sourceFileName ".obj"
#endif
verbose = optionVerbose options
fromHandle <- liftIO $ openFile sourceFileName ReadMode
toHandle <- liftIO $ openFile asmFileName WriteMode
code <- liftIO $ hGetContents fromHandle
put LexState
{lexStateID = LEX_PENDING,
lexStateIncludeFileDepth = 0,
lexStateIncludeFileNameStack = [sourceFileName],
lexStateIncludeFileNames = [],
lexStateCurrentToken = emptyToken,
lexStatePendingTokens = Seq.empty,
lexStateTokens = Seq.singleton (createBOFToken sourceFileName),
lexStateLineNumber = 1,
lexStateLineOffset = 0,
lexStateCharacters = code,
lexStateCompoundTokens = allCompoundTokens,
lexStateConfig = Config {configInputFile = fromHandle,
configOutputFile = toHandle,
configSourceFileName = sourceFileName,
configAsmFileName = asmFileName,
configObjectFileName = objectFileName,
configOptions = options}}
verboseCommentary ("Program arguments okay...\n") verbose
verboseCommentary ("Source file '" ++ sourceFileName ++ "'...\n") verbose
| clockworkdevstudio/Idlewild-Lang | Arguments.hs | Haskell | bsd-2-clause | 5,916 |
#ifndef AOS_COMMON_STL_MUTEX_H_
#define AOS_COMMON_STL_MUTEX_H_
#include <mutex>
#include "aos/linux_code/ipc_lib/aos_sync.h"
#include "aos/common/logging/logging.h"
#include "aos/common/type_traits.h"
#include "aos/common/macros.h"
namespace aos {
// A mutex with the same API and semantics as ::std::mutex, with the addition of
// methods for checking if the previous owner died and a constexpr default
// constructor.
// Definitely safe to put in SHM.
// This uses the pthread_mutex semantics for owner-died: once somebody dies with
// the lock held, anybody else who takes it will see true for owner_died() until
// one of them calls consistent(). It is an error to call unlock() when
// owner_died() returns true.
class stl_mutex {
public:
constexpr stl_mutex() : native_handle_() {}
void lock() {
const int ret = mutex_grab(&native_handle_);
switch (ret) {
case 0:
break;
case 1:
owner_died_ = true;
break;
default:
LOG(FATAL, "mutex_grab(%p) failed with %d\n", &native_handle_, ret);
}
}
bool try_lock() {
const int ret = mutex_trylock(&native_handle_);
switch (ret) {
case 0:
return true;
case 1:
owner_died_ = true;
return true;
case 4:
return false;
default:
LOG(FATAL, "mutex_trylock(%p) failed with %d\n", &native_handle_, ret);
}
}
void unlock() {
CHECK(!owner_died_);
mutex_unlock(&native_handle_);
}
typedef aos_mutex *native_handle_type;
native_handle_type native_handle() { return &native_handle_; }
bool owner_died() const { return owner_died_; }
void consistent() { owner_died_ = false; }
private:
aos_mutex native_handle_;
bool owner_died_ = false;
DISALLOW_COPY_AND_ASSIGN(stl_mutex);
};
// A mutex with the same API and semantics as ::std::recursive_mutex, with the
// addition of methods for checking if the previous owner died and a constexpr
// default constructor.
// Definitely safe to put in SHM.
// This uses the pthread_mutex semantics for owner-died: once somebody dies with
// the lock held, anybody else who takes it will see true for owner_died() until
// one of them calls consistent(). It is an error to call unlock() or lock()
// again when owner_died() returns true.
class stl_recursive_mutex {
public:
constexpr stl_recursive_mutex() {}
void lock() {
if (mutex_islocked(mutex_.native_handle())) {
CHECK(!owner_died());
++recursive_locks_;
} else {
mutex_.lock();
if (mutex_.owner_died()) {
recursive_locks_ = 0;
} else {
CHECK_EQ(0, recursive_locks_);
}
}
}
bool try_lock() {
if (mutex_islocked(mutex_.native_handle())) {
CHECK(!owner_died());
++recursive_locks_;
return true;
} else {
if (mutex_.try_lock()) {
if (mutex_.owner_died()) {
recursive_locks_ = 0;
} else {
CHECK_EQ(0, recursive_locks_);
}
return true;
} else {
return false;
}
}
}
void unlock() {
if (recursive_locks_ == 0) {
mutex_.unlock();
} else {
--recursive_locks_;
}
}
typedef stl_mutex::native_handle_type native_handle_type;
native_handle_type native_handle() { return mutex_.native_handle(); }
bool owner_died() const { return mutex_.owner_died(); }
void consistent() { mutex_.consistent(); }
private:
stl_mutex mutex_;
int recursive_locks_ = 0;
DISALLOW_COPY_AND_ASSIGN(stl_recursive_mutex);
};
// Convenient typedefs for various types of locking objects.
typedef ::std::lock_guard<stl_mutex> mutex_lock_guard;
typedef ::std::lock_guard<stl_recursive_mutex> recursive_lock_guard;
typedef ::std::unique_lock<stl_mutex> mutex_unique_lock;
typedef ::std::unique_lock<stl_recursive_mutex> recursive_unique_lock;
} // namespace aos
#endif // AOS_COMMON_STL_MUTEX_H_
| comran/SpartanBalloon2016 | aos/common/stl_mutex.h | C | bsd-2-clause | 3,899 |
#pragma once
//-----------------------------------------------------------------------------------------------------
/*
NAMESPACE::CLASS
DESC
Copyright 2015 - See license file LICENSE.txt
*/
//-----------------------------------------------------------------------------------------------------
namespace ASL
{
struct Expr
{
public:
/// constructor
Expr();
/// destructor
virtual ~Expr();
private:
};
} // namespace ASL
| Duttenheim/asl | code/expr.h | C | bsd-2-clause | 463 |
{% extends "fieldsight/fieldsight_base.html" %}
{% load i18n staticfiles %}
{% load filters %}
{% block content %}
<div id="main-content" class="padding">
{% if messages %}
{% for message in messages %}
<div class="alert alert-{% if message.tags %}{{ message.tags }}{% else %}info{% endif %}">
<strong class="text-capitalize">{% if message.tags %}{{ message.tags }}{% else %}info{% endif %}!</strong> {{ message }}
</div>
{% endfor %}
{% endif %}
{% if obj.children.all %}
<section class="panel">
<header class="panel-heading clearfix">
<h3>{% trans 'Sub regions' %}</h3>
</header>
<div class="panel-body">
<div class="row">
{% for child in obj.children.all %}
<div class="col-md-4 col-sm-6">
<a class="site-item-wrap margin-top" href="{% url 'fieldsight:regional-sites' child.project.pk child.pk %}">
<div class="basic-info clearfix">
<h4 class="detail-text1">{{ child.name }}</h4>
<h6 class="detail-text0">{{ child.identifier }}</h6>
<div>Total sites: {{ child.get_sites_count }}</div>
</div>
</a>
</div>
{% endfor %}
</div>
</div>
</section>
{% endif %}
<section class="panel">
<header class="panel-heading clearfix">
<h3><i class="la la-map-marker"></i>{% if request.GET.q %}{% trans 'Search' %} {% trans 'result' %} {% trans 'for' %} "{{request.GET.q}}"{% else %}{% trans 'Sites' %}{% endif %}</h3>
<div class="panel-heading-right">
<!-- <select name ="sortby" class="form-control form-control-sm" data-bind="value:sortby, valueUpdate:'afterkeydown'">
<option class="dropdown-list" value="sitename" disabled selected>{% trans 'Sort' %} {% trans 'By' %}</option>
<option value="progress">{% trans 'Site Progress' %}</option>
<option value="identifier">{% trans 'Site Identifier' %}</option>
<option value="sitename">{% trans 'Site Name' %}</option>
<option value="pending">{% trans 'Number of Pending Submissions' %}</option>
<option value="approved">{% trans 'Number of Approved Submissions' %}</option>
<option value="flagged">{% trans 'Number of Flagged Submissions' %}</option>
<option value="rejected">{% trans 'Number of Rejected Submissions' %}</option>
</select> -->
<a class="btn btn-sm btn-primary" data-toggle="collapse" href="#searchSite" aria-expanded="false" aria-controls="searchSite"><i class="la la-search"></i> {% trans 'Search' %}</a>
{% if type == "project" %}
<a href="{% url 'fieldsight:site-add' pk %}" title="" class="btn btn-sm btn-primary"><i class="la la-plus"></i> {% trans 'Add' %} {% trans 'New' %}</a>
<a href="{% url 'fieldsight:define-site-meta' pk %}" title="" class="btn btn-sm btn-primary"><i class="la la-cogs"></i> {% trans 'Meta' %} {% trans 'Attributes' %}</a>
<a href="{% url 'fieldsight:site-upload' pk %}" title="" class="btn btn-sm btn-primary"><i class="la la-files-o"></i> {% trans 'Bulk' %} {% trans 'Upload' %} {% trans 'Sites' %}</a>
<a href="{% url 'fieldsight:bulk-edit-site' pk %}" title="" class="btn btn-sm btn-primary"><i class="la la-files-o"></i> {% trans 'Bulk' %} {% trans 'edit' %} {% trans 'Sites' %}</a>
{% verbatim %}
<div id="export_button"></div>
{% endverbatim %}
<script>
configure_settings = {};
configure_settings.is_project_dashboard = false;
configure_settings.url = "{% url 'fieldsight_export:export_xls_project_level_sites' pk %}";
</script>
<!-- <a href="{% url 'fieldsight_export:export_xls_project_sites' pk %}" title="" class="btn btn-sm
btn-primary"><i class="la la-plus"></i> {% trans 'Export' %} {% trans 'Sites' %} {% trans 'data' %} </a> -->
{% endif %}
{% if type == "region" %}
<a href="{% url 'fieldsight:regional-site-add' pk region_id %}" title="" class="btn btn-sm
btn-primary"><i class="la la-plus"></i> {% trans 'Add' %} {% trans 'New' %} {% trans 'Regional' %} {% trans 'Site' %}</a>
<a href="{% url 'fieldsight:define-site-meta' pk %}" title="" class="btn btn-sm btn-primary"><i class="la la-cogs"></i> {% trans 'Meta' %} {% trans 'Attributes' %}</a>
{% verbatim %}
<div id="export_button"></div>
{% endverbatim %}
<script>
configure_settings = {};
configure_settings.is_project_dashboard = false;
configure_settings.url = "{% url 'fieldsight_export:export_xls_region_sites' pk region_id %}";
</script>
<!-- <a href="{% url 'fieldsight_export:export_xls_region_sites' pk region_id %}" title="" class="btn btn-sm
btn-primary"><i class="la la-plus"></i> {% trans 'Export' %} {% trans 'Sites' %} {% trans 'data' %} </a> -->
{% elif type == "Unregioned" %}
<a href="{% url 'fieldsight:site-add' project_id %}" title="" class="btn btn-sm
btn-primary"><i class="la la-plus"></i> {% trans 'Add' %} {% trans 'New' %} {% trans 'Regional' %} {% trans 'Site' %}</a>
<a href="{% url 'fieldsight:define-site-meta' project_id %}" title="" class="btn btn-sm btn-primary"><i class="la la-files-o"></i> {% trans 'Meta' %} {% trans 'Attributes' %}</a>
{% verbatim %}
<div id="export_button"></div>
{% endverbatim %}
<script>
configure_settings = {};
configure_settings.is_project_dashboard = false;
configure_settings.url = "{% url 'fieldsight_export:export_xls_region_sites' project_id 0 %}";
</script>
<!-- <a href="{% url 'fieldsight_export:export_xls_project_sites' project_id %}" title="" class="btn btn-sm
btn-primary"><i class="la la-plus"></i> {% trans 'Export' %} {% trans 'Sites' %} {% trans 'data' %} </a> -->
{% endif %}
</div>
</header>
<div class="panel-body">
<!--Search Organization-->
<div class="collapse margin-top" id="searchSite">
<form class="padding" action="{% if region_id %}{% url 'fieldsight:search-regional-site-list' pk region_id %}{% else %}{% url 'fieldsight:search-site-list' pk %}{% endif %}" method="GET">
<div class="row">
<div class="col-md-6 ml-md-auto">
<div class="input-group">
<input type="text" class="form-control" name="q" placeholder="Search for..." required value='{{ request.GET.q }}'/>
<span class="input-group-btn">
<button class="btn btn-primary" type="submit"><i class="la la-search"></i> {% trans 'Search' %}</button>
</span>
</div>
</div>
<div class="col-md-3"></div>
</div>
</form>
</div>
<div class="row" >
{% for site in object_list %}
<div class="col-md-4 col-sm-6">
<a href= {% url 'fieldsight:site-dashboard' site.pk %} title="" class="site-item-wrap margin-top clearfix">
<div class="logo">
<img src="{{ site.logo.url }}" alt="" width="100" height="100">
</div>
<div class="basic-info clearfix">
<h4 class="detail-text1" >{{site.name}}</h4>
<h6 class="detail-text0" >{{ site.identifier }}</h6>
<p class="address"><i class="fa fa-map-marker" aria-hidden="true"></i> <span class="detail-text1">{{ site.address }}</span></p>
<p class="phone"><i class="fa fa-phone" aria-hidden="true"></i> <span class="detail-text1">{{ site.phone }}</span></p>
</div>
{% with site.get_site_submission_count as count %}
<ul class="icon-listing clearfix margin-top">
<li>
<i class="la la-thumbs-up"></i>
<span>{{ count.approved }}
</span>
</li>
<li>
<i class="la la-flag"></i>
<span>
{{ count.flagged }}
</span>
</li>
<li>
<i class="la la-hourglass-half"></i>
<span>
{{ count.outstanding }}
</span>
</li>
<li>
<i class="la la-exclamation"></i>
<span>
{{ count.rejected }}
</span>
</li>
</ul>
{% endwith %}
<div class="progress margin-top">
<div class="progress progress-striped active progress-sm" style="width:100%;">
<div class="progress-bar progress-bar-success" role="progressbar" style="width:{{ site.site_progress }}%;}">
<span class="progress-bar-value" style="color:grey;">{{ site.site_progress }}% Complete'"</span>
</div>
</div>
</div>
</a>
</div>
{% empty %}
<div class="col-md-4 col-sm-6">
<h3>No Site</h3>
</div>
{% endfor %}
</div>
<br>
{% if is_paginated %}
<nav class="pagination justify-content-center" aria-label="page-navigation">
<ul class="pagination">
{% if page_obj.number == 1 %}
{% else %}
<li class="page-item"><a class="page-link" href="?page={{ 1 }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">First</a></li>
{% endif %}
{% if page_obj.has_previous %}
<li class="page-item"><a class="page-link" href="?page={{ page_obj.previous_page_number }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">«</a></li>
{% else %}
<li class="page-item disabled"><a class="page-link" href="#">«</a></li>
{% endif %}
{% if paginator.num_pages > 21 %}
{% page_offsets page_obj.number paginator.num_pages 20 as data %}
{% for item in data.front_range %}
<li class="page-item"><a class="page-link" href="?page={{ item }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">{{ item }}</a></li>
{% endfor %}
<li class="page-item active"><a class="page-link" href="?page={{ page_obj.number }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">{{ page_obj.number }}</a></li>
{% for item in data.back_range %}
<li class="page-item"><a class="page-link" href="?page={{ item }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">{{ item }}</a></li>
{% endfor %}
{% else %}
{% for i in paginator.page_range %}
{% if page_obj.number == i %}
<li class="page-item active"><a class="page-link" href="?page={{ i }}">{{ i }}</a></li>
{% else %}
<li class="page-item"><a class="page-link" href="?page={{ i }}">{{ i }}</a></li>
{% endif %}
{% endfor %}
{% endif %}
{% if page_obj.has_next %}
<li class="page-item"><a class="page-link" href="?page={{ page_obj.next_page_number }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">»</a></li>
{% else %}
<li class="page-item disabled"><a class="page-link" href="#">»</a></li>
{% endif %}
{% if page_obj.number == paginator.num_pages %}
{% else %}
<li class="page-item"><a class="page-link" href="?page={{ paginator.num_pages }}{% if request.GET.q %}&q={{request.GET.q}}{% endif %}">Last</a></li>
{% endif %}
</ul>
</nav>
{% endif %}
</div>
</section>
</div>
{% endblock %}
{%block extrascript %}
<script type="text/javascript" src="{% static 'vendor/vue.js' %}"></script>
<script src="{% static 'js/forms/vue-resource.min.js' %}"></script>
<script src="{% static 'js/fieldsight/site_export.js' %}?v=0.4"></script>
{% endblock %}
| awemulya/fieldsight-kobocat | onadata/apps/fieldsight/templates/fieldsight/site_list.html | HTML | bsd-2-clause | 12,702 |
/*
* Software written by Jared Bruni https://github.com/lostjared
This software is dedicated to all the people that experience mental illness.
Website: http://lostsidedead.com
YouTube: http://youtube.com/LostSideDead
Instagram: http://instagram.com/lostsidedead
Twitter: http://twitter.com/jaredbruni
Facebook: http://facebook.com/LostSideDead0x
You can use this program free of charge and redistrubute it online as long
as you do not charge anything for this program. This program is meant to be
100% free.
BSD 2-Clause License
Copyright (c) 2020, Jared Bruni
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import<Foundation/Foundation.h>
#import<Cocoa/Cocoa.h>
extern NSMutableArray *search_results;
@interface SearchController : NSObject<NSTableViewDataSource, NSTableViewDelegate> {
}
@end
| lostjared/Acid.Cam.v2.OSX | Acid.Cam.v2.OSX/AC_SearchController.h | C | bsd-2-clause | 2,061 |
class Gosec < Formula
desc "Golang security checker"
homepage "https://securego.io/"
url "https://github.com/securego/gosec/archive/v2.7.0.tar.gz"
sha256 "fd0b1ba1874cad93680c9e398af011560cd43b638c2b8d34850987a4cf984ba0"
license "Apache-2.0"
head "https://github.com/securego/gosec.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "8eaa54d014d924fa1ca807c25d7b2827023103582d7b269234f35a448787da64"
sha256 cellar: :any_skip_relocation, big_sur: "12d452f02f025f62136d866c00cdd54e0594e3ec1930d70f3aecd4960388273b"
sha256 cellar: :any_skip_relocation, catalina: "d6d5c69d310d0471950f4682193d27c4e59ef3b26acd185c01f9ab3cc7f78f92"
sha256 cellar: :any_skip_relocation, mojave: "7b07d7387e6477c1be027fd6f12eba5b3ac3f19b4fe5762cab07171aed40a514"
end
depends_on "go"
def install
system "go", "build", *std_go_args, "-ldflags", "-X main.version=v#{version}", "./cmd/gosec"
end
test do
(testpath/"test.go").write <<~EOS
package main
import "fmt"
func main() {
username := "admin"
var password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
fmt.Println("Doing something with: ", username, password)
}
EOS
output = shell_output("#{bin}/gosec ./...", 1)
assert_match "G101 (CWE-798)", output
assert_match "Issues: 1", output
end
end
| cblecker/homebrew-core | Formula/gosec.rb | Ruby | bsd-2-clause | 1,380 |
var http = require('http')
var url = require('url')
var path = require('path')
var sleep = require('sleep-ref')
var Router = require("routes-router")
var concat = require('concat-stream')
var ldj = require('ldjson-stream')
var manifest = require('level-manifest')
var multilevel = require('multilevel')
var extend = require('extend')
var prettyBytes = require('pretty-bytes')
var jsonStream = require('JSONStream')
var prebuiltEditor = require('dat-editor-prebuilt')
var debug = require('debug')('rest-handler')
var auth = require('./auth.js')
var pump = require('pump')
var zlib = require('zlib')
var through = require('through2')
module.exports = RestHandler
function RestHandler(dat) {
if (!(this instanceof RestHandler)) return new RestHandler(dat)
this.dat = dat
this.auth = auth(dat.options)
this.router = this.createRoutes()
this.sleep = sleep(function(opts) {
opts.decode = true
if (opts.live === 'true') opts.live = true
if (opts.tail === 'true') opts.tail = true
return dat.createChangesReadStream(opts)
}, {style: 'newline'})
}
RestHandler.prototype.createRoutes = function() {
var router = Router()
router.addRoute("/", this.dataTable.bind(this))
router.addRoute("/api/session", this.session.bind(this))
router.addRoute("/api/login", this.login.bind(this))
router.addRoute("/api/logout", this.logout.bind(this))
router.addRoute("/api/pull", this.pull.bind(this))
router.addRoute("/api/push", this.push.bind(this))
router.addRoute("/api/changes", this.changes.bind(this))
router.addRoute("/api/stats", this.stats.bind(this))
router.addRoute("/api/bulk", this.bulk.bind(this))
router.addRoute("/api/metadata", this.package.bind(this))
router.addRoute("/api/manifest", this.manifest.bind(this))
router.addRoute("/api/rpc", this.rpc.bind(this))
router.addRoute("/api/csv", this.exportCsv.bind(this))
router.addRoute("/api", this.hello.bind(this))
router.addRoute("/api/rows", this.document.bind(this))
router.addRoute("/api/rows/:key", this.document.bind(this))
router.addRoute("/api/rows/:key/:filename", this.blob.bind(this))
router.addRoute("/api/blobs/:key", this.blobs.bind(this))
router.addRoute("*", this.notFound.bind(this))
return router
}
RestHandler.prototype.session = function(req, res) {
var self = this
this.auth.handle(req, res, function(err, session) {
debug('session', [err, session])
var data = {}
if (err) return self.auth.error(req, res)
if (session) data.session = session
else data.loggedOut = true
self.json(res, data)
})
}
RestHandler.prototype.login = function(req, res) {
var self = this
this.auth.handle(req, res, function(err, session) {
debug('login', [err, session])
if (err) {
res.setHeader("WWW-Authenticate", "Basic realm=\"Secure Area\"")
self.auth.error(req, res)
return
}
self.json(res, {session: session})
})
}
RestHandler.prototype.logout = function(req, res) {
return this.auth.error(req, res)
}
RestHandler.prototype.blob = function(req, res, opts) {
var self = this
if (req.method === 'GET') {
var key = opts.key
var blob = self.dat.createBlobReadStream(opts.key, opts.filename, opts)
blob.on('error', function(err) {
return self.error(res, 404, {"error": "Not Found"})
})
pump(blob, res)
return
}
if (req.method === "POST") {
var reqUrl = url.parse(req.url, true)
var qs = reqUrl.query
var doc = {
key: opts.key,
version: qs.version
}
self.auth.handle(req, res, function(err) {
if (err) return self.auth.error(req, res)
var key = doc.key
self.dat.get(key, { version: doc.version }, function(err, existing) {
if (existing) {
doc = existing
}
var ws = self.dat.createBlobWriteStream(opts.filename, doc, function(err, updated) {
if (err) return self.error(res, 500, err)
self.json(res, updated)
})
pump(req, ws)
})
return
})
return
}
self.error(res, 405, {error: 'method not supported'})
}
RestHandler.prototype.blobs = function(req, res, opts) {
var self = this
if (req.method === 'HEAD') {
var key = opts.key
var blob = self.dat.blobs.backend.exists(opts, function(err, exists) {
res.statusCode = exists ? 200 : 404
res.setHeader('content-length', 0)
res.end()
})
return
}
if (req.method === 'GET') {
res.statusCode = 200
return pump(self.dat.blobs.backend.createReadStream(opts), res)
}
self.error(res, 405, {error: 'method not supported'})
}
var unzip = function(req) {
return req.headers['content-encoding'] === 'gzip' ? zlib.createGunzip() : through()
}
var zip = function(req, res) {
if (!/gzip/.test(req.headers['accept-encoding'] || '')) return through()
res.setHeader('Content-Encoding', 'gzip')
return zlib.createGzip()
}
RestHandler.prototype.push = function(req, res) {
var self = this
this.auth.handle(req, res, function(err) {
if (err) return self.auth.error(req, res)
pump(req, unzip(req), self.dat.replicator.receive(), function(err) {
if (err) {
res.statusCode = err.status || 500
res.end(err.message)
return
}
res.end()
})
})
}
RestHandler.prototype.pull = function(req, res) {
var reqUrl = url.parse(req.url, true)
var qs = reqUrl.query
var send = this.dat.replicator.send({
since: parseInt(qs.since, 10) || 0,
blobs: qs.blobs !== 'false',
live: !!qs.live
})
pump(send, zip(req, res), res)
}
RestHandler.prototype.changes = function(req, res) {
this.sleep.httpHandler(req, res)
}
RestHandler.prototype.stats = function(req, res) {
var statsStream = this.dat.createStatsStream()
statsStream.on('error', function(err) {
var errObj = {
type: 'statsStreamError',
message: err.message
}
res.statusCode = 400
serializer.write(errObj)
serializer.end()
})
var serializer = ldj.serialize()
pump(statsStream, serializer, res)
}
RestHandler.prototype.package = function(req, res) {
var meta = {changes: this.dat.storage.change, liveBackup: this.dat.supportsLiveBackup()}
meta.columns = this.dat.schema.headers()
this.json(res, meta)
}
RestHandler.prototype.manifest = function(req, res) {
this.json(res, manifest(this.dat.storage))
}
RestHandler.prototype.rpc = function(req, res) {
var self = this
this.auth.handle(req, res, function(err) {
if (err) return self.auth.error(req, res)
var mserver = multilevel.server(self.dat.storage)
pump(req, mserver, res)
})
}
RestHandler.prototype.exportCsv = function(req, res) {
var reqUrl = url.parse(req.url, true)
var qs = reqUrl.query
qs.csv = true
var readStream = this.dat.createReadStream(qs)
res.writeHead(200, {'content-type': 'text/csv'})
pump(readStream, res)
}
RestHandler.prototype.exportJson = function(req, res) {
var reqUrl = url.parse(req.url, true)
var qs = reqUrl.query
if (typeof qs.limit === 'undefined') qs.limit = 50
else qs.limit = +qs.limit
var readStream = this.dat.createReadStream(qs)
res.writeHead(200, {'content-type': 'application/json'})
pump(readStream, jsonStream.stringify('{"rows": [\n', '\n,\n', '\n]}\n'), res)
}
RestHandler.prototype.handle = function(req, res) {
debug(req.connection.remoteAddress + ' - ' + req.method + ' - ' + req.url + ' - ')
this.router(req, res)
}
RestHandler.prototype.error = function(res, status, message) {
if (!status) status = res.statusCode
if (message) {
if (message.status) status = message.status
if (typeof message === "object") message.status = status
if (typeof message === "string") message = {error: status, message: message}
}
res.statusCode = status || 500
this.json(res, message)
}
RestHandler.prototype.notFound = function(req, res) {
this.error(res, 404, {"error": "Not Found"})
}
RestHandler.prototype.hello = function(req, res) {
var self = this
var stats = {
"dat": "Hello",
"version": this.dat.version,
"changes": this.dat.storage.change,
"name": this.dat.options.name
}
this.dat.storage.stat(function(err, stat) {
if (err) return self.json(res, stats)
stats.rows = stat.rows
self.dat.storage.approximateSize(function(err, size) {
if (err) return self.json(res, stats)
stats.approximateSize = { rows: prettyBytes(size) }
self.json(res, stats)
})
})
}
RestHandler.prototype.dataTable = function(req, res) {
res.setHeader('content-type', 'text/html; charset=utf-8')
res.end(prebuiltEditor)
}
RestHandler.prototype.json = function(res, json) {
res.setHeader('content-type', 'application/json')
res.end(JSON.stringify(json) + '\n')
}
RestHandler.prototype.get = function(req, res, opts) {
var self = this
this.dat.get(opts.key, url.parse(req.url, true).query || {}, function(err, json) {
if (err && err.message === 'range not found') return self.error(res, 404, {error: "Not Found"})
if (err) return self.error(res, 500, err.message)
if (json === null) return self.error(res, 404, {error: "Not Found"})
self.json(res, json)
})
}
RestHandler.prototype.post = function(req, res) {
var self = this
self.bufferJSON(req, function(err, json) {
if (err) return self.error(res, 500, err)
if (!json) json = {}
self.dat.put(json, function(err, stored) {
if (err) {
if (err.conflict) return self.error(res, 409, {conflict: true, error: "Document update conflict. Invalid version"})
return self.error(res, 500, err)
}
res.statusCode = 201
self.json(res, stored)
})
})
}
RestHandler.prototype.delete = function(req, res, opts) {
var self = this
self.dat.delete(opts.key, function(err, stored) {
if (err) return self.error(res, 500, err)
self.json(res, {deleted: true})
})
}
RestHandler.prototype.bulk = function(req, res) {
var self = this
var opts = {}
var ct = req.headers['content-type']
if (ct === 'application/json') opts.json = true
else if (ct === 'text/csv') opts.csv = true
else return self.error(res, 400, {error: 'missing or unsupported content-type'})
opts.results = true
debug('/api/bulk', opts)
this.auth.handle(req, res, function(err) {
if (err) return self.auth.error(req, res)
var writeStream = self.dat.createWriteStream(opts)
writeStream.on('error', function(writeErr) {
var errObj = {
type: 'writeStreamError',
message: writeErr.message
}
res.statusCode = 400
serializer.write(errObj)
serializer.end()
})
var serializer = ldj.serialize()
pump(req, writeStream, serializer, res)
})
}
RestHandler.prototype.document = function(req, res, opts) {
var self = this
if (req.method === "GET" || req.method === "HEAD") {
if (opts.key) return this.get(req, res, opts)
else return this.exportJson(req, res)
}
this.auth.handle(req, res, function(err) {
if (err) return self.auth.error(req, res)
if (req.method === "POST") return self.post(req, res, opts)
if (req.method === "DELETE") return self.delete(req, res, opts)
self.error(res, 405, {error: 'method not supported'})
})
}
RestHandler.prototype.bufferJSON = function(req, cb) {
var self = this
req.on('error', function(err) {
cb(err)
})
req.pipe(concat(function(buff) {
var json
if (buff && buff.length === 0) return cb()
if (buff) {
try {
json = JSON.parse(buff)
} catch(err) {
return cb(err)
}
}
if (!json) return cb(err)
cb(null, json)
}))
}
| giantoak/dat | lib/rest-handler.js | JavaScript | bsd-2-clause | 11,585 |
/**
* The main application class. An instance of this class is created by app.js when it calls
* Ext.application(). This is the ideal place to handle application launch and initialization
* details.
*
*
*/
Ext.define('Sample.Application', {
extend: 'Devon.App',
name: 'Sample',
requires:[
'Sample.Simlets'
],
controllers: [
'Sample.controller.main.MainController',
'Sample.controller.table.TablesController',
'Sample.controller.cook.CookController'
],
launch: function() {
Devon.Log.trace('Sample.app launch');
console.log('Sample.app launch');
if (document.location.toString().indexOf('useSimlets')>=0){
Sample.Simlets.useSimlets();
}
this.callParent(arguments);
}
});
| CoEValencia/chirr | demos/workspace/ExtSample/app/Application.js | JavaScript | bsd-2-clause | 796 |
package com.glob3mobile.vectorial.processing;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import com.glob3mobile.utils.Progress;
import com.glob3mobile.vectorial.lod.PointFeatureLODStorage;
import com.glob3mobile.vectorial.lod.mapdb.PointFeatureLODMapDBStorage;
import com.glob3mobile.vectorial.storage.PointFeature;
import com.glob3mobile.vectorial.storage.PointFeatureStorage;
import com.glob3mobile.vectorial.storage.mapdb.PointFeatureMapDBStorage;
public class LODPointFeaturesPreprocessor {
private static class LeafNodesImporter
implements
PointFeatureStorage.NodeVisitor {
private final long _nodesCount;
private final PointFeatureLODStorage _lodStorage;
private final boolean _verbose;
private Progress _progress;
private LeafNodesImporter(final long nodesCount,
final PointFeatureLODStorage lodStorage,
final boolean verbose) {
_nodesCount = nodesCount;
_lodStorage = lodStorage;
_verbose = verbose;
}
@Override
public void start() {
_progress = new Progress(_nodesCount) {
@Override
public void informProgress(final long stepsDone,
final double percent,
final long elapsed,
final long estimatedMsToFinish) {
if (_verbose) {
System.out.println(_lodStorage.getName() + ": 1/4 Importing leaf nodes: "
+ progressString(stepsDone, percent, elapsed, estimatedMsToFinish));
}
}
};
}
@Override
public void stop() {
_progress.finish();
_progress = null;
}
@Override
public boolean visit(final PointFeatureStorage.Node node) {
final List<PointFeature> features = new ArrayList<>(node.getFeatures());
_lodStorage.addLeafNode( //
node.getID(), //
node.getNodeSector(), //
node.getMinimumSector(), //
features //
);
_progress.stepDone();
return true;
}
}
public static void process(final File storageDir,
final String storageName,
final File lodDir,
final String lodName,
final int maxFeaturesPerNode,
final Comparator<PointFeature> featuresComparator,
final boolean createClusters,
final boolean verbose) throws IOException {
try (final PointFeatureStorage storage = PointFeatureMapDBStorage.openReadOnly(storageDir, storageName)) {
try (final PointFeatureLODStorage lodStorage = PointFeatureLODMapDBStorage.createEmpty(storage.getSector(), lodDir,
lodName, maxFeaturesPerNode, featuresComparator, createClusters)) {
final PointFeatureStorage.Statistics statistics = storage.getStatistics(verbose);
if (verbose) {
statistics.show();
System.out.println();
}
final int nodesCount = statistics.getNodesCount();
storage.acceptDepthFirstVisitor(new LeafNodesImporter(nodesCount, lodStorage, verbose));
lodStorage.createLOD(verbose);
if (verbose) {
System.out.println(lodStorage.getName() + ": 4/4 Optimizing storage...");
}
lodStorage.optimize();
if (verbose) {
System.out.println();
final PointFeatureLODStorage.Statistics lodStatistics = lodStorage.getStatistics(verbose);
lodStatistics.show();
}
}
}
}
private LODPointFeaturesPreprocessor() {
}
public static void main(final String[] args) throws IOException {
System.out.println("LODPointFeaturesPreprocessor 0.1");
System.out.println("--------------------------------\n");
final File sourceDir = new File("PointFeaturesStorage");
// final String sourceName = "Cities1000";
// final String sourceName = "AR";
// final String sourceName = "ES";
// final String sourceName = "GEONames-PopulatedPlaces";
// final String sourceName = "SpanishBars";
final String sourceName = "Tornados";
final File lodDir = new File("PointFeaturesLOD");
final String lodName = sourceName + "_LOD";
final int maxFeaturesPerNode = 64;
// final int maxFeaturesPerNode = 96;
final boolean createClusters = true;
final Comparator<PointFeature> featuresComparator = createClusters ? null : new GEONamesComparator();
final boolean verbose = true;
LODPointFeaturesPreprocessor.process( //
sourceDir, sourceName, //
lodDir, lodName, //
maxFeaturesPerNode, //
featuresComparator, //
createClusters, //
verbose);
System.out.println("\n- done!");
}
}
| octavianiLocator/g3m | tools/vectorial-streaming/src/com/glob3mobile/vectorial/processing/LODPointFeaturesPreprocessor.java | Java | bsd-2-clause | 5,344 |
// Copyright (C) 2018 Andrew Paprocki. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-date.parse
es6id: 20.3.3.2
description: >
Date.parse return value is limited to specified time value maximum range
info: |
Date.parse ( string )
parse interprets the resulting String as a date and time; it returns a
Number, the UTC time value corresponding to the date and time.
A Date object contains a Number indicating a particular instant in time to
within a millisecond. Such a Number is called a time value.
The actual range of times supported by ECMAScript Date objects is slightly
smaller: exactly -100,000,000 days to 100,000,000 days measured relative to
midnight at the beginning of 01 January, 1970 UTC. This gives a range of
8,640,000,000,000,000 milliseconds to either side of 01 January, 1970 UTC.
includes: [propertyHelper.js]
---*/
const minDateStr = "-271821-04-20T00:00:00.000Z";
const minDate = new Date(-8640000000000000);
assert.sameValue(minDate.toISOString(), minDateStr, "minDateStr");
assert.sameValue(Date.parse(minDateStr), minDate.valueOf(), "parse minDateStr");
const maxDateStr = "+275760-09-13T00:00:00.000Z";
const maxDate = new Date(8640000000000000);
assert.sameValue(maxDate.toISOString(), maxDateStr, "maxDateStr");
assert.sameValue(Date.parse(maxDateStr), maxDate.valueOf(), "parse maxDateStr");
const belowRange = "-271821-04-19T23:59:59.999Z";
const aboveRange = "+275760-09-13T00:00:00.001Z";
assert.sameValue(Date.parse(belowRange), NaN, "parse below minimum time value");
assert.sameValue(Date.parse(aboveRange), NaN, "parse above maximum time value");
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/Date/parse/time-value-maximum-range.js | JavaScript | bsd-2-clause | 1,673 |
package ru.biocad.ig.alicont.conts.simple
import ru.biocad.ig.alicont.algorithms.simple.SemiglobalAlignment
import ru.biocad.ig.alicont.conts.SimpleAlicont
/**
* Created with IntelliJ IDEA.
* User: pavel
* Date: 27.11.13
* Time: 23:37
*/
class AlicontSemiglobal(maxheight : Int, query : String, gap : Double, score_matrix : Array[Array[Double]])
extends SimpleAlicont(maxheight : Int, query : String, gap : Double, score_matrix : Array[Array[Double]]) {
def push(s : String) : Unit = {
_strings.push(s)
SemiglobalAlignment.extendMatrix(s, _query, _gap, _score, _matrix)
}
def alignment() : (Double, (String, String)) = {
SemiglobalAlignment.traceback(target, _query, _gap, _score, _matrix)
}
} | zmactep/igcat | lib/ig-alicont/src/main/scala/ru/biocad/ig/alicont/conts/simple/AlicontSemiglobal.scala | Scala | bsd-2-clause | 724 |
#include <catch.hpp>
#include <rapidcheck/catch.h>
using namespace rc;
TEST_CASE("scaleInteger") {
prop("for uint32_t, equal to naive way",
[] {
const auto x = *gen::arbitrary<uint32_t>();
const auto size = *gen::nonNegative<int>();
RC_ASSERT(gen::detail::scaleInteger(x, size) ==
((x * std::min<uint64_t>(kNominalSize, size) +
(kNominalSize / 2)) /
kNominalSize));
});
prop("result strictly increases with size",
[](uint64_t x) {
const auto sizeA = *gen::nonNegative<int>();
const auto sizeB = *gen::nonNegative<int>();
const auto small = std::min(sizeA, sizeB);
const auto large = std::max(sizeA, sizeB);
RC_ASSERT(gen::detail::scaleInteger(x, small) <=
gen::detail::scaleInteger(x, large));
});
prop("result strictly increases with value",
[](uint64_t a, uint64_t b){
const auto size = *gen::nonNegative<int>();
const auto small = std::min(a, b);
const auto large = std::max(a, b);
RC_ASSERT(gen::detail::scaleInteger(small, size) <=
gen::detail::scaleInteger(large, size));
});
prop("yields input for kNominalSize",
[](uint64_t x) {
RC_ASSERT(gen::detail::scaleInteger(x, kNominalSize) == x);
});
prop("yields 0 for 0",
[](uint64_t x) { RC_ASSERT(gen::detail::scaleInteger(x, 0) == 0U); });
}
| unapiedra/rapidfuzz | test/gen/detail/ScaleIntegerTests.cpp | C++ | bsd-2-clause | 1,493 |
// ------------------------------------------------------------------------------
// <copyright from='2002' to='2002' company='Scott Hanselman'>
// Copyright (c) Scott Hanselman. All Rights Reserved.
// </copyright>
// ------------------------------------------------------------------------------
//
// Scott Hanselman's Tiny Academic Virtual CPU and OS
// Copyright (c) 2002, Scott Hanselman ([email protected])
// All rights reserved.
//
// A BSD License
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// Neither the name of Scott Hanselman nor the names of its contributors
// may be used to endorse or promote products derived from this software without
// specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
// BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
// OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace Hanselman.CST352
{
using System;
using System.Collections;
/// <summary>
/// A collection that stores <see cref='Hanselman.CST352.Instruction'/> objects.
/// </summary>
/// <seealso cref='Hanselman.CST352.InstructionCollection'/>
[Serializable()]
public class InstructionCollection : CollectionBase {
/// <summary>
/// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/>.
/// </summary>
public InstructionCollection() {
}
/// <summary>
/// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/> based on another <see cref='Hanselman.CST352.InstructionCollection'/>.
/// </summary>
/// <param name='value'>
/// A <see cref='Hanselman.CST352.InstructionCollection'/> from which the contents are copied
/// </param>
public InstructionCollection(InstructionCollection value) {
this.AddRange(value);
}
/// <summary>
/// Initializes a new instance of <see cref='Hanselman.CST352.InstructionCollection'/> containing any array of <see cref='Hanselman.CST352.Instruction'/> objects.
/// </summary>
/// <param name='value'>
/// A array of <see cref='Hanselman.CST352.Instruction'/> objects with which to intialize the collection
/// </param>
public InstructionCollection(Instruction[] value) {
this.AddRange(value);
}
/// <summary>
/// Represents the entry at the specified index of the <see cref='Hanselman.CST352.Instruction'/>.
/// </summary>
/// <param name='index'>The zero-based index of the entry to locate in the collection.</param>
/// <value>
/// The entry at the specified index of the collection.
/// </value>
/// <exception cref='System.ArgumentOutOfRangeException'><paramref name='index'/> is outside the valid range of indexes for the collection.</exception>
public Instruction this[int index] {
get {
return ((Instruction)(List[index]));
}
set {
List[index] = value;
}
}
/// <summary>
/// Adds a <see cref='Hanselman.CST352.Instruction'/> with the specified value to the
/// <see cref='Hanselman.CST352.InstructionCollection'/> .
/// </summary>
/// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to add.</param>
/// <returns>
/// The index at which the new element was inserted.
/// </returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.AddRange(Instruction[])'/>
public int Add(Instruction value) {
return List.Add(value);
}
/// <summary>
/// Copies the elements of an array to the end of the <see cref='Hanselman.CST352.InstructionCollection'/>.
/// </summary>
/// <param name='value'>
/// An array of type <see cref='Hanselman.CST352.Instruction'/> containing the objects to add to the collection.
/// </param>
/// <returns>
/// None.
/// </returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/>
public void AddRange(Instruction[] value) {
for (int i = 0; (i < value.Length); i = (i + 1)) {
this.Add(value[i]);
}
}
/// <summary>
///
/// Adds the contents of another <see cref='Hanselman.CST352.InstructionCollection'/> to the end of the collection.
///
/// </summary>
/// <param name='value'>
/// A <see cref='Hanselman.CST352.InstructionCollection'/> containing the objects to add to the collection.
/// </param>
/// <returns>
/// None.
/// </returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/>
public void AddRange(InstructionCollection value) {
for (int i = 0; (i < value.Count); i = (i + 1)) {
this.Add(value[i]);
}
}
/// <summary>
/// Gets a value indicating whether the
/// <see cref='Hanselman.CST352.InstructionCollection'/> contains the specified <see cref='Hanselman.CST352.Instruction'/>.
/// </summary>
/// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to locate.</param>
/// <returns>
/// <see langword='true'/> if the <see cref='Hanselman.CST352.Instruction'/> is contained in the collection;
/// otherwise, <see langword='false'/>.
/// </returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.IndexOf'/>
public bool Contains(Instruction value) {
return List.Contains(value);
}
/// <summary>
/// Copies the <see cref='Hanselman.CST352.InstructionCollection'/> values to a one-dimensional <see cref='System.Array'/> instance at the
/// specified index.
/// </summary>
/// <param name='array'>The one-dimensional <see cref='System.Array'/> that is the destination of the values copied from <see cref='Hanselman.CST352.InstructionCollection'/> .</param>
/// <param name='index'>The index in <paramref name='array'/> where copying begins.</param>
/// <returns>
/// None.
/// </returns>
/// <exception cref='System.ArgumentException'><paramref name='array'/> is multidimensional. -or- The number of elements in the <see cref='Hanselman.CST352.InstructionCollection'/> is greater than the available space between <paramref name='index'/> and the end of <paramref name='array'/>.</exception>
/// <exception cref='System.ArgumentNullException'><paramref name='array'/> is <see langword='null'/>. </exception>
/// <exception cref='System.ArgumentOutOfRangeException'><paramref name='index'/> is less than <paramref name='array'/>'s lowbound. </exception>
/// <seealso cref='System.Array'/>
public void CopyTo(Instruction[] array, int index) {
List.CopyTo(array, index);
}
/// <summary>
/// Returns the index of a <see cref='Hanselman.CST352.Instruction'/> in
/// the <see cref='Hanselman.CST352.InstructionCollection'/> .
/// </summary>
/// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to locate.</param>
/// <returns>
/// The index of the <see cref='Hanselman.CST352.Instruction'/> of <paramref name='value'/> in the
/// <see cref='Hanselman.CST352.InstructionCollection'/>, if found; otherwise, -1.
/// </returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.Contains'/>
public int IndexOf(Instruction value) {
return List.IndexOf(value);
}
/// <summary>
/// Inserts a <see cref='Hanselman.CST352.Instruction'/> into the <see cref='Hanselman.CST352.InstructionCollection'/> at the specified index.
/// </summary>
/// <param name='index'>The zero-based index where <paramref name='value'/> should be inserted.</param>
/// <param name=' value'>The <see cref='Hanselman.CST352.Instruction'/> to insert.</param>
/// <returns>None.</returns>
/// <seealso cref='Hanselman.CST352.InstructionCollection.Add'/>
public void Insert(int index, Instruction value) {
List.Insert(index, value);
}
/// <summary>
/// Returns an enumerator that can iterate through
/// the <see cref='Hanselman.CST352.InstructionCollection'/> .
/// </summary>
/// <returns>None.</returns>
/// <seealso cref='System.Collections.IEnumerator'/>
public new InstructionEnumerator GetEnumerator() {
return new InstructionEnumerator(this);
}
/// <summary>
/// Removes a specific <see cref='Hanselman.CST352.Instruction'/> from the
/// <see cref='Hanselman.CST352.InstructionCollection'/> .
/// </summary>
/// <param name='value'>The <see cref='Hanselman.CST352.Instruction'/> to remove from the <see cref='Hanselman.CST352.InstructionCollection'/> .</param>
/// <returns>None.</returns>
/// <exception cref='System.ArgumentException'><paramref name='value'/> is not found in the Collection. </exception>
public void Remove(Instruction value) {
List.Remove(value);
}
/// <summary>
/// Provided for "foreach" support with this collection
/// </summary>
public class InstructionEnumerator : object, IEnumerator {
private IEnumerator baseEnumerator;
private IEnumerable temp;
/// <summary>
/// Public constructor for an InstructionEnumerator
/// </summary>
/// <param name="mappings">The <see cref="InstructionCollection"/>we are going to iterate over</param>
public InstructionEnumerator(InstructionCollection mappings) {
this.temp = ((IEnumerable)(mappings));
this.baseEnumerator = temp.GetEnumerator();
}
/// <summary>
/// The current <see cref="Instruction"/>
/// </summary>
public Instruction Current {
get {
return ((Instruction)(baseEnumerator.Current));
}
}
/// <summary>
/// The current IEnumerator interface
/// </summary>
object IEnumerator.Current {
get {
return baseEnumerator.Current;
}
}
/// <summary>
/// Move to the next Instruction
/// </summary>
/// <returns>true or false based on success</returns>
public bool MoveNext() {
return baseEnumerator.MoveNext();
}
/// <summary>
/// Move to the next Instruction
/// </summary>
/// <returns>true or false based on success</returns>
bool IEnumerator.MoveNext()
{
return baseEnumerator.MoveNext();
}
/// <summary>
/// Reset the cursor
/// </summary>
public void Reset()
{
baseEnumerator.Reset();
}
/// <summary>
/// Reset the cursor
/// </summary>
void IEnumerator.Reset()
{
baseEnumerator.Reset();
}
}
}
}
| shanselman/TinyOS | src/TinyOSCore/InstructionCollection.cs | C# | bsd-2-clause | 12,715 |
#ifndef EXTRACTION_WAY_HPP
#define EXTRACTION_WAY_HPP
#include "extractor/guidance/road_classification.hpp"
#include "extractor/travel_mode.hpp"
#include "util/guidance/turn_lanes.hpp"
#include "util/typedefs.hpp"
#include <string>
#include <vector>
namespace osrm
{
namespace extractor
{
namespace detail
{
inline void maybeSetString(std::string &str, const char *value)
{
if (value == nullptr)
{
str.clear();
}
else
{
str = std::string(value);
}
}
}
/**
* This struct is the direct result of the call to ```way_function```
* in the lua based profile.
*
* It is split into multiple edge segments in the ExtractorCallback.
*/
struct ExtractionWay
{
ExtractionWay() { clear(); }
void clear()
{
forward_speed = -1;
backward_speed = -1;
forward_rate = -1;
backward_rate = -1;
duration = -1;
weight = -1;
name.clear();
ref.clear();
pronunciation.clear();
destinations.clear();
exits.clear();
turn_lanes_forward.clear();
turn_lanes_backward.clear();
road_classification = guidance::RoadClassification();
forward_travel_mode = TRAVEL_MODE_INACCESSIBLE;
backward_travel_mode = TRAVEL_MODE_INACCESSIBLE;
roundabout = false;
circular = false;
is_startpoint = true;
forward_restricted = false;
backward_restricted = false;
is_left_hand_driving = false;
}
// wrappers to allow assigning nil (nullptr) to string values
void SetName(const char *value) { detail::maybeSetString(name, value); }
const char *GetName() const { return name.c_str(); }
void SetRef(const char *value) { detail::maybeSetString(ref, value); }
const char *GetRef() const { return ref.c_str(); }
void SetDestinations(const char *value) { detail::maybeSetString(destinations, value); }
const char *GetDestinations() const { return destinations.c_str(); }
void SetExits(const char *value) { detail::maybeSetString(exits, value); }
const char *GetExits() const { return exits.c_str(); }
void SetPronunciation(const char *value) { detail::maybeSetString(pronunciation, value); }
const char *GetPronunciation() const { return pronunciation.c_str(); }
void SetTurnLanesForward(const char *value)
{
detail::maybeSetString(turn_lanes_forward, value);
}
const char *GetTurnLanesForward() const { return turn_lanes_forward.c_str(); }
void SetTurnLanesBackward(const char *value)
{
detail::maybeSetString(turn_lanes_backward, value);
}
const char *GetTurnLanesBackward() const { return turn_lanes_backward.c_str(); }
// markers for determining user-defined classes for each way
std::unordered_map<std::string, bool> forward_classes;
std::unordered_map<std::string, bool> backward_classes;
// speed in km/h
double forward_speed;
double backward_speed;
// weight per meter
double forward_rate;
double backward_rate;
// duration of the whole way in both directions
double duration;
// weight of the whole way in both directions
double weight;
std::string name;
std::string ref;
std::string pronunciation;
std::string destinations;
std::string exits;
std::string turn_lanes_forward;
std::string turn_lanes_backward;
guidance::RoadClassification road_classification;
TravelMode forward_travel_mode : 4;
TravelMode backward_travel_mode : 4;
// Boolean flags
bool roundabout : 1;
bool circular : 1;
bool is_startpoint : 1;
bool forward_restricted : 1;
bool backward_restricted : 1;
bool is_left_hand_driving : 1;
bool : 2;
};
}
}
#endif // EXTRACTION_WAY_HPP
| frodrigo/osrm-backend | include/extractor/extraction_way.hpp | C++ | bsd-2-clause | 3,763 |
#include <math.h>
#include "evas_common.h"
#include "evas_blend_private.h"
typedef struct _RGBA_Span RGBA_Span;
typedef struct _RGBA_Edge RGBA_Edge;
typedef struct _RGBA_Vertex RGBA_Vertex;
struct _RGBA_Span
{
EINA_INLIST;
int x, y, w;
};
struct _RGBA_Edge
{
double x, dx;
int i;
};
struct _RGBA_Vertex
{
double x, y;
int i;
};
#define POLY_EDGE_DEL(_i) \
{ \
int _j; \
\
for (_j = 0; (_j < num_active_edges) && (edges[_j].i != _i); _j++); \
if (_j < num_active_edges) \
{ \
num_active_edges--; \
memmove(&(edges[_j]), &(edges[_j + 1]), \
(num_active_edges - _j) * sizeof(RGBA_Edge)); \
} \
}
#define POLY_EDGE_ADD(_i, _y) \
{ \
int _j; \
float _dx; \
RGBA_Vertex *_p, *_q; \
if (_i < (n - 1)) _j = _i + 1; \
else _j = 0; \
if (point[_i].y < point[_j].y) \
{ \
_p = &(point[_i]); \
_q = &(point[_j]); \
} \
else \
{ \
_p = &(point[_j]); \
_q = &(point[_i]); \
} \
edges[num_active_edges].dx = _dx = (_q->x - _p->x) / (_q->y - _p->y); \
edges[num_active_edges].x = (_dx * ((float)_y + 0.5 - _p->y)) + _p->x; \
edges[num_active_edges].i = _i; \
num_active_edges++; \
}
EAPI void
evas_common_polygon_init(void)
{
}
EAPI RGBA_Polygon_Point *
evas_common_polygon_point_add(RGBA_Polygon_Point *points, int x, int y)
{
RGBA_Polygon_Point *pt;
pt = malloc(sizeof(RGBA_Polygon_Point));
if (!pt) return points;
pt->x = x;
pt->y = y;
points = (RGBA_Polygon_Point *)eina_inlist_append(EINA_INLIST_GET(points), EINA_INLIST_GET(pt));
return points;
}
EAPI RGBA_Polygon_Point *
evas_common_polygon_points_clear(RGBA_Polygon_Point *points)
{
if (points)
{
while (points)
{
RGBA_Polygon_Point *old_p;
old_p = points;
points = (RGBA_Polygon_Point *)eina_inlist_remove(EINA_INLIST_GET(points), EINA_INLIST_GET(points));
free(old_p);
}
}
return NULL;
}
static int
polygon_point_sorter(const void *a, const void *b)
{
RGBA_Vertex *p, *q;
p = (RGBA_Vertex *)a;
q = (RGBA_Vertex *)b;
if (p->y <= q->y) return -1;
return 1;
}
static int
polygon_edge_sorter(const void *a, const void *b)
{
RGBA_Edge *p, *q;
p = (RGBA_Edge *)a;
q = (RGBA_Edge *)b;
if (p->x <= q->x) return -1;
return 1;
}
EAPI void
evas_common_polygon_draw(RGBA_Image *dst, RGBA_Draw_Context *dc, RGBA_Polygon_Point *points, int x, int y)
{
RGBA_Gfx_Func func;
RGBA_Polygon_Point *pt;
RGBA_Vertex *point;
RGBA_Edge *edges;
Eina_Inlist *spans;
int num_active_edges;
int n;
int i, j, k;
int y0, y1, yi;
int ext_x, ext_y, ext_w, ext_h;
int *sorted_index;
ext_x = 0;
ext_y = 0;
ext_w = dst->cache_entry.w;
ext_h = dst->cache_entry.h;
if (dc->clip.use)
{
if (dc->clip.x > ext_x)
{
ext_w += ext_x - dc->clip.x;
ext_x = dc->clip.x;
}
if ((ext_x + ext_w) > (dc->clip.x + dc->clip.w))
{
ext_w = (dc->clip.x + dc->clip.w) - ext_x;
}
if (dc->clip.y > ext_y)
{
ext_h += ext_y - dc->clip.y;
ext_y = dc->clip.y;
}
if ((ext_y + ext_h) > (dc->clip.y + dc->clip.h))
{
ext_h = (dc->clip.y + dc->clip.h) - ext_y;
}
}
if ((ext_w <= 0) || (ext_h <= 0)) return;
evas_common_cpu_end_opt();
n = 0; EINA_INLIST_FOREACH(points, pt) n++;
if (n < 3) return;
edges = malloc(sizeof(RGBA_Edge) * n);
if (!edges) return;
point = malloc(sizeof(RGBA_Vertex) * n);
if (!point)
{
free(edges);
return;
}
sorted_index = malloc(sizeof(int) * n);
if (!sorted_index)
{
free(edges);
free(point);
return;
}
k = 0;
EINA_INLIST_FOREACH(points, pt)
{
point[k].x = pt->x + x;
point[k].y = pt->y + y;
point[k].i = k;
k++;
}
qsort(point, n, sizeof(RGBA_Vertex), polygon_point_sorter);
for (k = 0; k < n; k++) sorted_index[k] = point[k].i;
k = 0;
EINA_INLIST_FOREACH(points, pt)
{
point[k].x = pt->x + x;
point[k].y = pt->y + y;
point[k].i = k;
k++;
}
y0 = MAX(ext_y, ceil(point[sorted_index[0]].y - 0.5));
y1 = MIN(ext_y + ext_h - 1, floor(point[sorted_index[n - 1]].y - 0.5));
k = 0;
num_active_edges = 0;
spans = NULL;
for (yi = y0; yi <= y1; yi++)
{
for (; (k < n) && (point[sorted_index[k]].y <= ((double)yi + 0.5)); k++)
{
i = sorted_index[k];
if (i > 0) j = i - 1;
else j = n - 1;
if (point[j].y <= ((double)yi - 0.5))
{
POLY_EDGE_DEL(j)
}
else if (point[j].y > ((double)yi + 0.5))
{
POLY_EDGE_ADD(j, yi)
}
if (i < (n - 1)) j = i + 1;
else j = 0;
if (point[j].y <= ((double)yi - 0.5))
{
POLY_EDGE_DEL(i)
}
else if (point[j].y > ((double)yi + 0.5))
{
POLY_EDGE_ADD(i, yi)
}
}
qsort(edges, num_active_edges, sizeof(RGBA_Edge), polygon_edge_sorter);
for (j = 0; j < num_active_edges; j += 2)
{
int x0, x1;
x0 = ceil(edges[j].x - 0.5);
if (j < (num_active_edges - 1))
x1 = floor(edges[j + 1].x - 0.5);
else
x1 = x0;
if ((x1 >= ext_x) && (x0 < (ext_x + ext_w)) && (x0 <= x1))
{
RGBA_Span *span;
if (x0 < ext_x) x0 = ext_x;
if (x1 >= (ext_x + ext_w)) x1 = ext_x + ext_w - 1;
span = malloc(sizeof(RGBA_Span));
spans = eina_inlist_append(spans, EINA_INLIST_GET(span));
span->y = yi;
span->x = x0;
span->w = (x1 - x0) + 1;
}
edges[j].x += edges[j].dx;
edges[j + 1].x += edges[j + 1].dx;
}
}
free(edges);
free(point);
free(sorted_index);
func = evas_common_gfx_func_composite_color_span_get(dc->col.col, dst, 1, dc->render_op);
if (spans)
{
RGBA_Span *span;
EINA_INLIST_FOREACH(spans, span)
{
DATA32 *ptr;
#ifdef EVAS_SLI
if (((span->y) % dc->sli.h) == dc->sli.y)
#endif
{
ptr = dst->image.data + (span->y * (dst->cache_entry.w)) + span->x;
func(NULL, NULL, dc->col.col, ptr, span->w);
}
}
while (spans)
{
span = (RGBA_Span *)spans;
spans = eina_inlist_remove(spans, spans);
free(span);
}
}
}
| nashidau/Evas-Next | src/lib/engines/common/evas_polygon_main.c | C | bsd-2-clause | 7,727 |
/*
* Copyright (c) 2013 Ambroz Bizjak
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef AMBROLIB_STRUCT_IF_H
#define AMBROLIB_STRUCT_IF_H
namespace APrinter {
#define AMBRO_STRUCT_IF(name, condition) \
template <bool name##__IfEnable, typename name##__IfDummy = void> \
struct name##__impl {}; \
using name = name##__impl<(condition)>; \
template <typename name##__IfDummy> \
struct name##__impl <true, name##__IfDummy>
#define AMBRO_STRUCT_ELSE(name) \
; template <typename name##__IfDummy> \
struct name##__impl <false, name##__IfDummy>
#define APRINTER_STRUCT_IF_TEMPLATE(name) \
template <bool name##__IfEnable, typename name##__IfDummy = void> \
struct name {}; \
template <typename name##__IfDummy> \
struct name <true, name##__IfDummy>
}
#endif
| ambrop72/aprinter | aprinter/meta/StructIf.h | C | bsd-2-clause | 2,021 |
/* Copyright (c) Citrix Systems Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <ntddk.h>
#include <xen.h>
#include <util.h>
#include "hypercall.h"
#include "dbg_print.h"
#include "assert.h"
#define MAXIMUM_HYPERCALL_PFN_COUNT 2
#pragma code_seg("hypercall")
__declspec(allocate("hypercall"))
static UCHAR __Section[(MAXIMUM_HYPERCALL_PFN_COUNT + 1) * PAGE_SIZE];
static ULONG XenBaseLeaf = 0x40000000;
static USHORT XenMajorVersion;
static USHORT XenMinorVersion;
static PFN_NUMBER HypercallPfn[MAXIMUM_HYPERCALL_PFN_COUNT];
static ULONG HypercallPfnCount;
typedef UCHAR HYPERCALL_GATE[32];
typedef HYPERCALL_GATE *PHYPERCALL_GATE;
PHYPERCALL_GATE Hypercall;
NTSTATUS
HypercallInitialize(
VOID
)
{
ULONG EAX = 'DEAD';
ULONG EBX = 'DEAD';
ULONG ECX = 'DEAD';
ULONG EDX = 'DEAD';
ULONG Index;
ULONG HypercallMsr;
NTSTATUS status;
status = STATUS_UNSUCCESSFUL;
for (;;) {
CHAR Signature[13] = {0};
__CpuId(XenBaseLeaf, &EAX, &EBX, &ECX, &EDX);
*((PULONG)(Signature + 0)) = EBX;
*((PULONG)(Signature + 4)) = ECX;
*((PULONG)(Signature + 8)) = EDX;
if (strcmp(Signature, "XenVMMXenVMM") == 0 &&
EAX >= XenBaseLeaf + 2)
break;
XenBaseLeaf += 0x100;
if (XenBaseLeaf > 0x40000100)
goto fail1;
}
__CpuId(XenBaseLeaf + 1, &EAX, NULL, NULL, NULL);
XenMajorVersion = (USHORT)(EAX >> 16);
XenMinorVersion = (USHORT)(EAX & 0xFFFF);
Info("XEN %d.%d\n", XenMajorVersion, XenMinorVersion);
Info("INTERFACE 0x%08x\n", __XEN_INTERFACE_VERSION__);
if ((ULONG_PTR)__Section & (PAGE_SIZE - 1))
Hypercall = (PVOID)(((ULONG_PTR)__Section + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1));
else
Hypercall = (PVOID)__Section;
ASSERT3U(((ULONG_PTR)Hypercall & (PAGE_SIZE - 1)), ==, 0);
for (Index = 0; Index < MAXIMUM_HYPERCALL_PFN_COUNT; Index++) {
PHYSICAL_ADDRESS PhysicalAddress;
PhysicalAddress = MmGetPhysicalAddress((PUCHAR)Hypercall + (Index << PAGE_SHIFT));
HypercallPfn[Index] = (PFN_NUMBER)(PhysicalAddress.QuadPart >> PAGE_SHIFT);
}
__CpuId(XenBaseLeaf + 2, &EAX, &EBX, NULL, NULL);
HypercallPfnCount = EAX;
ASSERT(HypercallPfnCount <= MAXIMUM_HYPERCALL_PFN_COUNT);
HypercallMsr = EBX;
for (Index = 0; Index < HypercallPfnCount; Index++) {
Info("HypercallPfn[%d]: %p\n", Index, (PVOID)HypercallPfn[Index]);
__writemsr(HypercallMsr, (ULONG64)HypercallPfn[Index] << PAGE_SHIFT);
}
return STATUS_SUCCESS;
fail1:
Error("fail1 (%08x)", status);
return status;
}
extern uintptr_t __stdcall hypercall_gate_2(uint32_t ord, uintptr_t arg1, uintptr_t arg2);
ULONG_PTR
__Hypercall2(
ULONG Ordinal,
ULONG_PTR Argument1,
ULONG_PTR Argument2
)
{
return hypercall_gate_2(Ordinal, Argument1, Argument2);
}
extern uintptr_t __stdcall hypercall_gate_3(uint32_t ord, uintptr_t arg1, uintptr_t arg2, uintptr_t arg3);
ULONG_PTR
__Hypercall3(
ULONG Ordinal,
ULONG_PTR Argument1,
ULONG_PTR Argument2,
ULONG_PTR Argument3
)
{
return hypercall_gate_3(Ordinal, Argument1, Argument2, Argument3);
}
VOID
HypercallTeardown(
VOID
)
{
ULONG Index;
Hypercall = NULL;
for (Index = 0; Index < MAXIMUM_HYPERCALL_PFN_COUNT; Index++)
HypercallPfn[Index] = 0;
HypercallPfnCount = 0;
}
| xenserver/win-xenbus | src/xen/hypercall.c | C | bsd-2-clause | 4,901 |
# *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
import itertools
import numba
import numpy as np
import os
import pandas as pd
import pyarrow.parquet as pq
import random
import string
import unittest
from numba import types
import sdc
from sdc import hiframes
from sdc.str_arr_ext import StringArray
from sdc.tests.test_base import TestCase
from sdc.tests.test_utils import (count_array_OneDs,
count_array_REPs,
count_parfor_OneDs,
count_parfor_REPs,
dist_IR_contains,
get_start_end,
skip_numba_jit,
skip_sdc_jit)
class TestHiFrames(TestCase):
@skip_numba_jit
def test_column_list_select2(self):
# make sure SDC copies the columns like Pandas does
def test_impl(df):
df2 = df[['A']]
df2['A'] += 10
return df2.A, df.A
hpat_func = self.jit(test_impl)
n = 11
df = pd.DataFrame(
{'A': np.arange(n), 'B': np.ones(n), 'C': np.random.ranf(n)})
np.testing.assert_array_equal(hpat_func(df.copy())[1], test_impl(df)[1])
@skip_numba_jit
def test_pd_DataFrame_from_series_par(self):
def test_impl(n):
S1 = pd.Series(np.ones(n))
S2 = pd.Series(np.random.ranf(n))
df = pd.DataFrame({'A': S1, 'B': S2})
return df.A.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 1)
@skip_numba_jit
def test_getitem_bool_series(self):
def test_impl(df):
return df['A'][df['B']].values
hpat_func = self.jit(test_impl)
df = pd.DataFrame({'A': [1, 2, 3], 'B': [True, False, True]})
np.testing.assert_array_equal(test_impl(df), hpat_func(df))
@skip_numba_jit
def test_fillna(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
B = df.A.fillna(5.0)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_fillna_inplace(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
df.A.fillna(5.0, inplace=True)
return df.A.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_column_mean(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = np.nan
df = pd.DataFrame({'A': A})
return df.A.mean()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
@skip_numba_jit
def test_column_var(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.var()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl())
@skip_numba_jit
def test_column_std(self):
def test_impl():
A = np.array([1., 2., 3.])
A[0] = 4.0
df = pd.DataFrame({'A': A})
return df.A.std()
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(), test_impl())
@skip_numba_jit
def test_column_map(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df['B'] = df.A.map(lambda a: 2 * a)
return df.B.sum()
n = 121
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_column_map_arg(self):
def test_impl(df):
df['B'] = df.A.map(lambda a: 2 * a)
return
n = 121
df1 = pd.DataFrame({'A': np.arange(n)})
df2 = pd.DataFrame({'A': np.arange(n)})
hpat_func = self.jit(test_impl)
hpat_func(df1)
self.assertTrue(hasattr(df1, 'B'))
test_impl(df2)
np.testing.assert_equal(df1.B.values, df2.B.values)
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_cumsum(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.cumsum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_array_OneDs(), 2)
self.assertEqual(count_parfor_REPs(), 0)
self.assertEqual(count_parfor_OneDs(), 2)
self.assertTrue(dist_IR_contains('dist_cumsum'))
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_column_distribution(self):
# make sure all column calls are distributed
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df.A.fillna(5.0, inplace=True)
DF = df.A.fillna(5.0)
s = DF.sum()
m = df.A.mean()
v = df.A.var()
t = df.A.std()
Ac = df.A.cumsum()
return Ac.sum() + s + m + v + t
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
self.assertTrue(dist_IR_contains('dist_cumsum'))
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_quantile_parallel(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_parallel_float_nan(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float32)})
df.A[0:100] = np.nan
df.A[200:331] = np.nan
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_parallel_int(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.int32)})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_quantile_sequential(self):
def test_impl(A):
df = pd.DataFrame({'A': A})
return df.A.quantile(.25)
hpat_func = self.jit(test_impl)
n = 1001
A = np.arange(0, n, 1, np.float64)
np.testing.assert_almost_equal(hpat_func(A), test_impl(A))
@skip_numba_jit
def test_nunique(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n)})
df.A[2] = 0
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_nunique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.four.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
def test_nunique_str(self):
def test_impl(n):
df = pd.DataFrame({'A': ['aa', 'bb', 'aa', 'cc', 'cc']})
return df.A.nunique()
hpat_func = self.jit(test_impl)
n = 1001
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
@unittest.skip('AssertionError - fix needed\n'
'5 != 3\n')
def test_nunique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return df.two.nunique()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
# test compile again for overload related issues
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
def test_unique_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.four.unique() == 3.0).sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@unittest.skip('AssertionError - fix needed\n'
'2 != 1\n')
def test_unique_str_parallel(self):
# TODO: test without file
def test_impl():
df = pq.read_table('example.parquet').to_pandas()
return (df.two.unique() == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
@skip_numba_jit
@skip_sdc_jit('Not implemented in sequential transport layer')
def test_describe(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(0, n, 1, np.float64)})
return df.A.describe()
hpat_func = self.jit(test_impl)
n = 1001
hpat_func(n)
# XXX: test actual output
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_contains_regex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('AB*', regex=True)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 2)
@skip_numba_jit
def test_str_contains_noregex(self):
def test_impl():
A = StringArray(['ABC', 'BB', 'ADEF'])
df = pd.DataFrame({'A': A})
B = df.A.str.contains('BB', regex=False)
return B.sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), 1)
@skip_numba_jit
def test_str_replace_regex(self):
def test_impl(df):
return df.A.str.replace('AB*', 'EE', regex=True)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_replace_noregex(self):
def test_impl(df):
return df.A.str.replace('AB', 'EE', regex=False)
df = pd.DataFrame({'A': ['ABCC', 'CABBD']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_replace_regex_parallel(self):
def test_impl(df):
B = df.A.str.replace('AB*', 'EE', regex=True)
return B
n = 5
A = ['ABCC', 'CABBD', 'CCD', 'CCDAABB', 'ED']
start, end = get_start_end(n)
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_default(self):
def test_impl(df):
return df.A.str.split()
df = pd.DataFrame({'A': ['AB CC', 'C ABB D', 'G ', ' ', 'g\t f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_filter(self):
def test_impl(df):
B = df.A.str.split(',')
df2 = pd.DataFrame({'B': B})
return df2[df2.B.str.len() > 1]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D', 'G', '', 'g,f']})
hpat_func = self.jit(test_impl)
pd.testing.assert_frame_equal(
hpat_func(df), test_impl(df).reset_index(drop=True))
@skip_numba_jit
def test_str_split_box_df(self):
def test_impl(df):
return pd.DataFrame({'B': df.A.str.split(',')})
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df).B, test_impl(df).B, check_names=False)
@skip_numba_jit
def test_str_split_unbox_df(self):
def test_impl(df):
return df.A.iloc[0]
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
df2 = pd.DataFrame({'A': df.A.str.split(',')})
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(df2), test_impl(df2))
@unittest.skip('Getitem Series with list values not implement')
def test_str_split_bool_index(self):
def test_impl(df):
C = df.A.str.split(',')
return C[df.B == 'aa']
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D'], 'B': ['aa', 'bb']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split_parallel(self):
def test_impl(df):
B = df.A.str.split(',')
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_get(self):
def test_impl(df):
B = df.A.str.split(',')
return B.str.get(1)
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_split(self):
def test_impl(df):
return df.A.str.split(',')
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_get_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = A.str.get(1)
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD,F', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_str_get_to_numeric(self):
def test_impl(df):
B = df.A.str.split(',')
C = pd.to_numeric(B.str.get(1), errors='coerce')
return C
df = pd.DataFrame({'A': ['AB,12', 'C,321,D']})
hpat_func = self.jit(locals={'C': types.int64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_flatten(self):
def test_impl(df):
A = df.A.str.split(',')
return pd.Series(list(itertools.chain(*A)))
df = pd.DataFrame({'A': ['AB,CC', 'C,ABB,D']})
hpat_func = self.jit(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_str_flatten_parallel(self):
def test_impl(df):
A = df.A.str.split(',')
B = pd.Series(list(itertools.chain(*A)))
return B
n = 5
start, end = get_start_end(n)
A = ['AB,CC', 'C,ABB,D', 'CAD', 'CA,D', 'AA,,D']
df = pd.DataFrame({'A': A[start:end]})
hpat_func = self.jit(distributed={'df', 'B'})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
self.assertEqual(count_array_REPs(), 3)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_to_numeric(self):
def test_impl(df):
B = pd.to_numeric(df.A, errors='coerce')
return B
df = pd.DataFrame({'A': ['123.1', '331.2']})
hpat_func = self.jit(locals={'B': types.float64[:]})(test_impl)
pd.testing.assert_series_equal(
hpat_func(df), test_impl(df), check_names=False)
@skip_numba_jit
def test_1D_Var_len(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.arange(n) + 1.0})
df1 = df[df.A > 5]
return len(df1.B)
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling1(self):
# size 3 without unroll
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
# size 7 with unroll
def test_impl_2(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.rolling(7).sum()
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
df['moving average'] = df.A.rolling(window=5, center=True).mean()
return df['moving average'].sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_rolling3(self):
def test_impl(n):
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
Ac = df.A.rolling(3, center=True).apply(lambda a: a[0] + 2 * a[1] + a[2])
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 121
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_shift1(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.shift(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@unittest.skip('Error - fix needed\n'
'NUMA_PES=3 build')
def test_shift2(self):
def test_impl(n):
df = pd.DataFrame({'A': np.arange(n) + 1.0, 'B': np.random.ranf(n)})
Ac = df.A.pct_change(1)
return Ac.sum()
hpat_func = self.jit(test_impl)
n = 11
np.testing.assert_almost_equal(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_df_input(self):
def test_impl(df):
return df.B.sum()
n = 121
df = pd.DataFrame({'A': np.ones(n), 'B': np.random.ranf(n)})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df))
@skip_numba_jit
def test_df_input2(self):
def test_impl(df):
C = df.B == 'two'
return C.sum()
n = 11
df = pd.DataFrame({'A': np.random.ranf(3 * n), 'B': ['one', 'two', 'three'] * n})
hpat_func = self.jit(test_impl)
np.testing.assert_almost_equal(hpat_func(df), test_impl(df))
@skip_numba_jit
def test_df_input_dist1(self):
def test_impl(df):
return df.B.sum()
n = 121
A = [3, 4, 5, 6, 1]
B = [5, 6, 2, 1, 3]
n = 5
start, end = get_start_end(n)
df = pd.DataFrame({'A': A, 'B': B})
df_h = pd.DataFrame({'A': A[start:end], 'B': B[start:end]})
hpat_func = self.jit(distributed={'df'})(test_impl)
np.testing.assert_almost_equal(hpat_func(df_h), test_impl(df))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_concat(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
df3 = pd.concat([df1, df2])
return df3.A.sum() + df3.key2.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_concat_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1, df2])
return (A3.two == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
def test_concat_series(self):
def test_impl(n):
df1 = pd.DataFrame({'key1': np.arange(n), 'A': np.arange(n) + 1.0})
df2 = pd.DataFrame({'key2': n - np.arange(n), 'A': n + np.arange(n) + 1.0})
A3 = pd.concat([df1.A, df2.A])
return A3.sum()
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
n = 11111
self.assertEqual(hpat_func(n), test_impl(n))
@skip_numba_jit
def test_concat_series_str(self):
def test_impl():
df1 = pq.read_table('example.parquet').to_pandas()
df2 = pq.read_table('example.parquet').to_pandas()
A3 = pd.concat([df1.two, df2.two])
return (A3 == 'foo').sum()
hpat_func = self.jit(test_impl)
self.assertEqual(hpat_func(), test_impl())
self.assertEqual(count_array_REPs(), 0)
self.assertEqual(count_parfor_REPs(), 0)
@skip_numba_jit
@unittest.skipIf(int(os.getenv('SDC_NP_MPI', '0')) > 1, 'Test hangs on NP=2 and NP=3 on all platforms')
def test_intraday(self):
def test_impl(nsyms):
max_num_days = 100
all_res = 0.0
for i in sdc.prange(nsyms):
s_open = 20 * np.ones(max_num_days)
s_low = 28 * np.ones(max_num_days)
s_close = 19 * np.ones(max_num_days)
df = pd.DataFrame({'Open': s_open, 'Low': s_low, 'Close': s_close})
df['Stdev'] = df['Close'].rolling(window=90).std()
df['Moving Average'] = df['Close'].rolling(window=20).mean()
df['Criteria1'] = (df['Open'] - df['Low'].shift(1)) < -df['Stdev']
df['Criteria2'] = df['Open'] > df['Moving Average']
df['BUY'] = df['Criteria1'] & df['Criteria2']
df['Pct Change'] = (df['Close'] - df['Open']) / df['Open']
df['Rets'] = df['Pct Change'][df['BUY']]
all_res += df['Rets'].mean()
return all_res
hpat_func = self.jit(test_impl)
n = 11
self.assertEqual(hpat_func(n), test_impl(n))
self.assertEqual(count_array_OneDs(), 0)
self.assertEqual(count_parfor_OneDs(), 1)
@skip_numba_jit
def test_var_dist1(self):
def test_impl(A, B):
df = pd.DataFrame({'A': A, 'B': B})
df2 = df.groupby('A', as_index=False)['B'].sum()
# TODO: fix handling of df setitem to force match of array dists
# probably with a new node that is appended to the end of basic block
# df2['C'] = np.full(len(df2.B), 3, np.int8)
# TODO: full_like for Series
df2['C'] = np.full_like(df2.B.values, 3, np.int8)
return df2
A = np.array([1, 1, 2, 3])
B = np.array([3, 4, 5, 6])
hpat_func = self.jit(locals={'A:input': 'distributed',
'B:input': 'distributed', 'df2:return': 'distributed'})(test_impl)
start, end = get_start_end(len(A))
df2 = hpat_func(A[start:end], B[start:end])
# TODO:
# pd.testing.assert_frame_equal(
# hpat_func(A[start:end], B[start:end]), test_impl(A, B))
if __name__ == "__main__":
unittest.main()
| IntelLabs/hpat | sdc/tests/test_hiframes.py | Python | bsd-2-clause | 29,355 |
---@module fimbul.v35.material_template
local material_template = {}
function material_template:new(y)
local neu = y or {}
-- Do a deep resolve
if neu.material then
neu = neu.material
end
setmetatable(neu, self)
self.__index = self
neu.templatetype = "material"
-- TODO: Check if everything is here and in proper order
-- neu:check()
return neu
end
return material_template
| n0la/fimbul | fimbul/v35/material_template.lua | Lua | bsd-2-clause | 418 |
var crypto = require('crypto');
var Canvas = require('canvas');
var _ = require('lodash');
var bu = require('./bufutil');
var fmt = require('util').format;
var unpack = require('./unpack');
var bright = require('./bright');
function fprint(buf, len) {
if (len > 64)
throw new Error(fmt("sha512 can only generate 64B of data: %dB requested", len));
return _(crypto.createHash('sha512').update(buf).digest())
.groupBy(function (x, k) { return Math.floor(k/len); })
.reduce(bu.xor);
}
function idhash(str, n, minFill, maxFill) {
var buf = new Buffer(str.length + 1);
buf.write(str);
for (var i=0; i<0x100; i++) {
buf[buf.length - 1] = i;
var f = fprint(buf, Math.ceil(n/8)+6);
var pixels = _(f.slice(6))
.map(function (x) { return unpack(x); })
.flatten().take(n);
var setPixels = pixels.filter().size();
var c = [ f.slice(0, 3), f.slice(3, 6)];
c.sort(bright.cmp);
if (setPixels > (minFill * n) && setPixels < (maxFill * n))
return {
colors: c.map(function (x) { return x.toString('hex'); }),
pixels: pixels.value()
};
}
throw new Error(fmt("String '''%s''' unhashable in single-byte search space.", str));
}
function reflect(id, dimension) {
var mid = Math.ceil(dimension / 2);
var odd = Boolean(dimension % 2);
var pic = [];
for (var row=0; row<dimension; row++) {
pic[row] = [];
for (var col=0; col<dimension; col++) {
var p = (row * mid) + col;
if (col>=mid) {
var d = mid - (odd ? 1 : 0) - col;
var ad = Math.abs(d);
p = (row * mid) + mid - 1 - ad;
}
pic[row][col] = id.pixels[p];
// console.error(fmt("looking for %d, of %d for %d,%d", p, id.pixels.length, row, col))
}
}
return pic;
}
function retricon(str, opts) {
opts = _.merge({}, retricon.defaults, opts);
var dimension = opts.tiles;
var pixelSize = opts.pixelSize;
var border = opts.pixelPadding;
var mid = Math.ceil(dimension / 2);
var id = idhash(str, mid * dimension, opts.minFill, opts.maxFill);
var pic = reflect(id, dimension);
var csize = (pixelSize * dimension) + (opts.imagePadding * 2);
var c = Canvas.createCanvas(csize, csize);
var ctx = c.getContext('2d');
if (_.isString(opts.bgColor)) {
ctx.fillStyle = opts.bgColor;
} else if (_.isNumber(opts.bgColor)) {
ctx.fillStyle = '#' + id.colors[opts.bgColor];
}
if (! _.isNull(opts.bgColor))
ctx.fillRect(0, 0, csize, csize);
var drawOp = ctx.fillRect.bind(ctx);
if (_.isString(opts.pixelColor)) {
ctx.fillStyle = opts.pixelColor;
} else if (_.isNumber(opts.pixelColor)) {
ctx.fillStyle = '#' + id.colors[opts.pixelColor];
} else {
drawOp = ctx.clearRect.bind(ctx);
}
for (var x=0; x<dimension; x++)
for (var y=0; y<dimension; y++)
if (pic[y][x])
drawOp((x*pixelSize) + border + opts.imagePadding,
(y*pixelSize) + border + opts.imagePadding,
pixelSize - (border * 2),
pixelSize - (border * 2));
return c;
}
retricon.defaults = {
pixelSize: 10,
bgColor: null,
pixelPadding: 0,
imagePadding: 0,
tiles: 5,
minFill: 0.3,
maxFill: 0.90,
pixelColor: 0
};
retricon.style = {
github: {
pixelSize: 70,
bgColor: '#F0F0F0',
pixelPadding: -1,
imagePadding: 35,
tiles: 5
},
gravatar: {
tiles: 8,
bgColor: 1
},
mono: {
bgColor: '#F0F0F0',
pixelColor: '#000000',
tiles: 6,
pixelSize: 12,
pixelPadding: -1,
imagePadding: 6
},
mosaic: {
imagePadding: 2,
pixelPadding: 1,
pixelSize: 16,
bgColor: '#F0F0F0'
},
mini: {
pixelSize: 10,
pixelPadding: 1,
tiles: 3,
bgColor: 0,
pixelColor: 1
},
window: {
pixelColor: null,
bgColor: 0,
imagePadding: 2,
pixelPadding: 1,
pixelSize: 16
}
};
module.exports = retricon;
| sehrgut/node-retricon | lib/index.js | JavaScript | bsd-2-clause | 3,668 |
<?php
namespace AppZap\PHPFramework\Tests\Functional\Testapp\Controller;
use AppZap\PHPFramework\Mvc\AbstractController;
class IndexController extends AbstractController {
/**
* @return string
*/
public function cli() {
return '';
}
} | app-zap/PHPFramework | tests/Functional/testapp/classes/Controller/IndexController.php | PHP | bsd-2-clause | 254 |
#include <mscorlib/System/Runtime/Serialization/mscorlib_System_Runtime_Serialization_SerializationObjectManager.h>
#include <mscorlib/System/mscorlib_System_Type.h>
#include <mscorlib/System/mscorlib_System_String.h>
namespace mscorlib
{
namespace System
{
namespace Runtime
{
namespace Serialization
{
//Public Methods
void SerializationObjectManager::RegisterObject(mscorlib::System::Object obj)
{
MonoType *__parameter_types__[1];
void *__parameters__[1];
__parameter_types__[0] = Global::GetType(typeid(obj).name());
__parameters__[0] = (MonoObject*)obj;
Global::InvokeMethod("mscorlib", "System.Runtime.Serialization", "SerializationObjectManager", 0, NULL, "RegisterObject", __native_object__, 1, __parameter_types__, __parameters__, NULL);
}
void SerializationObjectManager::RaiseOnSerializedEvent()
{
Global::InvokeMethod("mscorlib", "System.Runtime.Serialization", "SerializationObjectManager", 0, NULL, "RaiseOnSerializedEvent", __native_object__, 0, NULL, NULL, NULL);
}
}
}
}
}
| brunolauze/MonoNative | MonoNative/mscorlib/System/Runtime/Serialization/mscorlib_System_Runtime_Serialization_SerializationObjectManager.cpp | C++ | bsd-2-clause | 1,080 |
/*
This file is a part of libcds - Concurrent Data Structures library
Version: 2.0.0
(C) Copyright Maxim Khizhinsky ([email protected]) 2006-2014
Distributed under the BSD license (see accompanying file license.txt)
Source code repo: http://github.com/khizmax/libcds/
Download: http://sourceforge.net/projects/libcds/files/
*/
#include "hdr_intrusive_msqueue.h"
#include <cds/intrusive/msqueue.h>
#include <cds/gc/dhp.h>
namespace queue {
#define TEST(X) void IntrusiveQueueHeaderTest::test_##X() { test<X>(); }
namespace {
typedef IntrusiveQueueHeaderTest::base_hook_item< ci::msqueue::node<cds::gc::DHP > > base_item_type;
typedef IntrusiveQueueHeaderTest::member_hook_item< ci::msqueue::node<cds::gc::DHP > > member_item_type;
// DHP base hook
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_base;
// DHP member hook
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_member;
/// DHP base hook + item counter
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, co::item_counter< cds::atomicity::item_counter >
, co::memory_model< co::v::relaxed_ordering >
>::type
> MSQueue_DHP_base_ic;
// DHP member hook + item counter
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::item_counter< cds::atomicity::item_counter >
>::type
> MSQueue_DHP_member_ic;
// DHP base hook + stat
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::stat< ci::msqueue::stat<> >
>::type
> MSQueue_DHP_base_stat;
// DHP member hook + stat
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, co::stat< ci::msqueue::stat<> >
>::type
> MSQueue_DHP_member_stat;
// DHP base hook + alignment
typedef ci::MSQueue< cds::gc::DHP, base_item_type,
typename ci::msqueue::make_traits<
ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
, ci::opt::hook<
ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> >
>
, co::alignment< 32 >
>::type
> MSQueue_DHP_base_align;
// DHP member hook + alignment
typedef ci::MSQueue< cds::gc::DHP, member_item_type,
typename ci::msqueue::make_traits<
ci::opt::hook<
ci::msqueue::member_hook<
offsetof( member_item_type, hMember ),
ci::opt::gc<cds::gc::DHP>
>
>
, co::alignment< 32 >
, ci::opt::disposer< IntrusiveQueueHeaderTest::faked_disposer >
>::type
> MSQueue_DHP_member_align;
// DHP base hook + no alignment
struct traits_MSQueue_DHP_base_noalign : public ci::msqueue::traits {
typedef ci::msqueue::base_hook< ci::opt::gc<cds::gc::DHP> > hook;
typedef IntrusiveQueueHeaderTest::faked_disposer disposer;
enum { alignment = co::no_special_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, base_item_type, traits_MSQueue_DHP_base_noalign > MSQueue_DHP_base_noalign;
// DHP member hook + no alignment
struct traits_MSQueue_DHP_member_noalign : public ci::msqueue::traits {
typedef ci::msqueue::member_hook <
offsetof( member_item_type, hMember ),
ci::opt::gc < cds::gc::DHP >
> hook;
typedef IntrusiveQueueHeaderTest::faked_disposer disposer;
enum { alignment = co::no_special_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, member_item_type, traits_MSQueue_DHP_member_noalign > MSQueue_DHP_member_noalign;
// DHP base hook + cache alignment
struct traits_MSQueue_DHP_base_cachealign : public traits_MSQueue_DHP_base_noalign
{
enum { alignment = co::cache_line_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, base_item_type, traits_MSQueue_DHP_base_cachealign > MSQueue_DHP_base_cachealign;
// DHP member hook + cache alignment
struct traits_MSQueue_DHP_member_cachealign : public traits_MSQueue_DHP_member_noalign
{
enum { alignment = co::cache_line_alignment };
};
typedef ci::MSQueue< cds::gc::DHP, member_item_type, traits_MSQueue_DHP_member_cachealign > MSQueue_DHP_member_cachealign;
} // namespace
TEST(MSQueue_DHP_base)
TEST(MSQueue_DHP_member)
TEST(MSQueue_DHP_base_ic)
TEST(MSQueue_DHP_member_ic)
TEST(MSQueue_DHP_base_stat)
TEST(MSQueue_DHP_member_stat)
TEST(MSQueue_DHP_base_align)
TEST(MSQueue_DHP_member_align)
TEST(MSQueue_DHP_base_noalign)
TEST(MSQueue_DHP_member_noalign)
TEST(MSQueue_DHP_base_cachealign)
TEST(MSQueue_DHP_member_cachealign)
} // namespace queue
| Rapotkinnik/libcds | tests/test-hdr/queue/hdr_intrusive_msqueue_dhp.cpp | C++ | bsd-2-clause | 6,600 |
var compilerSupport=require('../../src/compilerSupport');var main = function () {
var __builder = new compilerSupport.TaskBuilder(), __state = 0, __continue = __builder.CONT, __ex;
var data;
return __builder.run(function () {
switch (__state) {
case 0: {
data = 12345;
console.log("data: " + data);
__state = -1;
__builder.ret(data);
break;
}
default:
throw 'Internal error: encountered wrong state';
}
});
};
main().then(function(x) {
console.log("returned: " + x);
}, function(y) {
console.log("failed: " + y);
});
| omgtehlion/asjs | tests/00-misc/00-fulfills-promise.exp.js | JavaScript | bsd-2-clause | 678 |
package test
import (
"github.com/tidepool-org/platform/config"
"github.com/tidepool-org/platform/log"
"github.com/tidepool-org/platform/version"
)
type Provider struct {
VersionReporterInvocations int
VersionReporterStub func() version.Reporter
VersionReporterOutputs []version.Reporter
VersionReporterOutput *version.Reporter
ConfigReporterInvocations int
ConfigReporterStub func() config.Reporter
ConfigReporterOutputs []config.Reporter
ConfigReporterOutput *config.Reporter
LoggerInvocations int
LoggerStub func() log.Logger
LoggerOutputs []log.Logger
LoggerOutput *log.Logger
PrefixInvocations int
PrefixStub func() string
PrefixOutputs []string
PrefixOutput *string
NameInvocations int
NameStub func() string
NameOutputs []string
NameOutput *string
UserAgentInvocations int
UserAgentStub func() string
UserAgentOutputs []string
UserAgentOutput *string
}
func NewProvider() *Provider {
return &Provider{}
}
func (p *Provider) VersionReporter() version.Reporter {
p.VersionReporterInvocations++
if p.VersionReporterStub != nil {
return p.VersionReporterStub()
}
if len(p.VersionReporterOutputs) > 0 {
output := p.VersionReporterOutputs[0]
p.VersionReporterOutputs = p.VersionReporterOutputs[1:]
return output
}
if p.VersionReporterOutput != nil {
return *p.VersionReporterOutput
}
panic("VersionReporter has no output")
}
func (p *Provider) ConfigReporter() config.Reporter {
p.ConfigReporterInvocations++
if p.ConfigReporterStub != nil {
return p.ConfigReporterStub()
}
if len(p.ConfigReporterOutputs) > 0 {
output := p.ConfigReporterOutputs[0]
p.ConfigReporterOutputs = p.ConfigReporterOutputs[1:]
return output
}
if p.ConfigReporterOutput != nil {
return *p.ConfigReporterOutput
}
panic("ConfigReporter has no output")
}
func (p *Provider) Logger() log.Logger {
p.LoggerInvocations++
if p.LoggerStub != nil {
return p.LoggerStub()
}
if len(p.LoggerOutputs) > 0 {
output := p.LoggerOutputs[0]
p.LoggerOutputs = p.LoggerOutputs[1:]
return output
}
if p.LoggerOutput != nil {
return *p.LoggerOutput
}
panic("Logger has no output")
}
func (p *Provider) Prefix() string {
p.PrefixInvocations++
if p.PrefixStub != nil {
return p.PrefixStub()
}
if len(p.PrefixOutputs) > 0 {
output := p.PrefixOutputs[0]
p.PrefixOutputs = p.PrefixOutputs[1:]
return output
}
if p.PrefixOutput != nil {
return *p.PrefixOutput
}
panic("Prefix has no output")
}
func (p *Provider) Name() string {
p.NameInvocations++
if p.NameStub != nil {
return p.NameStub()
}
if len(p.NameOutputs) > 0 {
output := p.NameOutputs[0]
p.NameOutputs = p.NameOutputs[1:]
return output
}
if p.NameOutput != nil {
return *p.NameOutput
}
panic("Name has no output")
}
func (p *Provider) UserAgent() string {
p.UserAgentInvocations++
if p.UserAgentStub != nil {
return p.UserAgentStub()
}
if len(p.UserAgentOutputs) > 0 {
output := p.UserAgentOutputs[0]
p.UserAgentOutputs = p.UserAgentOutputs[1:]
return output
}
if p.UserAgentOutput != nil {
return *p.UserAgentOutput
}
panic("UserAgent has no output")
}
func (p *Provider) AssertOutputsEmpty() {
if len(p.VersionReporterOutputs) > 0 {
panic("VersionReporterOutputs is not empty")
}
if len(p.ConfigReporterOutputs) > 0 {
panic("ConfigReporterOutputs is not empty")
}
if len(p.LoggerOutputs) > 0 {
panic("LoggerOutputs is not empty")
}
if len(p.PrefixOutputs) > 0 {
panic("PrefixOutputs is not empty")
}
if len(p.NameOutputs) > 0 {
panic("NameOutputs is not empty")
}
if len(p.UserAgentOutputs) > 0 {
panic("UserAgentOutputs is not empty")
}
}
| tidepool-org/platform | application/test/provider.go | GO | bsd-2-clause | 3,861 |
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/epoll.h>
#include <arpa/inet.h>
#define BUFF_SIZE 1024
int main(int argc, const char *argv[])
{
int i = 0;
char buff[BUFF_SIZE];
ssize_t msg_len = 0;
int srv_fd = -1;
int cli_fd = -1;
int epoll_fd = -1;
struct sockaddr_in srv_addr;
struct sockaddr_in cli_addr;
socklen_t cli_addr_len;
struct epoll_event e, es[2];
memset(&srv_addr, 0, sizeof(srv_addr));
memset(&cli_addr, 0, sizeof(cli_addr));
memset(&e, 0, sizeof(e));
srv_fd = socket(AF_INET, SOCK_STREAM | SOCK_NONBLOCK, 0);
if (srv_fd < 0) {
printf("Cannot create socket\n");
return 1;
}
srv_addr.sin_family = AF_INET;
srv_addr.sin_addr.s_addr = htonl(INADDR_ANY);
srv_addr.sin_port = htons(5555);
if (bind(srv_fd, (struct sockaddr*) &srv_addr, sizeof(srv_addr)) < 0) {
printf("Cannot bind socket\n");
close(srv_fd);
return 1;
}
if (listen(srv_fd, 1) < 0) {
printf("Cannot listen\n");
close(srv_fd);
return 1;
}
epoll_fd = epoll_create(2);
if (epoll_fd < 0) {
printf("Cannot create epoll\n");
close(srv_fd);
return 1;
}
e.events = EPOLLIN;
e.data.fd = srv_fd;
if (epoll_ctl(epoll_fd, EPOLL_CTL_ADD, srv_fd, &e) < 0) {
printf("Cannot add server socket to epoll\n");
close(epoll_fd);
close(srv_fd);
return 1;
}
for(;;) {
i = epoll_wait(epoll_fd, es, 2, -1);
if (i < 0) {
printf("Cannot wait for events\n");
close(epoll_fd);
close(srv_fd);
return 1;
}
for (--i; i > -1; --i) {
if (es[i].data.fd == srv_fd) {
cli_fd = accept(srv_fd, (struct sockaddr*) &cli_addr, &cli_addr_len);
if (cli_fd < 0) {
printf("Cannot accept client\n");
close(epoll_fd);
close(srv_fd);
return 1;
}
e.data.fd = cli_fd;
if (epoll_ctl(epoll_fd, EPOLL_CTL_ADD, cli_fd, &e) < 0) {
printf("Cannot accept client\n");
close(epoll_fd);
close(srv_fd);
return 1;
}
}
if (es[i].data.fd == cli_fd) {
if (es[i].events & EPOLLIN) {
msg_len = read(cli_fd, buff, BUFF_SIZE);
if (msg_len > 0) {
write(cli_fd, buff, msg_len);
}
close(cli_fd);
epoll_ctl(epoll_fd, EPOLL_CTL_DEL, cli_fd, &e);
cli_fd = -1;
}
}
}
}
return 0;
}
| AdamWolanski/internship-tasks | task03/epoll/main.c | C | bsd-2-clause | 2,905 |
// (c) 2010-2014 IndiegameGarden.com. Distributed under the FreeBSD license in LICENSE.txt
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("TTengine")]
[assembly: AssemblyProduct("TTengine")]
[assembly: AssemblyDescription("2D game engine for C# XNA 4.0")]
[assembly: AssemblyCompany("IndiegameGarden.com")]
[assembly: AssemblyCopyright("Copyright © IndiegameGarden.com 2010-2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type. Only Windows
// assemblies support COM.
[assembly: ComVisible(false)]
// On Windows, the following GUID is for the ID of the typelib if this
// project is exposed to COM. On other platforms, it unique identifies the
// title storage container when deploying this assembly to the device.
[assembly: Guid("0de778ef-fe76-4a50-d0e9-cabba1a02517")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
[assembly: AssemblyVersion("5.0.0.1")]
| IndiegameGarden/Quest | TTengine/Properties/AssemblyInfo.cs | C# | bsd-2-clause | 1,485 |
/*
* The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
*
* Find the sum of all the primes below two million.
*/
#include <stdio.h>
#include <stdint.h>
#include "euler.h"
#define PROBLEM 10
int
main(int argc, char **argv)
{
uint64_t sum = 0, number = 0;
do {
number++;
if(is_prime(number)) {
sum += number;
}
} while(number != 2000000);
printf("%llu\n", sum);
return(0);
}
| skreuzer/euler | problem_010.c | C | bsd-2-clause | 440 |
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Text;
namespace Mkko.Configuration
{
[ConfigurationCollection(typeof(ExecutionEnvironmentSettings), AddItemName = "executionEnvironment", CollectionType = ConfigurationElementCollectionType.BasicMap)]
public class ExecutionEnvironmentCollection : ConfigurationElementCollection
{
public override ConfigurationElementCollectionType CollectionType
{
get { return ConfigurationElementCollectionType.BasicMap; }
}
protected override string ElementName
{
get { return "executionEnvironment"; }
}
protected override ConfigurationElement CreateNewElement()
{
return new ExecutionEnvironmentSettings();
}
protected override object GetElementKey(ConfigurationElement element)
{
return (element as ExecutionEnvironmentSettings).Id;
}
}
}
| mk83ko/any-log-analyzer | AnyLogAnalyzerCore/Configuration/ExecutionEnvironmentCollection.cs | C# | bsd-2-clause | 1,003 |
import pytest
import urllib.error
from urlpathlib import UrlPath
def test_scheme():
# does not raise NotImplementedError
UrlPath('/dev/null').touch()
def test_scheme_not_supported():
with pytest.raises(NotImplementedError):
UrlPath('http:///tmp/test').touch()
def test_scheme_not_listed():
with pytest.raises(NotImplementedError):
UrlPath('test:///tmp/test').touch()
def test_file_additional():
assert UrlPath('.').resolve() == UrlPath.cwd()
def test_scheme_alias():
# does not raise NotImplementedError
with pytest.raises(urllib.error.URLError):
UrlPath('https://localhost/test').exists()
| morgan-del/urlpathlib | tests/test_dispatch.py | Python | bsd-2-clause | 657 |
#region Copyright notice
/**
* Copyright (c) 2018 Samsung Electronics, Inc.,
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System.Collections.Generic;
namespace DexterCS
{
public class ProjectAnalysisConfiguration
{
public string ProjectName { get; set; }
public string ProjectFullPath { get; set; }
private List<string> sourceDirs;
public List<string> SourceDirs
{
get { return sourceDirs ?? new List<string>(); }
set { sourceDirs = value; }
}
private List<string> headerDirs;
public List<string> HeaderDirs
{
get { return headerDirs ?? new List<string>(); }
set { headerDirs = value; }
}
private List<string> targetDir;
public List<string> TargetDirs
{
get
{
return targetDir ?? new List<string>();
}
set { targetDir = value; }
}
public string Type { get; set; }
}
} | Samsung/Dexter | project/dexter-cs/DexterCS/Src/ProjectAnalysisConfiguration.cs | C# | bsd-2-clause | 2,316 |
require "language/node"
class HttpServer < Formula
desc "Simple zero-configuration command-line HTTP server"
homepage "https://github.com/http-party/http-server"
url "https://registry.npmjs.org/http-server/-/http-server-13.0.1.tgz"
sha256 "35e08960062d766ad4c1e098f65b6e8bfb44f12516da90fd2df9974729652f03"
license "MIT"
head "https://github.com/http-party/http-server.git"
bottle do
sha256 cellar: :any_skip_relocation, all: "27f327beb2f485c4885636bbede7d6096a69659ee595b10621cf0a59d8797d32"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
port = free_port
pid = fork do
exec "#{bin}/http-server", "-p#{port}"
end
sleep 3
output = shell_output("curl -sI http://localhost:#{port}")
assert_match "200 OK", output
ensure
Process.kill("HUP", pid)
end
end
| mbcoguno/homebrew-core | Formula/http-server.rb | Ruby | bsd-2-clause | 944 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Ocronet.Dynamic.Recognizers;
using Ocronet.Dynamic.OcroFST;
using Ocronet.Dynamic;
using Ocronet.Dynamic.ImgLib;
using Ocronet.Dynamic.Interfaces;
using Ocronet.Dynamic.Utils;
using Ocronet.Dynamic.IOData;
using System.IO;
using Ocronet.Dynamic.Segmentation;
using Ocronet.Dynamic.Recognizers.Lenet;
namespace Ocronet.DynamicConsole
{
public class TestLinerec
{
public void TestSimple()
{
Global.SetEnv("debug", Global.GetEnv("debug") + "");
// image file name to recognize
string imgFileName = "line.png";
string imgCsegFileName = "line.cseg.png";
string imgTranscriptFileName = "line.txt";
// line recognizer
Linerec lrec = (Linerec)Linerec.LoadLinerec("default.model");
//Linerec lrec = (Linerec)Linerec.LoadLinerec("2m2-reject.cmodel");
//Linerec lrec = (Linerec)Linerec.LoadLinerec("multi3.cmodel");
//Linerec lrec = (Linerec)Linerec.LoadLinerec("latin-ascii.model");
lrec.Info();
// language model
OcroFST default_lmodel = OcroFST.MakeOcroFst();
default_lmodel.Load("default.fst");
OcroFST lmodel = default_lmodel;
// read image
Bytearray image = new Bytearray(1, 1);
ImgIo.read_image_gray(image, imgFileName);
// recognize!
OcroFST fst = OcroFST.MakeOcroFst();
Intarray rseg = new Intarray();
lrec.RecognizeLine(rseg, fst, image);
// show result 1
string resStr;
fst.BestPath(out resStr);
Console.WriteLine("Fst BestPath: {0}", resStr);
// find result 2
Intarray v1 = new Intarray();
Intarray v2 = new Intarray();
Intarray inp = new Intarray();
Intarray outp = new Intarray();
Floatarray c = new Floatarray();
BeamSearch.beam_search(v1, v2, inp, outp, c, fst, lmodel, 100);
FstUtil.remove_epsilons(out resStr, outp);
Console.WriteLine("Fst BeamSearch: {0}", resStr);
Intarray cseg = new Intarray();
SegmRoutine.rseg_to_cseg(cseg, rseg, inp);
SegmRoutine.make_line_segmentation_white(cseg);
ImgLabels.simple_recolor(cseg); // for human readable
ImgIo.write_image_packed(imgCsegFileName, cseg);
File.WriteAllText(imgTranscriptFileName, resStr.Replace(" ", ""));
}
public void TestTrainLenetCseg()
{
string bookPath = "data\\0000\\";
string netFileName = "latin-lenet.model";
Linerec.GDef("linerec", "use_reject", 1);
Linerec.GDef("lenet", "junk", 1);
Linerec.GDef("lenet", "epochs", 4);
// create Linerec
Linerec linerec;
if (File.Exists(netFileName))
linerec = Linerec.LoadLinerec(netFileName);
else
{
linerec = new Linerec("lenet");
LenetClassifier classifier = linerec.GetClassifier() as LenetClassifier;
if (classifier != null)
classifier.InitNumSymbLatinAlphabet();
}
// temporary disable junk
//linerec.DisableJunk = true;
linerec.StartTraining();
int nepochs = 10;
LineSource lines = new LineSource();
lines.Init(new string[] { "data2" });
//linerec.GetClassifier().Set("epochs", 1);
for (int epoch = 1; epoch <= nepochs; epoch++)
{
linerec.Epoch(epoch);
// load cseg samples
while (!lines.Done())
{
lines.MoveNext();
Intarray cseg = new Intarray();
//Bytearray image = new Bytearray();
string transcript = lines.GetTranscript();
//lines.GetImage(image);
if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0)
{
Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)",
lines.CurrentBook, lines.CurrentPage, lines.Current);
continue;
}
SegmRoutine.make_line_segmentation_black(cseg);
linerec.AddTrainingLine(cseg, transcript);
}
lines.Reset();
lines.Shuffle();
// do Train and clear Dataset
linerec.FinishTraining();
// do save
if (epoch % 1 == 0)
linerec.Save(netFileName);
// recognize test line
bool bakDisJunk = linerec.DisableJunk;
linerec.DisableJunk = false;
DoTestLinerecRecognize(linerec, "data2\\", "test1.png");
linerec.DisableJunk = bakDisJunk;
}
// finnaly save
linerec.Save(netFileName);
}
public void TestTrainLatinCseg()
{
string bookPath = "data\\0000\\";
string netFileName = "latin-amlp.model";
Linerec.GDef("linerec", "use_reject", 1);
Linerec.GDef("lenet", "junk", 1);
// create Linerec
Linerec linerec;
if (File.Exists(netFileName))
linerec = Linerec.LoadLinerec(netFileName);
else
{
linerec = new Linerec("latin");
}
// temporary disable junk
//linerec.DisableJunk = true;
linerec.StartTraining();
int nepochs = 1;
LineSource lines = new LineSource();
lines.Init(new string[] { "data2" });
for (int epoch = 1; epoch <= nepochs; epoch++)
{
linerec.Epoch(epoch);
// load cseg samples
while (lines.MoveNext())
{
Intarray cseg = new Intarray();
//Bytearray image = new Bytearray();
string transcript = lines.GetTranscript();
//lines.GetImage(image);
if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0)
{
Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)",
lines.CurrentBook, lines.CurrentPage, lines.CurrentLine);
continue;
}
SegmRoutine.make_line_segmentation_black(cseg);
linerec.AddTrainingLine(cseg, transcript);
}
lines.Reset();
lines.Shuffle();
// do Train and clear Dataset
linerec.FinishTraining();
// do save
if (epoch % 1 == 0)
linerec.Save(netFileName);
// recognize test line
bool bakDisJunk = linerec.DisableJunk;
linerec.DisableJunk = false;
DoTestLinerecRecognize(linerec, bookPath, "000010.png");
linerec.DisableJunk = bakDisJunk;
}
// finnaly save
linerec.Save(netFileName);
}
private void DoTestLinerecRecognize(Linerec linerec, string bookPath, string filename)
{
Bytearray image = new Bytearray();
ImgIo.read_image_gray(image, bookPath + filename);
// recognize!
OcroFST fst = OcroFST.MakeOcroFst();
linerec.RecognizeLine(fst, image);
// show result
string resStr;
fst.BestPath(out resStr);
Console.WriteLine("Fst BestPath: {0}", resStr);
}
public void TestRecognizeCseg()
{
string book1Path = "data2\\0001\\";
string book2Path = "data\\0000\\";
//Linerec.GDef("linerec", "use_reject", 0);
//Linerec.GDef("lenet", "junk", 0);
Linerec linerec = Linerec.LoadLinerec("latin-amlp.model");
//Linerec linerec = Linerec.LoadLinerec("latin-lenet.model");
//Linerec linerec = Linerec.LoadLinerec("latin-ascii2.model");
//Linerec linerec = Linerec.LoadLinerec("default.model");
//linerec.Set("maxcost", 20);
DoTestLinerecRecognize(linerec, book1Path, "0010.png");
DoTestLinerecRecognize(linerec, book1Path, "0001.png");
DoTestLinerecRecognize(linerec, book1Path, "0089.png");
DoTestLinerecRecognize(linerec, book1Path, "0026.png");
DoTestLinerecRecognize(linerec, book2Path, "000001.png");
}
public void TestComputeMissingCseg()
{
//ComputeMissingCsegForBookStore("data", "default.model", "");
//ComputeMissingCsegForBookStore("data2", "latin-ascii.model", "gt");
ComputeMissingCsegForBookStore("data", "latin-lenet.model", "");
}
/// <summary>
/// Create char segmentation (cseg) files if missing
/// </summary>
/// <param name="bookPath">path to bookstore</param>
/// <param name="modelFilename">Linerec model file</param>
/// <param name="langModel">language model file</param>
/// <param name="suffix">e.g., 'gt'</param>
public void ComputeMissingCsegForBookStore(string bookPath, string model = "default.model",
string suffix = "", bool saveRseg = false, string langModel = "default.fst")
{
// create line recognizer
Linerec linerec = Linerec.LoadLinerec(model);
// create IBookStore
IBookStore bookstore = new SmartBookStore();
bookstore.SetPrefix(bookPath);
bookstore.Info();
// language model
OcroFST lmodel = OcroFST.MakeOcroFst();
lmodel.Load(langModel);
// iterate lines of pages
for (int page = 0; page < bookstore.NumberOfPages(); page++)
{
int nlines = bookstore.LinesOnPage(page);
Console.WriteLine("Page {0} has {1} lines", page, nlines);
for (int j = 0; j < nlines; j++)
{
int line = bookstore.GetLineId(page, j);
Bytearray image = new Bytearray();
bookstore.GetLine(image, page, line);
Intarray cseg = new Intarray();
bookstore.GetCharSegmentation(cseg, page, line, suffix);
// check missing cseg file
if (cseg.Length() <= 0 && image.Length() > 0)
{
// recognize line
OcroFST fst = OcroFST.MakeOcroFst();
Intarray rseg = new Intarray();
linerec.RecognizeLine(rseg, fst, image);
// find best results
string resText;
Intarray inp = new Intarray();
Floatarray costs = new Floatarray();
double totalCost = BeamSearch.beam_search(out resText, inp, costs, fst, lmodel, 100);
Console.WriteLine(bookstore.PathFile(page, line, suffix));
Console.Write(" beam_search score: {0}", totalCost);
/*string resText2;
fst.BestPath(out resText2);*/
// write cseg to bookstore
string trans;
bookstore.GetLine(out trans, page, line, suffix);
resText = resText.Replace(" ", "");
if (String.IsNullOrEmpty(trans))
{
bookstore.PutLine(resText, page, line, suffix);
Console.Write("; transcript saved");
}
else if (trans == resText)
{
// convert inputs and rseg to cseg
SegmRoutine.rseg_to_cseg(cseg, rseg, inp);
bookstore.PutCharSegmentation(cseg, page, line, suffix);
Console.Write("; cseg saved");
}
else if (saveRseg)
{
// convert inputs and rseg to cseg
SegmRoutine.rseg_to_cseg(cseg, rseg, inp);
//SegmRoutine.remove_small_components(cseg, 4);
/*bookstore.PutCharSegmentation(cseg, page, line, suffix);
Console.Write("; cseg saved");*/
SegmRoutine.make_line_segmentation_white(cseg);
ImgLabels.simple_recolor(cseg);
string v = "rseg";
if (!String.IsNullOrEmpty(suffix)) { v += "."; v += suffix; }
string rsegpath = bookstore.PathFile(page, line, v, "png");
ImgIo.write_image_packed(rsegpath, cseg);
Console.Write("; rseg saved");
}
Console.WriteLine();
}
}
}
}
}
}
| nickun/OCRonet | Ocronet.Dynamic/Tests/TestLinerec.cs | C# | bsd-2-clause | 13,695 |
class Opusfile < Formula
desc "API for decoding and seeking in .opus files"
homepage "https://www.opus-codec.org/"
url "https://archive.mozilla.org/pub/opus/opusfile-0.11.tar.gz"
sha256 "74ce9b6cf4da103133e7b5c95df810ceb7195471e1162ed57af415fabf5603bf"
revision 1
bottle do
cellar :any
rebuild 1
sha256 "acd200760db74feb30ea28bdb14cdf8b3ebdeb5a65759e1095fad3f9583c3ef3" => :catalina
sha256 "44e1c4d26cac791ff40de7b15fb2718c6aaa99856a128c23a3c542a3132e2053" => :mojave
sha256 "7f83ce800aaa0dedb44b18332e1628e307bf83d693586ed6359b02e6ea21737e" => :high_sierra
end
head do
url "https://gitlab.xiph.org/xiph/opusfile.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
depends_on "libogg"
depends_on "[email protected]"
depends_on "opus"
resource "music_48kbps.opus" do
url "https://www.opus-codec.org/static/examples/samples/music_48kbps.opus"
sha256 "64571f56bb973c078ec784472944aff0b88ba0c88456c95ff3eb86f5e0c1357d"
end
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
testpath.install resource("music_48kbps.opus")
(testpath/"test.c").write <<~EOS
#include <opus/opusfile.h>
#include <stdlib.h>
int main(int argc, const char **argv) {
int ret;
OggOpusFile *of;
of = op_open_file(argv[1], &ret);
if (of == NULL) {
fprintf(stderr, "Failed to open file '%s': %i\\n", argv[1], ret);
return EXIT_FAILURE;
}
op_free(of);
return EXIT_SUCCESS;
}
EOS
system ENV.cc, "test.c", "-I#{Formula["opus"].include}/opus",
"-L#{lib}",
"-lopusfile",
"-o", "test"
system "./test", "music_48kbps.opus"
end
end
| edporras/homebrew-core | Formula/opusfile.rb | Ruby | bsd-2-clause | 2,016 |
cask 'opera-beta' do
version '50.0.2762.42'
sha256 '0ae6866beb0047a2aebd22df7d2638f2d95ad8c08227879905d0e96e1add2235'
url "https://get.geo.opera.com/pub/opera-beta/#{version}/mac/Opera_beta_#{version}_Setup.dmg"
name 'Opera Beta'
homepage 'https://www.opera.com/computer/beta'
auto_updates true
app 'Opera Beta.app'
end
| yurikoles/homebrew-versions | Casks/opera-beta.rb | Ruby | bsd-2-clause | 337 |
/*********************************************************************************
*
* Inviwo - Interactive Visualization Workshop
*
* Copyright (c) 2016 Inviwo Foundation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*********************************************************************************/
#ifndef IVW_GLFWEXCEPTION_H
#define IVW_GLFWEXCEPTION_H
#include <modules/glfw/glfwmoduledefine.h>
#include <inviwo/core/common/inviwo.h>
#include <inviwo/core/util/exception.h>
namespace inviwo {
/**
* \class GLFWException
*/
class IVW_MODULE_GLFW_API GLFWException : public Exception {
public:
GLFWException(const std::string& message = "", ExceptionContext context = ExceptionContext());
virtual ~GLFWException() throw() {}
};
class IVW_MODULE_GLFW_API GLFWInitException : public ModuleInitException {
public:
GLFWInitException(const std::string& message = "",
ExceptionContext context = ExceptionContext());
virtual ~GLFWInitException() throw() {}
};
} // namespace
#endif // IVW_GLFWEXCEPTION_H
| cgloger/inviwo | modules/glfw/glfwexception.h | C | bsd-2-clause | 2,327 |
# coding: utf8
from __future__ import unicode_literals
from flask import abort, make_response, request
from flask_api.decorators import set_renderers
from flask_api import exceptions, renderers, status, FlaskAPI
import json
import unittest
app = FlaskAPI(__name__)
app.config['TESTING'] = True
class JSONVersion1(renderers.JSONRenderer):
media_type = 'application/json; api-version="1.0"'
class JSONVersion2(renderers.JSONRenderer):
media_type = 'application/json; api-version="2.0"'
@app.route('/set_status_and_headers/')
def set_status_and_headers():
headers = {'Location': 'http://example.com/456'}
return {'example': 'content'}, status.HTTP_201_CREATED, headers
@app.route('/set_headers/')
def set_headers():
headers = {'Location': 'http://example.com/456'}
return {'example': 'content'}, headers
@app.route('/make_response_view/')
def make_response_view():
response = make_response({'example': 'content'})
response.headers['Location'] = 'http://example.com/456'
return response
@app.route('/api_exception/')
def api_exception():
raise exceptions.PermissionDenied()
@app.route('/abort_view/')
def abort_view():
abort(status.HTTP_403_FORBIDDEN)
@app.route('/options/')
def options_view():
return {}
@app.route('/accepted_media_type/')
@set_renderers([JSONVersion2, JSONVersion1])
def accepted_media_type():
return {'accepted_media_type': str(request.accepted_media_type)}
class AppTests(unittest.TestCase):
def test_set_status_and_headers(self):
with app.test_client() as client:
response = client.get('/set_status_and_headers/')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_set_headers(self):
with app.test_client() as client:
response = client.get('/set_headers/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_make_response(self):
with app.test_client() as client:
response = client.get('/make_response_view/')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_api_exception(self):
with app.test_client() as client:
response = client.get('/api_exception/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content_type, 'application/json')
expected = '{"message": "You do not have permission to perform this action."}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_abort_view(self):
with app.test_client() as client:
response = client.get('/abort_view/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_options_view(self):
with app.test_client() as client:
response = client.options('/options/')
# Errors if `response.response` is `None`
response.get_data()
def test_accepted_media_type_property(self):
with app.test_client() as client:
# Explicitly request the "api-version 1.0" renderer.
headers = {'Accept': 'application/json; api-version="1.0"'}
response = client.get('/accepted_media_type/', headers=headers)
data = json.loads(response.get_data().decode('utf8'))
expected = {'accepted_media_type': 'application/json; api-version="1.0"'}
self.assertEqual(data, expected)
# Request the default renderer, which is "api-version 2.0".
headers = {'Accept': '*/*'}
response = client.get('/accepted_media_type/', headers=headers)
data = json.loads(response.get_data().decode('utf8'))
expected = {'accepted_media_type': 'application/json; api-version="2.0"'}
self.assertEqual(data, expected)
| theskumar-archive/flask-api | flask_api/tests/test_app.py | Python | bsd-2-clause | 4,692 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.