text
stringlengths 2
6.14k
|
|---|
import { Injectable, OnInit } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { Author } from '../../domain/author';
import { Observable } from 'rxjs';
import { DataService } from "./data.service";
@Injectable()
export class AuthorService extends DataService<Author> {
constructor(private http : HttpClient){
super(http);
super.setResource("author");
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>#ESC2017 Selection</title>
<style type="text/css">
html, body {
padding: 0;
margin: 0;
width: 100%;
font-family: sans-serif;
text-align: center;
}
</style>
</head>
<body>
<h1>The Fabulous<br/> Eurovision Song Contest 2017<br/> Pseudorandom Country Selector 3000 Super Alpha</h1>
<div style="height: 500px">
<h2 id="country_label">Pick a country</h2>
<img id="country_flag" src="img/random.png" style="display: none" alt="Country">
</div>
<button id="btn_rand">Pick a random country!</button>
<script type="text/javascript">
var countries = ['Armenia', 'Australia', 'Austria', 'Azerbaijan', 'Belarus', 'Belgium', 'Bulgaria', 'Croatia', 'Cyprus', 'Denmark', 'France', 'Germany', 'Greece', 'Hungary', 'Israel', 'Italy', 'Moldova', 'Norway', 'Poland', 'Portugal', 'Romania', 'Spain', 'Sweden', 'The Netherlands', 'Ukraine', 'United Kingdom'];
var n_countries = countries.length;
var randInt = function(max) {
return Math.floor(Math.random() * max);
}
var btn = document.getElementById('btn_rand');
var h2 = document.getElementById('country_label');
var img = document.getElementById('country_flag');
btn.addEventListener('click', function(){
var r = randInt(n_countries);
var countryName = countries[r];
var imagePath = 'img/' + countryName.toLowerCase().replace(' ', '_') + '.png';
h2.innerText = countryName;
img.src = imagePath;
img.alt = countryName;
img.style.display = "inline-block";
});
</script>
</body>
</html>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for Keychain password database parser."""
import unittest
from plaso.lib import definitions
from plaso.parsers import mac_keychain
from tests.parsers import test_lib
class MacKeychainParserTest(test_lib.ParserTestCase):
"""Tests for keychain file parser."""
def testParse(self):
"""Tests the Parse function."""
parser = mac_keychain.KeychainParser()
storage_writer = self._ParseFile(['login.keychain'], parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetEvents())
expected_ssgp = (
'b8e44863af1cb0785b89681d22e2721997ccfb8adb8853e726aff94c8830b05a')
expected_event_values = {
'account_name': 'moxilo',
'data_type': 'mac:keychain:application',
'entry_name': 'Secret Application',
'ssgp_hash': expected_ssgp,
'timestamp': '2014-01-26 14:51:48.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_CREATION}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
expected_event_values = {
'data_type': 'mac:keychain:application',
'timestamp': '2014-01-26 14:52:29.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
expected_ssgp = (
'72bd40987413638e081b8d1497573343f193ab4574c08f08cb618ca729488a68'
'2fd9f179c2134ab89c2096a3f335eb61bf4377ca15209197c5ead3a775149db3'
'c5a306d1a2db4f9c3c20949280892c994049a55e8323a7d51b9c51826057d743'
'ced5f6fb23a2fea5de833fe49fbd92bf7a4d536d64cca1abf9ee09f92025e48e'
'41331fbd7801d81f953a39b1d8c523cd0575834240e5e566b1aaf31b960dfd77'
'4a180958f6c06e372ea0a8b211d3f9a1c207984b6e51c55904ddaf9ac12bc4bf'
'255356178b07bfaa70de9ece90420f0a289b4a73f63c624d9e6a138b6dbb0559'
'64641e7526167712f205b7333dec36063127c66fc1633c3c0aac6833b3487894'
'8b2d45270ce754a07c8f06beb18d98ca9565fa7c57cca083804b8a96dfbf7d5f'
'5c24c1c391f1a38ecf8d6919b21a398ce89bdffd0aa99eb60a3c4ad9c1d0d074'
'143ad0e71d5986bf8bf13f166c61cff3bc384e3a79f6f6c57ed52fef2c66d267'
'bab006e6e2495afb55162bf0b88111b2429c83bb7b59a54df745aa23055d7b0f'
'd6c0543203397640b46109e1382441945447457461aa01edc75169f2b462d508'
'7519957ab587e07203ad1377ad76255a5a64398fe329760951bd8bca7bbe8c2b'
'4d8b987370a6c7eb05613051d19a4d0f588e05a1a51e43831a2b07b7d50a6379'
'130f6fb2bbeab2b016899fca2e9d8e60429da995da9de0f326eb498212a6671f'
'0125402cc387371f61775008fa049b446df792704880f45869dd4b7c89b77f58'
'06fe61faf7e790e59ba02d5e8b8dd880313fc5921bee4d5e9d0a67c1c16f898d'
'cc99cd403f63e652109ee4be4c28bbbf98cfe59047145f9fbface4ebf4f1ce1e'
'4d7d108a689e2744b464ed539a305f3b40fe8248b93556d14810d70469457651'
'c507af61bd692622f30996bfa8ac8aa29f712b6d035883ae37855e223de43a85'
'9264ecea0f2b5b87396cb030edc79d1943eb53267137d1e5fbbe2fb74756ecb1'
'58d8e81008453a537085d67feeddb155a8c3f76deecb02d003d8d16c487111c4'
'b43518ec66902876aab00a5dcfd3cc6fc713a1b9bdba533d84bd7b4a3d9f778e'
'd7ee477a53df012a778b2d833d2a18cb88b23ca69b0806bb38bd38fbbc78e261'
'15a8b465ceaa8bfa8ecb97a446bc12434da6d2dd7355ec3c7297960f4b996e5b'
'22e8f256c6094d7f2ed4f7c89c060faf')
expected_event_values = {
'data_type': 'mac:keychain:application',
'entry_name': 'Secret Note',
'ssgp_hash': expected_ssgp,
'text_description': 'secure note',
'timestamp': '2014-01-26 14:53:29.000000'}
self.CheckEventValues(storage_writer, events[2], expected_event_values)
expected_ssgp = (
'83ccacf55a8cb656d340ec405e9d8b308fac54bb79c5c9b0219bd0d700c3c521')
expected_event_values = {
'account_name': 'MrMoreno',
'data_type': 'mac:keychain:internet',
'entry_name': 'plaso.kiddaland.net',
'protocol': 'http',
'ssgp_hash': expected_ssgp,
'timestamp': '2014-01-26 14:54:33.000000',
'type_protocol': 'dflt',
'where': 'plaso.kiddaland.net'}
self.CheckEventValues(storage_writer, events[4], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
MySQL Connector/Python - MySQL drive written in Python
"""
# Python Db API v2
apilevel = '2.0'
threadsafety = 1
paramstyle = 'pyformat'
from mysql.connector.connection import MySQLConnection
from mysql.connector.errors import (
Error, Warning, InterfaceError, DatabaseError,
NotSupportedError, DataError, IntegrityError, ProgrammingError,
OperationalError, InternalError, custom_error_exception)
from mysql.connector.constants import (FieldFlag, FieldType, CharacterSet,
RefreshOption, ClientFlag)
from mysql.connector.dbapi import *
def Connect(*args, **kwargs):
"""Shortcut for creating a connection.MySQLConnection object."""
return MySQLConnection(*args, **kwargs)
connect = Connect
__all__ = [
'MySQLConnection', 'Connect', 'custom_error_exception',
# Some useful constants
'FieldType','FieldFlag','ClientFlag','CharacterSet','RefreshOption',
# Error handling
'Error','Warning',
'InterfaceError','DatabaseError',
'NotSupportedError','DataError','IntegrityError','ProgrammingError',
'OperationalError','InternalError',
# DBAPI PEP 249 required exports
'connect','apilevel','threadsafety','paramstyle',
'Date', 'Time', 'Timestamp', 'Binary',
'DateFromTicks', 'DateFromTicks', 'TimestampFromTicks',
'STRING', 'BINARY', 'NUMBER',
'DATETIME', 'ROWID',
]
|
#include "ir_regex.h"
#include "atoms.h"
iridium_method(Regex, initialize) {
object self = local(self);
object pattern = local(pattern);
char * err;
int err_offset;
const int INFO_CAPTURECOUNT = 2;
int captures;
pcre_extra * extra;
pcre * code;
// Compile and get information about the regex
code = pcre_compile(C_STRING(context, pattern), 0, (const char **) &err, &err_offset, NULL);
// pcre_compile returns NULL on error and sets err and err_offset
if (code == NULL) {
object reason = IR_STRING("could not compile '");
send(reason, "__add__", pattern);
send(reason, "__add__", IR_STRING("': "));
send(reason, "__add__", IR_STRING(err));
handleException(context, send(CLASS(ArgumentError), "new", reason));
}
extra = pcre_study(code, 0, (const char **) &err);
if (extra == NULL) {
object reason = IR_STRING("could not study '");
send(reason, "__add__", pattern);
send(reason, "__add__", IR_STRING("': "));
send(reason, "__add__", IR_STRING(err));
send(reason, "__add__", IR_STRING(" at "));
send(reason, "__add__", _send(context, FIXNUM(err_offset), "to_s", 0));
handleException(context, send(CLASS(ArgumentError), "new", reason));
}
pcre_fullinfo(code, NULL, INFO_CAPTURECOUNT, &captures);
// Store regex info into the object
send(self, "__set__", L_ATOM(pattern), pattern);
internal_set_attribute(self, L_ATOM(code), code);
internal_set_attribute(self, L_ATOM(extra), extra);
internal_set_integral(self, L_ATOM(captures), captures);
return NIL;
}
iridium_method(Regex, match) {
object self = local(self);
object str = local(str);
object pos_or_fn = local(pos_or_fn);
object fun = local(fun);
object pos;
if (isA(pos_or_fn, CLASS(Integer))) {
pos = pos_or_fn;
} else if (isA(pos_or_fn, CLASS(Function))) {
fun = pos_or_fn;
pos = FIXNUM(0);
} else {
handleException(context, send(CLASS(ArgumentError), "new", IR_STRING("pos must be an integer")));
return NIL;
}
pcre * code = internal_get_attribute(self, L_ATOM(code), pcre *);
pcre_extra * extra = internal_get_attribute(self, L_ATOM(extra), pcre_extra *);
int captures = internal_get_integral(self, L_ATOM(captures), size_t);
size_t ovector_size = (captures + 1) * 3;
int * ovector = GC_MALLOC(ovector_size);
int ret;
object matchdata;
assert(ovector);
if (INT(pos) > INT(send(str, "size"))) {
return NIL;
}
const char * cstr = C_STRING(context, str);
ret = pcre_exec(code, extra, cstr, strlen(cstr), INT(pos), 0, ovector, ovector_size);
if (ret >= 0) {
matchdata = create_matchdata(self, code, str, INT(pos), ovector, captures);
return calls(context, fun, array_push(array_new(), matchdata));
} else {
return NIL;
}
}
// new and initialize are NOT defined on MatchData since it needs to be passed non-iridiium objects
object create_matchdata(object regex, pcre * code, object string, int pos, int * ovector, int captures) {
object matchdata = construct(CLASS(MatchData));
set_attribute(matchdata, L_ATOM(regex), PUBLIC, regex);
set_attribute(matchdata, L_ATOM(string), PUBLIC, string);
internal_set_attribute(matchdata, L_ATOM(code), code);
internal_set_attribute(matchdata, L_ATOM(ovector), ovector);
internal_set_integral(matchdata, L_ATOM(pos), pos);
internal_set_integral(matchdata, L_ATOM(captures), captures);
return matchdata;
}
iridium_method(MatchData, __get_index__) {
object self = local(self);
object index = local(index);
pcre * code;
int idx = -1;
object string = get_attribute(self, L_ATOM(string), PUBLIC);
int * ovector = internal_get_attribute(self, L_ATOM(ovector), int *);
int start;
int finish;
char * str = C_STRING(context, string);
char * match;
if (isA(index, CLASS(Integer))) {
idx = INT(index);
} else if (isA(index, CLASS(String))) {
code = internal_get_attribute(self, L_ATOM(code), pcre *);
idx = pcre_get_stringnumber(code, C_STRING(context, index));
} else {
handleException(context, send(CLASS(ArgumentError), "new", IR_STRING("index must be a Integer or String")));
}
if (validGroup(self, idx)) {
start = ovector[idx * 2];
finish = ovector[idx * 2 + 1];
if (start < 0) {
return NIL;
}
match = GC_MALLOC((finish - start + 1) * sizeof(char));
strncpy(match, str + start, finish - start);
// Ensure the last character is the null terminator
match[finish - start] = 0;
return IR_STRING(match);
}
return NIL;
}
int validGroup(object match_data, int group) {
int captures = internal_get_integral(match_data, L_ATOM(captures), int);
return group <= captures;
}
iridium_method(Lambda, match_default_fn) {
return local(match);
}
void IR_init_Regex(struct IridiumContext * context)
{
object match_lambda = FUNCTION(L_ATOM(lambda), ARGLIST(argument_new(L_ATOM(match), NULL, 0)), dict_new(ObjectHashsize), iridium_method_name(Lambda, match_default_fn));
CLASS(Regex) = send(CLASS(Class), "new", IR_STRING("Regex"));
DEF_METHOD(CLASS(Regex), "initialize", ARGLIST(argument_new(L_ATOM(pattern), NULL, 0)), iridium_method_name(Regex, initialize));
DEF_METHOD(CLASS(Regex), "match", ARGLIST(argument_new(L_ATOM(str), NULL, 0), argument_new(L_ATOM(pos_or_fn), FIXNUM(0), 0), argument_new(L_ATOM(fun), match_lambda, 0)), iridium_method_name(Regex, match));
CLASS(MatchData) = send(CLASS(Class), "new", IR_STRING("Regex.MatchData"));
DEF_METHOD(CLASS(MatchData), "__get_index__", ARGLIST(argument_new(L_ATOM(index), NULL, 0)), iridium_method_name(MatchData, __get_index__));
no_attribute(CLASS(MatchData), L_ATOM(new));
no_instance_attribute(CLASS(MatchData), L_ATOM(initialize));
define_constant(L_ATOM(Regex), CLASS(Regex));
}
|
# -*- coding: utf-8 -
#
# This file is part of epygments available under the public domain or
# MIT license. See the NOTICE for more information.
from erlport import Port, Protocol, String
from pygments import highlight, lexers, formatters
class SimpleFormatter(formatters.HtmlFormatter):
def wrap(self, source, outfile):
return self._wrap_code(source)
def _wrap_code(self, source):
l = 0
for i, t in source:
if i == 1:
l += 1
yield i, t
class HighlightProtocol(Protocol):
def handle_all_languages(self):
all_lexers = list(lexers.get_all_lexers())
ret = []
for l in all_lexers:
if l[1][0] != "text":
ret.append((l[1][0],l[0],))
ret.sort()
return [('text', 'Plain text',)] + ret
def handle_highlight(self, code, lexer_name, formatter_name,
options):
try:
lexer = lexers.get_lexer_by_name(lexer_name)
except:
lexer = lexers.get_lexer_by_name('text')
if formatter_name == "SimpleFormatter":
formatter_class = SimpleFormatter
else:
formatter_class = getattr(formatters, formatter_name)
formatter = SimpleFormatter(**dict(options))
return highlight(code, lexer, formatter)
if __name__ == "__main__":
proto = HighlightProtocol()
# Run protocol with port open on STDIO
proto.run(Port(packet=4, use_stdio=True, compressed=True))
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Public News</title>
<!-- Fonts -->
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.4.0/css/font-awesome.min.css" rel='stylesheet' type='text/css'>
<link href="https://fonts.googleapis.com/css?family=Lato:100,300,400,700" rel='stylesheet' type='text/css'>
<!-- Styles -->
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" rel="stylesheet">
<link rel="alternate" type="application/rss+xml" href="{{ url('rss') }}" title="RSS Feed {{ config('blog.title') }}"/>
<link href="{{ URL::asset('css/app.css') }}" rel="stylesheet">
<style>
body {
font-family: 'Lato';
}
.fa-btn {
margin-right: 6px;
}
</style>
</head>
<body id="app-layout">
<nav class="navbar navbar-default navbar-static-top">
<div class="container">
<div class="navbar-header">
<!-- Branding Image -->
<a class="navbar-brand" href="{{ url('/') }}">
My News
</a>
</div>
<div class="collapse navbar-collapse" id="app-navbar-collapse">
<!-- Left Side Of Navbar -->
<!-- Right Side Of Navbar -->
<ul class="nav navbar-nav navbar-right">
<!-- Authentication Links -->
@if (Auth::guest())
<li><a href="{{ url('/login') }}">Login</a></li>
<li><a href="{{ url('/register') }}">Register</a></li>
@else
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">
{{ Auth::user()->name }} <span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<li><a href="{{ url('/logout') }}"><i class="fa fa-btn fa-sign-out"></i>Logout</a></li>
</ul>
</li>
@endif
</ul>
</div>
</div>
</nav>
@yield('content')
<!-- JavaScripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
@yield('javascript')
{{-- <script src="{{ elixir('js/app.js') }}"></script> --}}
</body>
</html>
|
<!DOCTYPE html>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<html>
<head>
<!--
Customize this policy to fit your own app's needs. For more guidance, see:
https://github.com/apache/cordova-plugin-whitelist/blob/master/README.md#content-security-policy
Some notes:
* gap: is required only on iOS (when using UIWebView) and is needed for JS->native communication
* https://ssl.gstatic.com is required only on Android and is needed for TalkBack to function properly
* Disables use of inline scripts in order to mitigate risk of XSS vulnerabilities. To change this:
* Enable inline JS: add 'unsafe-inline' to default-src
-->
<meta http-equiv="Content-Security-Policy" content="default-src 'self' data: gap: https://ssl.gstatic.com 'unsafe-eval'; style-src 'self' 'unsafe-inline'; media-src *">
<meta name="format-detection" content="telephone=no">
<meta name="msapplication-tap-highlight" content="no">
<meta name="viewport" content="user-scalable=no, initial-scale=1, maximum-scale=1, minimum-scale=1, width=device-width">
<link rel="stylesheet" type="text/css" href="css/index.css">
</head>
<body>
<div class="app">
<div class="menu">
<a href="svg.html" class="button">SVG</a>
<a href="web-speech-api.html" class="button">Web Speech API</a>
<a href="speech-gestures.html" class="button">Gestures + speech</a>
</div>
<h1>Speech synthesiser</h1>
<button id="speak">Speak</button>
</div>
<script type="text/javascript" src="cordova.js"></script>
<script type="text/javascript" src="js/SpeechSynthesis.js"></script>
</body>
</html>
|
'''
Ultimate Whitecream
Copyright (C) 2015 mortael
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib, urllib2, re, cookielib, os.path, sys, socket
import xbmc, xbmcplugin, xbmcgui, xbmcaddon
import json
import utils
# from youtube-dl
from compat import (
compat_chr,
compat_ord,
compat_urllib_parse_unquote,
)
dialog = utils.dialog
addon = utils.addon
# 80 BGMain
# 81 BGList
# 82 BGPlayvid
# 83 BGCat
# 84 BGSearch
def BGVersion():
bgpage = utils.getHtml('http://beeg.com','')
bgversion = re.compile(r"cpl/(\d+)\.js", re.DOTALL | re.IGNORECASE).findall(bgpage)[0]
bgsavedversion = addon.getSetting('bgversion')
if bgversion <> bgsavedversion:
addon.setSetting('bgversion',bgversion)
bgjspage = utils.getHtml('http://static.beeg.com/cpl/'+bgversion+'.js','http://beeg.com')
bgsalt = re.compile('beeg_salt="([^"]+)"', re.DOTALL | re.IGNORECASE).findall(bgjspage)[0]
addon.setSetting('bgsalt',bgsalt)
def BGMain():
BGVersion()
bgversion = addon.getSetting('bgversion')
utils.addDir('[COLOR hotpink]Categories[/COLOR]','http://api2.beeg.com/api/v6/'+bgversion+'/index/main/0/pc',83,'','')
utils.addDir('[COLOR hotpink]Search[/COLOR]','http://api2.beeg.com/api/v6/'+bgversion+'/index/main/0/pc?query=',84,'','')
BGList('http://api2.beeg.com/api/v6/'+bgversion+'/index/main/0/pc')
xbmcplugin.endOfDirectory(utils.addon_handle)
def BGList(url):
bgversion = addon.getSetting('bgversion')
listjson = utils.getHtml(url,'')
match = re.compile(r'\{"title":"([^"]+)","id":"([^"]+)"', re.DOTALL | re.IGNORECASE).findall(listjson)
for title, videoid in match:
img = "http://img.beeg.com/236x177/" + videoid + ".jpg"
videopage = "https://api.beeg.com/api/v6/"+bgversion+"/video/" + videoid
name = title.encode("utf8")
utils.addDownLink(name, videopage, 82, img, '')
try:
page=re.compile('http://api2.beeg.com/api/v6/'+bgversion+'/index/[^/]+/([0-9]+)/pc', re.DOTALL | re.IGNORECASE).findall(url)[0]
page = int(page)
npage = page + 1
jsonpage = re.compile(r'pages":(\d+)', re.DOTALL | re.IGNORECASE).findall(listjson)[0]
if int(jsonpage) > page:
nextp = url.replace("/"+str(page)+"/", "/"+str(npage)+"/")
utils.addDir('Next Page ('+str(npage)+')', nextp,81,'')
except: pass
xbmcplugin.endOfDirectory(utils.addon_handle)
# from youtube-dl
def split(o, e):
def cut(s, x):
n.append(s[:x])
return s[x:]
n = []
r = len(o) % e
if r > 0:
o = cut(o, r)
while len(o) > e:
o = cut(o, e)
n.append(o)
return n
def decrypt_key(key):
bgsalt = addon.getSetting('bgsalt')
# Reverse engineered from http://static.beeg.com/cpl/1738.js
a = bgsalt
e = compat_urllib_parse_unquote(key)
o = ''.join([
compat_chr(compat_ord(e[n]) - compat_ord(a[n % len(a)]) % 21)
for n in range(len(e))])
return ''.join(split(o, 3)[::-1])
##
def BGPlayvid(url, name, download=None):
videopage = utils.getHtml(url,'http://beeg.com')
videopage = json.loads(videopage)
if not videopage["240p"] == None:
url = videopage["240p"].encode("utf8")
if not videopage["480p"] == None:
url = videopage["480p"].encode("utf8")
if not videopage["720p"] == None:
url = videopage["720p"].encode("utf8")
url = url.replace("{DATA_MARKERS}","data=pc_XX")
if not url.startswith("http:"): url = "https:" + url
key = re.compile("/key=(.*?)%2Cend", re.DOTALL | re.IGNORECASE).findall(url)[0]
decryptedkey = decrypt_key(key)
videourl = url.replace(key, decryptedkey)
if download == 1:
utils.downloadVideo(videourl, name)
else:
iconimage = xbmc.getInfoImage("ListItem.Thumb")
listitem = xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
listitem.setInfo('video', {'Title': name, 'Genre': 'Porn'})
listitem.setProperty("IsPlayable","true")
if int(sys.argv[1]) == -1:
pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
pl.add(videourl, listitem)
xbmc.Player().play(pl)
else:
listitem.setPath(str(videourl))
xbmcplugin.setResolvedUrl(utils.addon_handle, True, listitem)
def BGCat(url):
bgversion = addon.getSetting('bgversion')
caturl = utils.getHtml2(url)
tags = re.compile(r'"nonpopular":\[(.*?)\]', re.DOTALL | re.IGNORECASE).findall(caturl)[0]
tags = re.compile('"([^"]+)"', re.DOTALL | re.IGNORECASE).findall(tags)
for tag in tags:
videolist = "http://api2.beeg.com/api/v6/"+bgversion+"/index/tag/0/mobile?tag=" + tag.encode("utf8")
name = tag.encode("utf8")
name = name[:1].upper() + name[1:]
utils.addDir(name, videolist, 81, '')
xbmcplugin.endOfDirectory(utils.addon_handle)
def BGSearch(url, keyword=None):
searchUrl = url
if not keyword:
utils.searchDir(url, 84)
else:
title = keyword.replace(' ','+')
searchUrl = searchUrl + title
print "Searching URL: " + searchUrl
BGList(searchUrl)
|
""" Cisco_IOS_XR_mpls_oam_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR mpls\-oam package configuration.
This module contains definitions
for the following management objects\:
mpls\-oam\: MPLS LSP verification configuration
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class MplsOam(object):
"""
MPLS LSP verification configuration
.. attribute:: disable_vendor_extension
Disable vendor extension
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: enable_oam
Enable/Disable MPLS OAM globally.Without creating this object the MPLS OAM feature will not be enabled. Deleting this object will stop the MPLS OAM feature
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: reply_mode
Echo request reply mode attributes
**type**\: :py:class:`ReplyMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_mpls_oam_cfg.MplsOam.ReplyMode>`
"""
_prefix = 'mpls-oam-cfg'
_revision = '2015-11-09'
def __init__(self):
self.disable_vendor_extension = None
self.enable_oam = None
self.reply_mode = MplsOam.ReplyMode()
self.reply_mode.parent = self
class ReplyMode(object):
"""
Echo request reply mode attributes
.. attribute:: control_channel
Configure control channel reply mode
**type**\: :py:class:`ControlChannel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_mpls_oam_cfg.MplsOam.ReplyMode.ControlChannel>`
"""
_prefix = 'mpls-oam-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.control_channel = MplsOam.ReplyMode.ControlChannel()
self.control_channel.parent = self
class ControlChannel(object):
"""
Configure control channel reply mode
.. attribute:: allow_reverse_lsp
Use Reverse LSP as the control channel
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'mpls-oam-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.allow_reverse_lsp = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-mpls-oam-cfg:mpls-oam/Cisco-IOS-XR-mpls-oam-cfg:reply-mode/Cisco-IOS-XR-mpls-oam-cfg:control-channel'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.allow_reverse_lsp is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_mpls_oam_cfg as meta
return meta._meta_table['MplsOam.ReplyMode.ControlChannel']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-mpls-oam-cfg:mpls-oam/Cisco-IOS-XR-mpls-oam-cfg:reply-mode'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.control_channel is not None and self.control_channel._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_mpls_oam_cfg as meta
return meta._meta_table['MplsOam.ReplyMode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-mpls-oam-cfg:mpls-oam'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.disable_vendor_extension is not None:
return True
if self.enable_oam is not None:
return True
if self.reply_mode is not None and self.reply_mode._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_mpls_oam_cfg as meta
return meta._meta_table['MplsOam']['meta_info']
|
import os
def _default_mpi_comm():
try:
from mpi4py import MPI
except ImportError:
return None
else:
return MPI.COMM_WORLD
def twoints(v):
return (int(v[0]), int(v[1]))
def loglevel(v):
loglevels = ('trace', 'debug', 'info', 'warning', 'error')
if v not in loglevels:
raise ValueError('loglevel must be one of %r' % loglevels)
return v
def parse_bool(obj):
if not isinstance(obj, str):
return bool(obj)
return obj.lower() in {'1', 'true'}
AVAILABLE_SETTINGS = (
# (name, type, default)
('backend', str, os.environ.get('VEROS_BACKEND', 'numpy')),
('linear_solver', str, os.environ.get('VEROS_LINEAR_SOLVER', 'best')),
('num_proc', twoints, (1, 1)),
('profile_mode', parse_bool, os.environ.get('VEROS_PROFILE_MODE', '')),
('loglevel', loglevel, os.environ.get('VEROS_LOGLEVEL', 'info')),
('mpi_comm', None, _default_mpi_comm()),
('log_all_processes', parse_bool, os.environ.get('VEROS_LOG_ALL_PROCESSES', ''))
)
class RuntimeSettings:
def __init__(self):
self.__locked__ = False
self.__setting_types__ = {}
for setting, typ, default in AVAILABLE_SETTINGS:
setattr(self, setting, default)
self.__setting_types__[setting] = typ
self.__settings__ = set(self.__setting_types__.keys())
self.__locked__ = True
def __setattr__(self, attr, val):
if attr == '__locked__' or not self.__locked__:
return super(RuntimeSettings, self).__setattr__(attr, val)
# prevent adding new settings
if attr not in self.__settings__:
raise AttributeError('Unknown runtime setting %s' % attr)
# coerce type
stype = self.__setting_types__.get(attr)
if stype is not None:
val = stype(val)
return super(RuntimeSettings, self).__setattr__(attr, val)
def __repr__(self):
setval = ', '.join(
'%s=%s' % (key, repr(getattr(self, key))) for key in self.__settings__
)
return '{clsname}({setval})'.format(
clsname=self.__class__.__name__,
setval=setval
)
class RuntimeState:
"""Unifies attributes from various modules in a simple read-only object"""
__slots__ = []
@property
def proc_rank(self):
from . import runtime_settings
comm = runtime_settings.mpi_comm
if comm is None:
return 0
return comm.Get_rank()
@property
def proc_num(self):
from . import runtime_settings
comm = runtime_settings.mpi_comm
if comm is None:
return 1
return comm.Get_size()
@property
def proc_idx(self):
from . import distributed
return distributed.proc_rank_to_index(self.proc_rank)
@property
def backend_module(self):
from . import backend, runtime_settings
return backend.get_backend(runtime_settings.backend)
@property
def vector_engine(self):
from . import backend
return backend.get_vector_engine(self.backend_module)
def __setattr__(self, attr, val):
raise TypeError('Cannot modify runtime state objects')
|
import { Component, EventEmitter, Input, Output, OnInit, OnDestroy } from '@angular/core';
import { Cohort } from '../shared/models/cohort.class';
import { Chart } from '../shared/graph/chart.class';
import { Hotkey } from 'angular2-hotkeys';
import { KeyboardShortcutHelpService } from '../shared/keyboard-shortcut-help/keyboard-shortcut-help.service';
@Component({
moduleId: module.id,
selector: 'probabilities-of-death-summary',
templateUrl: 'probabilities-of-death-summary.component.html',
})
export class ProbabilitiesOfDeathSummaryComponent implements OnInit, OnDestroy {
@Input() MIN_AGE: number;
@Input() MAX_AGE: number;
@Input() year: number;
@Input() sex: string;
@Input() cohorts: Cohort[];
@Input() chart: Chart;
@Output() submittedChange = new EventEmitter<boolean>();
private hotkeys: Hotkey[];
constructor(private keyboardShortcutHelpService: KeyboardShortcutHelpService){}
ngOnInit(): void {
this.hotkeys = this.buildHotkeys();
this.keyboardShortcutHelpService.createCheetSheet(this.hotkeys);
}
buildHotkeys(): Hotkey[] {
return [
new Hotkey('backspace', (event: KeyboardEvent): boolean => {
this.goBack();
return false;
}, ["BUTTON"], "Go Back to Probabitilies of Death Form"),
new Hotkey('alt+shift+l', (event: KeyboardEvent): boolean => {
document.getElementById("minAgeInput").focus();
return false;
}, ["INPUT"], "Focus on 'Minimum Age' input box"),
new Hotkey('alt+shift+h', (event: KeyboardEvent): boolean => {
document.getElementById("maxAgeInput").focus();
return false;
}, ["INPUT"], "Focus on 'Maxmimum Age' input box")
];
}
goBack(): void {
window.scrollTo(0, 0);
this.keyboardShortcutHelpService.reset(this.hotkeys);
this.submittedChange.emit(false);
}
ngOnDestroy(): void {
this.keyboardShortcutHelpService.reset(this.hotkeys);
}
}
|
// Copyright (c) 2011-2016 The Cryptonote developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#pragma once
#include "IInputStream.h"
namespace Common {
class MemoryInputStream : public IInputStream {
public:
MemoryInputStream(const void* buffer, size_t bufferSize);
size_t getPosition() const;
bool endOfStream() const;
// IInputStream
virtual size_t readSome(void* data, size_t size) override;
private:
const char* buffer;
size_t bufferSize;
size_t position;
};
}
|
// stdafx.cpp : source file that includes just the standard includes
// TestConsoleW32.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
// TODO: reference any additional headers you need in STDAFX.H
// and not in this file
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_geopunt4QgisAbout.ui'
#
# Created: Thu Nov 20 18:45:01 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_aboutDlg(object):
def setupUi(self, aboutDlg):
aboutDlg.setObjectName(_fromUtf8("aboutDlg"))
aboutDlg.resize(440, 428)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/geopunt4Qgis/images/geopuntSmal.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
aboutDlg.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(aboutDlg)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.aboutText = QtGui.QTextBrowser(aboutDlg)
self.aboutText.setHtml(_fromUtf8("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.aboutText.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.aboutText.setSearchPaths(['images', '../images'])
self.aboutText.setOpenExternalLinks(True)
self.aboutText.setOpenLinks(True)
self.aboutText.setObjectName(_fromUtf8("aboutText"))
self.verticalLayout.addWidget(self.aboutText)
self.buttonBox = QtGui.QDialogButtonBox(aboutDlg)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.NoButton)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(aboutDlg)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), aboutDlg.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), aboutDlg.reject)
QtCore.QMetaObject.connectSlotsByName(aboutDlg)
def retranslateUi(self, aboutDlg):
aboutDlg.setWindowTitle(_translate("aboutDlg", "Over Geopunt4QGIS", None))
import resources_rc
|
#! /usr/bin/env python3
from __future__ import print_function
import argparse
import gzip
import os.path
import subprocess
import sys
def check_args(args):
"""Checks that commandline options are valid:
Miminum count in genotype groups is greater than zero.
Input file exists.
Output file, compressed output file and index for output file do not
exist and so won't be overwritten.
"""
if args.threshold < 0:
print("Minimum count must be greater than zero.", file=sys.stderr)
sys.exit()
if not os.path.isfile(args.input):
print("Input file,", args.input, "doesn't exist.", file=sys.stderr)
sys.exit()
if os.path.isfile(args.output):
print(args.output, "would be overwritten.", file=sys.stderr)
sys.exit()
if os.path.isfile(args.output + ".gz"):
print(args.output + ".gz", "would be overwritten.", file=sys.stderr)
sys.exit()
if os.path.isfile(args.output + ".gz.csi"):
print(args.output + ".gz.csi", "would be overwritten.",
file=sys.stderr)
sys.exit()
def filter_snps(args):
"""Given an input vcf file print only snps where all 3 genotype groups have
at least a certain number of individuals"""
threshold = args.threshold - 1
with gzip.open(args.input, 'rt') as f:
with open(args.output, 'w') as g:
for line in f:
if line[0] == "#":
_ = g.write(line.strip() + '\n')
else:
line = line.strip().split()
count_hom1, count_het, count_hom2 = 0, 0, 0
for snp in line[9:]:
if snp == "0|0":
count_hom1 += 1
elif snp == "1|1":
count_hom2 += 1
else:
count_het += 1
if count_hom1 > threshold and count_hom2 > threshold and count_het > threshold:
_ = g.write('\t'.join(line) + '\n')
subprocess.call(['bgzip', args.output])
subprocess.call(['bcftools', 'index', args.output + ".gz"])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Filters a vcf file for SNPs with at least a certain count in all three genotype groups. Warning: only works on vcf files where genotype is specified only by the phased GT flag (such as the standard 1000 genomes downloads).")
parser.add_argument("--input",
default="Genotypes.individuals_filtered.maf0.05.vcf.gz",
help="Name of vcf file to be filtered.")
parser.add_argument("--output",
default="Genotypes.individuals_filtered.parent.of.origin.vcf",
help="Name of output vcf file (without .gz suffix).")
parser.add_argument("--threshold", default=50, type=int,
help="Minimum count in genotype groups.")
args = parser.parse_args()
check_args(args)
filter_snps(args)
|
// Copyright 2017 The Kubernetes Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component } from '@angular/core';
import { GroupedResourceList } from '../../common/resources/groupedlist';
@Component({
selector: 'kd-config',
templateUrl: './template.html',
})
export class ConfigComponent extends GroupedResourceList {}
|
# vim:set et sts=4 sw=4:
#
# Zanata Python Client
#
# Copyright (c) 2011 Jian Ni <[email protected]>
# Copyright (c) 2011 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
__all__ = (
"VersionService",
)
from .service import Service
class VersionService(Service):
_fields = ['base_url', 'http_headers']
def __init__(self, *args, **kargs):
super(VersionService, self).__init__(*args, **kargs)
def disable_ssl_cert_validation(self):
self.restclient.disable_ssl_cert_validation()
def get_server_version(self):
res, content = self.restclient.process_request(
'server_version',
headers=self.http_headers
)
return self.messages(res, content)
|
'use strict';
/**
* Module Dependencies.
*/
var fs = require('fs');
var jsdom = require('jsdom');
var assert = require('better-assert');
/**
* Load test html
*/
var html = fs.readFileSync('./test/index.html', 'utf-8');
/**
* Define window and document.
*/
var window = jsdom.jsdom(html).parentWindow;
var document = window.document;
/**
* aload
*/
var aload = require('../');
/**
* Tests
*/
describe('aload', function () {
it('should be defined.', function () {
assert(aload !== undefined);
});
it('should be a function.', function () {
assert(typeof aload === 'function');
});
});
describe('An image', function () {
var img = document.querySelector('img');
var loaded = false;
it('shouldn\'t have "src" attibute', function () {
assert(img.src === '');
});
it('should have "data-aload" attibute', function () {
assert(img.getAttribute('data-aload') !== '');
});
it('should set "src" attibute', function (done) {
var src = img.getAttribute('data-aload');
img.onload = function () {
if (!loaded) { done(); }
loaded = true;
};
aload(img);
assert(img.src !== '');
assert(img.src === src);
});
it('should remove "data-aload" attibute', function () {
assert(img.getAttribute('data-aload') === null);
});
});
|
// [AsmJit]
// Complete JIT Assembler for C++ Language.
//
// [License]
// Zlib - See COPYING file in this package.
// [Guard]
#ifndef _ASMJIT_CORE_MEMORYMARKER_H
#define _ASMJIT_CORE_MEMORYMARKER_H
// [Dependencies - AsmJit]
#include "../core/build.h"
#include "../core/defs.h"
// [Api-Begin]
#include "../core/apibegin.h"
namespace AsmJit {
//! @addtogroup AsmJit_MemoryManagement
//! @{
// ============================================================================
// [AsmJit::MemoryMarker]
// ============================================================================
//! @brief Virtual memory marker interface.
struct MemoryMarker
{
ASMJIT_NO_COPY(MemoryMarker)
// --------------------------------------------------------------------------
// [Construction / Destruction]
// --------------------------------------------------------------------------
ASMJIT_API MemoryMarker();
ASMJIT_API virtual ~MemoryMarker();
// --------------------------------------------------------------------------
// [Interface]
// --------------------------------------------------------------------------
virtual void mark(const void* ptr, size_t size) = 0;
};
//! @}
} // AsmJit namespace
// [Api-End]
#include "../core/apiend.h"
// [Guard]
#endif // _ASMJIT_CORE_MEMORYMARKER_H
|
//
// ServerViewController.h
// SocketDemo
//
// Created by zx on 1/20/15.
// Copyright (c) 2015 zx. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "AsyncSocket.h"
@interface ServerViewController : UIViewController<AsyncSocketDelegate,UITableViewDataSource,UITableViewDelegate>
@end
|
from __future__ import absolute_import
from sentry.models import Authenticator, OrganizationMember, User, UserEmail
from sentry.testutils import TestCase
class UserDetailsTest(TestCase):
def test_salutation(self):
user = self.create_user(email="[email protected]", username="[email protected]")
assert user.get_salutation_name() == "A"
user.update(name="hello world", email="[email protected]")
user = User.objects.get(id=user.id)
assert user.name == "hello world"
assert user.email == "[email protected]"
assert user.get_salutation_name() == "Hello"
class UserMergeToTest(TestCase):
def test_simple(self):
from_user = self.create_user("[email protected]")
UserEmail.objects.create_or_update(
user=from_user, email=from_user.email, values={"is_verified": True}
)
to_user = self.create_user("[email protected]")
UserEmail.objects.create_or_update(
user=to_user, email=to_user.email, values={"is_verified": True}
)
auth1 = Authenticator.objects.create(user=from_user, type=1)
auth2 = Authenticator.objects.create(user=to_user, type=1)
auth3 = Authenticator.objects.create(user=to_user, type=2)
from_user.merge_to(to_user)
assert UserEmail.objects.filter(
user=to_user, email=to_user.email, is_verified=True
).exists()
assert UserEmail.objects.filter(
user=to_user, email=from_user.email, is_verified=True
).exists()
assert Authenticator.objects.filter(user=to_user, id=auth2.id).exists()
assert Authenticator.objects.filter(user=to_user, id=auth3.id).exists()
# dupe shouldn't get merged
assert Authenticator.objects.filter(user=from_user, id=auth1.id).exists()
def test_duplicate_memberships(self):
from_user = self.create_user("[email protected]")
to_user = self.create_user("[email protected]")
org_1 = self.create_organization()
team_1 = self.create_team(organization=org_1)
team_2 = self.create_team(organization=org_1)
team_3 = self.create_team(organization=org_1)
self.create_member(organization=org_1, user=from_user, role="owner", teams=[team_1, team_2])
# to_user should have less roles
self.create_member(organization=org_1, user=to_user, role="member", teams=[team_2, team_3])
from_user.merge_to(to_user)
member = OrganizationMember.objects.get(user=to_user)
assert member.role == "owner"
assert list(member.teams.all().order_by("pk")) == [team_1, team_2, team_3]
class GetUsersFromTeamsTest(TestCase):
def test(self):
user = self.create_user()
org = self.create_organization(name="foo", owner=user)
team = self.create_team(organization=org)
org2 = self.create_organization(name="bar", owner=None)
team2 = self.create_team(organization=org2)
user2 = self.create_user("[email protected]")
self.create_member(user=user2, organization=org, role="admin", teams=[team])
assert list(User.objects.get_from_teams(org, [team])) == [user2]
user3 = self.create_user("[email protected]")
self.create_member(user=user3, organization=org, role="admin", teams=[team])
assert set(list(User.objects.get_from_teams(org, [team]))) == set([user2, user3])
assert list(User.objects.get_from_teams(org2, [team])) == []
assert list(User.objects.get_from_teams(org2, [team2])) == []
self.create_member(user=user, organization=org2, role="member", teams=[team2])
assert list(User.objects.get_from_teams(org2, [team2])) == [user]
class GetUsersFromProjectsTest(TestCase):
def test(self):
user = self.create_user()
org = self.create_organization(name="foo", owner=user)
team = self.create_team(organization=org)
project = self.create_project(organization=org, teams=[team])
org2 = self.create_organization(name="bar", owner=None)
team2 = self.create_team(organization=org2)
user2 = self.create_user("[email protected]")
project2 = self.create_project(organization=org2, teams=[team2])
self.create_member(user=user2, organization=org, role="admin", teams=[team])
assert list(User.objects.get_from_projects(org, [project])) == [user2]
user3 = self.create_user("[email protected]")
self.create_member(user=user3, organization=org, role="admin", teams=[team])
assert set(list(User.objects.get_from_projects(org, [project]))) == set([user2, user3])
assert list(User.objects.get_from_projects(org2, [project])) == []
assert list(User.objects.get_from_projects(org2, [project2])) == []
self.create_member(user=user, organization=org2, role="member", teams=[team2])
assert list(User.objects.get_from_projects(org2, [project2])) == [user]
|
"""Customized autocomplete widgets"""
# Standard Library
import re
# Third Party
from dal import autocomplete
# MuckRock
from muckrock.jurisdiction.models import Jurisdiction
class MRSelect2Mixin:
"""MuckRock Model Select2 mixin"""
def __init__(self, *args, **kwargs):
attrs = {
"data-html": True,
"data-dropdown-css-class": "select2-dropdown",
"data-width": "100%",
}
attrs.update(kwargs.pop("attrs", {}))
super().__init__(*args, attrs=attrs, **kwargs)
def filter_choices_to_render(self, selected_choices):
"""Filter out non-numeric choices"""
selected_choices = [c for c in selected_choices if c.isdecimal()]
return super().filter_choices_to_render(selected_choices)
class ModelSelect2(MRSelect2Mixin, autocomplete.ModelSelect2):
"""MuckRock Model Select2"""
class ModelSelect2Multiple(MRSelect2Mixin, autocomplete.ModelSelect2Multiple):
"""MuckRock Model Select2"""
class Select2MultipleSI(MRSelect2Mixin, autocomplete.Select2Multiple):
"""MuckRock Select2 for state inclusive jurisdiction autocomplete"""
value_format = re.compile(r"\d+-(True|False)")
def filter_choices_to_render(self, selected_choices):
"""Replace self.choices with selected_choices."""
self.choices = []
for choice in selected_choices:
if not self.value_format.match(choice):
continue
pk, include_local = choice.split("-")
jurisdiction = Jurisdiction.objects.get(pk=pk)
label = str(jurisdiction)
if include_local == "True":
label += " (include local)"
self.choices.append((choice, label))
|
#! /usr/bin/env python
# _*_ coding: UTF-8 _*_
# File : 50-problem.py
# Created : Sat 11 Apr 2015 20:57:18
# Last Modified : Sat 18 Apr 2015 22:21:45
# Maintainer : sharlatan, <[email protected]>
# Title : CONSECUTIVE PRIME SUM
# License : Same as Python (GPL)
# Credits : https://projecteuler.net/
#
# -=:[ Description ]:=-
# The prime 41, can be written as the sum of six consecutive primes:
# 41 = 2 + 3 + 5 + 7 + 11 + 13
# This is the longest sum of consecutive primes that adds to a prime below
# one-hundred. The longest sum of consecutive primes below one-thousand that
# adds to a prime, contains 21 terms, and is equal to 953.
# Which prime, below one-million, can be written as the sum of the most
# consecutive primes?
from termcolor import colored
import sys
import time
import math
INIT_TIME = time.time()
def isprime(num):
search_lim = int(math.sqrt(num))
for i in xrange(2, search_lim + 1):
if num % i == 0:
return False
return True
def cons_prime_sum(prime_list):
""" takes a list of prime, return a list with consecutive prime sum """
pl = [] # prime list of consecutive prime sum
for p_check in prime_list:
for p_step in xrange(0, prime_list.index(p_check)):
for p_index in xrange(p_step, prime_list.index(p_check)):
pl[::] = [] # clean up list
get_that_prime = p_index
while sum(pl) <= p_check:
pl.append(prime_list[get_that_prime])
get_that_prime += 1
print pl, p_check
def perf():
""" Return perfomance time of the programm """
return ("Perfomance time: %s" % (time.time() - INIT_TIME))
def error():
""" Massage if error """
print colored("..::Incorect amount of arguments::..", 'red')
print colored("\tEnter just one integer", 'blue')
quit()
def main():
if len(sys.argv) != 2:
error()
PRIME_LIMIT = int(sys.argv[1])
PRIME_LIST = []
for i in xrange(2, PRIME_LIMIT + 1):
if isprime(i):
PRIME_LIST.append(i)
# p_sum = 0
# for p in PRIME_LIST:
# p_sum += p
# print p_sum, p
# if p_sum > 953:
# break
# print PRIME_LIST
cons_prime_sum(PRIME_LIST)
print perf()
if __name__ == '__main__':
main()
|
/*
DNT: a satirical post-apocalyptical RPG.
Copyright (C) 2005-2013 DNTeam <[email protected]>
This file is part of DNT.
DNT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
DNT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with DNT. If not, see <http://www.gnu.org/licenses/>.
*/
#include "npcs.h"
#include "../../engine/util.h"
#define NPCS_STATE_NONE 0
#define NPCS_STATE_ADD 1
using namespace std;
using namespace dntMapEditor;
/******************************************************
* Constructor *
******************************************************/
Npcs::Npcs(Map* acMap, characterList* NPCsList, featsList* lFeats)
{
actualMap = acMap;
state = NPCS_STATE_NONE;
actualNpc = NULL;
npcFile = "";
intList = new(characterList);
NPCs = NPCsList;
features = lFeats;
}
/******************************************************
* Destructor *
******************************************************/
Npcs::~Npcs()
{
actualMap = NULL;
}
/******************************************************
* verifyAction *
******************************************************/
void Npcs::verifyAction(GLfloat mouseX, GLfloat mouseY, GLfloat mouseZ,
Uint8 mButton, int mouseXw, int mouseYw,
int tool, GLdouble proj[16],
GLdouble modl[16], GLint viewPort[4])
{
if(actualNpc == NULL)
{
return;
}
if(tool == TOOL_NPC_ADD)
{
state = NPCS_STATE_ADD;
/* Changed Character Position */
if( (mX != mouseXw) || (mY != mouseYw))
{
/* Set the new position */
mX = mouseXw;
mY = mouseYw;
actualNpc->scNode->setPosition(mouseX,
actualMap->getHeight(mouseX, mouseZ),mouseZ);
}
/* Rotate the NPC + */
if( (mButton & SDL_BUTTON(2)) && (actualNpc != NULL) )
{
actualNpc->scNode->setAngle(0.0f,
((int)(actualNpc->scNode->getAngleY()+1))%360,0.0f);
}
/* Rotate the NPC - */
if( (mButton & SDL_BUTTON(3)) && (actualNpc != NULL) )
{
actualNpc->scNode->setAngle(0.0f,
((int)(actualNpc->scNode->getAngleY()-1))%360,0.0f);
}
/* Insert the NPC */
if( (mButton & SDL_BUTTON(1)) && (actualNpc != NULL) )
{
insertNpc(mouseX, mouseZ, actualNpc,
(int)(mouseX / actualMap->squareSize()),
(int)(mouseZ / actualMap->squareSize()));
while(mButton & SDL_BUTTON(1))
{
//Wait for Mouse Button Release
SDL_PumpEvents();
int x,y;
mButton = SDL_GetMouseState(&x,&y);
}
}
}
else
{
state = NPCS_STATE_NONE;
}
}
/******************************************************************
* drawTemporary *
******************************************************************/
void Npcs::drawTemporary()
{
}
/******************************************************************
* insertNpc *
******************************************************************/
void Npcs::insertNpc(GLfloat xReal, GLfloat zReal,
character* npc, int qx, int qz)
{
character* per;
per = NPCs->insertCharacter(npcFile,features, NULL, "");
per->scNode->set(xReal, actualMap->getHeight(xReal, zReal), zReal,
0.0f, npc->scNode->getAngleY(), 0.0f);
}
/******************************************************************
* defineActualNpc *
******************************************************************/
void Npcs::defineActualNpc(string fileName)
{
if(npcFile != fileName)
{
delete(intList);
intList = new(characterList);
actualNpc = intList->insertCharacter(fileName, features, NULL, "");
npcFile = fileName;
}
}
/******************************************************************
* deleteNpcs *
******************************************************************/
void Npcs::deleteNpc()
{
if(actualNpc)
{
intList->removeCharacter(actualNpc);
npcFile = "";
actualNpc = NULL;
}
}
/******************************************************************
* getObjectFileName *
******************************************************************/
string Npcs::getNpcFileName()
{
return(npcFile);
}
/******************************************************************
* saveFile *
******************************************************************/
bool Npcs::saveFile(string fileName)
{
FILE* arq;
string saveName;
if(!(arq = fopen(fileName.c_str(),"w")))
{
return(false);
}
else
{
int npc;
fprintf(arq,"%d\n",NPCs->getTotal());
character* per = (character*) NPCs->getFirst();
for(npc = 0; npc < NPCs->getTotal(); npc++)
{
saveName = replaceSpaces(per->name);
fprintf(arq,"%s %s %.3f %.3f %.3f %d\n", saveName.c_str(),
per->getCharacterFile().c_str(),
per->scNode->getPosX(),
per->scNode->getPosZ(),
per->scNode->getAngleY(),
per->getPsychoState());
per = (character*) per->getNext();
}
fclose(arq);
}
return(true);
}
|
#!/usr/bin/env python
# Dan Blankenberg
from __future__ import print_function
import json
import optparse
import os
import subprocess
import sys
import tempfile
CHUNK_SIZE = 2**20 # 1mb
def get_id_name(params, dbkey, fasta_description=None):
# TODO: ensure sequence_id is unique and does not already appear in location file
sequence_id = params['param_dict']['sequence_id']
if not sequence_id:
sequence_id = dbkey # uuid.uuid4() generate and use an uuid
sequence_name = params['param_dict']['sequence_name']
if not sequence_name:
sequence_name = fasta_description
if not sequence_name:
sequence_name = dbkey
return sequence_id, sequence_name
def build_twobit(data_manager_dict, fasta_filename, params, target_directory, dbkey, sequence_id, sequence_name):
twobit_base_name = "%s.2bit" % (sequence_id)
twobit_filename = os.path.join(target_directory, twobit_base_name)
args = ['faToTwoBit', fasta_filename, twobit_filename]
tmp_stderr = tempfile.NamedTemporaryFile(prefix="tmp-data-manager-twobit-builder-stderr")
proc = subprocess.Popen(args=args, shell=False, cwd=target_directory, stderr=tmp_stderr.fileno())
return_code = proc.wait()
if return_code:
tmp_stderr.flush()
tmp_stderr.seek(0)
print("Error building index:", file=sys.stderr)
while True:
chunk = tmp_stderr.read(CHUNK_SIZE)
if not chunk:
break
sys.stderr.write(chunk)
sys.exit(return_code)
tmp_stderr.close()
# lastz_seqs
data_table_entry = dict(value=sequence_id, name=sequence_name, path=twobit_base_name)
_add_data_table_entry(data_manager_dict, "lastz_seqs", data_table_entry)
# twobit.loc
data_table_entry = dict(value=sequence_id, path=twobit_base_name)
_add_data_table_entry(data_manager_dict, "twobit", data_table_entry)
# alignseq
data_table_entry = dict(type="seq", value=sequence_id, path=twobit_base_name)
_add_data_table_entry(data_manager_dict, "alignseq_seq", data_table_entry)
def _add_data_table_entry(data_manager_dict, data_table_name, data_table_entry):
data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {})
data_manager_dict['data_tables'][data_table_name] = data_manager_dict['data_tables'].get(data_table_name, [])
data_manager_dict['data_tables'][data_table_name].append(data_table_entry)
return data_manager_dict
def main():
parser = optparse.OptionParser()
parser.add_option('-f', '--fasta_filename', dest='fasta_filename', action='store', type="string", default=None, help='fasta_filename')
parser.add_option('-d', '--fasta_dbkey', dest='fasta_dbkey', action='store', type="string", default=None, help='fasta_dbkey')
parser.add_option('-t', '--fasta_description', dest='fasta_description', action='store', type="string", default=None, help='fasta_description')
(options, args) = parser.parse_args()
filename = args[0]
with open(filename) as fh:
params = json.load(fh)
target_directory = params['output_data'][0]['extra_files_path']
os.mkdir(target_directory)
data_manager_dict = {}
dbkey = options.fasta_dbkey
if dbkey in [None, '', '?']:
raise Exception('"%s" is not a valid dbkey. You must specify a valid dbkey.' % (dbkey))
sequence_id, sequence_name = get_id_name(params, dbkey=dbkey, fasta_description=options.fasta_description)
# build the index
build_twobit(data_manager_dict, options.fasta_filename, params, target_directory, dbkey, sequence_id, sequence_name)
# save info to json file
with open(filename, 'w') as fh:
json.dump(data_manager_dict, fh, sort_keys=True)
if __name__ == "__main__":
main()
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gmsh(CMakePackage):
"""Gmsh is a free 3D finite element grid generator with a built-in CAD engine
and post-processor. Its design goal is to provide a fast, light and
user-friendly meshing tool with parametric input and advanced visualization
capabilities. Gmsh is built around four modules: geometry, mesh, solver and
post-processing. The specification of any input to these modules is done
either interactively using the graphical user interface or in ASCII text
files using Gmsh's own scripting language.
"""
homepage = 'http://gmsh.info'
url = 'http://gmsh.info/src/gmsh-2.11.0-source.tgz'
version('3.0.1', '830b5400d9f1aeca79c3745c5c9fdaa2900cdb2fa319b664a5d26f7e615c749f')
version('2.16.0', 'e829eaf32ea02350a385202cc749341f2a3217c464719384b18f653edd028eea')
version('2.15.0', '992a4b580454105f719f5bc05441d3d392ab0b4b80d4ea07b61ca3bdc974070a')
version('2.12.0', '7fbd2ec8071e79725266e72744d21e902d4fe6fa9e7c52340ad5f4be5c159d09')
version('2.11.0', 'f15b6e7ac9ca649c9a74440e1259d0db')
variant('shared', default=True,
description='Enables the build of shared libraries')
variant('mpi', default=True,
description='Builds MPI support for parser and solver')
variant('fltk', default=False,
description='Enables the build of the FLTK GUI')
variant('hdf5', default=False, description='Enables HDF5 support')
variant('compression', default=True,
description='Enables IO compression through zlib')
variant('oce', default=False, description='Build with OCE')
variant('petsc', default=False, description='Build with PETSc')
variant('slepc', default=False,
description='Build with SLEPc (only when PETSc is enabled)')
depends_on('blas')
depends_on('lapack')
depends_on('[email protected]:', type='build')
depends_on('gmp')
depends_on('mpi', when='+mpi')
# Assumes OpenGL with GLU is already provided by the system:
depends_on('fltk', when='+fltk')
depends_on('hdf5', when='+hdf5')
depends_on('oce', when='+oce')
depends_on('petsc+mpi', when='+petsc+mpi')
depends_on('petsc', when='+petsc~mpi')
depends_on('slepc', when='+slepc+petsc')
depends_on('zlib', when='+compression')
def cmake_args(self):
spec = self.spec
prefix = self.prefix
options = []
# Make sure native file dialogs are used
options.extend(['-DENABLE_NATIVE_FILE_CHOOSER=ON'])
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
# Prevent GMsh from using its own strange directory structure on OSX
options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF')
# Make sure GMSH picks up correct BlasLapack by providing linker flags
blas_lapack = spec['lapack'].libs + spec['blas'].libs
options.append(
'-DBLAS_LAPACK_LIBRARIES={0}'.format(blas_lapack.ld_flags))
# Gmsh does not have an option to compile against external metis.
# Its own Metis, however, fails to build
options.append('-DENABLE_METIS=OFF')
if '+fltk' in spec:
options.append('-DENABLE_FLTK=ON')
else:
options.append('-DENABLE_FLTK=OFF')
if '+oce' in spec:
env['CASROOT'] = self.spec['oce'].prefix
options.extend(['-DENABLE_OCC=ON'])
else:
options.extend(['-DENABLE_OCC=OFF'])
if '+petsc' in spec:
env['PETSC_DIR'] = self.spec['petsc'].prefix
options.extend(['-DENABLE_PETSC=ON'])
else:
options.extend(['-DENABLE_PETSC=OFF'])
if '+slepc' in spec:
env['SLEPC_DIR'] = self.spec['slepc'].prefix
options.extend(['-DENABLE_SLEPC=ON'])
else:
options.extend(['-DENABLE_SLEPC=OFF'])
if '+shared' in spec:
# Builds dynamic executable and installs shared library
options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON',
'-DENABLE_BUILD_DYNAMIC:BOOL=ON'])
else:
# Builds and installs static library
options.append('-DENABLE_BUILD_LIB:BOOL=ON')
if '+mpi' in spec:
options.append('-DENABLE_MPI:BOOL=ON')
if '+compression' in spec:
options.append('-DENABLE_COMPRESSED_IO:BOOL=ON')
return options
|
# Collisions and Reflections
# Drawing Vectors
# This program draws a vector from point_one to point_two,
# which are given by the user, and displays the magnitude
# (length) of the vector rounded to two decimal places.
import simplegui
import math
# Global Variables
canvas_width = 400
canvas_height = 400
point_one = [canvas_width // 2, canvas_height // 2]
point_two = [canvas_width // 2, canvas_height // 2]
magnitude = 0
angle = 0
# Helper Functions
def distance(pos1, pos2):
return math.sqrt((pos1[0] - pos2[0]) ** 2 + (pos1[1] - pos2[1]) ** 2)
# This is just for the pretty arrow at the end of the vector,
# so don't worry about the math
def get_angle(pos1, pos2):
if pos1[0] == pos2[0]:
if pos2[1] - pos1[1] > 0:
return math.pi / 2
else:
return math.pi * 3 / 2
angle = math.atan((pos1[1] - pos2[1]) / (pos1[0] - pos2[0]))
if pos2[0] - pos1[0] < 0:
angle += math.pi
return angle
# Event Handlers
def draw(canvas):
canvas.draw_line(point_one, point_two, 1, 'White')
# Draws an arrow at the end of the vector. Don't worry
# about the math, this is just to make the program
# look nice.
if magnitude > 0:
pos = (point_two[0] - 8 * math.cos(angle), point_two[1] - 8 * math.sin(angle))
a = (math.pi * 2 / 3) * 0 + angle
p1 = [pos[0] + math.cos(a) * 20 / 3, pos[1] + math.sin(a) * 20 / 3]
a = (math.pi * 2 / 3) * 1 + angle
p2 = [pos[0] + math.cos(a) * 20 / 3, pos[1] + math.sin(a) * 20 / 3]
a = (math.pi * 2 / 3) * 2 + angle
p3 = [pos[0] + math.cos(a) * 20 / 3, pos[1] + math.sin(a) * 20 / 3]
canvas.draw_polygon([p1, p2, p3], 1, 'White', 'White')
canvas.draw_text('Magnitude of Vector: ' + str(round(magnitude, 2)), (50, 30), 20, 'Red')
def update_x1(text):
# Note that I don't have to list point_one as a global
global magnitude, angle
if text.isdigit() and int(text) <= canvas_width:
point_one[0] = int(text)
magnitude = distance(point_one, point_two)
angle = get_angle(point_one, point_two)
else:
print 'Invalid input'
def update_y1(text):
global magnitude, angle
if text.isdigit() and int(text) <= canvas_height:
point_one[1] = int(text)
magnitude = distance(point_one, point_two)
angle = get_angle(point_one, point_two)
else:
print 'Invalid input'
def update_x2(text):
global magnitude, angle
if text.isdigit() and int(text) <= canvas_width:
point_two[0] = int(text)
magnitude = distance(point_one, point_two)
angle = get_angle(point_one, point_two)
else:
print 'Invalid input'
def update_y2(text):
global magnitude, angle
if text.isdigit() and int(text) <= canvas_height:
point_two[1] = int(text)
magnitude = distance(point_one, point_two)
angle = get_angle(point_one, point_two)
else:
print 'Invalid input'
# Frame and Timer
frame = simplegui.create_frame('Drawing Lines', canvas_width, canvas_height)
# Register Event Handlers
frame.set_draw_handler(draw)
# Note that input points must be on the canvas.
frame.add_input('x1', update_x1, 50)
frame.add_input('y1', update_y1, 50)
frame.add_input('x2', update_x2, 50)
frame.add_input('y2', update_y2, 50)
# Start
frame.start()
|
using System;
using System.Xml.Serialization;
using System.Collections.Generic;
namespace Aop.Api.Domain
{
/// <summary>
/// ShopSummaryQueryResponse Data Structure.
/// </summary>
[Serializable]
public class ShopSummaryQueryResponse : AopObject
{
/// <summary>
/// 门店地址
/// </summary>
[XmlElement("address")]
public string Address { get; set; }
/// <summary>
/// 分店名
/// </summary>
[XmlElement("branch_shop_name")]
public string BranchShopName { get; set; }
/// <summary>
/// 品牌名,不填写则默认为其它品牌
/// </summary>
[XmlElement("brand_name")]
public string BrandName { get; set; }
/// <summary>
/// 经营时间
/// </summary>
[XmlElement("business_time")]
public string BusinessTime { get; set; }
/// <summary>
/// 门店类目列表
/// </summary>
[XmlArray("category_infos")]
[XmlArrayItem("shop_category_info")]
public List<ShopCategoryInfo> CategoryInfos { get; set; }
/// <summary>
/// 城市编码,国标码,详见国家统计局数据 <a href="http://aopsdkdownload.cn-hangzhou.alipay-pub.aliyun-inc.com/doc/AreaCodeList.zip">点此下载</a>
/// </summary>
[XmlElement("city_code")]
public string CityCode { get; set; }
/// <summary>
/// 区县编码,国标码,详见国家统计局数据 <a href="http://aopsdkdownload.cn-hangzhou.alipay-pub.aliyun-inc.com/doc/AreaCodeList.zip">点此下载</a>
/// </summary>
[XmlElement("district_code")]
public string DistrictCode { get; set; }
/// <summary>
/// 创建时间
/// </summary>
[XmlElement("gmt_create")]
public string GmtCreate { get; set; }
/// <summary>
/// 门店是否在客户端显示,T表示显示,F表示隐藏
/// </summary>
[XmlElement("is_show")]
public string IsShow { get; set; }
/// <summary>
/// 门店首图
/// </summary>
[XmlElement("main_image")]
public string MainImage { get; set; }
/// <summary>
/// 主门店名
/// </summary>
[XmlElement("main_shop_name")]
public string MainShopName { get; set; }
/// <summary>
/// 图片集,是map转化成的json串,key是图片id,value是图片url
/// </summary>
[XmlElement("pic_coll")]
public string PicColl { get; set; }
/// <summary>
/// 省份编码,国标码,详见国家统计局数据 <a href="http://aopsdkdownload.cn-hangzhou.alipay-pub.aliyun-inc.com/doc/AreaCodeList.zip">点此下载</a>
/// </summary>
[XmlElement("province_code")]
public string ProvinceCode { get; set; }
/// <summary>
/// 门店评论信息
/// </summary>
[XmlElement("shop_comment_info")]
public ShopCommentInfo ShopCommentInfo { get; set; }
/// <summary>
/// 门店ID
/// </summary>
[XmlElement("shop_id")]
public string ShopId { get; set; }
/// <summary>
/// COMMON(普通门店)、MALL(商圈)
/// </summary>
[XmlElement("shop_type")]
public string ShopType { get; set; }
/// <summary>
/// 门店状态,OPEN:营业中、PAUSE:暂停营业、FREEZE:已冻结、CLOSE:门店已关闭
/// </summary>
[XmlElement("status")]
public string Status { get; set; }
}
}
|
/**
* Copyright 2013 The Trustees of Indiana University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.indiana.d2i.htrc.sloan.bean;
public class VMStatus {
private String vmId;
private String mode;
private String state;
private long vncPort;
private long sshPort;
private String publicIp;
private long vcpu;
private long memory;
private long volumeSize;
private String imageName;
private String vmIntialLogingId;
private String vmInitialLogingPassword;
public String getVmId() {
return vmId;
}
public void setVmId(String vmId) {
this.vmId = vmId;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public long getVncPort() {
return vncPort;
}
public void setVncPort(long vncPort) {
this.vncPort = vncPort;
}
public long getSshPort() {
return sshPort;
}
public void setSshPort(long sshPort) {
this.sshPort = sshPort;
}
public String getPublicIp() {
return publicIp;
}
public void setPublicIp(String publicIp) {
this.publicIp = publicIp;
}
public long getVcpu() {
return vcpu;
}
public void setVcpu(long vcpu) {
this.vcpu = vcpu;
}
public long getMemory() {
return memory;
}
public void setMemory(long memory) {
this.memory = memory;
}
public long getVolumeSize() {
return volumeSize;
}
public void setVolumeSize(long volumeSize) {
this.volumeSize = volumeSize;
}
public String getImageName() {
return imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public String getVmIntialLogingId() {
return vmIntialLogingId;
}
public void setVmIntialLogingId(String vmIntialLogingId) {
this.vmIntialLogingId = vmIntialLogingId;
}
public String getVmInitialLogingPassword() {
return vmInitialLogingPassword;
}
public void setVmInitialLogingPassword(String vmInitialLogingPassword) {
this.vmInitialLogingPassword = vmInitialLogingPassword;
}
}
|
<script>window.Testee = { provider: { type: 'rest' } };</script>
<script src="../node_modules/steal/steal.js"
data-main="test/test"></script>
|
import React from "react";
import { styled, Icon } from "@patternplate/components";
import ToggleButton from "./common/toggle-button";
export default Hamburger;
function Hamburger(props) {
return (
<StyledToggleButton
enabled={props.enabled}
shortcut={props.shortcut}
>
<Icon symbol="hamburger"/>
{props.shortcut.toString()}
</StyledToggleButton>
);
}
const StyledToggleButton = styled(ToggleButton)`
display: flex;
align-items: center;
justify-content: center;
font-size: 0;
line-height: 0;
width: 60px;
height: 60px;
cursor: pointer;
color: inherit;
`;
|
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
from opps import documents
install_requires = ["opps>=0.2"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Framework :: Opps",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules']
try:
long_description = open('README.md').read()
except:
long_description = documents.__description__
setup(
name='opps-documents',
namespace_packages=['opps', 'opps.documents'],
version=documents.__version__,
description=documents.__description__,
long_description=long_description,
classifiers=classifiers,
keywords='upload documents opps cms django apps magazines websites',
author=documents.__author__,
author_email=documents.__email__,
url='http://oppsproject.org',
download_url="https://github.com/opps/opps-documents/tarball/master",
license=documents.__license__,
packages=find_packages(exclude=('doc', 'docs',)),
package_dir={'opps': 'opps'},
install_requires=install_requires,
include_package_data=True,
)
|
(function( factory ) {
if ( typeof define === "function" && define.amd ) {
define( ["jquery", "../jquery.validate"], factory );
} else {
factory( jQuery );
}
}(function( $ ) {
/*
* Translated default messages for the jQuery validation plugin.
* Locale: FI (Finnish; suomi, suomen kieli)
*/
$.extend($.validator.messages, {
required: "Tämä kenttä on pakollinen.",
email: "Syötä oikea sähköpostiosoite.",
url: "Syötä oikea URL osoite.",
date: "Syötä oike päivämäärä.",
dateISO: "Syötä oike päivämäärä (VVVV-MM-DD).",
number: "Syötä numero.",
creditcard: "Syötä voimassa oleva luottokorttinumero.",
digits: "Syötä pelkästään numeroita.",
equalTo: "Syötä sama arvo uudestaan.",
maxlength: $.validator.format("Voit syöttää enintään {0} merkkiä."),
minlength: $.validator.format("Vähintään {0} merkkiä."),
rangelength: $.validator.format("Syötä vähintään {0} ja enintään {1} merkkiä."),
range: $.validator.format("Syötä arvo {0} ja {1} väliltä."),
max: $.validator.format("Syötä arvo joka on pienempi tai yhtä suuri kuin {0}."),
min: $.validator.format("Syötä arvo joka on yhtä suuri tai suurempi kuin {0}.")
});
}));
|
/* global define, require */
(function (root, factory) {
'use strict';
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['seriously'], factory);
} else if (typeof exports === 'object') {
// Node/CommonJS
factory(require('seriously'));
} else {
if (!root.Seriously) {
root.Seriously = { plugin: function (name, opt) { this[name] = opt; } };
}
factory(root.Seriously);
}
}(this, function (Seriously) {
'use strict';
var intRegex = /\d+/;
Seriously.plugin('select', function (options) {
var count,
me = this,
i,
inputs;
function resize() {
me.resize();
}
function update() {
var i = me.inputs.active,
source;
source = me.inputs['source' + i];
me.texture = source && source.texture;
resize();
}
if (typeof options === 'number' && options >= 2) {
count = options;
} else {
count = options && options.count || 4;
count = Math.max(2, count);
}
inputs = {
active: {
type: 'number',
step: 1,
min: 0,
max: count - 1,
defaultValue: 0,
update: update,
updateSources: true
},
sizeMode: {
type: 'enum',
defaultValue: '0',
options: [
'union',
'intersection',
'active'
],
update: resize
}
};
for (i = 0; i < count; i++) {
inputs.sizeMode.options.push(i.toString());
inputs.sizeMode.options.push('source' + i);
//source
inputs['source' + i] = {
type: 'image',
update: update
};
}
this.uniforms.layerResolution = [1, 1];
// custom resize method
this.resize = function () {
var width,
height,
mode = this.inputs.sizeMode,
i,
n,
source,
a;
if (mode === 'union') {
width = 0;
height = 0;
for (i = 0; i < count; i++) {
source = this.inputs['source' + i];
if (source) {
width = Math.max(width, source.width);
height = Math.max(height, source.height);
}
}
} else if (mode === 'intersection') {
width = Infinity;
height = Infinity;
for (i = 0; i < count; i++) {
source = this.inputs['source' + i];
if (source) {
width = Math.min(width, source.width);
height = Math.min(height, source.height);
}
}
} else if (mode === 'active') {
i = this.inputs.active;
source = this.inputs['source' + i];
width = Math.max(1, source && source.width || 1);
height = Math.max(1, source && source.height || 1);
} else {
width = 1;
height = 1;
n = count - 1;
a = intRegex.exec(this.inputs.sizeMode);
if (a) {
n = Math.min(parseInt(a[0], 10), n);
}
for (i = 0; i <= n; i++) {
source = this.inputs['source' + i];
if (source) {
width = source.width;
height = source.height;
break;
}
}
}
if (this.width !== width || this.height !== height) {
this.width = width;
this.height = height;
this.emit('resize');
this.setDirty();
}
for (i = 0; i < this.targets.length; i++) {
this.targets[i].resize();
}
};
return {
initialize: function () {
this.initialized = true;
this.shaderDirty = false;
},
requires: function (sourceName) {
return !!(this.inputs[sourceName] && sourceName === 'source' + this.inputs.active);
},
//check the source texture on every draw just in case the source nodes pulls
//shenanigans with its texture.
draw: function () {
var i = me.inputs.active,
source;
source = me.inputs['source' + i];
me.texture = source && source.texture;
},
inputs: inputs
};
},
{
title: 'Select',
description: 'Select a single source image from a list of source nodes.',
inPlace: false,
commonShader: true
});
}));
|
<?php
/**
* Sample implementation of the Custom Header feature.
*
* You can add an optional custom header image to header.php like so ...
*
<?php if ( get_header_image() ) : ?>
<a href="<?php echo esc_url( home_url( '/' ) ); ?>" rel="home">
<img src="<?php header_image(); ?>" width="<?php echo esc_attr( get_custom_header()->width ); ?>" height="<?php echo esc_attr( get_custom_header()->height ); ?>" alt="">
</a>
<?php endif; // End header image check. ?>
*
* @link https://developer.wordpress.org/themes/functionality/custom-headers/
*
* @package ssnblog
*/
/**
* Set up the WordPress core custom header feature.
*
* @uses ssnblog_header_style()
*/
function ssnblog_custom_header_setup() {
add_theme_support( 'custom-header', apply_filters( 'ssnblog_custom_header_args', array(
'default-image' => '',
'default-text-color' => '000000',
'width' => 1000,
'height' => 250,
'flex-height' => true,
'wp-head-callback' => 'ssnblog_header_style',
) ) );
}
add_action( 'after_setup_theme', 'ssnblog_custom_header_setup' );
if ( ! function_exists( 'ssnblog_header_style' ) ) :
/**
* Styles the header image and text displayed on the blog.
*
* @see ssnblog_custom_header_setup().
*/
function ssnblog_header_style() {
$header_text_color = get_header_textcolor();
/*
* If no custom options for text are set, let's bail.
* get_header_textcolor() options: Any hex value, 'blank' to hide text. Default: HEADER_TEXTCOLOR.
*/
if ( HEADER_TEXTCOLOR === $header_text_color ) {
return;
}
// If we get this far, we have custom styles. Let's do this.
?>
<style type="text/css">
<?php
// Has the text been hidden?
if ( ! display_header_text() ) :
?>
.site-title,
.site-description {
position: absolute;
clip: rect(1px, 1px, 1px, 1px);
}
<?php
// If the user has set a custom color for the text use that.
else :
?>
.site-title a,
.site-description {
color: #<?php echo esc_attr( $header_text_color ); ?>;
}
<?php endif; ?>
</style>
<?php
}
endif;
|
<?php
namespace Sabre\VObject;
/**
* Google produces vcards with a weird escaping of urls.
*
* VObject will provide a workaround for this, so end-user still get expected
* values.
*/
class GoogleColonEscaping extends \PHPUnit_Framework_TestCase {
function testDecode() {
$vcard = <<<VCF
BEGIN:VCARD
VERSION:3.0
FN:Evert Pot
N:Pot;Evert;;;
EMAIL;TYPE=INTERNET;TYPE=WORK:[email protected]
BDAY:1985-04-07
item7.URL:http\://www.rooftopsolutions.nl/
END:VCARD
VCF;
$vobj = Reader::read($vcard);
$this->assertEquals('http://www.rooftopsolutions.nl/', $vobj->URL->getValue());
}
}
|
/*
* File: app/view/HomePanel.js
*
* This file was generated by Sencha Architect version 3.0.4.
* http://www.sencha.com/products/architect/
*
* This file requires use of the Sencha Touch 2.3.x library, under independent license.
* License of Sencha Architect does not include license for Sencha Touch 2.3.x. For more
* details see http://www.sencha.com/license or contact [email protected].
*
* This file will be auto-generated each and everytime you save your project.
*
* Do NOT hand edit this file.
*/
Ext.define('UTalBus.view.HomePanel', {
extend: 'Ext.Panel',
alias: 'widget.homepanel',
requires: [
'Ext.XTemplate'
],
config: {
itemId: 'homePanel',
padding: 10,
styleHtmlContent: true,
scrollable: 'vertical',
tpl: [
'<h1>UTalBus</h1>',
'<h4>version 0.2</h4>',
'Mejoras para la version 0.3',
'<ul>',
'<li>',
'Lista de paraderos actualizada',
'</li>',
'<li>',
'Estimación de llegada a cada paradero',
'</li>',
'</ul>',
'Mejoras para la version 0.4',
'<ul>',
'<li>Estimación de llegada a posición actual (usando geolocalización del teléfono)',
'</li>',
'</ul>',
'Puedes encontrar mas información del proyecto <a href="https://wiki.iccutal.cl/wiki/UTalGPS">aquí</a>',
'</br>',
'</br>',
'(C) 2013 - Los Niches Initiative'
]
},
initialize: function() {
this.callParent();
this.setHtml(this.getTpl().applyTemplate(this));
}
});
|
# -*- coding: utf-8 -*-
"""Test forms."""
from etl.public.forms import LoginForm
from etl.user.forms import RegisterForm
class TestRegisterForm:
"""Register form."""
def test_validate_user_already_registered(self, user):
"""Enter username that is already registered."""
form = RegisterForm(username=user.username, email='[email protected]',
password='example', confirm='example')
assert form.validate() is False
assert 'Username already registered' in form.username.errors
def test_validate_email_already_registered(self, user):
"""Enter email that is already registered."""
form = RegisterForm(username='unique', email=user.email,
password='example', confirm='example')
assert form.validate() is False
assert 'Email already registered' in form.email.errors
def test_validate_success(self, db):
"""Register with success."""
form = RegisterForm(username='newusername', email='[email protected]',
password='example', confirm='example')
assert form.validate() is True
class TestLoginForm:
"""Login form."""
def test_validate_success(self, user):
"""Login successful."""
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='example')
assert form.validate() is True
assert form.user == user
def test_validate_unknown_username(self, db):
"""Unknown username."""
form = LoginForm(username='unknown', password='example')
assert form.validate() is False
assert 'Unknown username' in form.username.errors
assert form.user is None
def test_validate_invalid_password(self, user):
"""Invalid password."""
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='wrongpassword')
assert form.validate() is False
assert 'Invalid password' in form.password.errors
def test_validate_inactive_user(self, user):
"""Inactive user."""
user.active = False
user.set_password('example')
user.save()
# Correct username and password, but user is not activated
form = LoginForm(username=user.username, password='example')
assert form.validate() is False
assert 'User not activated' in form.username.errors
|
# Copyright 2009 Google Inc. Released under the GPL v2
# This file contains the classes used for the known kernel versions persistent
# storage
import cPickle, fcntl, os, tempfile
class item(object):
"""Wrap a file item stored in a database."""
def __init__(self, name, size, timestamp):
assert type(size) == int
assert type(timestamp) == int
self.name = name
self.size = size
self.timestamp = timestamp
def __repr__(self):
return ("database.item('%s', %d, %d)" %
(self.name, self.size, self.timestamp))
def __eq__(self, other):
if not isinstance(other, item):
return NotImplementedError
return (self.name == other.name and self.size == other.size and
self.timestamp == other.timestamp)
def __ne__(self, other):
return not self.__eq__(other)
class database(object):
"""
This is an Abstract Base Class for the file items database, not strictly
needed in Python because of the dynamic nature of the language but useful
to document the expected common API of the implementations.
"""
def get_dictionary(self):
"""
Should be implemented to open and read the persistent contents of
the database and return it as a key->value dictionary.
"""
raise NotImplementedError('get_dictionary not implemented')
def merge_dictionary(self, values):
"""
Should be implemented to merge the "values" dictionary into the
database persistent contents (ie to update existent entries and to add
those that do not exist).
"""
raise NotImplementedError('merge_dictionary not implemented')
class dict_database(database):
"""
A simple key->value database that uses standard python pickle dump of
a dictionary object for persistent storage.
"""
def __init__(self, path):
self.path = path
def get_dictionary(self, _open_func=open):
"""
Return the key/value pairs as a standard dictionary.
"""
try:
fd = _open_func(self.path, 'rb')
except IOError:
# no db file, considering as if empty dictionary
res = {}
else:
try:
res = cPickle.load(fd)
finally:
fd.close()
return res
def _aquire_lock(self):
fd = os.open(self.path + '.lock', os.O_RDONLY | os.O_CREAT)
try:
# this may block
fcntl.flock(fd, fcntl.LOCK_EX)
except Exception, err:
os.close(fd)
raise err
return fd
def merge_dictionary(self, values):
"""
Merge the contents of "values" with the current contents of the
database.
"""
if not values:
return
# use file locking to make the read/write of the file atomic
lock_fd = self._aquire_lock()
# make sure we release locks in case of exceptions (in case the
# process dies the OS will release them for us)
try:
contents = self.get_dictionary()
contents.update(values)
# use a tempfile/atomic-rename technique to not require
# synchronization for get_dictionary() calls and also protect
# against full disk file corruption situations
fd, fname = tempfile.mkstemp(prefix=os.path.basename(self.path),
dir=os.path.dirname(self.path))
write_file = os.fdopen(fd, 'wb')
try:
try:
cPickle.dump(contents, write_file,
protocol=cPickle.HIGHEST_PROTOCOL)
finally:
write_file.close()
# this is supposed to be atomic on POSIX systems
os.rename(fname, self.path)
except Exception:
os.unlink(fname)
raise
finally:
# close() releases any locks on that fd
os.close(lock_fd)
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Callable, Iterable, Sequence, Tuple
from google.ads.googleads.v8.services.types import google_ads_service
class SearchPager:
"""A pager for iterating through ``search`` requests.
This class thinly wraps an initial
:class:`google.ads.googleads.v8.services.types.SearchGoogleAdsResponse` object, and
provides an ``__iter__`` method to iterate through its
``results`` field.
If there are more pages, the ``__iter__`` method will make additional
``Search`` requests and continue to iterate
through the ``results`` field on the
corresponding responses.
All the usual :class:`google.ads.googleads.v8.services.types.SearchGoogleAdsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., google_ads_service.SearchGoogleAdsResponse],
request: google_ads_service.SearchGoogleAdsRequest,
response: google_ads_service.SearchGoogleAdsResponse,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`google.ads.googleads.v8.services.types.SearchGoogleAdsRequest`):
The initial request object.
response (:class:`google.ads.googleads.v8.services.types.SearchGoogleAdsResponse`):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = google_ads_service.SearchGoogleAdsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[google_ads_service.SearchGoogleAdsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[google_ads_service.GoogleAdsRow]:
for page in self.pages:
yield from page.results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
|
#!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs chrome driver tests.
This script attempts to emulate the contract of gtest-style tests
invoked via recipes. The main contract is that the caller passes the
argument:
--isolated-script-test-output=[FILENAME]
json is written to that file in the format detailed here:
https://www.chromium.org/developers/the-json-test-results-format
Optional argument:
--isolated-script-test-filter=[TEST_NAMES]
is a double-colon-separated ("::") list of test names, to run just that subset
of tests. This list is forwarded to the chrome driver test runner.
"""
import argparse
import json
import os
import shutil
import sys
import tempfile
import traceback
import common
class ChromeDriverAdapter(common.BaseIsolatedScriptArgsAdapter):
def generate_test_output_args(self, output):
return ['--isolated-script-test-output', output]
def generate_test_filter_args(self, test_filter_str):
if any('--filter' in arg for arg in self.rest_args):
self.parser.error(
'can\'t have the test call filter with the'
'--isolated-script-test-filter argument to the wrapper script')
return ['--filter', test_filter_str.replace('::', ':')]
def main():
adapter = ChromeDriverAdapter()
return adapter.run_test()
# This is not really a "script test" so does not need to manually add
# any additional compile targets.
def main_compile_targets(args):
json.dump([], args.output)
if __name__ == '__main__':
# Conform minimally to the protocol defined by ScriptTest.
if 'compile_targets' in sys.argv:
funcs = {
'run': None,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
sys.exit(main())
|
#region License
/* The MIT License
*
* Copyright (c) 2011 Red Badger Consulting
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#endregion
namespace RedBadger.Xpf.Graphics
{
/// <summary>
/// Represents a Texture.
/// </summary>
public interface ITexture
{
/// <summary>
/// Gets the height of this texture in pixels.
/// </summary>
int Height { get; }
/// <summary>
/// Gets the width of this texture in pixels.
/// </summary>
int Width { get; }
}
}
|
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.hpg.bigdata.app;
import com.beust.jcommander.ParameterException;
import org.opencb.hpg.bigdata.app.cli.CommandExecutor;
import org.opencb.hpg.bigdata.app.cli.local.AlignmentCommandExecutor;
import org.opencb.hpg.bigdata.app.cli.local.LocalCliOptionsParser;
import org.opencb.hpg.bigdata.app.cli.local.SequenceCommandExecutor;
import org.opencb.hpg.bigdata.app.cli.local.VariantCommandExecutor;
/**
* Created by hpccoll1 on 18/05/15.
*/
public class BigDataLocalMain {
public static void main(String[] args) {
LocalCliOptionsParser localCliOptionsParser = new LocalCliOptionsParser();
if (args == null || args.length == 0) {
localCliOptionsParser.printUsage();
}
try {
localCliOptionsParser.parse(args);
} catch (ParameterException e) {
System.out.println(e.getMessage());
localCliOptionsParser.printUsage();
System.exit(-1);
}
// String parsedCommand = localCliOptionsParser.getCommand();
// if (parsedCommand == null || parsedCommand.isEmpty()) {
// if (localCliOptionsParser.getGeneralOptions().help) {
// localCliOptionsParser.printUsage();
// System.exit(0);
// }
// if (localCliOptionsParser.getGeneralOptions().version) {
// BigDataMain.printVersion();
// }
// }
String parsedCommand = localCliOptionsParser.getCommand();
if (parsedCommand == null || parsedCommand.isEmpty()) {
if (localCliOptionsParser.getGeneralOptions().help) {
localCliOptionsParser.printUsage();
System.exit(0);
}
if (localCliOptionsParser.getGeneralOptions().version) {
localCliOptionsParser.printUsage();
}
} else { // correct command exist
CommandExecutor commandExecutor = null;
// Check if any command or subcommand -h options are present
if (localCliOptionsParser.getCommandOptions().help || localCliOptionsParser.getCommonCommandOptions().help) {
localCliOptionsParser.printUsage();
} else {
// get the subcommand and printUsage if empty
String parsedSubCommand = localCliOptionsParser.getSubCommand();
if (parsedSubCommand == null || parsedSubCommand.isEmpty()) {
localCliOptionsParser.printUsage();
} else {
switch (parsedCommand) {
case "sequence":
commandExecutor = new SequenceCommandExecutor(localCliOptionsParser.getSequenceCommandOptions());
break;
case "alignment":
commandExecutor = new AlignmentCommandExecutor(localCliOptionsParser.getAlignmentCommandOptions());
break;
case "variant":
commandExecutor = new VariantCommandExecutor(localCliOptionsParser.getVariantCommandOptions());
break;
default:
System.out.printf("ERROR: not valid command: '" + parsedCommand + "'");
localCliOptionsParser.printUsage();
break;
}
if (commandExecutor != null) {
try {
commandExecutor.execute();
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
}
}
}
}
|
from django.http import HttpResponseRedirect
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from vanilla import UpdateView
from deck.views import BaseEventView
from deck.models import Event
from .forms import InviteForm
from .models import Jury
class JuryView(UpdateView):
template_name = 'jury/jury_detail.html'
lookup_field = 'slug'
model = Jury
def get_object(self):
return Jury.objects.get(event__slug=self.kwargs.get('slug'))
class InviteEvent(BaseEventView, UpdateView):
template_name = 'event/jury_invite.html'
form_class = InviteForm
def form_valid(self, form):
try:
form.add_to_jury()
except ValidationError, e:
messages.warning(self.request, e.message)
else:
user = User.objects.get(email=form.cleaned_data.get('email'))
messages.success(
self.request,
_(u'The "@%s" are successfully joined to the Jury.') % user)
return HttpResponseRedirect(
reverse('jury_event', kwargs={'slug': self.get_object().slug}),
)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(InviteEvent, self).dispatch(*args, **kwargs)
def remove_user_from_event_jury(request, slug, user_pk):
event = Event.objects.get(slug=slug)
user = User.objects.get(pk=user_pk)
event.jury.users.remove(user)
messages.success(
request,
_(u'The "@%s" was successfully removed from the Jury.') % user)
return HttpResponseRedirect(
reverse('jury_event', kwargs={'slug': event.slug}),
)
|
/*
* arch/arm/mach-ns9xxx/include/mach/system.h
*
* Copyright (C) 2006-2008 by Digi International Inc.
* All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*/
#ifndef __ASM_ARCH_SYSTEM_H
#define __ASM_ARCH_SYSTEM_H
#include <asm/proc-fns.h>
#include <mach/processor.h>
static inline void arch_idle(void)
{
/*
* When a Camry (i.e. ns921x) executes
*
* mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
*
* the CPU is stopped, so are all timers. This is not something I want
* to handle. As the "wait for interrupt" instruction is part of
* cpu_arm926_do_idle, it's not called for it.
*/
if (!processor_is_ns921x())
cpu_do_idle();
}
static inline void arch_reset(char mode, const char *cmd)
{
#ifdef CONFIG_PROCESSOR_NS921X
#include "../../processor-ns921x.h"
if (processor_is_ns921x())
ns921x_reset(mode);
else
#endif
#ifdef RLS_REMOVE
#ifdef CONFIG_PROCESSOR_NS9360
if (processor_is_ns9360())
RLS ns9360_reset(mode);
else
#endif
#endif
BUG();
BUG();
}
#endif /* ifndef __ASM_ARCH_SYSTEM_H */
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases.group import GroupEndpoint
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.paginator import DateTimePaginator, OffsetPaginator, Paginator
from sentry.api.serializers import serialize
from sentry.api.serializers.models.tagvalue import UserTagValueSerializer
from sentry.models import GroupTagValue, TagKey, TagKeyStatus, Group
from sentry.utils.apidocs import scenario
@scenario('ListTagValues')
def list_tag_values_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/tags/%s/values/' % (group.id, 'browser'),
)
class GroupTagKeyValuesEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
# XXX: this scenario does not work for some inexplicable reasons
# @attach_scenarios([list_tag_values_scenario])
def get(self, request, group, key):
"""
List a Tag's Values
```````````````````
Return a list of values associated with this key for an issue.
:pparam string issue_id: the ID of the issue to retrieve.
:pparam string key: the tag key to look the values up for.
:auth: required
"""
# XXX(dcramer): kill sentry prefix for internal reserved tags
if TagKey.is_reserved_key(key):
lookup_key = 'sentry:{0}'.format(key)
else:
lookup_key = key
tagkey = TagKey.objects.filter(
project=group.project_id,
key=lookup_key,
status=TagKeyStatus.VISIBLE,
)
if not tagkey.exists():
raise ResourceDoesNotExist
queryset = GroupTagValue.objects.filter(
group_id=group.id,
key=lookup_key,
)
sort = request.GET.get('sort')
if sort == 'date':
order_by = '-last_seen'
paginator_cls = DateTimePaginator
elif sort == 'age':
order_by = '-first_seen'
paginator_cls = DateTimePaginator
elif sort == 'freq':
order_by = '-times_seen'
paginator_cls = OffsetPaginator
else:
order_by = '-id'
paginator_cls = Paginator
if key == 'user':
serializer_cls = UserTagValueSerializer()
else:
serializer_cls = None
return self.paginate(
request=request,
queryset=queryset,
order_by=order_by,
paginator_cls=paginator_cls,
on_results=lambda x: serialize(x, request.user, serializer_cls),
)
|
////////////////////////////////////////////////////////////////////////////////
// The following FIT Protocol software provided may be used with FIT protocol
// devices only and remains the copyrighted property of Dynastream Innovations Inc.
// The software is being provided on an "as-is" basis and as an accommodation,
// and therefore all warranties, representations, or guarantees of any kind
// (whether express, implied or statutory) including, without limitation,
// warranties of merchantability, non-infringement, or fitness for a particular
// purpose, are specifically disclaimed.
//
// Copyright 2017 Dynastream Innovations Inc.
////////////////////////////////////////////////////////////////////////////////
// ****WARNING**** This file is auto-generated! Do NOT edit this file.
// Profile Version = 20.38Release
// Tag = production/akw/20.38.00-0-geccbce3
////////////////////////////////////////////////////////////////////////////////
#if !defined(FIT_CAPABILITIES_MESG_LISTENER_HPP)
#define FIT_CAPABILITIES_MESG_LISTENER_HPP
#include "fit_capabilities_mesg.hpp"
namespace fit
{
class CapabilitiesMesgListener
{
public:
virtual ~CapabilitiesMesgListener() {}
virtual void OnMesg(CapabilitiesMesg& mesg) = 0;
};
} // namespace fit
#endif // !defined(FIT_CAPABILITIES_MESG_LISTENER_HPP)
|
#!/usr/local/bin python3
# -*- encoding: utf-8 -*-
import numpy as np
class FOM:
def __init__(self, function, x0, args=(), step=0.1, stop=0.001):
print("Constructing FOM instance...")
self.step = step
self.function = lambda x: function(x, *args)
self.stop = stop
self.x0 = x0
def optimize(self):
x = self.x0
print(" i ; f(x) ; |x| ; grad ; delta")
i = 1
while True:
# Calcular un subgradiente
gradient = self.numerical_gradient(x)
# Calcular nuevo punto
new_x = x - gradient * self.step
# Delta
delta = abs(self.function(new_x) - self.function(x))
print("{: >4d}; {: <8.5f}; {: <8.5f}; {: <8.5f}; {: <8.5f}".format(
i,
self.function(x),
np.linalg.norm(x),
np.linalg.norm(gradient),
delta
))
# Criterio de parada es distancia entre puntos
if delta <= self.stop:
return new_x
else:
x = new_x
i += 1
def numerical_gradient(self, x, gradient_epsilon=0.00001):
grad = np.zeros(len(x))
for i in range(len(x)):
print(i/len(x))
temp_x = [np.copy(x), np.copy(x)]
temp_x[0][i] = temp_x[0][i] + gradient_epsilon
temp_x[1][i] = temp_x[1][i] - gradient_epsilon
grad[i] = (self.function(temp_x[0]) - self.function(temp_x[1])) \
/ (2 * gradient_epsilon)
norm = np.linalg.norm(grad)
return grad / np.linalg.norm(grad) if norm > 0 else grad
if __name__ == '__main__':
def function(x):
# A = np.array([
# [1, 2, 3, 4],
# [5, 6, 7, 8],
# [9, 10, 11, 12],
# [13, 14, 15, 16]
# ])
A = np.identity(4)
return x.T.dot(A).dot(x) + 10
x0 = np.array([1000, 98, 40, 10])
fom = FOM(function, x0, 0.1, 0.001)
print(list(map(lambda x: round(x, 1), fom.optimize())))
|
package it.subtree.filmclub.data.db;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.net.Uri;
import android.provider.BaseColumns;
public class MoviesContract {
public static final String AUTHORITY = "it.subtree.filmclub";
public static final Uri BASE_CONTENT_URI = Uri.parse("content://" + AUTHORITY);
public static final String PATH_MOVIE = "movies";
public static final class MovieEntry implements BaseColumns {
public static final Uri CONTENT_URI = BASE_CONTENT_URI.buildUpon().appendPath(PATH_MOVIE).build();
public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/" + AUTHORITY + "/" + PATH_MOVIE;
public static final String TABLE_NAME = "movies";
public static final String COLUMN_MOVIE_ID = "movie_id";
public static final String COLUMN_MOVIE_TITLE = "original_title";
public static final String COLUMN_MOVIE_POSTER_PATH = "poster_path";
public static final String COLUMN_MOVIE_OVERVIEW = "overview";
public static final String COLUMN_MOVIE_VOTE_AVERAGE = "vote_average";
public static final String COLUMN_MOVIE_RELEASE_DATE = "release_date";
public static Uri movieUriWithId(long id) {
return ContentUris.withAppendedId(CONTENT_URI, id);
}
}
}
|
'use strict';
const Bookshelf = require('../bookshelf.js');
require('./user');
require('./post');
module.exports = Bookshelf.model('Rating', {
tableName: 'ratings',
idAttribute: 'idAttr',
hasTimestamps: ['createdAt', 'updatedAt'],
user: function() {
return this.belongsTo('User', 'userId');
},
post: function() {
return this.belongsTo('Post', 'postId');
},
});
|
#define BOOST_TEST_MODULE redis_client_lists_test
#include <boost/test/included/unit_test.hpp>
#include <string>
#include "../src/cpp_redis_client.h"
#include "../src/string_reply.h"
BOOST_AUTO_TEST_SUITE(redis_client_lists_test)
CppRedisClient::RedisClient r("localhost");
BOOST_AUTO_TEST_CASE(lindex)
{
const std::string key = "lindex_test_key";
r.lpush(key, "Hello");
r.lpush(key, "World");
CppRedisClient::StringReply reply = r.lindex(key, 0);
BOOST_CHECK(reply == "Hello");
reply = r.lindex(key, -1);
BOOST_CHECK(reply == "World");
reply = r.lindex(key, 3);
BOOST_CHECK(reply.isNull());
r.del(key);
}
BOOST_AUTO_TEST_CASE(llen)
{
const std::string key = "llen_test_key";
r.lpush(key, "Hello");
r.lpush(key, "World");
const size_t len = r.llen(key);
BOOST_CHECK(len == 2);
r.del(key);
}
BOOST_AUTO_TEST_CASE(lpop)
{
const std::string key = "lpop_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
r.rpush(key, "three");
CppRedisClient::StringReply reply = r.lpop(key);
BOOST_CHECK(reply == "one");
reply = r.lpop(key);
BOOST_CHECK(reply == "two");
reply = r.lpop(key);
BOOST_CHECK(reply == "three");
r.del(key);
}
BOOST_AUTO_TEST_CASE(lpush)
{
const std::string key = "lpush_test_key";
size_t len = r.lpush(key, "World");
BOOST_CHECK(len == 1);
len = r.lpush(key, "Hello");
BOOST_CHECK(len == 2);
r.del(key);
}
BOOST_AUTO_TEST_CASE(lpushx)
{
const std::string key = "lpushx_test_key";
size_t len = r.lpushx(key, "World");
BOOST_CHECK(len == 1);
len = r.lpushx(key, "Hello");
BOOST_CHECK(len == 2);
len = r.lpushx(key, "Hello");
BOOST_CHECK(len == 0);
r.del(key);
}
BOOST_AUTO_TEST_CASE(lrange)
{
const std::string key = "lrange_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
std::vector<std::string> replys = r.lrange(key, 0, 0);
// one
BOOST_REQUIRE(replys.siz() == 1 && replys[0] == "one");
// one two
replys = r.lrange(key, -3, 1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "one" && replys[1] == "two");
// one two
replys = r.lrange(key, -100, 100);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "one" && replys[1] == "two");
// empty
replys = r.lrange(key, 90, 100);
BOOST_CHECK(replys.empty());
r.del(key);
}
BOOST_AUTO_TEST_CASE(lrem)
{
const std::string key = "lrem_test_key";
size_t len = r.rpush(key, "Hello");
BOOST_REQUIRE(len == 1);
len = r.rpush(key, "Hello");
BOOST_REQUIRE(len == 2);
len = r.rpush(key, "foo");
BOOST_REQUIRE(len == 3);
len = r.rpush(key, "Hello");
BOOST_REQUIRE(len == 4);
len = r.lrem(key, -2, "Hello");
BOOST_REQUIRE(len == 2);
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "Hello" && replys[1] == "foo");
r.del(key);
}
BOOST_AUTO_TEST_CASE(lset)
{
const std::string key = "lset_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
r.rpush(key, "three");
r.lset(key, 0, "four");
r.lset(key, -2, "five");
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 3 && replys[0] == "four" && replys[1] == "five" && replys[2] == "three");
r.del(key);
}
BOOST_AUTO_TEST_CASE(ltrim)
{
const std::string key = "ltrim_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
r.rpush(key, "three");
r.ltrim(key, 1, -1);
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "two" && replys[1] == "three");
r.del(key);
}
BOOST_AUTO_TEST_CASE(rpop)
{
const std::sting key = "rpop_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
r.rpush(key, "three");
CppRedisClient::StringReply reply = r.rpop(key);
BOOST_CHECK(reply == "three");
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "one" && replys[1] == "two");
}
BOOST_AUTO_TEST_CASE(rpoplpush)
{
const std::string key = "rpoplpush_test_key";
const std::string key_other = "myother_rpoplpush_test_key";
r.rpush(key, "one");
r.rpush(key, "two");
r.rpush(key, "three");
r.rpoplpush(key, key_other);
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "one" && replys[1] == "two");
replys = r.lrange(key_other, 0, -1);
BOOST_REQUIRE(replys.size() == 1 && replys[0] == "three");
r.del(key);
r.del(key_other);
}
BOOST_AUTO_TEST_CASE(rpush)
{
const std::string key = "rpush_test_key";
r.rpush(key, "Hello");
r.rpush(key, "World");
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "Hello" && replys[1] == "World");
r.del(key);
}
BOOST_AUTO_TEST_CASE(rpushx)
{
const std::string key = "rpushx_test_key";
const std::string key_other = "other_rpushx_test_key";
r.rpushx(key, "Hello");
r.rpushx(key, "World");
size_t len = r.rpushx(key_other, "Hello");
BOOST_REQUIRE(len == 0);
std::vector<std::string> replys = r.lrange(key, 0, -1);
BOOST_REQUIRE(replys.size() == 2 && replys[0] == "Hello" && replys[1] == "World");
replys = r.lrange(key_other, 0, -1);
BOOST_CHECK(replys.empty());
r.del(key);
r.del(key_other);
}
BOOST_AUTO_TEST_SUITE_END()
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
import os
import xmlrpc.client
from socket import error as socket_error
from flexget import plugin
from flexget.event import event
from flexget.utils.template import RenderError
log = logging.getLogger('aria2')
class OutputAria2(object):
"""
Simple Aria2 output
Example::
aria2:
path: ~/downloads/
"""
schema = {
'type': 'object',
'properties': {
'server': {'type': 'string', 'default': 'localhost'},
'port': {'type': 'integer', 'default': 6800},
'secret': {'type': 'string', 'default': ''},
'username': {'type': 'string', 'default': ''}, # NOTE: To be deprecated by aria2
'password': {'type': 'string', 'default': ''},
'path': {'type': 'string'},
'filename': {'type': 'string'},
'options': {
'type': 'object',
'additionalProperties': {'oneOf': [{'type': 'string'}, {'type': 'integer'}]},
},
},
'required': ['path'],
'additionalProperties': False,
}
def aria2_connection(self, server, port, username=None, password=None):
if username and password:
userpass = '%s:%s@' % (username, password)
else:
userpass = ''
url = 'http://%s%s:%s/rpc' % (userpass, server, port)
log.debug('aria2 url: %s' % url)
log.info('Connecting to daemon at %s', url)
try:
return xmlrpc.client.ServerProxy(url).aria2
except xmlrpc.client.ProtocolError as err:
raise plugin.PluginError(
'Could not connect to aria2 at %s. Protocol error %s: %s'
% (url, err.errcode, err.errmsg),
log,
)
except xmlrpc.client.Fault as err:
raise plugin.PluginError(
'XML-RPC fault: Unable to connect to aria2 daemon at %s: %s'
% (url, err.faultString),
log,
)
except socket_error as e:
raise plugin.PluginError(
'Socket connection issue with aria2 daemon at %s: %s' % (url, e), log
)
except:
log.debug('Unexpected error during aria2 connection', exc_info=True)
raise plugin.PluginError('Unidentified error during connection to aria2 daemon', log)
def prepare_config(self, config):
config.setdefault('server', 'localhost')
config.setdefault('port', 6800)
config.setdefault('username', '')
config.setdefault('password', '')
config.setdefault('secret', '')
config.setdefault('options', {})
return config
def on_task_output(self, task, config):
# don't add when learning
if task.options.learn:
return
config = self.prepare_config(config)
aria2 = self.aria2_connection(
config['server'], config['port'], config['username'], config['password']
)
for entry in task.accepted:
if task.options.test:
log.verbose('Would add `%s` to aria2.', entry['title'])
continue
try:
self.add_entry(aria2, entry, config)
except socket_error as se:
entry.fail('Unable to reach Aria2: %s' % se)
except xmlrpc.client.Fault as err:
log.critical('Fault code %s message %s', err.faultCode, err.faultString)
entry.fail('Aria2 communication Fault')
except Exception as e:
log.debug('Exception type %s', type(e), exc_info=True)
raise
def add_entry(self, aria2, entry, config):
"""
Add entry to Aria2
"""
options = config['options']
try:
options['dir'] = os.path.expanduser(entry.render(config['path']).rstrip('/'))
except RenderError as e:
entry.fail('failed to render \'path\': %s' % e)
return
if 'filename' in config:
try:
options['out'] = os.path.expanduser(entry.render(config['filename']))
except RenderError as e:
entry.fail('failed to render \'filename\': %s' % e)
return
secret = None
if config['secret']:
secret = 'token:%s' % config['secret']
# handle torrent files
if 'torrent' in entry:
if 'file' in entry:
torrent_file = entry['file']
elif 'location' in entry:
# in case download plugin moved the file elsewhere
torrent_file = entry['location']
else:
entry.fail('Cannot find torrent file')
return
if secret:
return aria2.addTorrent(
secret, xmlrpc.client.Binary(open(torrent_file, mode='rb').read()), [], options
)
return aria2.addTorrent(
xmlrpc.client.Binary(open(torrent_file, mode='rb').read()), [], options
)
# handle everything else (except metalink -- which is unsupported)
# so magnets, https, http, ftp .. etc
if secret:
return aria2.addUri(secret, [entry['url']], options)
return aria2.addUri([entry['url']], options)
@event('plugin.register')
def register_plugin():
plugin.register(OutputAria2, 'aria2', api_ver=2)
|
/***************************************************************************
qgscsexception.h - Coordinate System Exception
-------------------
begin : 2004-12-29
copyright : (C) 2004 by Gary Sherman
email : sherman at mrcc dot com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef QGSCSEXCEPTION_H
#define QGSCSEXCEPTION_H
#include "qgsexception.h"
/** \ingroup core
* Custom exception class for Coordinate Reference System related exceptions.
*/
class CORE_EXPORT QgsCsException : public QgsException
{
public:
QgsCsException( QString const &what ) : QgsException( what ) {}
};
#endif //QGCSEXCEPTION_H
|
"use strict";
function View(name, render, sound, clickables, back) {
this.render = (render instanceof Animation) ? render : new Animation([render], 0, false);
this.sound = sound;
this.clickables = clickables !== undefined ? clickables : [];
this.back = back;
this.checkClickables = function(mousePos){
var i=this.clickables.length;
while (i--){
if (this.clickables[i].checkClick(mousePos)) return this.clickables[i];
}
return null;
}
}
function Clickable(name, bounds, text, newView, sound, onClick){
this.name = name;
this.bounds = bounds; //Unit space rect
this.text = text;
this.newView = newView;
this.sound = sound;
this.onClick = onClick;
this.timesClicked = 0;
this.checkClick = function(mousePos){
return this.bounds.copy().scale(SCREEN_DIM).collidePoint(mousePos);
}
this.activate = function(){
this.timesClicked++;
if (this.sound){
this.sound.currentTime = 0;
this.sound.play();
}
if (this.onClick) this.onClick();
}
}
|
// PR c++/51707
// { dg-do compile { target c++11 } }
struct S {
constexpr S() {}
};
struct T {
constexpr T(S const& s) : s{s} {}
S const& s;
};
constexpr S s {};
constexpr T t { s };
|
/******************************************************************************
*
* graph_connectone.c
* Create the minimum spanning tree of the connections for one author
*
* Copyright (C) 2016 Simone Conti
*
* This file is part of Cygne
*
* Cygne is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* Cygne is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
******************************************************************************/
#include <cygne/peergraph.h>
/*
Create the minimum spanning tree of the connections for one author
*/
int
peer_graph_connectone(xmlDocPtr doc, const char *center, const char *fname)
{
char *outname = str_trim(fname);
if (outname==NULL) {
outname = str_copy(NULL, "graph.gv", -1);
}
FILE *fp = fopen(outname, "w");
if (fp==NULL) {
fprintf(stderr, "ERROR! Impossible to open <%s> to write!\n", outname);
free(outname);
return -1;
}
int ret;
int v, w, wmax, imax, i, j;
int nato=0, ncon=0;
int pos, posmax;
int peer_num;
char *name1, *name2;
char **peer_keys, **peer_names;
int *au1=malloc(NCON*sizeof(int));
int *au2=malloc(NCON*sizeof(int));
int *status=malloc(NATU*sizeof(int));
int *queue=malloc(NATU*sizeof(int));
int *imp=malloc(NATU*sizeof(int));
int *dist=malloc(NATU*sizeof(int));
char **aut=malloc(NATU*sizeof(char*));
if (au1==NULL || au2==NULL || status==NULL || queue==NULL || imp==NULL || dist==NULL || aut==NULL ){
fprintf(stderr, "ERROR! Memory allocation failed\n\n");
exit(EXIT_FAILURE);
}
for (i=0; i<NATU; i++) {
aut[i]=NULL;
status[i] = 1;
queue[i] = -1;
imp[i] = 0;
dist[i] = 0;
}
/* Read peers */
ret = peer_graph_readauthors(doc, &peer_num, &peer_keys, &peer_names);
if (ret!=0) return -1;
/* Read papers.txt and extract all couples of collaborations */
ret = peer_graph_readpapers(doc, &aut, &nato, &au1, &au2, &ncon, &imp);
if (ret!=0) return -1;
printf("peer_num = %d; nato = %d\n", peer_num, nato);
/* Get the center of the graph */
/* Center contains the PeerID of the author */
v = -1;
for (i=0; i<nato; i++) {
if (strcmp(center, aut[i])==0) {
v = i;
}
}
if (v==-1) {
fprintf(stderr, "ERROR! Impossible to find author <%s>!\n", center);
return -1;
}
/* Get the fullname of that peerid */
name1 = NULL;
for (j=0; j<peer_num; j++) {
if (strcmp(peer_keys[j], aut[v])==0) {
name1=peer_names[j];
}
}
if (name1==NULL) {
fprintf(stderr, "ERROR! PeerID is not in paper list!\n");
return -1;
}
/* Print graph header */
fprintf(fp, "graph \"G\" {\n");
fprintf(fp, " graph [ overlap=scale, splines=true ];\n");
fprintf(fp, " node [ style=filled ];\n");
fprintf(fp, " edge [colorscheme=rdylbu11, penwidth=2 ];\n");
fprintf(fp, " root=\"%s\";\n", name1);
fprintf(fp, " \"%s\" [fillcolor=yellow]\n", name1);
/* Do Breadth-first search. Modified to search the element with maximum impact at each level */
posmax=0;
pos=0;
queue[posmax] = v;
posmax++;
status[v]=0;
while (pos<posmax) {
v=queue[pos];
pos++;
for (i=0; i<ncon; i++) {
wmax=-1;
imax=-1;
for (j=0; j<ncon; j++) {
w=-1;
if (au1[j]==v) { w=au2[j]; };
if (au2[j]==v) { w=au1[j]; };
if (w!=-1 && status[w]==1) {
if (imp[w]>imax) {
imax=imp[w];
wmax=w;
}
}
}
if (wmax!=-1) {
dist[wmax]=dist[v]+1;
name1=aut[v];
name2=aut[wmax];
for (j=0; j<peer_num; j++) {
if (strcmp(peer_keys[j], aut[v])==0) name1=peer_names[j];
if (strcmp(peer_keys[j], aut[wmax])==0) name2=peer_names[j];
}
fprintf(fp, " \"%s\" -- \"%s\" [color=%d, label=\"%d\"] \n", name1, name2, dist[wmax], dist[wmax]);
queue[posmax]=wmax;
posmax++;
status[wmax]=0;
}
}
}
fprintf(fp, "}");
/* Free memory and exit */
free(au1);
free(au2);
free(status);
free(queue);
free(imp);
free(dist);
for (i=0; i<NATU; i++) {
free(aut[i]);
}
free(aut);
free(outname);
fclose(fp);
for (i=0; i<peer_num; i++) {
free(peer_keys[i]);
free(peer_names[i]);
}
free(peer_keys);
free(peer_names);
return 0;
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="de">
<head>
<!-- Generated by javadoc (version 1.7.0_80) on Sat Jun 18 13:56:54 CEST 2016 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.codehaus.plexus.archiver.bzip2.BZip2Archiver (Plexus Archiver Component 3.4 API)</title>
<meta name="date" content="2016-06-18">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.codehaus.plexus.archiver.bzip2.BZip2Archiver (Plexus Archiver Component 3.4 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/codehaus/plexus/archiver/bzip2/BZip2Archiver.html" title="class in org.codehaus.plexus.archiver.bzip2">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/codehaus/plexus/archiver/bzip2/class-use/BZip2Archiver.html" target="_top">Frames</a></li>
<li><a href="BZip2Archiver.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.codehaus.plexus.archiver.bzip2.BZip2Archiver" class="title">Uses of Class<br>org.codehaus.plexus.archiver.bzip2.BZip2Archiver</h2>
</div>
<div class="classUseContainer">No usage of org.codehaus.plexus.archiver.bzip2.BZip2Archiver</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/codehaus/plexus/archiver/bzip2/BZip2Archiver.html" title="class in org.codehaus.plexus.archiver.bzip2">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/codehaus/plexus/archiver/bzip2/class-use/BZip2Archiver.html" target="_top">Frames</a></li>
<li><a href="BZip2Archiver.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2001–2016 <a href="http://codehaus-plexus.github.io/">Codehaus Plexus</a>. All rights reserved.</small></p>
</body>
</html>
|
import Container from "container";
import { get } from "ember-metal/property_get";
import run from "ember-metal/run_loop";
import { View as EmberView } from "ember-views/views/view";
var container, view;
module("EmberView - Layout Functionality", {
setup: function() {
container = new Container();
container.optionsForType('template', { instantiate: false });
},
teardown: function() {
run(function() {
view.destroy();
});
}
});
test("should call the function of the associated layout", function() {
var templateCalled = 0, layoutCalled = 0;
container.register('template:template', function() { templateCalled++; });
container.register('template:layout', function() { layoutCalled++; });
view = EmberView.create({
container: container,
layoutName: 'layout',
templateName: 'template'
});
run(function() {
view.createElement();
});
equal(templateCalled, 0, "template is not called when layout is present");
equal(layoutCalled, 1, "layout is called when layout is present");
});
test("should call the function of the associated template with itself as the context", function() {
container.register('template:testTemplate', function(dataSource) {
return "<h1 id='twas-called'>template was called for " + get(dataSource, 'personName') + "</h1>";
});
view = EmberView.create({
container: container,
layoutName: 'testTemplate',
context: {
personName: "Tom DAAAALE"
}
});
run(function() {
view.createElement();
});
equal("template was called for Tom DAAAALE", view.$('#twas-called').text(), "the named template was called with the view as the data source");
});
test("should fall back to defaultTemplate if neither template nor templateName are provided", function() {
var View;
View = EmberView.extend({
defaultLayout: function(dataSource) { return "<h1 id='twas-called'>template was called for " + get(dataSource, 'personName') + "</h1>"; }
});
view = View.create({
context: {
personName: "Tom DAAAALE"
}
});
run(function() {
view.createElement();
});
equal("template was called for Tom DAAAALE", view.$('#twas-called').text(), "the named template was called with the view as the data source");
});
test("should not use defaultLayout if layout is provided", function() {
var View;
View = EmberView.extend({
layout: function() { return "foo"; },
defaultLayout: function(dataSource) { return "<h1 id='twas-called'>template was called for " + get(dataSource, 'personName') + "</h1>"; }
});
view = View.create();
run(function() {
view.createElement();
});
equal("foo", view.$().text(), "default layout was not printed");
});
test("the template property is available to the layout template", function() {
view = EmberView.create({
template: function(context, options) {
options.data.buffer.push(" derp");
},
layout: function(context, options) {
options.data.buffer.push("Herp");
get(options.data.view, 'template')(context, options);
}
});
run(function() {
view.createElement();
});
equal("Herp derp", view.$().text(), "the layout has access to the template");
});
|
/*
* Register map access API - SPI support
*
* Copyright 2011 Wolfson Microelectronics plc
*
* Author: Mark Brown <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#include <linux/regmap.h>
#include <linux/spi/spi.h>
#include <linux/init.h>
#include <linux/module.h>
#include "internal.h"
struct regmap_async_spi {
struct regmap_async core;
struct spi_message m;
struct spi_transfer t[2];
};
static void regmap_spi_complete(void *data)
{
struct regmap_async_spi *async = data;
regmap_async_complete_cb(&async->core, async->m.status);
}
static int regmap_spi_write(void *context, const void *data, size_t count)
{
struct device *dev = context;
struct spi_device *spi = to_spi_device(dev);
return spi_write(spi, data, count);
}
static int regmap_spi_gather_write(void *context,
const void *reg, size_t reg_len,
const void *val, size_t val_len)
{
struct device *dev = context;
struct spi_device *spi = to_spi_device(dev);
struct spi_message m;
struct spi_transfer t[2] = { { .tx_buf = reg, .len = reg_len, },
{ .tx_buf = val, .len = val_len, }, };
spi_message_init(&m);
spi_message_add_tail(&t[0], &m);
spi_message_add_tail(&t[1], &m);
return spi_sync(spi, &m);
}
static int regmap_spi_async_write(void *context,
const void *reg, size_t reg_len,
const void *val, size_t val_len,
struct regmap_async *a)
{
struct regmap_async_spi *async = container_of(a,
struct regmap_async_spi,
core);
struct device *dev = context;
struct spi_device *spi = to_spi_device(dev);
async->t[0].tx_buf = reg;
async->t[0].len = reg_len;
async->t[1].tx_buf = val;
async->t[1].len = val_len;
spi_message_init(&async->m);
spi_message_add_tail(&async->t[0], &async->m);
spi_message_add_tail(&async->t[1], &async->m);
async->m.complete = regmap_spi_complete;
async->m.context = async;
return spi_async(spi, &async->m);
}
static struct regmap_async *regmap_spi_async_alloc(void)
{
struct regmap_async_spi *async_spi;
async_spi = kzalloc(sizeof(*async_spi), GFP_KERNEL);
if (!async_spi)
return NULL;
return &async_spi->core;
}
static int regmap_spi_read(void *context,
const void *reg, size_t reg_size,
void *val, size_t val_size)
{
struct device *dev = context;
struct spi_device *spi = to_spi_device(dev);
return spi_write_then_read(spi, reg, reg_size, val, val_size);
}
static struct regmap_bus regmap_spi = {
.write = regmap_spi_write,
.gather_write = regmap_spi_gather_write,
.async_write = regmap_spi_async_write,
.async_alloc = regmap_spi_async_alloc,
.read = regmap_spi_read,
.read_flag_mask = 0x80,
};
/**
* regmap_init_spi(): Initialise register map
*
* @spi: Device that will be interacted with
* @config: Configuration for register map
*
* The return value will be an ERR_PTR() on error or a valid pointer to
* a struct regmap.
*/
struct regmap *regmap_init_spi(struct spi_device *spi,
const struct regmap_config *config)
{
return regmap_init(&spi->dev, ®map_spi, &spi->dev, config);
}
EXPORT_SYMBOL_GPL(regmap_init_spi);
/**
* devm_regmap_init_spi(): Initialise register map
*
* @spi: Device that will be interacted with
* @config: Configuration for register map
*
* The return value will be an ERR_PTR() on error or a valid pointer
* to a struct regmap. The map will be automatically freed by the
* device management code.
*/
struct regmap *devm_regmap_init_spi(struct spi_device *spi,
const struct regmap_config *config)
{
return devm_regmap_init(&spi->dev, ®map_spi, &spi->dev, config);
}
EXPORT_SYMBOL_GPL(devm_regmap_init_spi);
MODULE_LICENSE("GPL");
|
"""Models SFA virtual disk using local SFA API scripts."""
# Zenoss Imports
from Products.DataCollector.plugins.CollectorPlugin import PythonPlugin
class ModelSFAVirtualDisk(PythonPlugin):
""" Models SFA Host Channel """
relname = 'virtualDisks'
modname = 'ZenPacks.DDN.SFA2.VirtualDisk'
requiredProperties = (
'zCommandUsername',
'zCommandPassword',
'zSFASecondaryIp',
)
deviceProperties = PythonPlugin.deviceProperties + requiredProperties
def collect(self, device, log):
"""Synchronously collect data from device. """
log.info("%s: collecting Virtual Disk data", device.id)
rm = self.relMap()
try:
from ZenPacks.DDN.SFA2.lib.DDNConnPlugin import fetch_data, connect
from ZenPacks.DDN.SFA2.lib.sfa_help import vd_makedict
conn_parms = {}
conn_parms['user'] = getattr(device, 'zCommandUsername', 'user')
conn_parms['pass'] = getattr(device, 'zCommandPassword', 'pass')
conn_parms['target'] = device.id
conn_parms['alt_target'] = getattr(device, 'zSFASecondaryIp', None)
conn = connect(conn_parms)
func = 'get_all_vds'
vdisks = fetch_data(conn, func)
for disk in vdisks:
model = vd_makedict(disk)
rm.append(self.objectMap(model))
except Exception as e:
log.error('SFAVirtualDisk Exception : %s', str(e))
return None
return rm
def process(self, device, results, log):
"""Process results. Return iterable of datamaps or None."""
return results
|
BRANCH = "mozilla-aurora"
MOZ_UPDATE_CHANNEL = "aurora"
MOZILLA_DIR = BRANCH
OBJDIR = "obj-l10n"
EN_US_BINARY_URL = "http://stage.mozilla.org/pub/mozilla.org/mobile/nightly/latest-%s-android/en-US" % (BRANCH)
#STAGE_SERVER = "dev-stage01.srv.releng.scl3.mozilla.com"
STAGE_SERVER = "stage.mozilla.org"
STAGE_USER = "ffxbld"
STAGE_SSH_KEY = "~/.ssh/ffxbld_rsa"
HG_SHARE_BASE_DIR = "/builds/hg-shared"
config = {
"log_name": "single_locale",
"objdir": OBJDIR,
"is_automation": True,
"buildbot_json_path": "buildprops.json",
"purge_minsize": 10,
"force_clobber": True,
"clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber",
"locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR,
"locales_dir": "mobile/android/locales",
"ignore_locales": ["en-US"],
"nightly_build": True,
'balrog_credentials_file': 'oauth.txt',
"tools_repo": "https://hg.mozilla.org/build/tools",
"tooltool_config": {
"manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
"output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR,
"bootstrap_cmd": ["bash", "-xe", "setup.sh"],
},
"exes": {
'tooltool.py': '/tools/tooltool.py',
},
"repos": [{
"repo": "https://hg.mozilla.org/releases/mozilla-aurora",
"revision": "default",
"dest": MOZILLA_DIR,
}, {
"repo": "https://hg.mozilla.org/build/buildbot-configs",
"revision": "default",
"dest": "buildbot-configs"
}, {
"repo": "https://hg.mozilla.org/build/tools",
"revision": "default",
"dest": "tools"
}, {
"repo": "https://hg.mozilla.org/build/compare-locales",
"revision": "RELEASE_AUTOMATION"
}],
"hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH,
"hg_l10n_tag": "default",
'vcs_share_base': HG_SHARE_BASE_DIR,
"l10n_dir": MOZILLA_DIR,
"repack_env": {
# so ugly, bug 951238
"LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64",
"MOZ_OBJDIR": OBJDIR,
"EN_US_BINARY_URL": EN_US_BINARY_URL,
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL,
},
# TODO ideally we could get this info from a central location.
# However, the agility of these individual config files might trump that.
"upload_env": {
"UPLOAD_USER": STAGE_USER,
"UPLOAD_SSH_KEY": STAGE_SSH_KEY,
"UPLOAD_HOST": STAGE_SERVER,
"POST_UPLOAD_CMD": "post_upload.py -b mozilla-aurora-android-l10n -p mobile -i %(buildid)s --release-to-latest --release-to-dated",
"UPLOAD_TO_TEMP": "1",
},
"merge_locales": True,
"make_dirs": ['config'],
"mozilla_dir": MOZILLA_DIR,
"mozconfig": "%s/mobile/android/config/mozconfigs/android/l10n-nightly" % MOZILLA_DIR,
"signature_verification_script": "tools/release/signing/verify-android-signature.sh",
# Balrog
"build_target": "Android_arm-eabi-gcc3",
# Mock
"mock_target": "mozilla-centos6-x86_64-android",
"mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache',
'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
'gtk2-devel', 'libnotify-devel', 'yasm',
'alsa-lib-devel', 'libcurl-devel',
'wireless-tools-devel', 'libX11-devel',
'libXt-devel', 'mesa-libGL-devel',
'gnome-vfs2-devel', 'GConf2-devel', 'wget',
'mpfr', # required for system compiler
'xorg-x11-font*', # fonts required for PGO
'imake', # required for makedepend!?!
'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo
'valgrind', 'dbus-x11',
'pulseaudio-libs-devel',
'gstreamer-devel', 'gstreamer-plugins-base-devel',
'freetype-2.3.11-6.el6_1.8.x86_64',
'freetype-devel-2.3.11-6.el6_1.8.x86_64',
'java-1.7.0-openjdk-devel',
'openssh-clients',
'zlib-devel-1.2.3-27.el6.i686',
],
"mock_files": [
("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
],
}
|
# ---------------------------------------------------------------------------- #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# ---------------------------------------------------------------------------- #
# classifiers
ARM = "armor"
CLO = "clothing"
TOP = "tops"
BTM = "Bottoms"
UND = "undies"
SWM = "swimwear"
JEW = "jewelry",
ACC = "accessories"
FTW = "footwear"
HDW = "headwear"
NEK = "neckwear"
ONE = "onepiece" # top and bottom
BOD = "bodysuit" # top, bottom, (possibly legs, arms, etc)
GLV = "glove"
class _EqSlotData:
__slots__ = "_name", "_layers", "_linking",
def __init__(self, name, layers=0):
self.name = name
self._layers = layers # expiremental
self._linking = 0 # always 0 for the basic type
self._tags =
@property
def layers(self):
"""The max layer index or 0 if layers are unsupported."""
return self._layers
#@property
#def fullnames(self):
# """A tuple of all the full names of this slot."""
# return (self._name,)
@property
def linking(self):
"""Whether linking is allowed. (0 - False, 1 - optional, 2 - required)"""
return self._linking
@property
def plural_name(self):
"""The plural name of this slot."""
if self._name not in EQUIPSLOTS._noplural: return self._name
return "".join(self._name, 's')
class _EqSlotDataLR(EqSlot):
"""Slot data type for left and right slot pairs."""
__slots__ = EqSlot.__slots__
def __init__(self, name, layers=1, linking=1):
self.name = name
self._linking = linking
@property
def opp_name(self):
if self.name.endswith("L"): return "".join(self.name[:-1], "R")
return "".join(self.name[:-1], "L")
#@property
#def fullnames(self):
# return "".join(name, '.R'), "".join(name, '.L')
class _EqSlots(dict):
_noplural = (HDW, FTW, BTM, TOP, NEK)
__slots__ = dict.__slots__
def __init__(self):
super().__init__({
# LR - no linking
'ankle.L': _EqSlotDataLR('ankle.L', 3, 0),
'ankle.R': _EqSlotDataLR('ankle.R', 3, 0),
'wrist.L': _EqSlotDataLR('wrist.L', 3, 0),
'wrist.R': _EqSlotDataLR('wrist.R', 3, 0),
'thigh.L': _EqSlotDataLR('thigh.L', 3, 0),
'thigh.R': _EqSlotDataLR('thigh.R', 3, 0),
# LR - linking
'sock.L': _EqSlotDataLR('sock.L', 1, 1),
'sock.R': _EqSlotDataLR('sock.R', 1, 1),
'glove.L': _EqSlotDataLR('glove.L', 1, 1),
'glove.R': _EqSlotDataLR('glove.R', 1, 1),
# Normal
'headwear': _EqSlotData(HDW 0),
'tops': _EqSlotData(TOP, 2),
'bottoms': _EqSlotData(BOT, 2),
'undies-top': _EqSlotData('undies-top', 0),
'undies-bottom': _EqSlotData('undies-bottom' 0),
# Normal - misc
'glasses': _EqSlotData('glasses', 0),
'belt': _EqSlotData('belt', 0),
})
EQUIPSLOTS = _EQSlots()
|
if (!n) {
var n = require('./n')(module);
}
if (!_) {
var _ = require('lodash');
}
n('piksha.shared', function (ns) {
var months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
var definitions = [
{
name: 'person'
},
{
name: 'month',
error: function (value) {
return _.contains(months, value) ? '' : 'Input a month, e.g. January';
},
groupError: function (values) {
if (values.length > 0 && _.all(values, function (value) { return _.contains(months, value); })) {
var sortedValues = _.sortBy(values, function (value) { return _.indexOf(months, value); });
var index = _.indexOf(months, sortedValues[0]);
return _.eq(_.slice(months, index, sortedValues.length + index), sortedValues) ? '' : 'Months must be contiguous.';
} else {
return '';
}
}
},
{
name: 'year',
error: function (value) {
return _.inRange(value, 1920, new Date().getFullYear()) ? '' : 'Year must be from 1920 onwards.';
},
unique: true
},
{
name: 'locality',
unique: true
},
{
name: 'country',
unique: true
},
{
name: 'event',
unique: true
},
{
name: 'subject'
}
];
ns.AttributesService = {
create: function () {
return {
definitions: function () {
return definitions;
},
definitionByName: function (name) {
return _.find(definitions, function (a) { return name === a.name; });
},
errors: function (attributes) {
return _.reduce(attributes, function (errors, attribute) {
var definition = this.definitionByName(attribute.name);
var groupValues = _(attributes).filter(function (a) { return attribute.name === a.name; }).pluck('value').value();
if (definition.groupError && definition.groupError(groupValues)) {
errors[attribute.id] = definition.groupError(groupValues);
}
if (definition.error && definition.error(attribute.value)) {
errors[attribute.id] = definition.error(attribute.value);
}
if (_.isEmpty(_.trim(attribute.value))) {
errors[attribute.id] = 'Attributes cannot be blank.';
}
return errors;
}, {}, this);
}
};
}
};
});
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals, \
with_statement
import logging
import solrdataimport.lib.cache as Cache
from solrdataimport.cass.cassClient import CassandraClient
from solrdataimport.dataload.cqlbuilder import CqlBuilder
from solrdataimport.dataload.cassdata import CassandraData
logger = logging.getLogger(__name__)
class CassandraDataBatchLoad(object):
@classmethod
def batchResult(cls, section, rows, rowKey):
logger.debug('load section: %s', section.name or section.table)
if len(rows) == 1:
logger.debug('one row')
cassData = CassandraData(section)
cassData.loadData(fullDataImport = False, row = rows[0], rowKey = rowKey)
return [cassData]
else:
logger.info('more rows, batch mode')
return cls.__batchLoad(section, rows, rowKey)
@classmethod
def __batchLoad(cls, section, rows, rowKey):
search = CqlBuilder.buildCql(False, section.table,
section.key, rowKey=rowKey)
logger.debug("cql %s", search)
statements_and_params = []
for row in rows:
params = CqlBuilder.buildParam(False, section.table,
section.key, row = row, rowKey = rowKey)
logger.debug("cql params %s", params)
cacheKey = None
if section.cache:
cacheKey = CqlBuilder.buildCacheKey(search, params)
logger.debug("cache key %s", cacheKey)
if Cache.hasKey(cacheKey):
logger.debug('loadData - hit cache: %s', cacheKey)
statements_and_params.append({"cached": true,
"cacheKey": cacheKey})
continue
query = {
"cql": search,
"params": params,
"cacheKey": cacheKey
}
statements_and_params.append(query)
statements_need_query = []
for query in statements_and_params:
if query.has_key('cached'):
continue
statements_need_query.append(query)
logger.debug('batch load ====>>, total %s', len(statements_need_query))
resultSetArray = CassandraClient.execute_concurrent(statements_need_query)
logger.debug('batch load done')
index = 0
PreloadDataArray = []
for query in statements_and_params:
preload = None
if query.has_key('cached'):
preload = CassandraData(section, None, query['cacheKey'])
else:
success, resultSet = resultSetArray[index]
if success:
preload = CassandraData(section, resultSet, query['cacheKey'])
index = index + 1
if preload:
PreloadDataArray.append(preload)
return PreloadDataArray
|
export default function(qs: string) {
const q = {
text: ''
};
qs.split(' ').forEach(x => {
if (/^([a-z_]+?):(.+?)$/.test(x)) {
const [key, value] = x.split(':');
switch (key) {
case 'user':
q['includeUserUsernames'] = value.split(',');
break;
case 'exclude_user':
q['excludeUserUsernames'] = value.split(',');
break;
case 'follow':
q['following'] = value == 'null' ? null : value == 'true';
break;
case 'reply':
q['reply'] = value == 'null' ? null : value == 'true';
break;
case 'renote':
q['renote'] = value == 'null' ? null : value == 'true';
break;
case 'media':
q['media'] = value == 'null' ? null : value == 'true';
break;
case 'poll':
q['poll'] = value == 'null' ? null : value == 'true';
break;
case 'until':
case 'since':
// YYYY-MM-DD
if (/^[0-9]+\-[0-9]+\-[0-9]+$/) {
const [yyyy, mm, dd] = value.split('-');
q[`${key}_date`] = (new Date(parseInt(yyyy, 10), parseInt(mm, 10) - 1, parseInt(dd, 10))).getTime();
}
break;
default:
q[key] = value;
break;
}
} else {
q.text += x + ' ';
}
});
if (q.text) {
q.text = q.text.trim();
}
return q;
}
|
import pitcher = require("../../lib/runtime");
export class Module implements pitcher.Module {
includes = [HousingModule, ChickensModule]
providesFarm(
farmer: string,
market: string,
housing: string,
chickens: string) {
return farmer + " living in " + housing + " with " + chickens + " being sold to the " + market;
}
providedFarmer = "Bob";
}
export class HousingModule implements pitcher.Module {
includes = [BusinessModule]
providesHousing(money: number) {
return "a house costing $" + money;
}
}
export class BusinessModule implements pitcher.Module {
includes = [ChickensModule]
providesMoney(eggs: string) {
return 1500;
}
providesMarket() {
return "market"
}
}
export class ChickensModule implements pitcher.Module {
providesChickens() {
return "chickens";
}
providesEggs(chickens: string) {
return chickens + " eggs";
}
}
|
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Example - example-guide-concepts-3-production</title>
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.5.0-rc.0/angular.min.js"></script>
<script src="invoice3.js"></script>
<script src="finance3.js"></script>
</head>
<body >
<div ng-app="invoice3" ng-controller="InvoiceController as invoice">
<b>Invoice:</b>
<div>
Quantity: <input type="number" min="0" ng-model="invoice.qty" required >
</div>
<div>
Costs: <input type="number" min="0" ng-model="invoice.cost" required >
<select ng-model="invoice.inCurr">
<option ng-repeat="c in invoice.currencies">{{c}}</option>
</select>
</div>
<div>
<b>Total:</b>
<span ng-repeat="c in invoice.currencies">
{{invoice.total(c) | currency:c}}
</span>
<button class="btn" ng-click="invoice.pay()">Pay</button>
</div>
</div>
</body>
</html>
|
package org.realityforge.gwt.websockets.client;
import com.google.gwt.typedarrays.shared.ArrayBuffer;
import com.google.gwt.typedarrays.shared.ArrayBufferView;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public final class TestWebSocket
extends WebSocket
{
public static class Factory
implements WebSocket.Factory
{
@Override
public WebSocket newWebSocket()
{
return new TestWebSocket();
}
}
@Override
public void connect( @Nonnull final String server, @Nonnull final String... protocols )
{
}
@Override
public void close()
{
}
@Override
public boolean isConnected()
{
return false;
}
@Override
public void send( @Nonnull final String data )
{
}
@Override
public int getBufferedAmount()
{
return 0;
}
@Override
public ReadyState getReadyState()
{
return null;
}
@Override
public void close( final short code, @Nullable final String reason )
{
}
@Override
public void send( @Nonnull final ArrayBuffer data )
{
}
@Override
public void send( @Nonnull final ArrayBufferView data )
{
}
@Override
public String getProtocol()
{
return null;
}
@Override
public String getURL()
{
return null;
}
@Override
public String getExtensions()
{
return null;
}
@Override
public void setBinaryType( @Nonnull final BinaryType binaryType )
{
}
@Override
public BinaryType getBinaryType()
{
return null;
}
}
|
# -*- coding: utf-8 -*-
import random
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
def random_with_n_digits(n):
rand = "0."
not_zero = 0
for i in xrange(1, n + 1):
r = random.randint(0, 9)
if(r > 0):
not_zero += 1
rand += str(r)
if not_zero > 0:
return rand
else:
return random_with_N_digits(n)
class MegaRapidoNet(MultiHoster):
__name__ = "MegaRapidoNet"
__type__ = "hoster"
__version__ = "0.07"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?\w+\.megarapido\.net/\?file=\w+'
__config__ = [("activated", "bool", "Activated", True),
("use_premium" , "bool", "Use premium account if available" , True),
("revertfailed", "bool", "Revert to standard download if fails", True)]
__description__ = """MegaRapido.net multi-hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Kagenoshin", "[email protected]")]
LINK_PREMIUM_PATTERN = r'<\s*?a[^>]*?title\s*?=\s*?["\'].*?download["\'][^>]*?href=["\']([^"\']+)'
ERROR_PATTERN = r'<\s*?div[^>]*?class\s*?=\s*?["\']?alert-message error.*?>([^<]*)'
def handle_premium(self, pyfile):
self.html = self.load("http://megarapido.net/gerar.php",
post={'rand' :random_with_N_digits(16),
'urllist' : pyfile.url,
'links' : pyfile.url,
'exibir' : "normal",
'usar' : "premium",
'user' : self.account.get_data('sid'),
'autoreset': ""})
if "desloga e loga novamente para gerar seus links" in self.html.lower():
self.error(_("You have logged in at another place"))
return super(MegaRapidoNet, self).handle_premium(pyfile)
getInfo = create_getInfo(MegaRapidoNet)
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import os
from waflib import Task,Options,Utils
from waflib.Configure import conf
from waflib.TaskGen import extension,feature,before_method
@before_method('apply_incpaths','apply_link','propagate_uselib_vars')
@feature('perlext')
def init_perlext(self):
self.uselib=self.to_list(getattr(self,'uselib',[]))
if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT')
self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN']
@extension('.xs')
def xsubpp_file(self,node):
outnode=node.change_ext('.c')
self.create_task('xsubpp',node,outnode)
self.source.append(outnode)
class xsubpp(Task.Task):
run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
color='BLUE'
ext_out=['.h']
@conf
def check_perl_version(self,minver=None):
res=True
if minver:
cver='.'.join(map(str,minver))
else:
cver=''
self.start_msg('Checking for minimum perl version %s'%cver)
perl=getattr(Options.options,'perlbinary',None)
if not perl:
perl=self.find_program('perl',var='PERL')
if not perl:
self.end_msg("Perl not found",color="YELLOW")
return False
self.env['PERL']=perl
version=self.cmd_and_log(self.env.PERL+["-e",'printf \"%vd\", $^V'])
if not version:
res=False
version="Unknown"
elif not minver is None:
ver=tuple(map(int,version.split(".")))
if ver<minver:
res=False
self.end_msg(version,color=res and"GREEN"or"YELLOW")
return res
@conf
def check_perl_module(self,module):
cmd=self.env.PERL+['-e','use %s'%module]
self.start_msg('perl module %s'%module)
try:
r=self.cmd_and_log(cmd)
except Exception:
self.end_msg(False)
return None
self.end_msg(r or True)
return r
@conf
def check_perl_ext_devel(self):
env=self.env
perl=env.PERL
if not perl:
self.fatal('find perl first')
def cmd_perl_config(s):
return perl+['-MConfig','-e','print \"%s\"'%s]
def cfg_str(cfg):
return self.cmd_and_log(cmd_perl_config(cfg))
def cfg_lst(cfg):
return Utils.to_list(cfg_str(cfg))
def find_xsubpp():
for var in('privlib','vendorlib'):
xsubpp=cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}'%var)
if xsubpp and os.path.isfile(xsubpp[0]):
return xsubpp
return self.find_program('xsubpp')
env['LINKFLAGS_PERLEXT']=cfg_lst('$Config{lddlflags}')
env['INCLUDES_PERLEXT']=cfg_lst('$Config{archlib}/CORE')
env['CFLAGS_PERLEXT']=cfg_lst('$Config{ccflags} $Config{cccdlflags}')
env['EXTUTILS_TYPEMAP']=cfg_lst('$Config{privlib}/ExtUtils/typemap')
env['XSUBPP']=find_xsubpp()
if not getattr(Options.options,'perlarchdir',None):
env['ARCHDIR_PERL']=cfg_str('$Config{sitearch}')
else:
env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir')
env['perlext_PATTERN']='%s.'+cfg_str('$Config{dlext}')
def options(opt):
opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None)
opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)
|
package com.dualexec.fxgraphs.view;
import javafx.scene.Node;
import javafx.scene.effect.Lighting;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.shape.StrokeType;
public class Place extends VertexView {
private Node anchor;
private Circle createOuterCircle;
public Place(double centerX, double centerY, double radius) {
// getAnchors().add(createAnchor(centerX, centerY, radius));
getChildren().add(createBaseBackroundCircle(centerX, centerY, radius));
createOuterCircle = createOuterCircle(centerX, centerY, radius);
getChildren().add(createOuterCircle);
getChildren().add(createOuterCircle(centerX, centerY, radius));
getChildren().add(getInnerFillCircle(centerX, centerY, radius));
getChildren().add(createCenterDot(centerX, centerY, radius));
}
private Circle getInnerFillCircle(double centerX, double centerY, double radius) {
Circle innerFillCircle = new Circle(radius * 0.9, Color.WHITE);
innerFillCircle.setLayoutX(centerX);
innerFillCircle.setLayoutY(centerY);
return innerFillCircle;
}
private Circle createAnchor(double centerX, double centerY, double radius) {
Circle anchor = new Circle(radius * 0.2, Color.BLACK);
anchor.setLayoutX(centerX);
anchor.setLayoutY(centerY);
return anchor;
}
private Circle createCenterDot(double centerX, double centerY, double radius) {
Circle centerDot = new Circle(radius * 0.2, Color.BLACK);
centerDot.setEffect(new Lighting());
centerDot.setOpacity(0.8);
centerDot.setLayoutX(centerX);
centerDot.setLayoutY(centerY);
return centerDot;
}
private Circle createBaseBackroundCircle(double centerX, double centerY, double radius) {
Circle outerCircle = new Circle(radius, Color.WHITE);
outerCircle.setLayoutX(centerX);
outerCircle.setLayoutY(centerY);
return outerCircle;
}
private Circle createOuterCircle(double centerX, double centerY, double radius) {
Circle outerCircle = new Circle(radius, Color.GRAY);
outerCircle.setEffect(new Lighting());
outerCircle.setStroke(Color.BLACK);
outerCircle.setStrokeWidth(2);
outerCircle.setStrokeType(StrokeType.OUTSIDE);
outerCircle.setOpacity(0.4);
outerCircle.setLayoutX(centerX);
outerCircle.setLayoutY(centerY);
setConnectionBounderyNode(outerCircle);
return outerCircle;
}
public Circle getCreateOuterCircle() {
return createOuterCircle;
}
public void setCreateOuterCircle(Circle createOuterCircle) {
this.createOuterCircle = createOuterCircle;
}
}
|
#!/usr/bin/env node
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
var path = require('path'),
fs = require('fs'),
clean = require('./clean'),
shjs = require('shelljs'),
zip = require('adm-zip'),
check_reqs = require('./check_reqs'),
platformWwwDir = path.join('platforms', 'firefoxos', 'www'),
platformBuildDir = path.join('platforms', 'firefoxos', 'build'),
packageFile = path.join(platformBuildDir, 'package.zip');
/**
* buildProject
* Creates a zip file int platform/build folder
*/
exports.buildProject = function(){
// Check that requirements are (stil) met
if (!check_reqs.run()) {
console.error('Please make sure you meet the software requirements in order to build a firefoxos cordova project');
process.exit(2);
}
clean.cleanProject(); // remove old build result
if (!fs.existsSync(platformBuildDir)) {
fs.mkdirSync(platformBuildDir);
}
// add the project to a zipfile
var zipFile = zip();
zipFile.addLocalFolder(platformWwwDir);
zipFile.writeZip(packageFile);
console.log('Firefox OS packaged app built in '+ packageFile);
process.exit(0);
};
module.exports.help = function() {
console.log('Usage: cordova build firefoxos');
console.log('Build will create the packaged app in \''+platformBuildDir+'\'.');
};
|
#include <stdlib.h>
#include <parson.h>
#include "MqttModule.h"
#include "MqttAdapter.h"
#include "azure_c_shared_utility/threadapi.h"
#include "azure_c_shared_utility/xlogging.h"
#include "azure_c_shared_utility/crt_abstractions.h"
#include "messageproperties.h"
#include "message.h"
#include "module.h"
#include "broker.h"
static bool copyConfigValueAsString(const JSON_Object *jsonData, const char *propertyName, char** propertyValue) {
const char* tmpValue = json_object_get_string(jsonData, propertyName);
if (tmpValue == NULL)
{
LogError("call json_object_get_string for %s return NULL or error", propertyName);
return false;
}
else if (mallocAndStrcpy_s(propertyValue, tmpValue) != 0) {
LogError("Error allocating memory for property %s string value %s", propertyName, propertyValue);
return false;
}
else {
return true;
}
}
static bool copyConfigValueAsInt(const JSON_Object *jsonData, const char *propertyName, unsigned int* propertyValue) {
const char* tmpValue = json_object_get_string(jsonData, propertyName);
if (tmpValue == NULL)
{
LogError("call json_object_get_string for {0} return NULL or error", propertyName);
return false;
}
else if (sscanf(tmpValue, "%d", propertyValue) < 0)
{
LogError("Wrong parameter %s value %s(should be an integer number of port)", propertyName, tmpValue);
return false;
}
else {
return true;
}
}
static void * MqttGateway_ParseConfigurationFromJson(const char* configuration)
{
LogInfo("MqttGateway_ParseConfigurationFromJson call..");
MQTT_CONFIG * result;
if (configuration == NULL)
{
LogError("invalid module args.");
result = NULL;
}
else
{
JSON_Value* json = json_parse_string((const char*)configuration);
if (json == NULL)
{
LogError("unable to json_parse_string");
result = NULL;
}
else
{
JSON_Object* root = json_value_get_object(json);
if (root == NULL)
{
LogError("unable to json_value_get_object");
result = NULL;
}
else
{
MQTT_CONFIG config;
// Check if we can read all required properties first
if (copyConfigValueAsString(root, "mqttBrokerAddress", &(config.mqttBrokerAddress)) &&
copyConfigValueAsString(root, "clientId", &(config.clientId)) &&
(copyConfigValueAsString(root, "topic2Publish", &(config.topic2Publish))
|| copyConfigValueAsString(root, "topic2Subscribe", &(config.topic2Subscribe)))
) {
if (!copyConfigValueAsInt(root, "mqttBrokerPort", &config.mqttBrokerPort)) {
//Set 1883 as a default MQTT port
config.mqttBrokerPort = 1883;
}
result = (MQTT_CONFIG*)malloc(sizeof(MQTT_CONFIG));
*result = config;
LogInfo("MqttGateway config record: mqttBrokerAddress->%s mqttBrokerPort->%d",
result->mqttBrokerAddress, result->mqttBrokerPort);
}
else
{
if (config.mqttBrokerAddress != NULL)
free(config.mqttBrokerAddress);
if (config.clientId != NULL)
free(config.clientId);
if (config.topic2Publish != NULL)
free(config.topic2Publish);
if (config.topic2Subscribe != NULL)
free(config.topic2Subscribe);
}
}
}
}
return result;
}
void MqttGateway_FreeConfiguration(void * configuration)
{
}
static MODULE_HANDLE MqttGateway_Create(BROKER_HANDLE broker, const void* configuration)
{
LogInfo("MqttGateway_Create_Create call..");
MQTT_GATEWAY_DATA * result;
if (broker == NULL || configuration == NULL)
{
LogError("invalid MqttModule args.");
result = NULL;
}
else
{
/*allocate module data struct */
result = (MQTT_GATEWAY_DATA*)malloc(sizeof(MQTT_GATEWAY_DATA));
result->config = (MQTT_CONFIG *)configuration;
result->mqttAdapter = new MqttAdapter(result->config);
}
return result;
}
static void MqttGateway_Destroy(MODULE_HANDLE moduleHandle)
{
LogInfo("MqttGateway_Destroy call..");
if (moduleHandle == NULL)
{
LogError("Attempt to destroy NULL module");
}
else
{
MQTT_GATEWAY_DATA* module_data = (MQTT_GATEWAY_DATA*)moduleHandle;
if (module_data->mqttAdapter != NULL) {
free(module_data->mqttAdapter);
}
/* Tell thread to stop */
module_data->gatewaysRunning = 0;
free(module_data);
}
}
static void MqttGateway_Receive(MODULE_HANDLE moduleHandle, MESSAGE_HANDLE messageHandle)
{
// Print the properties & content of the received message
CONSTMAP_HANDLE properties = Message_GetProperties(messageHandle);
}
static void MqttGateway_Start(MODULE_HANDLE moduleHandle)
{
LogInfo("MqttGateway_Start call..");
if (moduleHandle == NULL)
{
LogError("Attempt to start NULL module");
}
else
{
MESSAGE_CONFIG newMessageCfg;
MAP_HANDLE newProperties = Map_Create(NULL);
if (newProperties == NULL)
{
LogError("Failed to create message properties");
}
else
{
MQTT_GATEWAY_DATA* module_data = (MQTT_GATEWAY_DATA*)moduleHandle;
}
}
}
/*
* Required for all modules: the public API and the designated implementation functions.
*/
static const MODULE_API_1 MqttGateway_APIS_all =
{
{ MODULE_API_VERSION_1 },
MqttGateway_ParseConfigurationFromJson,
MqttGateway_FreeConfiguration,
MqttGateway_Create,
MqttGateway_Destroy,
MqttGateway_Receive,
MqttGateway_Start
};
#ifdef BUILD_MODULE_TYPE_STATIC
MODULE_EXPORT const MODULE_API* MODULE_STATIC_GETAPI(MQTT_GATEWAY_MODULE)(MODULE_API_VERSION gateway_api_version)
#else
MODULE_EXPORT const MODULE_API* Module_GetApi(MODULE_API_VERSION gateway_api_version)
#endif
{
(void)gateway_api_version;
return (const MODULE_API *)&MqttGateway_APIS_all;
}
|
/*
Copyright (C) 2004, 2005, 2006, 2008 Nikolas Zimmermann <[email protected]>
2004, 2005, 2006, 2007 Rob Buis <[email protected]>
This file is part of the KDE project
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "config.h"
#if ENABLE(SVG)
#include "SVGLineElement.h"
#include "FloatPoint.h"
#include "MappedAttribute.h"
#include "RenderPath.h"
#include "SVGLength.h"
#include "SVGNames.h"
namespace WebCore {
SVGLineElement::SVGLineElement(const QualifiedName& tagName, Document* doc)
: SVGStyledTransformableElement(tagName, doc)
, SVGTests()
, SVGLangSpace()
, SVGExternalResourcesRequired()
, m_x1(this, SVGNames::x1Attr, LengthModeWidth)
, m_y1(this, SVGNames::y1Attr, LengthModeHeight)
, m_x2(this, SVGNames::x2Attr, LengthModeWidth)
, m_y2(this, SVGNames::y2Attr, LengthModeHeight)
, m_externalResourcesRequired(this, SVGNames::externalResourcesRequiredAttr, false)
{
}
SVGLineElement::~SVGLineElement()
{
}
void SVGLineElement::parseMappedAttribute(MappedAttribute* attr)
{
if (attr->name() == SVGNames::x1Attr)
setX1BaseValue(SVGLength(LengthModeWidth, attr->value()));
else if (attr->name() == SVGNames::y1Attr)
setY1BaseValue(SVGLength(LengthModeHeight, attr->value()));
else if (attr->name() == SVGNames::x2Attr)
setX2BaseValue(SVGLength(LengthModeWidth, attr->value()));
else if (attr->name() == SVGNames::y2Attr)
setY2BaseValue(SVGLength(LengthModeHeight, attr->value()));
else {
if (SVGTests::parseMappedAttribute(attr))
return;
if (SVGLangSpace::parseMappedAttribute(attr))
return;
if (SVGExternalResourcesRequired::parseMappedAttribute(attr))
return;
SVGStyledTransformableElement::parseMappedAttribute(attr);
}
}
void SVGLineElement::svgAttributeChanged(const QualifiedName& attrName)
{
SVGStyledTransformableElement::svgAttributeChanged(attrName);
if (!renderer())
return;
if (attrName == SVGNames::x1Attr || attrName == SVGNames::y1Attr ||
attrName == SVGNames::x2Attr || attrName == SVGNames::y2Attr ||
SVGTests::isKnownAttribute(attrName) ||
SVGLangSpace::isKnownAttribute(attrName) ||
SVGExternalResourcesRequired::isKnownAttribute(attrName) ||
SVGStyledTransformableElement::isKnownAttribute(attrName))
renderer()->setNeedsLayout(true);
}
Path SVGLineElement::toPathData() const
{
return Path::createLine(FloatPoint(x1().value(this), y1().value(this)),
FloatPoint(x2().value(this), y2().value(this)));
}
bool SVGLineElement::hasRelativeValues() const
{
return (x1().isRelative() || y1().isRelative() ||
x2().isRelative() || y2().isRelative());
}
}
#endif // ENABLE(SVG)
|
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_config import cfg
from oslo_utils import excutils
from neutron.i18n import _LE
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.cisco.l3 import hosting_device_drivers
LOG = logging.getLogger(__name__)
# Length mgmt port UUID to be part of VM's config drive filename
CFG_DRIVE_UUID_START = 24
CFG_DRIVE_UUID_LEN = 12
CSR1KV_HD_DRIVER_OPTS = [
cfg.StrOpt('csr1kv_configdrive_template', default='csr1kv_cfg_template',
help=_("CSR1kv configdrive template file.")),
]
cfg.CONF.register_opts(CSR1KV_HD_DRIVER_OPTS, "hosting_devices")
class CSR1kvHostingDeviceDriver(hosting_device_drivers.HostingDeviceDriver):
def hosting_device_name(self):
return "CSR1kv"
def create_config(self, context, mgmtport):
mgmt_ip = mgmtport['fixed_ips'][0]['ip_address']
subnet_data = self._core_plugin.get_subnet(
context, mgmtport['fixed_ips'][0]['subnet_id'],
['cidr', 'gateway_ip', 'dns_nameservers'])
netmask = str(netaddr.IPNetwork(subnet_data['cidr']).netmask)
params = {'<ip>': mgmt_ip, '<mask>': netmask,
'<gw>': subnet_data['gateway_ip'],
'<name_server>': '8.8.8.8'}
try:
cfg_template_filename = (
cfg.CONF.general.templates_path + "/" +
cfg.CONF.hosting_devices.csr1kv_configdrive_template)
vm_cfg_data = ''
with open(cfg_template_filename, 'r') as cfg_template_file:
# insert proper instance values in the template
for line in cfg_template_file:
tokens = line.strip('\n').split(' ')
line = ' '.join(map(lambda x: params.get(x, x),
tokens)) + '\n'
vm_cfg_data += line
return {'iosxe_config.txt': vm_cfg_data}
except IOError:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to create config file. Trying to '
'clean up.'))
self.delete_configdrive_files(context, mgmtport)
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Signal processing operations.
See the @{$python/contrib.signal} guide.
@@frame
@@hamming_window
@@hann_window
@@inverse_stft
@@inverse_stft_window_fn
@@mfccs_from_log_mel_spectrograms
@@linear_to_mel_weight_matrix
@@overlap_and_add
@@stft
[hamming]: https://en.wikipedia.org/wiki/Window_function#Hamming_window
[hann]: https://en.wikipedia.org/wiki/Window_function#Hann_window
[mel]: https://en.wikipedia.org/wiki/Mel_scale
[mfcc]: https://en.wikipedia.org/wiki/Mel-frequency_cepstrum
[stft]: https://en.wikipedia.org/wiki/Short-time_Fourier_transform
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.signal.python.ops.mel_ops import linear_to_mel_weight_matrix
from tensorflow.contrib.signal.python.ops.mfcc_ops import mfccs_from_log_mel_spectrograms
from tensorflow.contrib.signal.python.ops.reconstruction_ops import overlap_and_add
from tensorflow.contrib.signal.python.ops.shape_ops import frame
# `frame` used to be named `frames`, which is a noun and not a verb.
# Keep an alias to `frames` for backwards compatibility.
from tensorflow.contrib.signal.python.ops.shape_ops import frame as frames
from tensorflow.contrib.signal.python.ops.spectral_ops import inverse_stft
from tensorflow.contrib.signal.python.ops.spectral_ops import inverse_stft_window_fn
from tensorflow.contrib.signal.python.ops.spectral_ops import stft
from tensorflow.contrib.signal.python.ops.window_ops import hamming_window
from tensorflow.contrib.signal.python.ops.window_ops import hann_window
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import object
from contextlib import closing
from io import BytesIO
class Manifest(object):
"""
Implements the basics of the jar manifest specification.
See: http://docs.oracle.com/javase/1.5.0/docs/guide/jar/jar.html#Manifest Specification
"""
@staticmethod
def _wrap(text):
text = text.encode('ascii')
with closing(BytesIO(text)) as fp:
yield fp.read(70)
while True:
chunk = fp.read(69)
if not chunk:
return
yield b' ' + chunk
PATH = 'META-INF/MANIFEST.MF'
MANIFEST_VERSION = 'Manifest-Version'
CREATED_BY = 'Created-By'
MAIN_CLASS = 'Main-Class'
CLASS_PATH = 'Class-Path'
def __init__(self, contents=''):
self._contents = contents.strip().encode('ascii')
def addentry(self, header, value):
if len(header) > 68:
raise ValueError('Header name must be 68 characters or less, given {}'.format(header))
if self._contents:
self._contents += b'\n'
self._contents += b'\n'.join(self._wrap('{header}: {value}'.format(header=header, value=value)))
def contents(self):
return self._contents + b'\n'
def is_empty(self):
if self._contents.strip():
return False
return True
|
import wikipedia
import requests
import os
import re
import shutil
import zipfile
from clint.textui import progress
def wiki_image(lang, query, addkeywords):
"""Download full size images from Wikipedia.
Don't print or republish images without permission.
"""
wikipedia.set_lang(lang)
suggestion = wikipedia.search(query, results=1)
try:
page = wikipedia.page(suggestion)
except wikipedia.exceptions.DisambiguationError as e:
for option in e.options:
found = False
for addkeyword in addkeywrds:
if addkeyword in option:
print option
page = wikipedia.page(option)
found = True
break
if found:
break
else:
page = wikipedia.page(e.options[0])
#print page.images
for image in page.images:
if 'logo' in image:
return image
else:
return page.images[0]
def download_file(url, folder):
if not os.path.exists(folder):
os.makedirs(folder)
local_filename = folder + url.split('/')[-1]
# NOTE the stream=True parameter
print "Downloading %s" % local_filename
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
total_length = int(r.headers.get('content-length'))
for chunk in progress.bar(r.iter_content(chunk_size=1024), expected_size=(total_length / 1024) + 1):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush() # commented by recommendation from J.F.Sebastian
return local_filename
def filter_consulta_cand(regex, year):
"""Download the full information from TSE candidates in temporary files and filter them by returning only the lines
that match the passed regular expression in form of an array.
"""
temporaryDirPath = os.path.dirname(__file__) + "/tmp/"
#Download zip file
url = "http://agencia.tse.jus.br/estatistica/sead/odsele/consulta_cand/consulta_cand_year.zip".replace("year", year)
candFile = download_file(url, temporaryDirPath)
print("Exctracting files...")
candZip = zipfile.ZipFile(candFile, 'r')
candZip.extractall(temporaryDirPath)
candZip.close()
print("Files exctracted.")
candidates = []
for fileName in os.listdir(temporaryDirPath):
if re.search(".*\.txt", fileName): #if it is a txt file
print("Search regular expression in " + fileName)
with open(temporaryDirPath + fileName, 'r') as currentFile:
for line in currentFile:
if re.search(regex, line):
candidates.append(line.decode('iso-8859-1').replace("\"", "").replace("\n", ""))
shutil.rmtree(temporaryDirPath) #remove temporary folder
return candidates
|
//-----------------------------------------------------------------------
// <copyright file="Scout.cs" company="Hyperar">
// Copyright (c) Hyperar. All rights reserved.
// </copyright>
// <author>Matías Ezequiel Sánchez</author>
//-----------------------------------------------------------------------
namespace Hyperar.HattrickUltimate.BusinessObjects.Hattrick.YouthTeamDetails
{
using System;
/// <summary>
/// Scout node within YouthTeamDetails XML file.
/// </summary>
public class Scout
{
#region Public Properties
/// <summary>
/// Gets or sets the Age.
/// </summary>
public byte Age { get; set; }
/// <summary>
/// Gets or sets the Country.
/// </summary>
public Country Country { get; set; }
/// <summary>
/// Gets or sets the date and time when the Scout was hired.
/// </summary>
public DateTime HiredDate { get; set; }
/// <summary>
/// Gets or sets the ID of the associated Hall Of Fame Player.
/// </summary>
/// <remarks>0 if None.</remarks>
public long HofPlayerId { get; set; }
/// <summary>
/// Gets or sets the Country the Scout is in.
/// </summary>
public Country InCountry { get; set; }
/// <summary>
/// Gets or sets the Region the Scout is in.
/// </summary>
public Region InRegion { get; set; }
/// <summary>
/// Gets or sets the date and time when the Scout was last called.
/// </summary>
public DateTime LastCalled { get; set; }
/// <summary>
/// Gets or sets the type of Player the Scout is searching for.
/// </summary>
/// <remarks>
/// Any = 0. Keeper = 1. Defender = 2. Wingback = 3. Midfielder = 4. Winger = 5. Forward = 6.
/// </remarks>
public byte PlayerSearchType { get; set; }
/// <summary>
/// Gets or sets the Region.
/// </summary>
public Region Region { get; set; }
/// <summary>
/// Gets or sets the Scout Name.
/// </summary>
public string ScoutName { get; set; }
/// <summary>
/// Gets or sets the Scout's Travel details.
/// </summary>
public Travel Travel { get; set; }
/// <summary>
/// Gets or sets the Youth Scout ID.
/// </summary>
public long YouthScoutId { get; set; }
#endregion Public Properties
}
}
|
# Copyright (C) 2015 Red Hat, Inc.
# Author: Petr Spacek <[email protected]>
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import binascii
import dns.rdata
from dns._compat import xrange
class EUIBase(dns.rdata.Rdata):
"""EUIxx record
@ivar fingerprint: xx-bit Extended Unique Identifier (EUI-xx)
@type fingerprint: string
@see: rfc7043.txt"""
__slots__ = ['eui']
# define these in subclasses
# byte_len = 6 # 0123456789ab (in hex)
# text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab
def __init__(self, rdclass, rdtype, eui):
super(EUIBase, self).__init__(rdclass, rdtype)
if len(eui) != self.byte_len:
raise dns.exception.FormError('EUI%s rdata has to have %s bytes'
% (self.byte_len * 8, self.byte_len))
self.eui = eui
def to_text(self, origin=None, relativize=True, **kw):
return dns.rdata._hexify(self.eui, chunksize=2).replace(' ', '-')
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
text = tok.get_string()
tok.get_eol()
if len(text) != cls.text_len:
raise dns.exception.SyntaxError(
'Input text must have %s characters' % cls.text_len)
expected_dash_idxs = xrange(2, cls.byte_len * 3 - 1, 3)
for i in expected_dash_idxs:
if text[i] != '-':
raise dns.exception.SyntaxError('Dash expected at position %s'
% i)
text = text.replace('-', '')
try:
data = binascii.unhexlify(text.encode())
except (ValueError, TypeError) as ex:
raise dns.exception.SyntaxError('Hex decoding error: %s' % str(ex))
return cls(rdclass, rdtype, data)
def to_wire(self, file, compress=None, origin=None):
file.write(self.eui)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
eui = wire[current:current + rdlen].unwrap()
return cls(rdclass, rdtype, eui)
|
import { ID, varsMap, Status, tagMap, Team, Plan } from './common.model';
import { Build, Environment } from './environment.model';
import { ProvisionedHost } from './host.model';
export interface Network {
id: ID;
vdi_visible: boolean;
name?: string;
cidr?: string;
vars?: varsMap[];
tags?: tagMap[];
NetworkToEnvironment?: Environment;
}
export interface ProvisionedNetwork {
id: ID;
name: string;
cidr: string;
ProvisionedNetworkToStatus?: Status;
ProvisionedNetworkToNetwork?: Network;
ProvisionedNetworkToBuild?: Build;
ProvisionedNetworkToTeam?: Team;
ProvisionedNetworkToProvisionedHost?: ProvisionedHost[];
ProvisionedNetworkToPlan?: Plan;
}
|
import collections
import ConfigParser
import openstack.common.exceptions as exceptions
class Registry(object):
"""Used to store and retrieve config values."""
def __init__(self):
"""Initialize a new configuration registry."""
self._data = collections.defaultdict(dict)
self._junk = collections.defaultdict(dict)
self._parser = ConfigParser.ConfigParser()
def get(self, section, option):
"""Return a particular option value."""
try:
# Returning a value in the config which has been 'defined'
return self._data[section][option].from_parser(self._parser)
except KeyError:
# Requested value has not been 'defined'
raise exceptions.NoSuchConfigOption(section=section, option=option)
def set(self, section, option, value):
"""Set the value of a particular config option."""
try:
self._data[section][option].value = value
except KeyError:
# Value hasn't been defined yet, so throw it in junk
self._junk[section][option] = value
def load(self, config_file):
"""Load a configuration file into the registry."""
self._parser.readfp(config_file)
for section in self._parser.sections():
for option, value in self._parser.items(section):
self.set(section, option, value)
def define(self, section, name, datatype, description, default=None):
"""Define a configuration option in the config registry.
:param section: The config section this option belongs in
:param name: The name of the config option
:param datatype: The class/datatype of the config option
:param description: A short description of the config option
:param default: The default value for the config option
:returns: None
"""
item = datatype(name, description, default)
if name in self._junk[section]:
item.value = self._junk[section][name]
if name in self._data[section]:
raise exceptions.OptionRedefined(section=section, option=name)
self._data[section][name] = item
|
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Code shared by third-party endpoint (API and feeds) view modules."""
import django.http
import xml.etree.ElementTree as ET
import config
import model
import utils
import views.thirdparty_endpoints.base
ATOM = views.thirdparty_endpoints.base.ATOM
GPF = views.thirdparty_endpoints.base.GPF
GEORSS = views.thirdparty_endpoints.base.GEORSS
class RepoFeedView(views.thirdparty_endpoints.base.ThirdPartyFeedBaseView):
"""View for the repo feed."""
_TITLE = 'Person Finder Repository Feed'
def check_auth(self):
# Anyone can access the repos feed.
pass
def add_feed_elements(self, root):
ET.SubElement(root, 'id').text = self.build_absolute_uri()
ET.SubElement(root, 'title').text = RepoFeedView._TITLE
if self.env.repo == 'global':
repos = model.Repo.all().filter(
'activation_status !=', model.Repo.ActivationStatus.STAGING)
else:
repo = model.Repo.get(self.env.repo)
if repo.activation_status == model.Repo.ActivationStatus.ACTIVE:
repos = [repo]
else:
raise django.http.Http404()
repo_confs = {}
for repo in repos:
repo_id = repo.key().name()
repo_conf = config.Configuration(repo_id, include_global=False)
repo_confs[repo_id] = repo_conf
updated_dates = [conf.updated_date for conf in repo_confs.values()]
# If there's no non-staging repositories, it's not really clear what
# updated_date should be; we just use the current time.
latest_updated_date = (
max(updated_dates) if updated_dates else utils.get_utcnow())
ET.SubElement(root, 'updated').text = utils.format_utc_timestamp(
latest_updated_date)
for repo in repos:
if repo.activation_status == model.Repo.ActivationStatus.ACTIVE:
self._add_repo_entry(root, repo, repo_confs[repo.key().name()])
def _add_repo_entry(self, root, repo, repo_conf):
entry_el = ET.SubElement(root, 'entry')
ET.SubElement(entry_el, 'id').text = self.build_absolute_uri(
'/', repo.key().name())
if repo_conf.language_menu_options:
default_lang = repo_conf.language_menu_options[0]
title_el = ET.SubElement(
entry_el, 'title', {'lang': default_lang})
title_el.text = repo_conf.repo_titles[default_lang]
ET.SubElement(entry_el, 'updated').text = utils.format_utc_timestamp(
repo_conf.updated_date)
content_el = ET.SubElement(entry_el, 'content', {'type': 'text/xml'})
repo_el = ET.SubElement(content_el, GPF + 'repo')
for lang, title in repo_conf.repo_titles.items():
ET.SubElement(repo_el, GPF + 'title', {'lang': lang}).text = title
ET.SubElement(repo_el, GPF + 'read_auth_key_required').text = (
'true' if repo_conf.read_auth_key_required else 'false')
ET.SubElement(repo_el, GPF + 'search_auth_key_required').text = (
'true' if repo_conf.search_auth_key_required else 'false')
ET.SubElement(repo_el, GPF + 'test_mode').text = (
'true' if repo.test_mode else 'false')
center = repo_conf.map_default_center or [0, 0]
location_el = ET.SubElement(repo_el, GPF + 'location')
ET.SubElement(location_el, GEORSS + 'point').text = (
'%f %f' % (center[0], center[1]))
def log(self):
self.log_api_action(model.ApiActionLog.REPO)
|
#!/usr/bin/python
import os, sys, shutil, collections
from optparse import OptionParser
def find_recursive(root, subpath, maxdepth=4):
queue = collections.deque([(root, 0)])
if 'PATH' in os.environ:
envpath = os.environ['PATH'].split(':')
relpath = ['..'] * (len(subpath) - 1)
queue.extendleft([(os.path.join(x, *relpath), maxdepth) for x in envpath if 'android' in x.lower()])
while len(queue) > 0:
item = queue.popleft()
if os.path.isfile(os.path.join(item[0], *subpath)):
return os.path.abspath(item[0])
if item[1] < maxdepth:
for name in os.listdir(item[0]):
fullname = os.path.join(item[0], name)
if os.path.isdir(fullname) and '.' not in name:
queue.append((fullname, item[1] + 1))
return None
def read_local_properties():
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
sdkDir = None
ndkDir = None
if os.path.exists(propsFile):
with open(propsFile, 'r') as f:
for line in f:
line = line.strip()
if line.startswith('sdk.dir') and '=' in line:
sdkDir = line.split('=')[1].strip()
elif line.startswith('ndk.dir') and '=' in line:
ndkDir = line.split('=')[1].strip()
return (sdkDir, ndkDir)
def query_path(title, option, default, subpath):
default = '' if not default else os.path.abspath(default)
searchHint = ', "s" to search'
while True:
path = raw_input('Path to {0}{1} [{2}]:'.format(title, searchHint, default)) or default
if len(searchHint) > 0 and path.lower().strip() == 's':
found = find_recursive(os.path.expanduser('~'), subpath)
if found:
default = found
searchHint = ''
else:
break
test = os.path.join(path, *subpath)
if path and os.path.isfile(test):
return os.path.abspath(path)
else:
print 'Could not find {0}, not an {1} path.'.format(test, title)
sys.exit(1)
def write_local_properties(sdkDir, ndkDir):
content = ''.join([x + '\n' for x in [
'# Autogenerated file',
'# Do not add it to version control',
'sdk.dir={0}'.format(sdkDir),
'ndk.dir={0}'.format(ndkDir)
]])
# Create omim/android/local.properties
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
print 'Writing {0}'.format(propsFile)
with open(propsFile, 'w') as f:
f.write(content)
# Copy files to folders
for folder in ['YoPme', 'YoPme2', 'UnitTests']:
destFolder = os.path.join(androidRoot, folder)
if not os.path.exists(destFolder):
os.makedirs(destFolder)
dst = os.path.join(destFolder, 'local.properties')
print 'Copying to {0}'.format(dst)
shutil.copy(propsFile, dst)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-s', '--sdk', help='Path to Android SDK')
parser.add_option('-n', '--ndk', help='Path to Android NDK')
options, _ = parser.parse_args()
sdkDir = options.sdk
ndkDir = options.ndk
if not options.sdk or not options.ndk:
sdkDirOld, ndkDirOld = read_local_properties()
if not sdkDir:
sdkDir = sdkDirOld
if not ndkDir:
ndkDir = ndkDirOld
sdkDir = query_path('Android SDK', options.sdk, sdkDir, ['platform-tools', 'adb'])
ndkDir = query_path('Android NDK', options.ndk, ndkDir, ['ndk-build'])
write_local_properties(sdkDir, ndkDir)
|
//
// EKMacro.h
// Copyright (c) 2014-2016 Moch Xiao (http://mochxiao.com).
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#ifndef EKMacro_h
#define EKMacro_h
#ifndef NSLog
# if DEBUG
# define NSLog(FORMAT, ...) \
do { \
fprintf(stderr,"<%s> %s %s [%d] %s\n", \
(NSThread.isMainThread ? "UI" : "BG"), \
(sel_getName(_cmd)),\
[[[NSString stringWithUTF8String:__FILE__] lastPathComponent] UTF8String], \
__LINE__, \
[[NSString stringWithFormat:FORMAT, ##__VA_ARGS__] UTF8String]); \
} while(0)
# else
# define NSLog(FORMAT, ...)
# endif
#endif
#define EKString(FORMAT, ...) [NSString stringWithFormat:FORMAT, ##__VA_ARGS__]
#define EKLocalizedString(key) NSLocalizedString(key, nil)
#ifndef weakify
# if DEBUG
# if __has_feature(objc_arc)
# define weakify(object) autoreleasepool{} __weak __typeof__(object) weak##_##object = object;
# else
# define weakify(object) autoreleasepool{} __block __typeof__(object) block##_##object = object;
# endif
# else
# if __has_feature(objc_arc)
# define weakify(object) try{} @finally{} {} __weak __typeof__(object) weak##_##object = object;
# else
# define weakify(object) try{} @finally{} {} __block __typeof__(object) block##_##object = object;
# endif
# endif
#endif
#ifndef strongify
# if DEBUG
# if __has_feature(objc_arc)
# define strongify(object) autoreleasepool{} __typeof__(object) object = weak##_##object;
# else
# define strongify(object) autoreleasepool{} __typeof__(object) object = block##_##object;
# endif
# else
# if __has_feature(objc_arc)
# define strongify(object) try{} @finally{} __typeof__(object) object = weak##_##object;
# else
# define strongify(object) try{} @finally{} __typeof__(object) object = block##_##object;
# endif
# endif
#endif
#define EKRelease(object) dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{ [object class]; })
#ifndef todo
# ifndef stringify
# define stringify(str) #str
# endif
# ifndef defer_stringify
# define defer_stringify(str) stringify(str)
# endif
# ifndef pragma_message
# define pragma_message(msg) _Pragma(stringify(message(msg)))
# endif
# ifndef formatted_message
# define formatted_message(msg) "[TODO-" defer_stringify(__COUNTER__) "] " msg " \n" defer_stringify(__FILE__) " line " defer_stringify(__LINE__)
# endif
# ifndef keyword
# define keyword try {} @catch (...) {}
# endif
# define todo(msg) keyword pragma_message(formatted_message(msg))
#endif
#endif /* EKMacro_h */
|
/***************************************************************************
*cr
*cr (C) Copyright 2008-2010 The Board of Trustees of the
*cr University of Illinois
*cr All Rights Reserved
*cr
***************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include "parboil.h"
#include "atom.h"
#include "cutoff.h"
#include "output.h"
#define ERRTOL 1e-4f
#define NOKERNELS 0
#define CUTOFF1 1
#define CUTOFF6 32
#define CUTOFF6OVERLAP 64
#define CUTOFFCPU 16384
int appenddata(const char *filename, int size, double time) {
FILE *fp;
fp=fopen(filename, "a");
if (fp == NULL) {
printf("error appending to file %s..\n", filename);
return -1;
}
fprintf(fp, "%d %.3f\n", size, time);
fclose(fp);
return 0;
}
LatticeDim
lattice_from_bounding_box(Vec3 lo, Vec3 hi, float h)
{
LatticeDim ret;
ret.nx = (int) floorf((hi.x-lo.x)/h) + 1;
ret.ny = (int) floorf((hi.y-lo.y)/h) + 1;
ret.nz = (int) floorf((hi.z-lo.z)/h) + 1;
ret.lo = lo;
ret.h = h;
return ret;
}
Lattice *
create_lattice(LatticeDim dim)
{
int size;
Lattice *lat = (Lattice *)malloc(sizeof(Lattice));
if (lat == NULL) {
fprintf(stderr, "Out of memory\n");
exit(1);
}
lat->dim = dim;
/* Round up the allocated size to a multiple of 8 */
size = ((dim.nx * dim.ny * dim.nz) + 7) & ~7;
lat->lattice = (float *)calloc(size, sizeof(float));
if (lat->lattice == NULL) {
fprintf(stderr, "Out of memory\n");
exit(1);
}
return lat;
}
void
destroy_lattice(Lattice *lat)
{
if (lat) {
free(lat->lattice);
free(lat);
}
}
int main(int argc, char *argv[]) {
Atoms *atom;
LatticeDim lattice_dim;
Lattice *gpu_lattice;
Vec3 min_ext, max_ext; /* Bounding box of atoms */
Vec3 lo, hi; /* Bounding box with padding */
float h = 0.5f; /* Lattice spacing */
float cutoff = 12.f; /* Cutoff radius */
float exclcutoff = 1.f; /* Radius for exclusion */
float padding = 0.5f; /* Bounding box padding distance */
int n;
struct pb_Parameters *parameters;
struct pb_TimerSet timers;
/* Read input parameters */
parameters = pb_ReadParameters(&argc, argv);
if (parameters == NULL) {
exit(1);
}
/* Expect one input file */
if (pb_Parameters_CountInputs(parameters) != 1) {
fprintf(stderr, "Expecting one input file\n");
exit(1);
}
pb_InitializeTimerSet(&timers);
pb_SwitchToTimer(&timers, pb_TimerID_IO);
{
const char *pqrfilename = parameters->inpFiles[0];
if (!(atom = read_atom_file(pqrfilename))) {
fprintf(stderr, "read_atom_file() failed\n");
exit(1);
}
printf("read %d atoms from file '%s'\n", atom->size, pqrfilename);
}
pb_SwitchToTimer(&timers, pb_TimerID_COMPUTE);
/* find extent of domain */
get_atom_extent(&min_ext, &max_ext, atom);
printf("extent of domain is:\n");
printf(" minimum %g %g %g\n", min_ext.x, min_ext.y, min_ext.z);
printf(" maximum %g %g %g\n", max_ext.x, max_ext.y, max_ext.z);
printf("padding domain by %g Angstroms\n", padding);
lo = (Vec3) {min_ext.x - padding, min_ext.y - padding, min_ext.z - padding};
hi = (Vec3) {max_ext.x + padding, max_ext.y + padding, max_ext.z + padding};
printf("domain lengths are %g by %g by %g\n", hi.x-lo.x, hi.y-lo.y, hi.z-lo.z);
lattice_dim = lattice_from_bounding_box(lo, hi, h);
gpu_lattice = create_lattice(lattice_dim);
/*
* CUDA kernel, with overlapped GPU/CPU computation
* (enter and exit with the 'compute' timer active)
*/
if (gpu_compute_cutoff_potential_lattice6overlap(&timers, gpu_lattice, cutoff, atom, 0)) {
fprintf(stderr, "Computation failed\n");
exit(1);
}
/*
* Zero the lattice points that are too close to an atom. This is
* necessary for numerical stability.
*/
if (remove_exclusions(gpu_lattice, exclcutoff, atom)) {
fprintf(stderr, "remove_exclusions() failed for gpu lattice\n");
exit(1);
}
printf("\n");
/* Print output */
pb_SwitchToTimer(&timers, pb_TimerID_IO);
if (parameters->outFile) {
write_lattice_summary(parameters->outFile, gpu_lattice);
}
pb_SwitchToTimer(&timers, pb_TimerID_COMPUTE);
/* Cleanup */
destroy_lattice(gpu_lattice);
free_atom(atom);
pb_SwitchToTimer(&timers, pb_TimerID_NONE);
pb_PrintTimerSet(&timers);
pb_FreeParameters(parameters);
return 0;
}
|
<?xml version="1.0" encoding="utf-8" ?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title>{{.Config.Name}}</title>
<subtitle>{{.Config.Description}}</subtitle>
<link rel="alternate" type="text/html" href="{{.Config.Address}}" />
<link rel="self" type="application/atom+xml" href="{{.Config.Address}}/rss" />
<id>{{.Config.Address}}/rss</id>
<updated>{{printf "%04d" .Updated.Year}}-{{printf "%02d" .Updated.Month}}-{{printf "%02d" .Updated.Day}}T{{printf "%02d" .Updated.Hour}}:{{printf "%02d" .Updated.Minute}}:{{printf "%02d" .Updated.Second}}Z</updated>
{{$siteUrl := .Config.Address}}
{{$siteName := .Config.Name}}
{{with .Posts}}
{{range .}}
<entry>
<id>{{$siteUrl}}/posts/{{.Url}}</id>
<title>{{.Metadata.Title}}</title>
<published>{{printf "%04d" .Metadata.Date.Year}}-{{printf "%02d" .Metadata.Date.Month}}-{{printf "%02d" .Metadata.Date.Day}}T{{printf "%02d" .Metadata.Date.Hour}}:{{printf "%02d" .Metadata.Date.Minute}}:{{printf "%02d" .Metadata.Date.Second}}Z</published>
<updated>{{printf "%04d" .Metadata.Date.Year}}-{{printf "%02d" .Metadata.Date.Month}}-{{printf "%02d" .Metadata.Date.Day}}T{{printf "%02d" .Metadata.Date.Hour}}:{{printf "%02d" .Metadata.Date.Minute}}:{{printf "%02d" .Metadata.Date.Second}}Z</updated>
<author>
<name>{{$siteName}}</name>
<uri>{{$siteUrl}}</uri>
</author>
<link rel="alternate" type="text/html" href="{{$siteUrl}}/posts/{{.Url}}" />
<content type="html"><![CDATA[{{.Body.HTML}}]]></content>
</entry>
{{end}}
{{end}}
</feed>
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from base_console import BaseConsole
from rest_util import (api_call)
class ShareSMBConsole(BaseConsole):
def __init__(self, prompt, share):
BaseConsole.__init__(self)
self.prompt = prompt + ' Samba>'
self.share = share
self.url = ('%sshares/%s/samba/' % (BaseConsole.url, self.share))
def do_enable(self, args):
"""
Make the share available via smb. all flags are optional.
enable -c comment [-b <yes|no> -g <yes|no> -r <yes|no> -m 0755]
"""
arg_fields = args.split()
input_data = {}
for f in arg_fields:
if (f[0:2] == '-c'):
input_data['comment'] = f[2:]
elif (f[0:2] == '-b'):
input_data['browsable'] = f[2:]
elif (f[0:2] == '-g'):
input_data['guest_ok'] = f[2:]
elif (f[0:2] == '-r'):
input_data['read_only'] = f[2:]
elif (f[0:2] == '-m'):
input_data['create_mask'] = f[2:]
else:
return self.do_help(args)
if (len(input_data) == 0):
return self.do_help(args)
samba_info = api_call(self.url, data=input_data, calltype='post')
print samba_info
def do_disable(self, args):
"""
disable smb for the share
"""
samba_info = api_call(self.url, data=None, calltype='delete')
print samba_info
def do_list(self, args):
"""
show smb properties for the share
"""
samba_info = api_call(self.url, data=None)
print samba_info
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that shows how to use FacebookAdsReportToGcsOperator.
"""
import os
from facebook_business.adobjects.adsinsights import AdsInsights
from airflow import models
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
BigQueryExecuteQueryOperator,
)
from airflow.providers.google.cloud.operators.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
from airflow.utils.dates import days_ago
# [START howto_GCS_env_variables]
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "free-tier-1997")
GCS_BUCKET = os.environ.get("GCS_BUCKET", "airflow_bucket_fb")
GCS_OBJ_PATH = os.environ.get("GCS_OBJ_PATH", "Temp/this_is_my_report_csv.csv")
GCS_CONN_ID = os.environ.get("GCS_CONN_ID", "google_cloud_default")
DATASET_NAME = os.environ.get("DATASET_NAME", "airflow_test_dataset")
TABLE_NAME = os.environ.get("FB_TABLE_NAME", "airflow_test_datatable")
# [END howto_GCS_env_variables]
# [START howto_FB_ADS_variables]
FIELDS = [
AdsInsights.Field.campaign_name,
AdsInsights.Field.campaign_id,
AdsInsights.Field.ad_id,
AdsInsights.Field.clicks,
AdsInsights.Field.impressions,
]
PARAMS = {
'level': 'ad',
'date_preset': 'yesterday'
}
# [END howto_FB_ADS_variables]
default_args = {"start_date": days_ago(1)}
with models.DAG(
"example_facebook_ads_to_gcs",
default_args=default_args,
schedule_interval=None, # Override to match your needs
) as dag:
create_bucket = GCSCreateBucketOperator(
task_id="create_bucket",
bucket_name=GCS_BUCKET,
project_id=GCP_PROJECT_ID,
)
create_dataset = BigQueryCreateEmptyDatasetOperator(
task_id="create_dataset",
dataset_id=DATASET_NAME,
)
create_table = BigQueryCreateEmptyTableOperator(
task_id="create_table",
dataset_id=DATASET_NAME,
table_id=TABLE_NAME,
schema_fields=[
{'name': 'campaign_name', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'campaign_id', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'ad_id', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'clicks', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'impressions', 'type': 'STRING', 'mode': 'NULLABLE'},
],
)
# [START howto_operator_facebook_ads_to_gcs]
run_operator = FacebookAdsReportToGcsOperator(
task_id='run_fetch_data',
start_date=days_ago(2),
owner='airflow',
bucket_name=GCS_BUCKET,
params=PARAMS,
fields=FIELDS,
gcp_conn_id=GCS_CONN_ID,
object_name=GCS_OBJ_PATH,
)
# [END howto_operator_facebook_ads_to_gcs]
load_csv = GCSToBigQueryOperator(
task_id='gcs_to_bq_example',
bucket=GCS_BUCKET,
source_objects=[GCS_OBJ_PATH],
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",
write_disposition='WRITE_TRUNCATE'
)
read_data_from_gcs_many_chunks = BigQueryExecuteQueryOperator(
task_id="read_data_from_gcs_many_chunks",
sql=f"SELECT COUNT(*) FROM `{GCP_PROJECT_ID}.{DATASET_NAME}.{TABLE_NAME}`",
use_legacy_sql=False,
)
delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket",
bucket_name=GCS_BUCKET,
)
delete_dataset = BigQueryDeleteDatasetOperator(
task_id="delete_dataset",
project_id=GCP_PROJECT_ID,
dataset_id=DATASET_NAME,
delete_contents=True,
)
create_bucket >> create_dataset >> create_table >> run_operator >> load_csv
load_csv >> read_data_from_gcs_many_chunks >> delete_bucket >> delete_dataset
|
#!/usr/bin/python
"""To use in developing and testing handling of process concurrency.
A useful utility by itself, try its --help option.
Idea and first implementation - Leo Baschy <srguiwiz12 AT nrvr DOT com>
Public repository - https://github.com/srguiwiz/nrvr-commander
Copyright (c) Nirvana Research 2006-2015.
Simplified BSD License"""
from optparse import OptionParser
import os
import os.path
import sys
import time
optionsParser = OptionParser(usage="%prog [options]",
description=
"""Utility to use in developing and testing handling of process concurrency.
Sleep any number of seconds, less than one to thousands, optionally repeatedly.
Optionally output repetition of 'o' with any character count, useful for testing buffering.
Exit with status number. If exit status number not 0 then final message to stderr.
Optional identity string in prefix of output lines, defaults to process id.
Works without options %prog or with options e.g. %prog -s 0.5 -r 3 -c 40 -x 1 -i A.
Also useful hilite %prog -s 0.5 -r 3 -c 40 -x 1 -i A ; echo $?.""",
version="%prog 1.0")
optionsParser.add_option("-s", "--sleep", type="float", dest="sleep",
help="seconds to sleep, default %default", default=5)
optionsParser.add_option("-r", "--repeat", type="int", dest="repeat",
help="repeat sleeping, default %default", default=1)
optionsParser.add_option("-c", "--charcount", type="int", dest="charcount",
help="character count for repetition of 'o', default %default", default=0)
optionsParser.add_option("-l", "--line", type="int", dest="line",
help="line length of repetition of 'o', default %default", default=100)
optionsParser.add_option("-x", "--exit", type="int", dest="exit",
help="exit with status, default %default", default=0)
optionsParser.add_option("-i", "--identity", type="string", dest="identity",
help="identity in prefix of output lines, default process id",
default="pid" + str(os.getpid()))
optionsParser.add_option("-f", "--flush", action="store_true", dest="flush",
help="flush after each line of output, default %default", default=False)
(options, args) = optionsParser.parse_args()
commandName = os.path.basename(sys.argv[0])
outputPrefix = "[" + commandName + "-" + options.identity + "]"
print outputPrefix + " begin"
if options.flush:
sys.stdout.flush()
for i in range(1, options.repeat + 1):
goingToSleepMessage = "going to sleep for %(sleep).6g seconds - %(i)d of %(n)d" % \
{"sleep":options.sleep, "i":i, "n":options.repeat}
if options.charcount > 0:
goingToSleepMessage = \
"after " + str(options.charcount) + " repetitions of 'o' " + goingToSleepMessage
print outputPrefix + " " + goingToSleepMessage
if options.flush:
sys.stdout.flush()
if options.charcount > 0:
numberOfLines = (options.charcount - 1) // options.line + 1
numberOfFullLines = options.charcount // options.line
for j in range(numberOfLines):
if j < numberOfFullLines:
line = "o" * options.line
else:
line = "o" * (options.charcount % options.line)
if j == 0:
line = "<os>" + line
if j == numberOfLines - 1:
line = line + "</os>"
print outputPrefix + " " + line
if options.flush:
sys.stdout.flush()
time.sleep(options.sleep)
print outputPrefix + " woke up"
if options.flush:
sys.stdout.flush()
print outputPrefix + " end"
if options.flush:
sys.stdout.flush()
if options.exit != 0:
print >> sys.stderr, outputPrefix + " exiting with status " + str(options.exit)
if options.flush:
sys.stderr.flush()
sys.exit(options.exit)
|
#!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class pyliblzma(test.test):
"""
Autotest module for testing basic functionality
of pyliblzma
@author Athira Rajeev <[email protected]> ##
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.Popen(['./pyliblzma.sh'], cwd="%s/pyliblzma" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
|
#!/usr/bin/env python
import luigi
import luigi_bigquery
from datetime import datetime as dt
import six
class DailyTask(luigi.Task):
day = luigi.DateParameter()
def requires(self):
return DailyTop10Account(day=self.day)
def output(self):
return luigi.LocalTarget("output/DailyTask-{0}.txt".format(self.day.strftime('%Y%m%d')))
def run(self):
with self.output().open('w') as f:
f.write("Done\n")
class DailyTop10Account(luigi_bigquery.QueryTable):
day = luigi.DateParameter()
source = "queries/daily_top10_account.sql"
def dataset(self):
return 'tmp'
def table(self):
return "{0}_{1}".format('daily_top10_account', self.day.strftime('%Y%m%d'))
class DailyTop10Organization(luigi_bigquery.QueryTable):
day = luigi.DateParameter()
source = "queries/daily_top10_organization.sql"
def dataset(self):
return 'tmp'
def table(self):
return "{0}_{1}".format('daily_top10_organization', self.day.strftime('%Y%m%d'))
class DailyTop10Task(luigi_bigquery.QueryTable):
day = luigi.DateParameter()
source = "queries/daily_top10.sql"
def requires(self):
return {
'account': DailyTop10Account(day=self.day),
'organization': DailyTop10Organization(day=self.day),
}
def dataset(self):
return 'result'
def table(self):
return "daily_top10_{0}".format(self.day.strftime('%Y%m%d'))
def input_table(self, index):
t = self.input()[index]
return "{0}.{1}".format(t.dataset_id, t.table_id)
class Top10AccountInAWeek(luigi_bigquery.QueryTable):
source = "queries/top10_account_in_a_week.sql"
def requires(self):
f = lambda x: DailyTop10Account(day=dt.strptime("2015-11-{0:02d}".format(x), "%Y-%m-%d"))
return [super(Top10AccountInAWeek, self).requires()] + list(map(f, range(8, 15)))
# Above code is same mean as following code
#return [
# super(Top10AccountInAWeek, self).requires(),
# DailyTop10Account(day=dt.strptime("2015-11-08", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-09", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-10", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-11", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-12", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-13", "%Y-%m-%d")),
# DailyTop10Account(day=dt.strptime("2015-11-14", "%Y-%m-%d"))
#]
def dataset(self):
return 'result'
def table(self):
return "daily_top10_account_in_a_week"
def input_table(self, index):
t = self.input()[index]
return "{0}.{1}".format(t.dataset_id, t.table_id)
class ReportTask(luigi_bigquery.QueryToGCS):
source = "queries/export_top10_account_in_a_week.sql"
def requires(self):
return Top10AccountInAWeek()
def dataset(self):
return 'tmp'
def bucket(self):
return "bq_sushi_2_01"
def path(self):
return "reports/top10_account_in_a_week.csv"
class PandasExample(luigi_bigquery.Query):
day = luigi.DateParameter()
source = "queries/daily_top10_account.sql"
def output(self):
return luigi.LocalTarget("output/PandasExample-{0}.txt".format(self.day.strftime('%Y%m%d')))
def run(self):
query = self.load_query(self.source)
result = self.run_query(query)
with self.output().open('w') as f:
f.write(result.to_dataframe().to_string())
if __name__ == '__main__':
import sys
if luigi.run():
sys.exit(0)
else:
sys.exit(1)
|
using System.Threading.Tasks;
using Skimia.Extensions.Messages.Abstractions;
namespace Skimia.Extensions.Messages
{
public class MessageDispatcherTask
{
public IMessageDispatcher Dispatcher { get; }
public bool Running
{
get;
private set;
}
public object Processor { get; }
public MessageDispatcherTask(IMessageDispatcher dispatcher)
{
Dispatcher = dispatcher;
Processor = this;
}
public MessageDispatcherTask(IMessageDispatcher dispatcher, object processor)
{
Dispatcher = dispatcher;
Processor = processor;
}
public void Start()
{
Running = true;
Task.Factory.StartNew(Process);
}
public void Stop()
{
Running = false;
}
private void Process()
{
while (Running)
{
Dispatcher.Wait();
if (Running)
Dispatcher.ProcessDispatching(Processor);
}
}
}
}
|
<html>
<head>
<title></title>
</head>
<body>
<h1>2222222</h1>
</body>
</html>
|
# Demo of the Spacy NLP library
# Based on https://spacy.io/
# See also
# https://nlpforhackers.io/complete-guide-to-spacy/
import spacy
nlps = spacy.load('en_core_web_sm')
nlpm = spacy.load('en_core_web_md')
tokens = nlpm(u'dog cat banana afskfsd')
for token in tokens:
print(token.text, token.has_vector, token.vector_norm, token.is_oov)
doc = nlps(u'Apple is looking at buying the U.K. startup FooCon for $1 billion.')
for token in doc:
print(token.text, token.lemma_, token.pos_, token.tag_, token.dep_,
token.shape_, token.is_alpha, token.is_stop)
"""
Apple apple PROPN NNP nsubj Xxxxx True False
is be VERB VBZ aux xx True True
looking look VERB VBG ROOT xxxx True False
at at ADP IN prep xx True True
buying buy VERB VBG pcomp xxxx True False
the the DET DT det xxx True True
U.K. u.k. PROPN NNP compound X.X. False False
startup startup NOUN NN dobj xxxx True False
FooCon foocon NOUN NN appos XxxXxx True False
for for ADP IN prep xxx True True
$ $ SYM $ quantmod $ False False
1 1 NUM CD compound d False False
billion billion NUM CD pobj xxxx True False
. . PUNCT . punct . False False
"""
# With the medium model, 'is' and 'at' are not flagged as stop words.
# This is a know bug.
# https://github.com/explosion/spaCy/issues/922
# Here is a fix.
nlpm.vocab.add_flag(lambda s: s.lower() in spacy.lang.en.stop_words.STOP_WORDS, spacy.attrs.IS_STOP)
doc = nlpm(u'Apple is looking at buying the U.K. startup FooCon for $1 billion.')
for token in doc:
print(token.text, token.lemma_, token.pos_, token.tag_, token.dep_,
token.shape_, token.is_alpha, token.is_stop)
corpus=[
'Mary had a little lamb, little lamb, little lamb',
'Mary had a little lamb',
'Whose fleece was white as snow.',
'And everywhere that Mary went',
'Mary went, Mary went,',
'Everywhere that Mary went',
'The lamb was sure to go.'
]
corpus_tokenized = [nlpm(doc) for doc in corpus]
all_tokens = [token for doc in corpus_tokenized for token in doc]
#vocab = set(all_tokens)
vocab = set()
for t in all_tokens:
vocab.add(str(t))
|
---
title: "पीसीबी ने सरफ़राज़ को कप्तानी से हटाने का किया फैसला, बाबर बन सकते हैं नए कप्तान!"
layout: item
category: ["sports"]
date: 2019-10-01T17:46:36.559Z
image: 1569951996558sarfaraz-ahmad-babar-azam-pcb-ehsan-mani-captain-.jpg
---
<p>लाहौर: पाकिस्तान क्रिकेट बोर्ड ने मौजूदा कप्तान सरफ़राज़ अहमद को कप्तानी से हटाने का फैसला कर लिया है और उनके स्थान पर बाबर आज़म को कप्तान बनाये जाने की पूरी सम्भावना है | </p>
<p>पाकिस्तान के अख़बार एक्सप्रेस न्यूज़ के अनुसार पीसीबी के चेयरमैन एहसान मणि ने पाकिस्तान क्रिकेट टीम के कप्तान कप बदलने का निर्णय कर लिया है | पीसीबी के सूत्रों का कहना है कि सरफ़राज़ अहमद की परफॉरमेंस प्रभावकारी नहीं रही है इसलिए उन्हें कप्तानी से हटाया जा रहा है| सूत्रों ने बताया कि सरफ़राज़ के स्थान पर बाबर आज़म को नया कप्तान बनाये जाने की पूरी सम्भावना है यद्यपि पाकिस्तान क्रिकेट टीम के नए कप्तान की घोषणा पाकिस्तान-श्रीलंका सीरीज़ के बाद होने की सम्भाना है | </p>
|
import os
import pickle
from PyQt4 import QtGui
from Orange.base import Model
from Orange.widgets import widget, gui
from Orange.widgets.settings import Setting
class OWLoadClassifier(widget.OWWidget):
name = "Load Classifier"
description = "Load a classifier from an input file."
priority = 3050
icon = "icons/LoadClassifier.svg"
outputs = [("Classifier", Model, widget.Dynamic)]
#: List of recent filenames.
history = Setting([])
#: Current (last selected) filename or None.
filename = Setting(None)
FILTER = "Pickle files (*.pickle *.pck)\nAll files (*.*)"
want_main_area = False
resizing_enabled = False
def __init__(self, parent=None):
super().__init__(parent)
self.selectedIndex = -1
box = gui.widgetBox(
self.controlArea, self.tr("File"), orientation=QtGui.QHBoxLayout()
)
self.filesCB = gui.comboBox(
box, self, "selectedIndex", callback=self._on_recent)
self.filesCB.setMinimumContentsLength(20)
self.filesCB.setSizeAdjustPolicy(
QtGui.QComboBox.AdjustToMinimumContentsLength)
self.loadbutton = gui.button(box, self, "...", callback=self.browse)
self.loadbutton.setIcon(
self.style().standardIcon(QtGui.QStyle.SP_DirOpenIcon))
self.loadbutton.setSizePolicy(QtGui.QSizePolicy.Maximum,
QtGui.QSizePolicy.Fixed)
self.reloadbutton = gui.button(
box, self, "Reload", callback=self.reload, default=True)
self.reloadbutton.setIcon(
self.style().standardIcon(QtGui.QStyle.SP_BrowserReload))
self.reloadbutton.setSizePolicy(QtGui.QSizePolicy.Maximum,
QtGui.QSizePolicy.Fixed)
# filter valid existing filenames
self.history = list(filter(os.path.isfile, self.history))[:20]
for filename in self.history:
self.filesCB.addItem(os.path.basename(filename), userData=filename)
# restore the current selection if the filename is
# in the history list
if self.filename in self.history:
self.selectedIndex = self.history.index(self.filename)
else:
self.selectedIndex = -1
self.filename = None
self.reloadbutton.setEnabled(False)
def browse(self):
"""Select a filename using an open file dialog."""
if self.filename is None:
startdir = QtGui.QDesktopServices.storageLocation(
QtGui.QDesktopServices.DocumentsLocation)
else:
startdir = os.path.dirname(self.filename)
filename = QtGui.QFileDialog.getOpenFileName(
self, self.tr("Open"), directory=startdir, filter=self.FILTER)
if filename:
self.load(filename)
def reload(self):
"""Reload the current file."""
self.load(self.filename)
def load(self, filename):
"""Load the object from filename and send it to output."""
try:
classifier = pickle.load(open(filename, "rb"))
except pickle.UnpicklingError:
raise # TODO: error reporting
except os.error:
raise # TODO: error reporting
else:
self._remember(filename)
self.send("Classifier", classifier)
def _remember(self, filename):
"""
Remember `filename` was accessed.
"""
if filename in self.history:
index = self.history.index(filename)
del self.history[index]
self.filesCB.removeItem(index)
self.history.insert(0, filename)
self.filesCB.insertItem(0, os.path.basename(filename),
userData=filename)
self.selectedIndex = 0
self.filename = filename
self.reloadbutton.setEnabled(self.selectedIndex != -1)
def _on_recent(self):
self.load(self.history[self.selectedIndex])
def main():
app = QtGui.QApplication([])
w = OWLoadClassifier()
w.show()
return app.exec_()
if __name__ == "__main__":
import sys
sys.exit(main())
|
version https://git-lfs.github.com/spec/v1
oid sha256:f05e33fa3b9ca9554b887b0dc9662c1fafc3619ea1863677aab3cc53958a0b47
size 8105
|
import {ColumnView} from '../scene/view/column.view';
import {DataColumnModel} from './data.column.model';
import {ColumnModel} from './column.model';
export declare class EmailColumnModel extends DataColumnModel {
constructor();
}
export declare class EmailColumn extends ColumnView {
constructor(model: ColumnModel);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.