text
stringlengths
2
6.14k
import sickbeard from sickbeard import logger, common from lib.pynma import pynma class NMA_Notifier: def test_notify(self, nma_api, nma_priority): return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True) def notify_snatch(self, ep_name): if sickbeard.NMA_NOTIFY_ONSNATCH: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name) def notify_download(self, ep_name): if sickbeard.NMA_NOTIFY_ONDOWNLOAD: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name) def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False): title = 'Sick-Beard' if not sickbeard.USE_NMA and not force: return False if nma_api == None: nma_api = sickbeard.NMA_API if nma_priority == None: nma_priority = sickbeard.NMA_PRIORITY logger.log(u"NMA title: " + title, logger.DEBUG) logger.log(u"NMA event: " + event, logger.DEBUG) logger.log(u"NMA message: " + message, logger.DEBUG) batch = False p = pynma.PyNMA() keys = nma_api.split(',') p.addkey(keys) if len(keys) > 1: batch = True response = p.push(title, event, message, priority=nma_priority, batch_mode=batch) if not response[nma_api][u'code'] == u'200': logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR) return False else: return True notifier = NMA_Notifier
#!/usr/bin/env python try: from StrinIO import StringIO except ImportError: from io import StringIO from .InventoryFilter import InventoryFilter class DuffyInventory(InventoryFilter): def get_hostnames(self, topo): hostnames = [] for group in topo.get('duffy_res', []): for host in group['hosts']: hostnames.append(host) return hostnames def get_host_ips(self, topo): return self.get_hostnames(topo) def get_inventory(self, topo, layout): if len(topo['duffy_res']) == 0: return "" inven_hosts = self.get_hostnames(topo) # adding sections to respective host groups host_groups = self.get_layout_host_groups(layout) self.add_sections(host_groups) # set children for each host group self.set_children(layout) # set vars for each host group self.set_vars(layout) # add ip addresses to each host self.add_ips_to_groups(inven_hosts, layout) self.add_common_vars(host_groups, layout) output = StringIO() self.config.write(output) return output.getvalue()
# coding: utf-8 from __future__ import unicode_literals import re import json from .common import InfoExtractor class RTBFIE(InfoExtractor): _VALID_URL = r'https?://www.rtbf.be/video/[^\?]+\?id=(?P<id>\d+)' _TEST = { 'url': 'https://www.rtbf.be/video/detail_les-diables-au-coeur-episode-2?id=1921274', 'md5': '799f334ddf2c0a582ba80c44655be570', 'info_dict': { 'id': '1921274', 'ext': 'mp4', 'title': 'Les Diables au coeur (épisode 2)', 'description': 'Football - Diables Rouges', 'duration': 3099, 'timestamp': 1398456336, 'upload_date': '20140425', } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') page = self._download_webpage('https://www.rtbf.be/video/embed?id=%s' % video_id, video_id) data = json.loads(self._html_search_regex( r'<div class="js-player-embed(?: player-embed)?" data-video="([^"]+)"', page, 'data video'))['data'] video_url = data.get('downloadUrl') or data.get('url') if data['provider'].lower() == 'youtube': return self.url_result(video_url, 'Youtube') return { 'id': video_id, 'url': video_url, 'title': data['title'], 'description': data.get('description') or data.get('subtitle'), 'thumbnail': data['thumbnail']['large'], 'duration': data.get('duration') or data.get('realDuration'), 'timestamp': data['created'], 'view_count': data['viewCount'], }
/* * $Id: PrintingServiceHome.java,v 1.1 2004/11/04 20:32:46 aron Exp $ * Created on 4.11.2004 * * Copyright (C) 2004 Idega Software hf. All Rights Reserved. * * This software is the proprietary information of Idega hf. * Use is subject to license terms. */ package com.idega.block.pdf.business; import com.idega.business.IBOHome; /** * * Last modified: $Date: 2004/11/04 20:32:46 $ by $Author: aron $ * * @author <a href="mailto:[email protected]">aron</a> * @version $Revision: 1.1 $ */ public interface PrintingServiceHome extends IBOHome { public PrintingService create() throws javax.ejb.CreateException, java.rmi.RemoteException; }
/*---------------------------------------------------------------------------*\ ========= | \\ / F ield | OpenFOAM: The Open Source CFD Toolbox \\ / O peration | \\ / A nd | Copyright (C) 1991-2010 OpenCFD Ltd. \\/ M anipulation | ------------------------------------------------------------------------------- License This file is part of OpenFOAM. OpenFOAM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. OpenFOAM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>. Description Create intermediate mesh from PROSTAR files \*---------------------------------------------------------------------------*/ #include "starMesh.H" #include "IFstream.H" // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // void starMesh::readPoints(const scalar scaleFactor) { label nPoints = 0; label maxLabel = -1; fileName pointsFileName(casePrefix_ + ".vrt"); { IFstream pointsFile(pointsFileName); // Pass 1: get # points and maximum vertex label if (pointsFile.good()) { label pointLabel; scalar x, y, z; maxLabel = -1; while ((pointsFile >> pointLabel).good()) { nPoints++; maxLabel = max(maxLabel, pointLabel); pointsFile >> x >> y >> z; } } else { FatalErrorIn("starMesh::readPoints()") << "Cannot read file " << pointsFileName << abort(FatalError); } } Info<< "Number of points = " << nPoints << endl << endl; points_.setSize(nPoints); starPointID_.setSize(nPoints); // Reset STAR point ID, just in case starPointID_ = -1; starPointLabelLookup_.setSize(maxLabel+1); // reset point labels to invalid value starPointLabelLookup_ = -1; if (nPoints > 0) { // Pass 2: construct pointlist and conversion table // from Star vertex numbers to Foam pointLabels IFstream pointsFile(pointsFileName); label pointLabel; forAll(points_, p) { pointsFile >> pointLabel >> points_[p].x() >> points_[p].y() >> points_[p].z(); starPointID_[p] = pointLabel; starPointLabelLookup_[pointLabel] = p; } if (scaleFactor > 1.0 + SMALL || scaleFactor < 1.0 - SMALL) { points_ *= scaleFactor; } } else { FatalError << "void starMesh::readPoints() : " << "no points in file " << pointsFileName << abort(FatalError); } } // ************************************************************************* //
import DrawerHeader from './DrawerHeader' export default DrawerHeader
import logging from autotest.client.shared import error from virttest import utils_test from virttest import error_context from virttest import qemu_storage from virttest import data_dir @error_context.context_aware def run(test, params, env): """ Downgrade qcow2 image version: 1) Get the version of the image 2) Compare the version with expect version. If they are different, Amend the image with new version 3) Check the amend result :param test: QEMU test object :param params: Dictionary with the test parameters :param env: Dictionary with test environment """ ver_to = params.get("lower_version_qcow2", "0.10") error_context.context("Downgrade qcow2 image version to '%s'" % ver_to, logging.info) image = params.get("images").split()[0] t_params = params.object_params(image) qemu_image = qemu_storage.QemuImg(t_params, data_dir.get_data_dir(), image) ver_from = utils_test.get_image_version(qemu_image) utils_test.update_qcow2_image_version(qemu_image, ver_from, ver_to) actual_compat = utils_test.get_image_version(qemu_image) if actual_compat != ver_to: err_msg = "Fail to downgrade qcow2 image version!" err_msg += "Actual: %s, expect: %s" % (actual_compat, ver_to) raise error.TestFail(err_msg)
import time import asyncio import urwid import websockets import json from .displaycommon import * from cryptolens.core.orderbook import Orderbook from cryptolens.processor.bitfinexprocessor import BitfinexProcessor class MarketModel: def __init__(self): self.latest_price = 0 #asyncio.Task(self.get_orders()) # Bitfinex processor self.bitfinex_processor = BitfinexProcessor() def get_orderbook(self): return self.bitfinex_processor.orderbook def get_market_info(self): return self.bitfinex_processor.market_info @asyncio.coroutine def get_orders(self): websocket = yield from websockets.connect('wss://api.bitfinex.com/ws/2') sendData = json.dumps({ "event":"subscribe", "channel": "book", "pair": "BTCUSD", "prec": "p0", "freq": "f0", "len": "25"}) try: yield from websocket.send(sendData) while True: result = yield from websocket.recv() if result is not None: json_string = '{"first_name": "Guido", "last_name":"Rossum"}' parsed_json = json.loads(json_string) try: result = json.loads(result) self.latest_price = result[1][0] except: pass finally: yield from websocket.close() class MarketView(urwid.WidgetWrap): def __init__(self, controller): self.controller = controller self.mainPileList = [] self.left_column = urwid.Pile([]) self.right_column = urwid.Pile([]) self.best_price = urwid.Text("Connecting...") # Prepare orderbook self.prepare_order_book() # Prepare left pane self.prepare_graph() self.prepare_market_info_pane() self.prepare_stats_pane() self.prepare_left_pane() self.__super.__init__(self.main_window()) def update(self): # Get the orderbook orderbook = self.controller.get_orderbook() market_info = self.controller.get_market_info() # Get the best price self.best_price.set_text(str(orderbook.best_sell)) # Update the sell table sell_data = [] for key in orderbook.sellOrders.islice(0, 35, reverse=True): amount = "{:.10f}".format(orderbook.sellOrders[key]) sell_data.append([key, amount, 0]) self.sell_table.update_data(sell_data) # Update the buy table buy_data = [] for key in itertools.islice(reversed(orderbook.buyOrders), 0, 35): amount = "{:.10f}".format(orderbook.buyOrders[key]) buy_data.append([key, amount, 0]) self.buy_table.update_data(buy_data) # Update info pane info_data = [] info_data.append(['Total Orders Processed', orderbook.total_orders_processed]) info_data.append(['Buy Orders Received', orderbook.total_buy_orders_processed]) info_data.append(['Sell Orders Received', orderbook.total_sell_orders_processed]) info_data.append(['Orders Removed', orderbook.total_orders_removed]) info_data.append(['Orders In System', len(orderbook.order_list)]) info_data.append(['Volume Of Modified Orders', orderbook.order_change_volume]) self.stats_table.update_data(info_data) # Update market pane market_data = [] market_data.append(['24hr Total volume', market_info['volume']]) market_data.append(['24hr Daily Change', market_info['daily_change']]) market_data.append(['24hr Daily Percent Change', market_info['daily_perc_change']]) market_data.append(['24hr High', market_info['high']]) market_data.append(['24hr Low', market_info['low']]) market_data.append(['Ask Size', market_info['ask_size']]) market_data.append(['Buy Size', market_info['bid_size']]) self.market_info_table.update_data(market_data) def main_window(self): price = str(self.controller.get_price_data()) # Prepare body columnList = [self.left_column, (40, self.right_column)] body = urwid.Columns(columnList) body = urwid.Padding(body, left=2, right=2) return body def prepare_left_pane(self): self.live_stats = urwid.Pile([(9, self.market_info_table), self.stats_table]) graph_text = urwid.Text("Live BTC/USD Chart", align='right') graph_text = urwid.AttrWrap(graph_text, 'headers') graph_pane = urwid.Pile([self.graph, ('pack', graph_text)]) self.left_column = urwid.Pile([graph_pane, self.live_stats]) self.left_column = urwid.Padding(self.left_column, align='left', right=4) def prepare_market_info_pane(self): columnList = ['Market Info'] self.market_info_table = Table(2, 7, column_names=columnList) self.market_info_table.create_table() def prepare_graph(self): columnList = ['Stats'] graph_body = urwid.Text("Graph!") listwalker = urwid.SimpleListWalker([graph_body]) self.graph = urwid.ListBox(listwalker) self.graph = urwid.AttrWrap(self.graph, 'listbox') self.graph = urwid.LineBox(self.graph) def prepare_stats_pane(self): columnList = ['Stats'] self.stats_table = Table(2, 6, column_names=columnList) self.stats_table.create_table() def prepare_order_book(self): column_list = [(12, urwid.Text(('table header', "Price"))), urwid.Text(('table header', "Volume")), (6, urwid.Text(('table header', "Orders")))] column_headers = urwid.Columns(column_list) column_headers = urwid.Filler(column_headers, valign='middle', top=1, bottom=1) self.sell_table = Table(3, 30, fillbottom=True, column_widths=[12,0,6]) self.sell_table.create_table() self.sell_table._w.set_focus(self.sell_table.row_count - 1) self.buy_table = Table(3, 30, column_widths=[12,0,6]) self.buy_table.create_table() best_price_widget = urwid.Padding(self.best_price, align="center", width='pack') best_price_widget = urwid.Filler(best_price_widget, valign='middle', top=1, bottom=1) self.right_column = urwid.Pile([(1, column_headers), self.sell_table, (3, best_price_widget), self.buy_table]) class MarketController: def __init__(self): self.model = HomeModel() self.view = HomeView(self) def set_loop(self, loop): self.loop = loop self.update() def update(self, loop=None, user_data=None): self.view.update() self.update_alarm = self.loop.set_alarm_in(0.05, self.update) def get_price_data(self): return self.model.latest_price; def get_orderbook(self): return self.model.get_orderbook(); def get_market_info(self): return self.model.get_market_info();
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import ( Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional, ) from google.cloud.dialogflow_v2beta1.types import version class ListVersionsPager: """A pager for iterating through ``list_versions`` requests. This class thinly wraps an initial :class:`google.cloud.dialogflow_v2beta1.types.ListVersionsResponse` object, and provides an ``__iter__`` method to iterate through its ``versions`` field. If there are more pages, the ``__iter__`` method will make additional ``ListVersions`` requests and continue to iterate through the ``versions`` field on the corresponding responses. All the usual :class:`google.cloud.dialogflow_v2beta1.types.ListVersionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., version.ListVersionsResponse], request: version.ListVersionsRequest, response: version.ListVersionsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.dialogflow_v2beta1.types.ListVersionsRequest): The initial request object. response (google.cloud.dialogflow_v2beta1.types.ListVersionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = version.ListVersionsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterable[version.ListVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterable[version.Version]: for page in self.pages: yield from page.versions def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListVersionsAsyncPager: """A pager for iterating through ``list_versions`` requests. This class thinly wraps an initial :class:`google.cloud.dialogflow_v2beta1.types.ListVersionsResponse` object, and provides an ``__aiter__`` method to iterate through its ``versions`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListVersions`` requests and continue to iterate through the ``versions`` field on the corresponding responses. All the usual :class:`google.cloud.dialogflow_v2beta1.types.ListVersionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, method: Callable[..., Awaitable[version.ListVersionsResponse]], request: version.ListVersionsRequest, response: version.ListVersionsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.dialogflow_v2beta1.types.ListVersionsRequest): The initial request object. response (google.cloud.dialogflow_v2beta1.types.ListVersionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = version.ListVersionsRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages(self) -> AsyncIterable[version.ListVersionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterable[version.Version]: async def async_generator(): async for page in self.pages: for response in page.versions: yield response return async_generator() def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content=""> <meta name="author" content=""> <title>Scrolling Nav - Start Bootstrap Template</title> <!-- Bootstrap Core CSS --> <link href="css/bootstrap.min.css" rel="stylesheet"> <!-- Custom CSS --> <link href="css/scrolling-nav.css" rel="stylesheet"> <!-- MyFonts CSS --> <link href="kfp.css" rel="stylesheet"> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script> <script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script> <![endif]--> </head> <!-- The #page-top ID is part of the scrolling feature - the data-spy and data-target are part of the built-in Bootstrap scrollspy function --> <body id="page-top" data-spy="scroll" data-target=".navbar-fixed-top"> <!-- Navigation --> <nav class="navbar navbar-default navbar-fixed-top" role="navigation"> <div class="container"> <div class="navbar-header page-scroll"> <button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-ex1-collapse"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a class="navbar-brand page-scroll" href="#page-top">Katherine Perry</a> </div> <!-- Collect the nav links, forms, and other content for toggling --> <div class="collapse navbar-collapse navbar-ex1-collapse"> <ul class="nav navbar-nav"> <!-- Hidden li included to remove active class from about link when scrolled up past about section --> <li class="hidden"> <a class="page-scroll" href="#page-top"></a> </li> <li> <a class="page-scroll" href="#about">About</a> </li> <li> <a class="page-scroll" href="#services">Experiences</a> </li> <li> <a class="page-scroll" href="#contact">Research</a> </li> <li> <a class="page-scroll" href="#services">Maps & Design</a> </li> <li> <a class="page-scroll" href="#services">Connect</a> </li> </ul> </div> <!-- /.navbar-collapse --> </div> <!-- /.container --> </nav> <!-- Intro Section --> <section id="intro" class="intro-section"> <div class="container"> <div class="row"> <div class="col-lg-12"> <h1>Hi, I'm Katherine.</h1> <p>Interests in data, urban design, and geography have led me down many paths, from mapmaking in local government to data analysis at the White House.</p> <a class="btn btn-default page-scroll" href="#about">get to know me.</a> </div> </div> </div> </section> <!-- About Section --> <section id="about" class="about-section"> <div class="container"> <div class="row"> <div class="col-lg-12"> <h1>About Section</h1> </div> </div> </div> </section> <!-- Services Section --> <section id="services" class="services-section"> <div class="container"> <div class="row"> <div class="col-lg-12"> <h1>Services Section</h1> </div> </div> </div> </section> <!-- Contact Section --> <section id="contact" class="contact-section"> <div class="container"> <div class="row"> <div class="col-lg-12"> <h1>Contact Section</h1> </div> </div> </div> </section> <!-- jQuery --> <script src="js/jquery.js"></script> <!-- Bootstrap Core JavaScript --> <script src="js/bootstrap.min.js"></script> <!-- Scrolling Nav JavaScript --> <script src="js/jquery.easing.min.js"></script> <script src="js/scrolling-nav.js"></script> </body> </html>
/* MIT License Copyright (c) 2017 ParkJunYeong(https://github.com/ParkJunYeong) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.*/ #include "Symmetrics.h" #include "../board/Common.h" namespace DeepAIGo { Tensor TensorUtil::Rotl90(const Tensor& tensor) { Tensor ret { boost::extents[tensor.shape()[0]][BOARD_SIZE][BOARD_SIZE]}; for (size_t w = 0; w < tensor.shape()[0]; ++w) { for (size_t i = 0; i < BOARD_SIZE; ++i) { for (size_t j = 0; j < BOARD_SIZE; ++j) { ret[w][j][i] = tensor[w][i][BOARD_SIZE - j - 1]; } } } return ret; } Tensor TensorUtil::Rotl180(const Tensor& tensor) { return Rotl90(Rotl90(tensor)); } Tensor TensorUtil::Rotl270(const Tensor& tensor) { return Rotl90(Rotl180(tensor)); } Tensor TensorUtil::Rotr90(const Tensor& tensor) { Tensor ret { boost::extents[tensor.shape()[0]][BOARD_SIZE][BOARD_SIZE]}; for (size_t w = 0; w < tensor.shape()[0]; ++w) { for (size_t i = 0; i < BOARD_SIZE; ++i) { for (size_t j = 0; j < BOARD_SIZE; ++j) { ret[w][j][i] = tensor[w][BOARD_SIZE - i - 1][j]; } } } return ret; } Tensor TensorUtil::Rotr180(const Tensor& tensor) { return Rotr90(Rotr90(tensor)); } Tensor TensorUtil::Rotr270(const Tensor& tensor) { return Rotr90(Rotr180(tensor)); } Tensor TensorUtil::Transpose(const Tensor& tensor) { Tensor ret { boost::extents[tensor.shape()[0]][BOARD_SIZE][BOARD_SIZE]}; for (size_t w = 0; w < tensor.shape()[0]; ++w) { for (size_t i = 0; i < BOARD_SIZE; ++i) { for (size_t j = 0; j < BOARD_SIZE; ++j) { ret[w][j][i] = tensor[w][i][j]; } } } return ret; } Tensor TensorUtil::FlipUD(const Tensor& tensor) { Tensor ret { boost::extents[tensor.shape()[0]][BOARD_SIZE][BOARD_SIZE] }; for (size_t w = 0; w < tensor.shape()[0]; ++w) { for (size_t i = 0; i < BOARD_SIZE; ++i) { for (size_t j = 0; j < BOARD_SIZE; ++j) { ret[w][i][j] = tensor[w][BOARD_SIZE - i - 1][j]; } } } return ret; } Tensor TensorUtil::FlipLR(const Tensor& tensor) { Tensor ret { boost::extents[tensor.shape()[0]][BOARD_SIZE][BOARD_SIZE] }; for (size_t w = 0; w < tensor.shape()[0]; ++w) { for (size_t i = 0; i < BOARD_SIZE; ++i) { for (size_t j = 0; j < BOARD_SIZE; ++j) { ret[w][i][j] = tensor[w][i][BOARD_SIZE - j - 1]; } } } return ret; } }
from .base import FileType from . import tools from six import string_types import numpy try: import fitsio except ImportError: fitsio = None class FITSFile(FileType): """ A file object to handle the reading of FITS data using the :mod:`fitsio` package. See also: https://github.com/esheldon/fitsio Parameters ---------- path : str the file path to load ext: number or string, optional The extension. Either the numerical extension from zero or a string extension name. If not sent, data is read from the first HDU that has data. """ def __init__(self, path, ext=None): # hide the import exception if fitsio is None: raise ImportError("please install fitsio: ``conda install -c bccp fitsio``") self.path = path self.dataset = str(ext) # try to find the first Table HDU to read if not specified with fitsio.FITS(path) as ff: if ext is None: for i, hdu in enumerate(ff): if hdu.has_data(): ext = i break if ext is None: raise ValueError("input fits file '%s' has not binary table to read" %path) else: if isinstance(ext, string_types): if ext not in ff: raise ValueError("FITS file does not contain extension with name '%s'" %ext) elif ext >= len(ff): raise ValueError("FITS extension %d is not valid" %ext) # make sure we crash if data is wrong or missing if not ff[ext].has_data() or ff[ext].get_exttype() == 'IMAGE_HDU': raise ValueError("FITS extension %d is not a readable binary table" %ext) self.attrs = {} self.attrs['ext'] = ext # size and dtype with fitsio.FITS(path) as ff: self.size = ff[ext].get_nrows() self.dtype = ff[ext].get_rec_dtype()[0] def read(self, columns, start, stop, step=1): """ Read the specified column(s) over the given range 'start' and 'stop' should be between 0 and :attr:`size`, which is the total size of the file Parameters ---------- columns : str, list of str the name of the column(s) to return start : int the row integer to start reading at stop : int the row integer to stop reading at step : int, optional the step size to use when reading; default is 1 Returns ------- numpy.array structured array holding the requested columns over the specified range of rows """ if isinstance(columns, string_types): columns = [columns] kws = {'ext':self.attrs['ext'], 'columns':columns, 'rows':range(start, stop, step)} return fitsio.read(self.path, **kws)
<?php /** * Plugin Name: Ecko Plugin * Plugin URI: http://ecko.me * Description: Shortcodes and Widgets for the EckoThemes WordPress Themes * Version: 1.5.1 * Author: EckoThemes * Author URI: http://ecko.me * License: GPL-2.0+ * License URI: http://www.gnu.org/licenses/gpl-2.0.txt * Text Domain: eckoshortcodes * * @link http://ecko.me * @since 1.5.1 * @package Ecko_Plugin * */ if ( ! defined( 'WPINC' ) ) { die; } define( 'ECKO_VERSION', '1.5.1' ); define( 'ECKO_DIR', ABSPATH . 'wp-content/plugins/eckoplugin' ); define( 'ECKO_URL', plugins_url( '', __FILE__ )); include (ECKO_DIR . '/inc/ecko-shortcodes.php'); include (ECKO_DIR . '/inc/ecko-widgets.php'); ?>
from decimal import Decimal import uuid from datetime import datetime from django.test import TestCase from casexml.apps.case.models import CommCareCase from casexml.apps.stock.models import StockTransaction, StockReport from casexml.apps.stock.tests.mock_consumption import ago from casexml.apps.stock import const from corehq.apps.products.models import SQLProduct SUB_TYPE_MAX_LEN = 20 class StockTransactionTests(TestCase): def test_subtype_no_truncate(self): sub_type = 'a' * (SUB_TYPE_MAX_LEN - 1) self._test_subtype(sub_type, sub_type) def test_subtype_truncate(self): initial = 'a' * (SUB_TYPE_MAX_LEN + 3) final = 'a' * (SUB_TYPE_MAX_LEN) self._test_subtype(initial, final) def _test_subtype(self, initial, final): case_id = uuid.uuid4().hex CommCareCase( _id=case_id, domain='fakedomain', ).save() product_id = uuid.uuid4().hex SQLProduct(product_id=product_id, domain='fakedomain').save() report = StockReport.objects.create( form_id=uuid.uuid4().hex, date=ago(1), server_date=datetime.utcnow(), type=const.REPORT_TYPE_BALANCE ) txn = StockTransaction( report=report, section_id=const.SECTION_TYPE_STOCK, type=const.TRANSACTION_TYPE_STOCKONHAND, subtype=initial, case_id=case_id, product_id=product_id, stock_on_hand=Decimal(10), ) txn.save() saved = StockTransaction.objects.get(id=txn.id) self.assertEqual(final, saved.subtype)
/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Platform} from '@angular/cdk/platform'; import { ChangeDetectionStrategy, Component, ContentChildren, ElementRef, Inject, NgZone, Optional, QueryList, ViewChild, ViewEncapsulation } from '@angular/core'; import { MatLine, MAT_RIPPLE_GLOBAL_OPTIONS, RippleGlobalOptions, } from '@angular/material-experimental/mdc-core'; import {MatListBase, MatListItemBase} from './list-base'; @Component({ selector: 'mat-list', exportAs: 'matList', template: '<ng-content></ng-content>', host: { 'class': 'mat-mdc-list mat-mdc-list-base mdc-deprecated-list', }, styleUrls: ['list.css'], encapsulation: ViewEncapsulation.None, changeDetection: ChangeDetectionStrategy.OnPush, providers: [ {provide: MatListBase, useExisting: MatList}, ] }) export class MatList extends MatListBase {} @Component({ selector: 'mat-list-item, a[mat-list-item], button[mat-list-item]', exportAs: 'matListItem', host: { 'class': 'mat-mdc-list-item mdc-deprecated-list-item', '[class.mat-mdc-list-item-with-avatar]': '_hasIconOrAvatar()', }, templateUrl: 'list-item.html', encapsulation: ViewEncapsulation.None, changeDetection: ChangeDetectionStrategy.OnPush, }) export class MatListItem extends MatListItemBase { @ContentChildren(MatLine, {read: ElementRef, descendants: true}) lines: QueryList<ElementRef<Element>>; @ViewChild('text') _itemText: ElementRef<HTMLElement>; constructor( element: ElementRef, ngZone: NgZone, listBase: MatListBase, platform: Platform, @Optional() @Inject(MAT_RIPPLE_GLOBAL_OPTIONS) globalRippleOptions?: RippleGlobalOptions) { super(element, ngZone, listBase, platform, globalRippleOptions); } }
"""\ Librarian - Access source code library based on templates. It more or less just a copy program, that copies files from a library of snippets to the given output. It can textually replace words, so that the output can be adjusted, e.g. when a template contains variables. """ import logging import codecs import pkgutil # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - def main(): import sys import os from optparse import OptionParser logging.basicConfig() parser = OptionParser(usage='%prog [options] temlate-name') parser.add_option("-o", "--outfile", dest = "outfile", help = "name of the object file", metavar = "FILE") parser.add_option("--debug", action = "store_true", dest = "debug", default = False, help = "print debug messages to stdout") parser.add_option("-D", "--define", action = "append", dest = "defines", metavar = "SYM[=VALUE]", default = [], help="define symbol") parser.add_option("-l", "--list", action = "store_true", dest = "list", default = False, help="List available snippets") (options, args) = parser.parse_args() if options.outfile: outfile = codecs.open(options.outfile, 'w', 'utf-8') else: outfile = codecs.getwriter("utf-8")(sys.stdout) if options.list: outfile.write('List of available snippets:\n') # XXX this method wont work when package is zipped (e.g. py2exe) d = os.path.join(os.path.dirname(sys.modules['msp430.asm'].__file__), 'librarian') for root, dirs, files in os.walk(d): for filename in files: outfile.write(' %s\n' % (os.path.join(root, filename)[1+len(d):],)) sys.exit(0) if len(args) != 1: parser.error("Expected name of template as argument.") # load desired snippet try: template = pkgutil.get_data('msp430.asm', 'librarian/%s' % args[0]) except IOError: sys.stderr.write('lib: %s: File not found\n' % (args[0]),) if options.debug: raise sys.exit(1) # collect predefined symbols defines = {} for definition in options.defines: if '=' in definition: symbol, value = definition.split('=', 1) else: symbol, value = definition, '' defines[symbol] = value # perform text replacements for key, value in defines.items(): template = template.replace(key, value) # write final result outfile.write(template) if __name__ == '__main__': main()
import {Component, Inject} from '@angular/core'; import {MAT_DIALOG_DATA} from '@angular/material/dialog'; import {Client} from '../../../lib/models/client'; /** Entry type */ export type EntryType = 'primitive'|'timestamp'|'size'|'user-list'|'interface-list'|'volume-list'; /** Parameters required to open an EntryHistoryDialog */ export interface EntryHistoryDialogParams { readonly path: ReadonlyArray<string>; readonly type: EntryType; readonly clientVersions: ReadonlyArray<Client>; } interface EntryHistoryTableRow<T> { time: Date; version: T; } /** * Component displaying the entry history dialog. */ @Component({ selector: 'entry-history-dialog', templateUrl: './entry_history_dialog.ng.html', styleUrls: ['./entry_history_dialog.scss'], }) export class EntryHistoryDialog { readonly entryType: EntryType; // tslint:disable-next-line:no-any readonly tableRows: Array<EntryHistoryTableRow<any>> = []; constructor( @Inject(MAT_DIALOG_DATA) private readonly data: EntryHistoryDialogParams, ) { if (this.data.path.length === 0) { throw new Error('Empty "path" provided'); } this.entryType = data.type; this.initTableRows(this.data); } private initTableRows(data: EntryHistoryDialogParams) { data.clientVersions.forEach((client) => { // tslint:disable-next-line:no-any let property: any = client; data.path.forEach((token) => { if (token === '' || property === undefined) { throw new Error(`Wrong "path" provided: ${data.path}`); } property = property[token]; }); this.tableRows.push({ time: client.age, version: property, }); }); } }
import { EN_US } from './locale.en_US'; /* tslint:disable */ export let EN_GB: any = Object.assign({}, EN_US, { // tabs / general // HELLO: 'hello world - en-GB' }); /* tslint:enable */
from django.utils.translation import ugettext_noop ugettext_noop("Search In") ugettext_noop("Device Reports") ugettext_noop("Live Trains") ugettext_noop("Report Details") ugettext_noop("Akko") ugettext_noop("Modiin") ugettext_noop("Modiin Center") ugettext_noop("Kiryat Hayyim") ugettext_noop("Kiryat Motzkin") ugettext_noop("Leb Hmifratz") ugettext_noop("Hutsot HaMifrats") ugettext_noop("Akko") ugettext_noop("Nahariyya") ugettext_noop("Haifa Center HaShmona") ugettext_noop("Haifa Bat Gallim") ugettext_noop("Haifa Hof HaKarmel (Razi'el)") ugettext_noop("Atlit") ugettext_noop("Binyamina") ugettext_noop("Kesariyya - Pardes Hanna") ugettext_noop("Hadera West") ugettext_noop("Natanya") ugettext_noop("Bet Yehoshua") ugettext_noop("Herzliyya") ugettext_noop("Tel Aviv - University") ugettext_noop("Tel Aviv Center - Savidor") ugettext_noop("Bne Brak") ugettext_noop("Petah Tikva Kiryat Arye") ugettext_noop("Petah Tikva Sgulla") ugettext_noop("Tel Aviv HaShalom") ugettext_noop("Holon Junction") ugettext_noop("Holon - Wolfson") ugettext_noop("Bat Yam - Yoseftal") ugettext_noop("Bat Yam - Komemiyyut") ugettext_noop("Kfar Habbad") ugettext_noop("Tel Aviv HaHagana") ugettext_noop("Lod") ugettext_noop("Ramla") ugettext_noop("Ganey Aviv") ugettext_noop("Rehovot E. Hadar") ugettext_noop("Be'er Ya'akov") ugettext_noop("Yavne") ugettext_noop("Ashdod Ad Halom") ugettext_noop("Ashkelon") ugettext_noop("Bet Shemesh") ugettext_noop("Jerusalem Biblical Zoo") ugettext_noop("Jerusalem Malha") ugettext_noop("Kiryat Gat") ugettext_noop("Be'er Sheva North University") ugettext_noop("Be'er Sheva Center") ugettext_noop("Dimona") ugettext_noop("Lehavim - Rahat") ugettext_noop("Ben Gurion Airport") ugettext_noop("Kfar Sava") ugettext_noop("Rosh Ha'Ayin North") ugettext_noop("Yavne - West") ugettext_noop("Rishon LeTsiyyon HaRishonim") ugettext_noop("Hod HaSharon") ugettext_noop("Sderot") ugettext_noop("Rishon LeTsiyyon - Moshe Dayan") # routes ugettext_noop("Tel Aviv Center - Rishon LeTsiyyon HaRishonim") ugettext_noop("Nahariyya - Modiin Center") ugettext_noop("Nahariyya - Be'er Sheva Center") ugettext_noop("Binyamina - Ashkelon") ugettext_noop("Nahariyya - Ben Gurion Airport -Be'er Sheva Center") ugettext_noop("Kiryat Motzkin - Haifa Hof HaKarmel (Razi'el)") ugettext_noop("Tel Aviv Center - Savidor - Jerusalem Malha") ugettext_noop("Be'er Sheva North University - Dimona") ugettext_noop("Hod HaSharon - Ashkelon") ugettext_noop("Hertsliyya - Be'er Sheva Center") # days ugettext_noop("Sunday") ugettext_noop("Monday") ugettext_noop("Tuesday") ugettext_noop("Wendesay") ugettext_noop("Thursday") ugettext_noop("Friday") ugettext_noop("Saturday") ugettext_noop("Stop") ugettext_noop("Arrival") ugettext_noop("Departure") ugettext_noop("Live") ugettext_noop('Live Trains'); ugettext_noop('Simulated'); ugettext_noop('WIP'); ugettext_noop('No Trips Now'); ugettext_noop('Current Trains List') ugettext_noop("Total # of reports (with loc)") ugettext_noop("to") ugettext_noop("on") ugettext_noop("Search Reports") ugettext_noop("Go Live") ugettext_noop("Stop Live") ugettext_noop("auto zoom") ugettext_noop("Please wait. Loading Reports, will take some time...") ugettext_noop("Map for device id") ugettext_noop("Total # of reports (with loc)") ugettext_noop('cur') ugettext_noop('exp')
""" examples.basic_usage ~~~~~~~~~~~~~~~~~~~~ Common usage patterns for the yaspin spinner. """ import signal import time from yaspin import Spinner, yaspin from yaspin.signal_handlers import fancy_handler from yaspin.spinners import Spinners def context_manager_default(): with yaspin(text="Braille"): time.sleep(3) def context_manager_line(): line_spinner = Spinner("-\\|/", 150) with yaspin(line_spinner, "line"): time.sleep(3) @yaspin(Spinner("⢄⢂⢁⡁⡈⡐⡠", 80), text="Dots") def decorated_function(): time.sleep(3) def pre_setup_example(): swirl = yaspin( spinner=Spinners.simpleDotsScrolling, text="swirl", color="red", side="right", sigmap={signal.SIGINT: fancy_handler}, ) with swirl as sp: time.sleep(2) with swirl as sp: sp.text = "new swirl" sp.reversal = True time.sleep(2) def main(): context_manager_default() context_manager_line() decorated_function() pre_setup_example() if __name__ == "__main__": main()
import {mobileQuery} from 'ghost/utils/mobile'; var PostController = Ember.ObjectController.extend({ isPublished: Ember.computed.equal('status', 'published'), classNameBindings: ['featured'], actions: { toggleFeatured: function () { var options = {disableNProgress: true}, self = this; this.toggleProperty('featured'); this.get('model').save(options).catch(function (errors) { self.notifications.showErrors(errors); }); }, hidePostContent: function () { if (mobileQuery.matches) { $('.js-content-list').animate({right: '0', left: '0', 'margin-right': '0'}, 300); $('.js-content-preview').animate({right: '-100%', left: '100%', 'margin-left': '15px'}, 300); } }, showPostContent: function () { if (mobileQuery.matches) { $('.js-content-list').animate({right: '100%', left: '-100%', 'margin-right': '15px'}, 300); $('.js-content-preview').animate({right: '0', left: '0', 'margin-left': '0'}, 300); } } } }); export default PostController;
#!/usr/bin/python # -*- coding: utf-8 -*- # xxx # # Copyright (c) 2017 Wintermute0110 <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # --- Python standard library --- from __future__ import unicode_literals import sys, os, pprint def fs_compress_item_list(item_list): reduced_list = [] num_items = len(item_list) if num_items == 0 or num_items == 1: return item_list previous_item = item_list[0] item_count = 1 for i in range(1, num_items): current_item = item_list[i] # print('{} | item_count {} | previous_item "{:>8}" | current_item "{:>8}"'.format(i, item_count, previous_item, current_item)) if current_item == previous_item: item_count += 1 else: if item_count == 1: reduced_list.append('{}'.format(previous_item)) else: reduced_list.append('{} x {}'.format(item_count, previous_item)) item_count = 1 previous_item = current_item # >> Last elemnt of the list if i == num_items - 1: if current_item == previous_item: if item_count == 1: reduced_list.append('{}'.format(current_item)) else: reduced_list.append('{} x {}'.format(item_count, current_item)) else: reduced_list.append('{}'.format(current_item)) return reduced_list # --- Main ---------------------------------------------------------------------------------------- input_list_list = [ ['dial'], ['dial', 'dial'], ['dial', 'dial', 'joy'], ['joy', 'dial', 'dial'], ['dial', 'dial', 'joy', 'joy'], ] for input_list in input_list_list: print('Input --> {}'.format(pprint.pformat(input_list))) print('Output --> {}'.format(pprint.pformat(fs_compress_item_list(input_list)))) print(' ')
# ============================================================================= # OWSLib. Copyright (C) 2005 Sean C. Gillies # # Contact email: [email protected] # ============================================================================= def patch_well_known_namespaces(etree_module): import warnings from owslib.namespaces import Namespaces ns = Namespaces() """Monkey patches the etree module to add some well-known namespaces.""" try: register_namespace = etree_module.register_namespace except AttributeError: try: etree_module._namespace_map def register_namespace(prefix, uri): etree_module._namespace_map[uri] = prefix except AttributeError: def register_namespace(prefix, uri): pass warnings.warn("Only 'lxml.etree' >= 2.3 and 'xml.etree.ElementTree' >= 1.3 are fully supported!") for k, v in ns.get_namespaces().iteritems(): register_namespace(k, v) # try to find lxml or elementtree try: from lxml import etree except ImportError: try: # Python 2.5 with ElementTree included import xml.etree.ElementTree as etree except ImportError: try: # Python < 2.5 with ElementTree installed import elementtree.ElementTree as etree except ImportError: raise RuntimeError('You need either lxml or ElementTree to use OWSLib!') patch_well_known_namespaces(etree)
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace ZendTest\Http\Header; use Zend\Http\Header\Exception\InvalidArgumentException; use Zend\Http\Header\GenericHeader; use PHPUnit_Framework_TestCase as TestCase; class GenericHeaderTest extends TestCase { /** * @param string $name * @dataProvider validFieldNameChars */ public function testValidFieldName($name) { try { new GenericHeader($name); } catch (InvalidArgumentException $e) { $this->assertEquals( $e->getMessage(), 'Header name must be a valid RFC 7230 (section 3.2) field-name.' ); $this->fail('Allowed char rejected: ' . ord($name)); // For easy debug } } /** * @param string $name * @dataProvider invalidFieldNameChars */ public function testInvalidFieldName($name) { try { new GenericHeader($name); $this->fail('Invalid char allowed: ' . ord($name)); // For easy debug } catch (InvalidArgumentException $e) { $this->assertEquals( $e->getMessage(), 'Header name must be a valid RFC 7230 (section 3.2) field-name.' ); } } /** * @group 7295 */ public function testDoesNotReplaceUnderscoresWithDashes() { $header = new GenericHeader('X_Foo_Bar'); $this->assertEquals('X_Foo_Bar', $header->getFieldName()); } /** * @see http://en.wikipedia.org/wiki/HTTP_response_splitting * @group ZF2015-04 */ public function testPreventsCRLFAttackViaFromString() { $this->setExpectedException('Zend\Http\Header\Exception\InvalidArgumentException'); $header = GenericHeader::fromString("X_Foo_Bar: Bar\r\n\r\nevilContent"); } /** * @see http://en.wikipedia.org/wiki/HTTP_response_splitting * @group ZF2015-04 */ public function testPreventsCRLFAttackViaConstructor() { $this->setExpectedException('Zend\Http\Header\Exception\InvalidArgumentException'); $header = new GenericHeader('X_Foo_Bar', "Bar\r\n\r\nevilContent"); } /** * @see http://en.wikipedia.org/wiki/HTTP_response_splitting * @group ZF2015-04 */ public function testProtectsFromCRLFAttackViaSetFieldName() { $header = new GenericHeader(); $this->setExpectedException('Zend\Http\Header\Exception\InvalidArgumentException', 'valid'); $header->setFieldName("\rX-\r\nFoo-\nBar"); } /** * @see http://en.wikipedia.org/wiki/HTTP_response_splitting * @group ZF2015-04 */ public function testProtectsFromCRLFAttackViaSetFieldValue() { $header = new GenericHeader(); $this->setExpectedException('Zend\Http\Header\Exception\InvalidArgumentException'); $header->setFieldValue("\rSome\r\nCLRF\nAttack"); } /** * Valid field name characters. * * @return string[] */ public function validFieldNameChars() { return [ ['!'], ['#'], ['$'], ['%'], ['&'], ["'"], ['*'], ['+'], ['-'], ['.'], ['0'], // Begin numeric range ['9'], // End numeric range ['A'], // Begin upper range ['Z'], // End upper range ['^'], ['_'], ['`'], ['a'], // Begin lower range ['z'], // End lower range ['|'], ['~'], ]; } /** * Invalid field name characters. * * @return string[] */ public function invalidFieldNameChars() { return [ ["\x00"], // Min CTL invalid character range. ["\x1F"], // Max CTL invalid character range. ['('], [')'], ['<'], ['>'], ['@'], [','], [';'], [':'], ['\\'], ['"'], ['/'], ['['], [']'], ['?'], ['='], ['{'], ['}'], [' '], ["\t"], ["\x7F"], // DEL CTL invalid character. ]; } }
from dal import autocomplete, forward from django import forms from django.forms.widgets import CheckboxSelectMultiple, RadioSelect from .models import TModel class TForm(forms.ModelForm): CHOICES = (("a", "Alice"), ("b", "Bob"), ("c", "Charlie")) POOR_CHOICE = (("d", "Dylan"), ) checkbox = forms.BooleanField(required=False) select = forms.ChoiceField(required=False, choices=CHOICES) select_radio = forms.ChoiceField(required=False, widget=RadioSelect(), choices=CHOICES) multiselect = forms.MultipleChoiceField(required=False, choices=CHOICES) multiselect_checks = forms.MultipleChoiceField( required=False, widget=CheckboxSelectMultiple(), choices=CHOICES) multiselect_checks_poor = forms.MultipleChoiceField( required=False, widget=CheckboxSelectMultiple(), choices=POOR_CHOICE) test = autocomplete.Select2ListChoiceField( required=False, widget=autocomplete.ListSelect2( url='forward_different_fields', forward=("name", "checkbox", "select", "select_radio", "multiselect", "multiselect_checks", forward.Field(src="multiselect_checks_poor"), forward.JavaScript(handler="const42", dst="const42"), forward.JavaScript( handler="reverse_name", dst="reverse_name"), forward.Self() ) ) ) class Meta: model = TModel fields = ('name', 'checkbox', 'select', 'select_radio', 'multiselect', 'multiselect_checks', 'multiselect_checks_poor', 'test') class Media: js = ( 'js_handlers.js', )
#include <stdio.h> main () { int A,B,C,D; printf ("\n** CALCULAR UMA EQUACAO QUADRATICA ** \n"); printf ("\nQual o valor de A? "); scanf ("%i",&A); printf ("Qual o valor de B? "); scanf ("%i",&B); printf ("Qual o valor de C? "); scanf ("%i",&C); D = B * B - 4 * A * C; printf ("\nO valor de Delta e: %i.\n",D); printf ("\n\n<< Marco_Tulio >>\n"); getchar(),getchar(); }
{ "type": "serial", "categoryField": "Dry Bulb Temperature", "startDuration": 1, "categoryAxis": { "gridPosition": "start", "title": "Dry Bulb Temperature" }, "trendLines": [], "graphs": [ { "balloonText": "[[title]] of [[category]]:[[value]]", "id": "AmGraph-1", "title": "2", "type": "smoothedLine", "valueField": "2" }, { "balloonText": "[[title]] of [[category]]:[[value]]", "id": "AmGraph-2", "title": "5", "type": "smoothedLine", "valueField": "5" }, { "id": "AmGraph-3", "title": "7", "valueField": "7" }, { "id": "AmGraph-4", "title": "10", "valueField": "10", "xField": "Dry Bulb Temperature" }, { "id": "AmGraph-5", "title": "12" }, { "id": "AmGraph-6", "title": "15", "valueField": "15" }, { "id": "AmGraph-7", "title": "20", "valueField": "20" }, { "id": "AmGraph-8", "title": "25", "valueField": "25" }, { "id": "AmGraph-9", "title": "30", "valueField": "30" } ], "guides": [], "valueAxes": [ { "id": "ValueAxis-1", "autoGridCount": false, "title": "Vapor Pressure" } ], "allLabels": [], "balloon": {}, "legend": { "enabled": true, "labelText": "[[title]] Tw", "useGraphSettings": true }, "titles": [ { "id": "Title-1", "size": 15, "text": "Chart Title" } ], "dataProvider": [ { "2": "-", "5": "-", "7": "-", "10": "-", "12": "-", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "0" }, { "2": "2", "5": "-", "7": "-", "10": "-", "12": "-", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "2" }, { "2": "-2.17", "5": "5", "7": "-", "10": "-", "12": "-", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "5" }, { "2": "-5.65", "5": "2.7", "7": "7", "10": "-", "12": "-", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "7" }, { "2": "-13.82", "5": "-1.36", "7": "3.88", "10": "10", "12": "-", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "10" }, { "2": "-26.49", "5": "-4.6", "7": "1.36", "10": "8.36", "12": "12", "15": "-", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "12" }, { "2": "-", "5": "-11.88", "7": "-2.92", "10": "5.48", "12": "9.76", "15": "15", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "15" }, { "2": "-", "5": "-21.49", "7": "-6.65", "10": "3.23", "12": "8.06", "15": "13.79", "20": "-", "25": "-", "30": "-", "Dry Bulb Temperature": "17" }, { "2": "-", "5": "-", "7": "-15.85", "10": "-0.76", "12": "5.13", "15": "11.77", "20": "20", "25": "-", "30": "-", "Dry Bulb Temperature": "20" }, { "2": "-", "5": "-", "7": "-34.72", "10": "-3.84", "12": "2.82", "15": "10.26", "20": "19.09", "25": "-", "30": "-", "Dry Bulb Temperature": "22" }, { "2": "-", "5": "-", "7": "-", "10": "-10.55", "12": "-1.22", "15": "7.73", "20": "17.61", "25": "25", "30": "-", "Dry Bulb Temperature": "25" }, { "2": "-", "5": "-", "7": "-", "10": "-18.7", "12": "-4.43", "15": "5.79", "20": "16.55", "25": "24.3", "30": "-", "Dry Bulb Temperature": "27" }, { "2": "-", "5": "-", "7": "-", "10": "-", "12": "-11.58", "15": "2.36", "20": "14.84", "25": "23.19", "30": "30", "Dry Bulb Temperature": "30" }, { "2": "-", "5": "-", "7": "-", "10": "-", "12": "-20.83", "15": "-0.35", "20": "13.6", "25": "22.42", "30": "29.45", "Dry Bulb Temperature": "32" }, { "2": "-", "5": "-", "7": "-", "10": "-", "12": "-", "15": "-5.11", "20": "11.55", "25": "21.2", "30": "28.61", "Dry Bulb Temperature": "35" }, { "2": "-", "5": "-", "7": "-", "10": "-", "12": "-", "15": "-", "20": "10.03", "25": "20.34", "30": "28.03", "Dry Bulb Temperature": "37" } ] }
{% extends "main.html" %} {% block content %} <h1>Congratulations {{ user.name }}!</h1> <p>If you see this page, it means that you managed to log in successfully.</p> {% endblock content%}
# coding: utf-8 """ Injects data for rendering index template - see base.html """ import flask from main import app import auth import model.user as users from model.config import CONFIG_DB import config import validators import util @app.route('/') def index(): """Render index template""" return flask.render_template('index.html') @app.context_processor def inject_user(): """Inject 'user' variable into jinja template, so it can be passed into angular. See base.html""" user = False if auth.is_logged_in(): user = auth.currentUser().toDict(publicOnly=False) util.debugDict(user, "auth.currentUser" ) return { 'user': user } @app.context_processor def inject_config(): """Inject 'app_config' variable into jinja template, so it can be passed into angular. See base.html""" #config_properties = Config.get_all_properties() if auth.is_admin() else Config.get_public_properties() app_config = CONFIG_DB.toDict(not auth.is_admin()) return { 'app_config': app_config , 'authNames' : config.authNames } @app.context_processor def inject_validators(): """Inject vdr-specifiers for regex and min-max validators into jinja template for the client. See base.html This is so that client and server can both recreate same ie functionally equivalent validators. However custom validators cannot be passed but generally these are only applied server side """ return { 'validators' : validators.to_dict(validators) } @app.route('/_ah/warmup') def warmup(): """Warmup request to load application code into a new instance before any live requests reach that instance. For more info see GAE docs""" return 'success'
using Wikiled.Arff.Logic; namespace Wikiled.Sentiment.ConsoleApp.Analysis { namespace Wikiled.Sentiment.ConsoleApp.Extraction.Bootstrap.Data { public class EvalData { public EvalData(string id, string text) { Id = id; Text = text; } public string Id { get; } public double? Stars { get; set; } public int TotalSentiments { get; set; } public bool? IsNeutral { get; set; } public PositivityType? CalculatedPositivity => IsNeutral == true ? PositivityType.Neutral : !Stars.HasValue ? (PositivityType?)null : Stars > 3 ? PositivityType.Positive : PositivityType.Negative; public string Text { get; } } } }
# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Copyright 2004 Duke University """ Core DNF Errors. """ from __future__ import unicode_literals from dnf.i18n import ucd class DeprecationWarning(DeprecationWarning): # :api pass class Error(Exception): """Base Error. All other Errors thrown by DNF should inherit from this. :api """ def __init__(self, value=None): super(Error, self).__init__() self.value = None if value is None else ucd(value) def __str__(self): return "%s" %(self.value,) def __unicode__(self): return '%s' % self.value class CompsError(Error): pass class ConfigError(Error): pass class DepsolveError(Error): # :api pass class DownloadError(Error): # :api def __init__(self, errmap): super(DownloadError, self).__init__() self.errmap = errmap @staticmethod def errmap2str(errmap): errstrings = [] for key in errmap: for error in errmap[key]: msg = '%s: %s' % (key, error) if key else '%s' % error errstrings.append(msg) return '\n'.join(errstrings) def __str__(self): return self.errmap2str(self.errmap) def __unicode__(self): return ucd(self.__str__()) class LockError(Error): pass class MarkingError(Error): # :api def __init__(self, value=None, pkg_spec=None): """Initialize the marking error instance.""" super(MarkingError, self).__init__(value) self.pkg_spec = None if pkg_spec is None else ucd(pkg_spec) def __unicode__(self): string = super(MarkingError, self).__unicode__() if self.pkg_spec: string += ': ' + self.pkg_spec return string class MetadataError(Error): pass class MiscError(Error): pass class PackagesNotAvailableError(MarkingError): def __init__(self, value=None, pkg_spec=None, packages=None): super(PackagesNotAvailableError, self).__init__(value, pkg_spec) self.packages = packages or [] class PackageNotFoundError(MarkingError): pass class PackagesNotInstalledError(MarkingError): def __init__(self, value=None, pkg_spec=None, packages=None): super(PackagesNotInstalledError, self).__init__(value, pkg_spec) self.packages = packages or [] class ProcessLockError(LockError): def __init__(self, value, pid): super(ProcessLockError, self).__init__(value) self.pid = pid def __reduce__(self): """Pickling support.""" return (ProcessLockError, (self.value, self.pid)) class RepoError(Error): # :api pass class ThreadLockError(LockError): pass class TransactionCheckError(Error): pass
using UnityEngine; using System.Collections; public interface ILaunchable { void Launch (); }
// LNLSFile.h // LNLSFile class declaration // wpfernandes 2015-11-23 10h50min #ifndef LNLSFILE_H #define LNLSFILE_H #include "Settings.h" #include "Post.h" #include "Parser.h" #include "Region.h" #include <string> #include <vector> #include <memory> class RsrcIfs {public: // Default constructor RsrcIfs() : pifs_(new std::ifstream) { } // Copy constructor RsrcIfs(const RsrcIfs& other) = delete; // Move constructor noexcept needed to enable optimizations in containers RsrcIfs(RsrcIfs&& other) noexcept : pifs_(std::move(other.pifs_)) { } // Destructor. best-practice: explicitly annotated with noexcept ~RsrcIfs() noexcept {} // Copy assignment operator RsrcIfs& operator=(const RsrcIfs& other) = delete; // Move assignment operator RsrcIfs& operator=(RsrcIfs&& other) = delete; std::ifstream& ifs() { return *pifs_; } private: std::unique_ptr<std::ifstream> pifs_; }; class LNLSFile { public: LNLSFile()=delete; LNLSFile(const LNLSFile&) = delete; LNLSFile(LNLSFile&&) = default; LNLSFile& operator=(const LNLSFile&) = delete; LNLSFile& operator=(LNLSFile&&) = default; LNLSFile(Settings& settings, Post& post, const std::string& directory, const std::string& source_file_name, int theta, int phi) : settings(settings), post(post), directory_(directory), source_file_name_(source_file_name), path_(Poco::Path(directory, source_file_name)), theta_(theta), phi_(phi), loaded_(false) { } ~LNLSFile()=default; ctrlEnum open_file_and_read_header(); ctrlEnum check_for_another_region(); ctrlEnum load_regions(); std::vector<Region>& regions() { return regions_; } ctrlEnum load(); ctrlEnum setNRegionAndRegionNameAtEachRegion(); void genAll(); void write_all(); void displaySequencesInfo(Sequences& sequences); void displayPlot2DInfo(Plot2D& plot2d); void displayPlot2DsInfo(std::vector<Plot2D>& plot2ds); void displayRegionInfo(Region& region); void displayRegionsInfo(); ctrlEnum doTest(); void directory(const std::string& directory) { directory_ = directory; } const std::string& directory() const { return directory_; } void source_file_name(const std::string& source_file_name) { source_file_name_ = source_file_name; } const std::string& source_file_name() const { return source_file_name_; } void theta(const int theta) { theta_ = theta; } int theta() const { return theta_; } void phi(const int phi) { phi_ = phi; } int phi() const { return phi_; } std::string path_to_string() { return path_.toString(); } const bool loaded() const { return loaded_; } std::ifstream& ifs() { return ifs_.ifs(); } private: Settings& settings; Post& post; std::string directory_; std::string source_file_name_; Poco::Path path_; int theta_; int phi_; bool loaded_; std::string experiment_; RsrcIfs ifs_; std::vector<Region> regions_; }; #endif // end of LNLSFile.h
// // SignInViewController.h // Ribbit // // Created by Ambreen Hasan on 4/2/15. // Copyright (c) 2015 Ambreen Hasan. All rights reserved. // #import <UIKit/UIKit.h> @interface SignInViewController : UIViewController @property (weak, nonatomic) IBOutlet UITextField *usernameField; @property (weak, nonatomic) IBOutlet UITextField *passwordField; - (IBAction)SignIn:(id)sender; @end
#!/usr/bin/env python # coding=utf-8 import datetime import pymongo from bson.objectid import ObjectId from db import db class Video(object): """ Database operation on a video Before inserting a picture to the database,we need to add some attributes on the picture. Some attributes maybe private,some maybe public. Attributes: author: A String indicating the video's author title: A String descrip the video briefly "link": A String indicating the video's link """ def __init__(self, author, title, link): self.author = author self.title = title self.link = link now = datetime.datetime.now() self.time = now.strftime("%m-%d %H:%M:%S") self.pv = 0 self.likes = 0 self.comments = [] self.likeperson = [] def objectSelf(self): """Encapsulate all the attributes of the video into a dict""" video = { "author": self.author, "title": self.title, "comments": self.comments, "time": self.time, "likes": self.likes, "link": self.link, "likeperson": self.likeperson, "pv": self.pv } return video def saveVideo(self): """ Insert a video into the database """ db.videos.insert(self.objectSelf()) def loadVideos(): """ Load all videos in the database """ coll = db.videos videos = coll.find().sort("likes", pymongo.DESCENDING) return videos def getVideo(videoId): """ Check a video is in the database according to the id of the video, if exist, return all the fields of the video. """ coll = db.videos query = {"_id": ObjectId(videoId)} video = coll.find_one(query) if video: video["pv"] += 1 coll.save(video) return video
#-*-coding:utf-8-*- # # Simple stemmer for Croatian v0.1 # Copyright 2012 Nikola Ljubešić and Ivan Pandžić # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import re import sys stop=set(['biti','jesam','budem','sam','jesi','budeš','si','jesmo','budemo','smo','jeste','budete','ste','jesu','budu','su','bih','bijah','bjeh','bijaše','bi','bje','bješe','bijasmo','bismo','bjesmo','bijaste','biste','bjeste','bijahu','biste','bjeste','bijahu','bi','biše','bjehu','bješe','bio','bili','budimo','budite','bila','bilo','bile','ću','ćeš','će','ćemo','ćete','želim','želiš','želi','želimo','želite','žele','moram','moraš','mora','moramo','morate','moraju','trebam','trebaš','treba','trebamo','trebate','trebaju','mogu','možeš','može','možemo','možete']) def istakniSlogotvornoR(niz): return re.sub(r'(^|[^aeiou])r($|[^aeiou])',r'\1R\2',niz) def imaSamoglasnik(niz): if re.search(r'[aeiouR]',istakniSlogotvornoR(niz)) is None: return False else: return True def transformiraj(pojavnica): for trazi,zamijeni in transformacije: if pojavnica.endswith(trazi): return pojavnica[:-len(trazi)]+zamijeni return pojavnica def korjenuj(pojavnica): for pravilo in pravila: dioba=pravilo.match(pojavnica) if dioba is not None: if imaSamoglasnik(dioba.group(1)) and len(dioba.group(1))>1: return dioba.group(1) return pojavnica @profile def main(): if len(sys.argv)!=3: print 'Usage: python Croatian_stemmer.py input_file output_file' print 'input_file should be an utf8-encoded text file which is then tokenized, stemmed and written in the output_file in a tab-separated fashion.' sys.exit(1) output_file=open(sys.argv[2],'w') pravila=[re.compile(r'^('+osnova+')('+nastavak+r')$') for osnova, nastavak in [e.decode('utf8').strip().split(' ') for e in open(os.path.join(os.path.dirname(__file__), 'rules.txt'))]] transformacije=[e.decode('utf8').strip().split('\t') for e in open(os.path.join(os.path.dirname(__file__), 'transformations.txt'))] for token in re.findall(r'\w+',open(sys.argv[1]).read().decode('utf8'),re.UNICODE): if token.lower() in stop: output_file.write((token+'\t'+token.lower()+'\n').encode('utf8')) continue output_file.write((token+'\t'+korjenuj(transformiraj(token.lower()))+'\n').encode('utf8')) output_file.close() if __name__=='__main__': main()
/// <reference no-default-lib="true"/> /// <reference path="../CloudParty.d.ts" /> /** * Description. * ------------------------------------------- * Add the example script to a prefab that is present in-world. * * GetMessage() content. * ------------------------------------------- * Here is a sample of getMessage() received, showing various form fields. * * { * event: { * offsetX: 22, * offsetY: 11, * target: { * offsetWidth: 55, * offsetHeight: 20 * } * }, * form: { * a: "a", * b: "b", * c: "c", * g: "0", * h: [], * i: "" * }, * state_path: { * root: true, * idx: 0 * }, * time: 1369432143045, * from: "6c0fb8d814402fb4", * from_ent: "P00A4EC", * from_display: "Salahzar Stenvaag", * click_handler: true * } * * * Salahzar™ * * Source. * ------------------------------------------- * https://wiki.cloudparty.com/wiki/Script_Examples/HTML_Form */ function clickStart() { controllerHTMLCreate( { ent: getMessageEnt(), title: 'HTML Form Example', html: '<form name="testForm">' + 'a:<input type="TEXT" name="a"><br>' + 'b:<input type="password" name="b"><br>' + '<input type="hidden" name="c" value="c">' + 'd:<input type="checkbox" name="d" value="d"><br>' + 'e:<input type="checkbox" name="e" value="e"><br>' + 'f0:<input type="radio" name="f" value="0"><br>' + 'f1:<input type="radio" name="f" value="1"><br>' + 'f2:<input type="radio" name="f" value="2"><br>' + 'g:<select size="1" name="g"><option value="0">0</option><option value="1">1</option></select><br>' + 'h:<select size="2" name="h" multiple="MULTIPLE"><option value="0">0</option><option value="1">1</option></select><br>' + 'i:<textarea id="textarea" name="i"></textarea><br>' + '<button id="submit" value="Submit">Submit</button></form>', listeners: [{ id: 'textarea' }], messagers: [{ id: 'submit' }], width: 300, height: 400, padding: 15 }); } handlerCreate( { name: 'clickStart', channel: 'direct', message: 'clickStart' }); function submitClick() { var msg = getMessageData(); error( msg ); //error( "Value of a: " + msg.form.a ); controllerMessage( { ent: getMessageEnt(), message: 'textarea', text: stringify( msg ) }); } handlerCreate( { name: 'submitClick', channel: 'controller', message: 'submit' });
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # See http://www.salome-platform.org/ or email : [email protected] # # This case corresponds to: /visu/CutLines/B1 case # Create Cut Lines for all data of the given MED file import sys from paravistest import datadir, pictureext, get_picture_dir from presentations import CreatePrsForFile, PrsTypeEnum import pvserver as paravis # Create presentations myParavis = paravis.myParavis # Directory for saving snapshots picturedir = get_picture_dir("CutLines/B1") file = datadir + "carre_en_quad4_seg2_fields.med" print " --------------------------------- " print "file ", file print " --------------------------------- " print "CreatePrsForFile..." CreatePrsForFile(myParavis, file, [PrsTypeEnum.CUTLINES], picturedir, pictureext)
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('credit', '0001_initial'), ] operations = [ migrations.CreateModel( name='CreditConfig', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')), ('enabled', models.BooleanField(default=False, verbose_name='Enabled')), ('cache_ttl', models.PositiveIntegerField(default=0, help_text='Specified in seconds. Enable caching by setting this to a value greater than 0.', verbose_name='Cache Time To Live')), ('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')), ], options={ 'ordering': ('-change_date',), 'abstract': False, }, ), ]
// // See README for overview // 'use strict'; // Parse query string to extract some parameters (it can fail for some input) var query = document.location.href.replace(/^[^?]*(\?([^#]*))?(#.*)?/, '$2'); var queryParams = query ? JSON.parse('{' + query.split('&').map(function (a) { return a.split('=').map(decodeURIComponent).map(JSON.stringify).join(': '); }).join(',') + '}') : {}; var url = queryParams.file || '../../test/pdfs/liveprogramming.pdf'; var scale = +queryParams.scale || 1.5; // // Fetch the PDF document from the URL using promises // PDFJS.getDocument(url).then(function(pdf) { var numPages = pdf.numPages; // Using promise to fetch the page // For testing only. var MAX_NUM_PAGES = 50; var ii = Math.min(MAX_NUM_PAGES, numPages); var promise = Promise.resolve(); for (var i = 1; i <= ii; i++) { var anchor = document.createElement('a'); anchor.setAttribute('name', 'page=' + i); anchor.setAttribute('title', 'Page ' + i); document.body.appendChild(anchor); // Using promise to fetch and render the next page promise = promise.then(function (pageNum, anchor) { return pdf.getPage(pageNum).then(function (page) { var viewport = page.getViewport(scale); var container = document.createElement('div'); container.id = 'pageContainer' + pageNum; container.className = 'pageContainer'; container.style.width = viewport.width + 'px'; container.style.height = viewport.height + 'px'; anchor.appendChild(container); return page.getOperatorList().then(function (opList) { var svgGfx = new PDFJS.SVGGraphics(page.commonObjs, page.objs); return svgGfx.getSVG(opList, viewport).then(function (svg) { container.appendChild(svg); }); }); }); }.bind(null, i, anchor)); } });
#!/usr/bin/env python """ conserved.py takes intersections of all replicates to find conserved domains. Copyright (C) 2015 Jonathan Niles This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import print_function import numpy as np import nutils as nu from pybedtools import BedTool dataDir = nu.join(nu.sync, "data/domains/bed/") exportDir = nu.chkdir(nu.join(nu.sync, "data/domains/conserved/")) # templates and paths tmpl = "{0}-{1}-{2}kb.bed" windows = [100, 200, 400, 800, 1000] def loadBed(cellType, rep, window): """loads a bed file in for work""" fname = nu.join(dataDir, tmpl.format(cellType, rep, window)) print("Loading .bed record", fname) return BedTool(fname) def filterConserved(cellType, window=200): """this will do things""" beds = [loadBed(cellType, rep, window) for rep in nu.datasets[cellType]] f = lambda x,y : x+y conserved = reduce(f, beds) print("Found", len(conserved), "conserved domains.") fname = nu.join(exportDir, "{0}.{1}kb.conserved.bed".format(cellType, window)) print("Saving as", fname) conserved.saveas(fname) return def findAllConserved(): for cellType in nu.datasets: for win in windows: filterConserved(cellType, window=win) if __name__ == "__main__": findAllConserved()
# -*- Mode: Python; coding: iso-8859-1 -*- # vi:si:et:sw=4:sts=4:ts=4 ## ## Stoqdrivers ## Copyright (C) 2005 Async Open Source <http://www.async.com.br> ## All rights reserved ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, ## USA. ## ## Author(s): Henrique Romano <[email protected]> ## """ Base class implementation for all the scales drivers. """ from stoqdrivers.scales.base import BaseScale # # Scale interface # class Scale(BaseScale): def read_data(self): return self._driver.read_data() def test(): scale = Scale() print "Waiting for scale reply... " data = scale.read_data() print "...ok" print "Weight: %.02f" % data.weight print "Price per Kg: %.02f" % data.price_per_kg print "Total price: %.02f" % data.total_price if __name__ == "__main__": test()
package org.deeplearning4j.rottentomatoes.fetcher; import static org.junit.Assert.*; import org.deeplearning4j.rottentomatoes.data.train.fetcher.RottenTomatoesBagOfWordsDataFetcher; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by agibsonccc on 10/18/14. */ public class RottenTomatoesBagOfWordsDataFetcherTest { private static Logger log = LoggerFactory.getLogger(RottenTomatoesBagOfWordsDataFetcherTest.class); @Test public void testDataFetcher() { RottenTomatoesBagOfWordsDataFetcher fetcher = new RottenTomatoesBagOfWordsDataFetcher(); System.out.println(fetcher.getCountVectorizer().vocab().numWords()); fetcher.fetch(10); assertEquals(10,fetcher.next().numExamples()); } }
#!/usr/bin/env python """async11tcpserver.py: TCP Echo server protocol Usage: async11tcpserver.py """ import asyncio class EchoServerProtocol(asyncio.Protocol): def connection_made(self, transport): peername = transport.get_extra_info('peername') print(f'Connection from {peername}', flush=True) self.transport = transport def data_received(self, data): message = data.decode() print(f'Data received: {message!r}') print(f'Send: {message!r}') self.transport.write(data) print('Close the client socket', flush=True) self.transport.close() def main(): loop = asyncio.get_event_loop() # Each client connection will create a new protocol instance coro = loop.create_server(EchoServerProtocol, '127.0.0.1', 8888) server = loop.run_until_complete(coro) # Serve requests until Ctrl+C is pressed print(f'Serving on {server.sockets[0].getsockname()}', flush=True) try: loop.run_forever() except KeyboardInterrupt: pass # Close the server server.close() loop.run_until_complete(server.wait_closed()) loop.close() if __name__ == '__main__': main()
# -*- coding: utf-8 -*- # # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from google.cloud.vision_helpers.decorators import add_single_feature_methods from google.cloud.vision_helpers import VisionHelpers from google.cloud.vision_v1p2beta1 import types from google.cloud.vision_v1p2beta1.gapic import enums from google.cloud.vision_v1p2beta1.gapic import image_annotator_client as iac @add_single_feature_methods class ImageAnnotatorClient(VisionHelpers, iac.ImageAnnotatorClient): __doc__ = iac.ImageAnnotatorClient.__doc__ enums = enums __all__ = ( 'enums', 'types', 'ImageAnnotatorClient', )
// // SIFloatingMenuViewController.h // SIFloatingMenuViewController // // Created by Shawn Irvin on 4/21/15. // Copyright (c) 2015 Shawn Irvin. All rights reserved. // #import <UIKit/UIKit.h> #import "SIMenuButton.h" #import "SIMenuTableView.h" #import "FXBlurView.h" /** * Choices for how the menu button animates onto screen. */ typedef NS_ENUM(NSUInteger, SIMenuButtonAnimationType){ /** * Animate from the left side of the screen to chosen location. */ kSIMenuAnimationTypeFromLeft, /** * Animate from the bottom of the screen to chosen location. */ kSIMenuAnimationTypeFromBottom, /** * Animate from the right side of the screen to chosen location. */ kSIMenuAnimationTypeFromRight, /** * No animation. Menu button just appears on screen. */ kSIMenuAnimationTypeNone }; /** * Choices for where the menu button is displayed on screen. */ typedef NS_ENUM(NSUInteger, SIMenuButtonLocation){ /** * Shows the menu button in the bottom left corner of the screen. */ kSIMenuLocationBottomLeft, /** * Shows the menu button in the bottom right corner of the screen. */ kSIMenuLocationBottomRight, /** * Shows the menu button in the bottom middle of the screen. */ kSIMenuLocationBottomMiddle }; typedef void (^MenuItemSelected)(SIMenuItem *menuItem, NSUInteger selectedIndex); @interface SIFloatingMenuViewController : UIViewController /******************************* Menu Button Properties *******************************/ /** * The button containing the icon. This property is readonly, but you can still set values on it. */ @property (strong, nonatomic, readonly) SIMenuButton *menuButton; /** * Where the menu button is displayed on screen. Choose one of the SIMenuButtonLocation enum values. */ @property (nonatomic) SIMenuButtonLocation menuButtonLocation; /** * How the menu button is displayed on screen. Choose one of the SIMenuButtonAnimationType enum values. */ @property (nonatomic) SIMenuButtonAnimationType menuButtonAnimationType; /** * The size of the menu button. Looks best if width and height are equal. */ @property (nonatomic) CGSize menuButtonSize; /** * The distance between the edge of the button and the edge (left, bottom, and right edges) of the screen. */ @property (nonatomic) CGFloat menuButtonBorderMargin; /** * The additional margain to the bottom of the screen, on top of the menuButtonBorderMargin. */ @property (nonatomic) CGFloat menuButtonAdditionalBottomMargin; /** * The additional margain to the left or right of the screen, on top of the menuButtonBorderMargin. */ @property (nonatomic) CGFloat menuButtonAdditionalSideMargin; /** * How quickly the menuButton animates on screen. Must be a value between 0 and 20. */ @property (nonatomic) CGFloat animationSpeed; /** * How bouncy the menuButton is while animating. Must be a value between 0 and 20. */ @property (nonatomic) CGFloat animationBounciness; /** * When the menuButton is pressed, this determines how much it scales down. Must be a value between 0 and 1. */ @property (nonatomic) CGFloat menuButtonPressedAnimationScale; /******************************* Menu Properties *******************************/ /** * Whether the menu is currently showing on screen or not. This property is read only. */ @property (nonatomic, readonly) BOOL menuIsOpen; /** * Table view containing the menu items. */ @property (strong, nonatomic, readonly) SIMenuTableView *menuTableView; /** * The block that should be called when a menu item is selected. */ @property (strong, nonatomic) MenuItemSelected menuItemSelectedBlock; /** * The UIViewControllers to be displayed when the respective menu item is selected. This property is read only! Use the set, add, remove, and insert methods to modify the viewControllers. */ @property (strong, nonatomic) NSArray *viewControllers; /** * The speed at which the dimmed view should animate it's alpha. */ @property (nonatomic) CGFloat backgroundViewAnimationSpeed; /** * Should the menu dismiss if the background is tapped. */ @property (nonatomic) BOOL backgroundTapDismissesMenu; /** * The view displayed behind the menu when the menu is being displayed. */ @property (strong, nonatomic, readonly) FXBlurView *backgroundView; /** * DESIGNATED INITIALIZER * * @param viewControllers The UIViewControllers to be displayed when the respective menu item is selected. * @param size The size of the menuButton. * @param margin The distance between the edge of the button and the edge (left, bottom, and right edges) of the screen. * @param animateToInitialState Whether the icon should animate to it's initial state when coming on screen. * * @return Instance of SIFloatingMenuViewController */ -(instancetype)initWithViewControllers:(NSArray *)viewControllers menuButtonSize:(CGSize)size borderMargin:(CGFloat)margin animateIconToInitalState:(BOOL)animateToInitialState; /** * Show the menu. * * @param animated Whether the menu should animate on screen or not. */ -(void)showMenuAnimated:(BOOL)animated; /** * Hide the menu. * * @param animated Whether the menu should animate off screen or not. */ -(void)hideMenuAnimated:(BOOL)animated; @end
// Simple phantom.js integration script // Taken from Twitter Bootstrap function waitFor(testFx, onReady, timeOutMillis) { var maxtimeOutMillis = timeOutMillis ? timeOutMillis : 5001 //< Default Max Timout is 5s , start = new Date().getTime() , condition = false , interval = setInterval(function () { if ((new Date().getTime() - start < maxtimeOutMillis) && !condition) { // If not time-out yet and condition not yet fulfilled condition = (typeof(testFx) === "string" ? eval(testFx) : testFx()) //< defensive code } else { if (!condition) { // If condition still not fulfilled (timeout but condition is 'false') console.log("'waitFor()' timeout") phantom.exit(1) } else { // Condition fulfilled (timeout and/or condition is 'true') typeof(onReady) === "string" ? eval(onReady) : onReady() //< Do what it's supposed to do once the condition is fulfilled clearInterval(interval) //< Stop this interval } } }, 100) //< repeat check every 100ms } if (phantom.args.length === 0 || phantom.args.length > 2) { console.log('Usage: phantom.js URL') phantom.exit() } var page = new WebPage() // Route "console.log()" calls from within the Page context to the main Phantom context (i.e. current "this") page.onConsoleMessage = function(msg) { console.log(msg) }; page.open(phantom.args[0], function(status){ if (status !== "success") { console.log("Unable to access network") phantom.exit() } else { waitFor(function(){ return page.evaluate(function(){ var el = document.getElementById('qunit-testresult') if (el && el.innerText.match('completed')) { return true } return false }) }, function(){ var failedNum = page.evaluate(function(){ var el = document.getElementById('qunit-testresult') try { return el.getElementsByClassName('failed')[0].innerHTML } catch (e) { } return 10000 }); phantom.exit((parseInt(failedNum, 10) > 0) ? 1 : 0) }) } })
# timeouts TIMEOUT = 30 WAIT_TIME = 0.0 RESOURCE_TIMEOUT = 0.0 MAX_TIMEOUT = 60.0 MAX_WAIT_TIME = 10.0 # Default size of browser window. As there're no decorations, this affects # both "window.inner*" and "window.outer*" values. VIEWPORT_SIZE = '1024x768' # Window size limitations. VIEWPORT_MAX_WIDTH = 20000 VIEWPORT_MAX_HEIGTH = 20000 VIEWPORT_MAX_AREA = 4000*4000 MAX_WIDTH = 1920 MAX_HEIGTH = 1080 AUTOLOAD_IMAGES = 1 # If 'raster', PNG images will be rescaled after rendering as regular images. # If 'vector', PNG image will be rescaled during rendering which is faster and # crisper, but may cause rendering artifacts. IMAGE_SCALE_METHOD = 'raster' # This value has the same meaning as "level" kwarg of :func:`zlib.compress`: # - 0 means no compression at all # - 1 means best speed, lowest compression ratio # - 9 means best compression, lowest speed # # The default is 1, because it is twice as fast as 9 and produces only 15% # larger files. PNG_COMPRESSION_LEVEL = 1 # 75 is Pillow default. Values above 95 should be avoided; # 100 disables portions of the JPEG compression algorithm, # and results in large files with hardly any gain in image quality. JPEG_QUALITY = 75 # There's a bug in Qt that manifests itself when width or height of rendering # surface (aka the png image) is more than 32768. Usually, this is solved by # rendering the image in tiled manner and obviously, TILE_MAXSIZE must not # exceed that value. # # Other than that, the setting is a tradeoff between performance and memory # usage, because QImage that acts as a rendering surface is quite a resource # hog. So, if you increase tile size you may end up using a lot more memory, # but there is less image pasting and the rendering is faster. As of now, 2048 # size is chosen to fit commonly used 1080p resolution in one tile. TILE_MAXSIZE = 2048 # defaults for render.json endpoint DO_HTML = 0 DO_IFRAMES = 0 DO_PNG = 0 DO_JPEG = 0 SHOW_SCRIPT = 0 SHOW_CONSOLE = 0 SHOW_HISTORY = 0 SHOW_HAR = 0 # servers SPLASH_PORT = 8050 PROXY_PORT = 8051 MANHOLE_PORT = 5023 MANHOLE_USERNAME = 'admin' MANHOLE_PASSWORD = 'admin' # pool options SLOTS = 50 # disk cache options - don't enable it unless you know what you're doing CACHE_ENABLED = False CACHE_SIZE = 50 # MB CACHE_PATH = '.splash-cache' # security options ALLOWED_SCHEMES = ['http', 'https', 'data', 'ftp', 'sftp', 'ws', 'wss'] JS_CROSS_DOMAIN_ENABLED = False # logging VERBOSITY = 1
from sklearn import svm from dataset import load_data from sklearn import cross_validation from sklearn import datasets from sklearn.cross_validation import train_test_split from sklearn.grid_search import GridSearchCV from sklearn.metrics import classification_report import matplotlib.pyplot as plt import numpy as np from sklearn.utils import shuffle #This is where your clf will be (model) scores, targets, acc = load_data() ''' # Loading the Digits dataset digits = datasets.load_digits() # To apply an classifier on this data, we need to flatten the image, to # turn the data in a (samples, feature) matrix: n_samples = len(digits.images) scores = digits.images.reshape((n_samples, -1)) targets = digits.target ''' # Split the dataset in two equal parts X_train, X_test, y_train, y_test = train_test_split( scores, targets, test_size=0.5, random_state=0) # Set the parameters by cross-validation tuned_parameters = [{'C':[ 3.12500000e-02, 7.06322365e-02, 1.59645210e-01, 3.60835144e-01, 8.15570983e-01, 1.84337928e+00, 4.16646404e+00, 9.41717355e+00, 2.12849929e+01, 4.81090129e+01, 1.08737510e+02, 2.45771952e+02, 5.55501524e+02, 1.25556208e+03, 2.83786105e+03, 6.41422312e+03, 1.44976296e+04, 3.27680000e+04] ,'cache_size':[200],'class_weight':['balanced'], 'coef0':[0.0], 'decision_function_shape':[None],'degree':[1,2,3,4,5,6],'gamma':[ 3.05175781e-05, 6.35751960e-05, 1.32441884e-04, 2.75907174e-04, 5.74778659e-04, 1.19739731e-03, 2.49445641e-03, 5.19653148e-03, 1.08255808e-02, 2.25521965e-02, 4.69814579e-02, 9.78732776e-02, 2.03892746e-01, 4.24755897e-01, 8.84865086e-01, 1.84337928e+00, 3.84018675e+00, 8.00000000e+00] ,'kernel':['rbf','poly','linear','sigmoid'],'max_iter':[-1] }] ''' ,'probability':['False'], 'random_state':[None], 'shrinking':['True','False'], 'tol':[1e-3], 'verbose':[1] {'kernel': ['rbf'], 'gamma': [1e-3, 1e-4], 'C': [1, 10, 100, 1000]}, {'kernel': ['linear'], 'C': [1, 10, 100, 1000]}] ''' scores2 = ['precision', 'recall'] ''' clf = svm.SVC( C=1.0, cache_size=200, class_weight=None, coef0=0.0, decision_function_shape=None, degree=3, gamma='auto', kernel='rbf', max_iter=-1, probability=False, random_state=None, shrinking=True, tol=0.001, verbose=False ) clf.fit(scores,targets) scores = cross_validation.cross_val_score(clf, scores, targets, cv=10, scoring='accuracy') ''' for score in scores2: print("# Tuning hyper-parameters for %s" % score) print() clf = GridSearchCV(svm.SVC(C=1), tuned_parameters, cv=5, scoring='%s_weighted' % score) clf.fit(X_train, y_train) print("Best parameters set found on development set:") print() print(clf.best_params_) print() print("Grid scores on development set:") print() print('Mean Score...StD\tHyperparameters') for params, mean_score, grid_scores in clf.grid_scores_: pass # print("%0.3f (+/-%0.03f) for %r" % (mean_score, grid_scores.std() * 2, params)) print() print("Detailed classification report:") print() print("The model is trained on the full development set.") print("The scores are computed on the full evaluation set.") print() y_true, y_pred = y_test, clf.predict(X_test) print(classification_report(y_true, y_pred)) print() # Note the problem is too easy: the hyperparameter plateau is too flat and the # output model is the same for precision and recall with ties in quality.
# Copyright (c) 2015 EMC Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from cinder import context from cinder.tests.unit import fake_constants as fake from cinder.tests.unit import fake_volume from cinder.tests.unit.volume.drivers.emc import scaleio class TestInitializeConnection(scaleio.TestScaleIODriver): def setUp(self): """Setup a test case environment.""" super(TestInitializeConnection, self).setUp() self.connector = {} self.ctx = ( context.RequestContext('fake', 'fake', True, auth_token=True)) self.volume = fake_volume.fake_volume_obj( self.ctx, **{'provider_id': fake.PROVIDER_ID}) def test_only_qos(self): qos = {'maxIOPS': 1000, 'maxBWS': 2048} extraspecs = {} connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(1000, int(connection_properties['iopsLimit'])) self.assertEqual(2048, int(connection_properties['bandwidthLimit'])) def test_no_qos(self): qos = {} extraspecs = {} connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertIsNone(connection_properties['iopsLimit']) self.assertIsNone(connection_properties['bandwidthLimit']) def test_only_extraspecs(self): qos = {} extraspecs = {'sio:iops_limit': 2000, 'sio:bandwidth_limit': 4096} connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(2000, int(connection_properties['iopsLimit'])) self.assertEqual(4096, int(connection_properties['bandwidthLimit'])) def test_qos_and_extraspecs(self): qos = {'maxIOPS': 1000, 'maxBWS': 3072} extraspecs = {'sio:iops_limit': 2000, 'sio:bandwidth_limit': 4000} connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(1000, int(connection_properties['iopsLimit'])) self.assertEqual(3072, int(connection_properties['bandwidthLimit'])) def test_qos_scaling_and_max(self): qos = {'maxIOPS': 100, 'maxBWS': 2048, 'maxIOPSperGB': 10, 'maxBWSperGB': 128} extraspecs = {} self.volume.size = 8 connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(80, int(connection_properties['iopsLimit'])) self.assertEqual(1024, int(connection_properties['bandwidthLimit'])) self.volume.size = 24 connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(100, int(connection_properties['iopsLimit'])) self.assertEqual(2048, int(connection_properties['bandwidthLimit'])) def test_qos_scaling_no_max(self): qos = {'maxIOPSperGB': 10, 'maxBWSperGB': 128} extraspecs = {} self.volume.size = 8 connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(80, int(connection_properties['iopsLimit'])) self.assertEqual(1024, int(connection_properties['bandwidthLimit'])) def test_qos_round_up(self): qos = {'maxBWS': 2000, 'maxBWSperGB': 100} extraspecs = {} self.volume.size = 8 connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(1024, int(connection_properties['bandwidthLimit'])) self.volume.size = 24 connection_properties = ( self._initialize_connection(qos, extraspecs)['data']) self.assertEqual(2048, int(connection_properties['bandwidthLimit'])) def test_vol_id(self): extraspecs = qos = {} connection_properties = ( self._initialize_connection(extraspecs, qos)['data']) self.assertEqual(fake.PROVIDER_ID, connection_properties['scaleIO_volume_id']) def _initialize_connection(self, qos, extraspecs): self.driver._get_volumetype_qos = mock.MagicMock() self.driver._get_volumetype_qos.return_value = qos self.driver._get_volumetype_extraspecs = mock.MagicMock() self.driver._get_volumetype_extraspecs.return_value = extraspecs return self.driver.initialize_connection(self.volume, self.connector)
__author__ = "Christian Kongsgaard" __license__ = 'MIT' # -------------------------------------------------------------------------------------------------------------------- # # IMPORTS # Modules import os import json # RiBuild Modules from delphin_6_automation.database_interactions import mongo_setup from delphin_6_automation.database_interactions.auth import auth_2d_1d as auth_dict from delphin_6_automation.database_interactions import weather_interactions from delphin_6_automation.database_interactions import delphin_interactions from delphin_6_automation.database_interactions import material_interactions from delphin_6_automation.database_interactions import sampling_interactions from delphin_6_automation.database_interactions.db_templates import sample_entry # -------------------------------------------------------------------------------------------------------------------- # # RIBuild server = mongo_setup.global_init(auth_dict) def upload_materials(folder): for file in os.listdir(folder): material_interactions.upload_material_file(f'{folder}/{file}') def upload_weather(folder): for file in os.listdir(folder): print(file) weather_interactions.upload_weather_to_db(os.path.join(folder, file)) def upload_strategy(folder): strategy = os.path.join(folder, 'sampling_strategy.json') with open(strategy) as file: data = json.load(file) sampling_interactions.upload_sampling_strategy(data) def upload_designs(folder): strategy = sample_entry.Strategy.objects().first() for file in os.listdir(folder): delphin_interactions.upload_design_file(os.path.join(folder, file), strategy.id) # upload_weather(r'C:\Users\ocni\OneDrive - Danmarks Tekniske Universitet\Shared WP6 DTU-SBiAAU\weather\WAC') # upload_materials(r'C:\Program Files\IBK\Delphin 6.0\resources\DB_materials') upload_strategy(r'C:\Users\ocni\OneDrive - Danmarks Tekniske Universitet\Shared WP6 DTU-SBiAAU\sampling_strategy') upload_designs(r'C:\Users\ocni\OneDrive - Danmarks Tekniske Universitet\Shared WP6 DTU-SBiAAU\designs') mongo_setup.global_end_ssh(server)
#!/usr/bin/env python # # @license Apache-2.0 # # Copyright (c) 2019 The Stdlib Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Benchmark scipy.special.ellipe.""" from __future__ import print_function import timeit NAME = "ellipe" REPEATS = 3 ITERATIONS = 1000000 def print_version(): """Print the TAP version.""" print("TAP version 13") def print_summary(total, passing): """Print the benchmark summary. # Arguments * `total`: total number of tests * `passing`: number of passing tests """ print("#") print("1.." + str(total)) # TAP plan print("# total " + str(total)) print("# pass " + str(passing)) print("#") print("# ok") def print_results(elapsed): """Print benchmark results. # Arguments * `elapsed`: elapsed time (in seconds) # Examples ``` python python> print_results(0.131009101868) ``` """ rate = ITERATIONS / elapsed print(" ---") print(" iterations: " + str(ITERATIONS)) print(" elapsed: " + str(elapsed)) print(" rate: " + str(rate)) print(" ...") def benchmark(): """Run the benchmark and print benchmark results.""" setup = "from scipy.special import ellipe; from random import random;" stmt = "y = ellipe(random())" t = timeit.Timer(stmt, setup=setup) print_version() for i in range(REPEATS): print("# python::scipy::" + NAME) elapsed = t.timeit(number=ITERATIONS) print_results(elapsed) print("ok " + str(i+1) + " benchmark finished") print_summary(REPEATS, REPEATS) def main(): """Run the benchmark.""" benchmark() if __name__ == "__main__": main()
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ios/chrome/browser/voice/voice_search_prefs.h" namespace prefs { // User preferred speech input language for voice search. const char kVoiceSearchLocale[] = "ios.speechinput.voicesearch_locale"; // Boolean which indicates if TTS after voice search is enabled. const char kVoiceSearchTTS[] = "ios.speechinput.voicesearch_tts"; } // namespace prefs
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Database setup and migration commands.""" from nova import utils IMPL = utils.LazyPluggable('backend', config_group='database', sqlalchemy='nova.db.sqlalchemy.migration') INIT_VERSION = 132 def db_sync(version=None): """Migrate the database to `version` or the most recent version.""" return IMPL.db_sync(version=version) def db_version(): """Display the current database version.""" return IMPL.db_version()
//%LICENSE//////////////////////////////////////////////////////////////// // // Licensed to The Open Group (TOG) under one or more contributor license // agreements. Refer to the OpenPegasusNOTICE.txt file distributed with // this work for additional information regarding copyright ownership. // Each contributor licenses this file to you under the OpenPegasus Open // Source License; you may not use this file except in compliance with the // License. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // ////////////////////////////////////////////////////////////////////////// // //%///////////////////////////////////////////////////////////////////////// #include "UNIX_MultiStateSensorProvider.h" UNIX_MultiStateSensorProvider::UNIX_MultiStateSensorProvider() { } UNIX_MultiStateSensorProvider::~UNIX_MultiStateSensorProvider() { } CIMInstance UNIX_MultiStateSensorProvider::constructInstance( const CIMName &className, const CIMNamespaceName &nameSpace, const UNIX_MultiStateSensor &_p) { CIMProperty p; CIMInstance inst(className); // Set path inst.setPath(CIMObjectPath(String(""), // hostname nameSpace, CIMName("UNIX_MultiStateSensor"), constructKeyBindings(_p))); //CIM_ManagedElement Properties if (_p.getInstanceID(p)) inst.addProperty(p); if (_p.getCaption(p)) inst.addProperty(p); if (_p.getDescription(p)) inst.addProperty(p); if (_p.getElementName(p)) inst.addProperty(p); //CIM_ManagedSystemElement Properties if (_p.getInstallDate(p)) inst.addProperty(p); if (_p.getName(p)) inst.addProperty(p); if (_p.getOperationalStatus(p)) inst.addProperty(p); if (_p.getStatusDescriptions(p)) inst.addProperty(p); if (_p.getStatus(p)) inst.addProperty(p); if (_p.getHealthState(p)) inst.addProperty(p); if (_p.getCommunicationStatus(p)) inst.addProperty(p); if (_p.getDetailedStatus(p)) inst.addProperty(p); if (_p.getOperatingStatus(p)) inst.addProperty(p); if (_p.getPrimaryStatus(p)) inst.addProperty(p); //CIM_LogicalElement Properties //CIM_EnabledLogicalElement Properties if (_p.getEnabledState(p)) inst.addProperty(p); if (_p.getOtherEnabledState(p)) inst.addProperty(p); if (_p.getRequestedState(p)) inst.addProperty(p); if (_p.getEnabledDefault(p)) inst.addProperty(p); if (_p.getTimeOfLastStateChange(p)) inst.addProperty(p); if (_p.getAvailableRequestedStates(p)) inst.addProperty(p); if (_p.getTransitioningToState(p)) inst.addProperty(p); //CIM_LogicalDevice Properties if (_p.getSystemCreationClassName(p)) inst.addProperty(p); if (_p.getSystemName(p)) inst.addProperty(p); if (_p.getCreationClassName(p)) inst.addProperty(p); if (_p.getDeviceID(p)) inst.addProperty(p); if (_p.getPowerManagementSupported(p)) inst.addProperty(p); if (_p.getPowerManagementCapabilities(p)) inst.addProperty(p); if (_p.getAvailability(p)) inst.addProperty(p); if (_p.getStatusInfo(p)) inst.addProperty(p); if (_p.getLastErrorCode(p)) inst.addProperty(p); if (_p.getErrorDescription(p)) inst.addProperty(p); if (_p.getErrorCleared(p)) inst.addProperty(p); if (_p.getOtherIdentifyingInfo(p)) inst.addProperty(p); if (_p.getPowerOnHours(p)) inst.addProperty(p); if (_p.getTotalPowerOnHours(p)) inst.addProperty(p); if (_p.getIdentifyingDescriptions(p)) inst.addProperty(p); if (_p.getAdditionalAvailability(p)) inst.addProperty(p); if (_p.getMaxQuiesceTime(p)) inst.addProperty(p); //CIM_Sensor Properties if (_p.getSensorType(p)) inst.addProperty(p); if (_p.getOtherSensorTypeDescription(p)) inst.addProperty(p); if (_p.getPossibleStates(p)) inst.addProperty(p); if (_p.getCurrentState(p)) inst.addProperty(p); if (_p.getPollingInterval(p)) inst.addProperty(p); if (_p.getSensorContext(p)) inst.addProperty(p); //CIM_MultiStateSensor Properties return inst; } Array<CIMKeyBinding> UNIX_MultiStateSensorProvider::constructKeyBindings(const UNIX_MultiStateSensor& _p) { Array<CIMKeyBinding> keys; keys.append(CIMKeyBinding( PROPERTY_SYSTEM_CREATION_CLASS_NAME, _p.getSystemCreationClassName(), CIMKeyBinding::STRING)); keys.append(CIMKeyBinding( PROPERTY_SYSTEM_NAME, _p.getSystemName(), CIMKeyBinding::STRING)); keys.append(CIMKeyBinding( PROPERTY_CREATION_CLASS_NAME, _p.getCreationClassName(), CIMKeyBinding::STRING)); keys.append(CIMKeyBinding( PROPERTY_DEVICE_ID, _p.getDeviceID(), CIMKeyBinding::STRING)); return keys; } #define UNIX_PROVIDER UNIX_MultiStateSensorProvider #define UNIX_PROVIDER_NAME "UNIX_MultiStateSensorProvider" #define CLASS_IMPLEMENTATION UNIX_MultiStateSensor #define CLASS_IMPLEMENTATION_NAME "UNIX_MultiStateSensor" #define BASE_CLASS_NAME "CIM_MultiStateSensor" #define NUMKEYS_CLASS_IMPLEMENTATION 0 #include "UNIXProviderBase.hpp"
#!/usr/bin/env python # coding: UTF-8 import datetime import logging # Define the logger LOG = logging.getLogger(__name__) def print_dates(number_of_days_back_in_time, start_date=datetime.datetime.now()): for i in range(0, number_of_days_back_in_time): print (start_date - datetime.timedelta(days=i)).date() if __name__ == "__main__": import argparse def date( date_string ): # argparse.ArgumentTypeError() return datetime.datetime.strptime( date_string, '%Y-%m-%d' ) def date_back_in_time( number_of_days_back_in_time ): number_of_days_back_in_time = int(number_of_days_back_in_time) d = datetime.datetime.now() - datetime.timedelta(days=number_of_days_back_in_time) return d parser = argparse.ArgumentParser(description='Print dates <number_of_days> back in time from start date. Start date can be set by number of days back or to specify the date.') parser.add_argument('number_of_days', type=int, help='The number of dates to print.') group = parser.add_mutually_exclusive_group() group.add_argument('--start-date', type=date, help='The number of dates to print.', default=datetime.datetime.now()) group.add_argument('--start-days-back-in-time', type=date_back_in_time, dest='start_date', help='The number of days back in to use as start date.', default=0) group = parser.add_mutually_exclusive_group() group.add_argument('-d', '--debug', action='store_true', help="Output debugging information.") group.add_argument('-v', '--verbose', action='store_true', help="Output info.") parser.add_argument('--log-filename', type=str, help="File used to output logging information.") # Do the parser. args = parser.parse_args() # Set the log options. if args.debug: logging.basicConfig(filename=args.log_filename, level=logging.DEBUG) elif args.verbose: logging.basicConfig(filename=args.log_filename, level=logging.INFO) else: logging.basicConfig(filename=args.log_filename, level=logging.WARNING) # Output what is in the args variable. LOG.debug(args) print_dates(args.number_of_days, args.start_date)
from twilio.rest.resources import InstanceResource, ListResource from twilio.rest.resources.applications import Applications from twilio.rest.resources.notifications import Notifications from twilio.rest.resources.recordings import Transcriptions, Recordings from twilio.rest.resources.calls import Calls from twilio.rest.resources.sms_messages import Sms from twilio.rest.resources.caller_ids import CallerIds from twilio.rest.resources.phone_numbers import PhoneNumbers from twilio.rest.resources.conferences import Conferences from twilio.rest.resources.connect_apps import ( ConnectApps, AuthorizedConnectApps ) from twilio.rest.resources.queues import Queues from twilio.rest.resources.usage import UsageRecords, UsageTriggers class Account(InstanceResource): """ An Account resource """ ACTIVE = "active" SUSPENDED = "suspended" CLOSED = "closed" subresources = [ Applications, Notifications, Transcriptions, Recordings, Calls, Sms, CallerIds, PhoneNumbers, Conferences, ConnectApps, Queues, AuthorizedConnectApps, UsageRecords, UsageTriggers, ] def update(self, **kwargs): """ :param friendly_name: Update the description of this account. :param status: Alter the status of this account Use :data:`CLOSED` to irreversibly close this account, :data:`SUSPENDED` to temporarily suspend it, or :data:`ACTIVE` to reactivate it. """ self.update_instance(**kwargs) def close(self): """ Permenently deactivate this account """ return self.update_instance(status=Account.CLOSED) def suspend(self): """ Temporarily suspend this account """ return self.update_instance(status=Account.SUSPENDED) def activate(self): """ Reactivate this account """ return self.update_instance(status=Account.ACTIVE) class Accounts(ListResource): """ A list of Account resources """ name = "Accounts" instance = Account def list(self, **kwargs): """ Returns a page of :class:`Account` resources as a list. For paging informtion see :class:`ListResource` :param date friendly_name: Only list accounts with this friendly name :param date status: Only list accounts with this status """ return self.get_instances(kwargs) def update(self, sid, **kwargs): """ :param sid: Account identifier :param friendly_name: Update the description of this account. :param status: Alter the status of this account Use :data:`CLOSED` to irreversibly close this account, :data:`SUSPENDED` to temporarily suspend it, or :data:`ACTIVE` to reactivate it. """ return self.update_instance(sid, kwargs) def close(self, sid): """ Permenently deactivate an account, Alias to update """ return self.update(sid, status=Account.CLOSED) def suspend(self, sid): """ Temporarily suspend an account, Alias to update """ return self.update(sid, status=Account.SUSPENDED) def activate(self, sid): """ Reactivate an account, Alias to update """ return self.update(sid, status=Account.ACTIVE) def create(self, **kwargs): """ Returns a newly created sub account resource. :param friendly_name: Update the description of this account. """ return self.create_instance(kwargs)
import React from 'react'; import { default as Box } from '../components/primitives/Box'; import type { SpaceType as ThemeSpaceType } from '../components/types'; import { ResponsiveQueryContext } from './useResponsiveQuery/ResponsiveQueryProvider'; type SpaceType = | 'gutter' | '2xs' | 'xs' | 'sm' | 'md' | 'lg' | 'xl' | '2xl' | ThemeSpaceType; // Thanks @gregberge for code and @nandorojo for suggestion. // Original source: https://github.com/gregberge/react-flatten-children type ReactChildArray = ReturnType<typeof React.Children.toArray>; function flattenChildren(children: React.ReactNode): ReactChildArray { const childrenArray = React.Children.toArray(children); return childrenArray.reduce((flatChildren: ReactChildArray, child) => { if ((child as React.ReactElement<any>).type === React.Fragment) { return flatChildren.concat( flattenChildren((child as React.ReactElement<any>).props.children) ); } flatChildren.push(child); return flatChildren; }, []); } const getSpacedChildren = ( children: JSX.Element[] | JSX.Element, space: undefined | SpaceType, axis: 'X' | 'Y', reverse: string, divider: JSX.Element | undefined ): any => { let childrenArray = React.Children.toArray(flattenChildren(children)); childrenArray = reverse === 'reverse' ? [...childrenArray].reverse() : childrenArray; const orientation = axis === 'X' ? 'vertical' : 'horizontal'; // eslint-disable-next-line react-hooks/rules-of-hooks const responsiveQueryContext = React.useContext(ResponsiveQueryContext); const disableCSSMediaQueries = responsiveQueryContext.disableCSSMediaQueries; // If there's a divider, we wrap it with a Box and apply vertical and horizontal margins else we add a spacer Box with height or width if (divider) { const spacingProp: object = { ...(axis === 'X' ? { mx: space } : { my: space }), }; divider = React.cloneElement(divider, { orientation, ...spacingProp, }); childrenArray = childrenArray.map((child: any, index: number) => { return ( <React.Fragment key={child.key ?? `spaced-child-${index}`}> {child} {index < childrenArray.length - 1 && divider} </React.Fragment> ); }); } else { const spacingProp: object = { ...(axis === 'X' ? { width: space } : { height: space }), }; childrenArray = childrenArray.map((child: any, index: number) => { return ( <React.Fragment key={child.key ?? `spaced-child-${index}`}> {child} {disableCSSMediaQueries ? ( index < childrenArray.length - 1 && <Box {...spacingProp} /> ) : ( <></> )} </React.Fragment> ); }); } return childrenArray; }; export default getSpacedChildren;
Template['events'].helpers({ eventsByArea: function () { var areas = Areas.find(); var events = Events.find(); var eventsByArea = []; _.each(areas,function(area){ var areaEvents = { name: area.name, events: [] } _.each(events,function(event){ if(event.area === area._id){ areaEvents.events.push(event); } }); eventsByArea.push(areaEvents); }); return eventsByArea; }, areas: function(){ return Areas.find(); }, events: function(){ var params = Session.get('eventsParams'); return Events.find(params,{sort: {date: 1}}); }, ifInArea: function(area, eventArea){ return area === eventArea; }, isFilter: function(filter){ return Session.get('filter') === filter; } }); Template['events'].events({ 'click #all-events': function(event, template){ Session.set('eventsParams', { date: { $gte: moment().toDate() } }); Session.set('filter', 'all'); }, 'click #reg-events': function(event, template){ Session.set('filter', 'reg'); Session.set('eventsParams', { $and: [ { users: { $all: [Meteor.userId()] } }, { date: { $gte: moment().toDate() } } ] }); }, 'click #past-events': function(event, template){ Session.set('filter', 'past'); Session.set('eventsParams', { $and: [ { users: { $all: [Meteor.userId()] } }, { date: { $lt: moment().toDate() } } ] }); }, }); Template['events'].onRendered(function(){ Session.set('filter', 'all'); Session.set('eventsParams', { date: { $gte: moment().toDate() } }); })
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Chatbot_GF.MessageBuilder.Model { public interface IMessage { } public class MessageAttachment :IMessage { public MessageAttachment(Attachment attachment) { // for a carousel message this.attachment = attachment; } public Attachment attachment { get; set; } } public class MessageText : IMessage { public MessageText(string text) { // message with only text this.text = text; } public string text { get; set; } } public class MessageQuickReply : IMessage { public MessageQuickReply(string text, List<SimpleQuickReply> quick_replies) { // message with text and buttons this.text = text; this.quick_replies = quick_replies; } public string text { get; set; } public List<SimpleQuickReply> quick_replies { get; set; } } }
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function # this is here for compatibility from collections import defaultdict assert defaultdict class KeyedSets: def __init__(self): self.d = dict() def add(self, key, value): if key not in self.d: self.d[key] = set() self.d[key].add(value) def discard(self, key, value): if key in self.d: self.d[key].discard(value) if not self.d[key]: del self.d[key] def __contains__(self, key): return key in self.d def __getitem__(self, key): return self.d.get(key, set()) def pop(self, key): if key in self.d: return self.d.pop(key) return set()
/* * linux_compat.h * * Copyright (C) 2009-2011 by ipoque GmbH * Copyright (C) 2011-13 - ntop.org * * This file is part of nDPI, an open source deep packet inspection * library based on the OpenDPI and PACE technology by ipoque GmbH * * nDPI is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * nDPI is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with nDPI. If not, see <http://www.gnu.org/licenses/>. * */ #ifndef __NDPI_LINUX_COMPAT_H__ #define __NDPI_LINUX_COMPAT_H__ #include "ndpi_define.h" #if defined(__FreeBSD__) || defined(__NetBSD__) #include <machine/endian.h> #if _BYTE_ORDER == _LITTLE_ENDIAN #define __LITTLE_ENDIAN__ 1 #else #define __BIG_ENDIAN__ 1 #endif #endif #pragma pack(push) /* push current alignment to stack */ #pragma pack(1) /* set alignment to 1 byte boundary */ struct ndpi_ethhdr { u_char h_dest[6]; /* destination eth addr */ u_char h_source[6]; /* source ether addr */ u_int16_t h_proto; /* packet type ID field */ }; #pragma pack(pop) /* restore original alignment from stack */ struct ndpi_iphdr { #if defined(__LITTLE_ENDIAN__) u_int8_t ihl:4, version:4; #elif defined(__BIG_ENDIAN__) u_int8_t version:4, ihl:4; #else # error "Byte order must be defined" #endif u_int8_t tos; u_int16_t tot_len; u_int16_t id; u_int16_t frag_off; u_int8_t ttl; u_int8_t protocol; u_int16_t check; u_int32_t saddr; u_int32_t daddr; }; #ifdef WIN32 typedef unsigned char u_char; typedef unsigned short u_short; typedef unsigned int uint; typedef unsigned long u_long; typedef u_char u_int8_t; typedef u_short u_int16_t; typedef uint u_int32_t; #define _WS2TCPIP_H_ /* Avoid compilation problems */ #define HAVE_SIN6_LEN /* IPv6 address */ /* Already defined in WS2tcpip.h */ struct ndpi_win_in6_addr { union { u_int8_t u6_addr8[16]; u_int16_t u6_addr16[8]; u_int32_t u6_addr32[4]; } in6_u; #ifdef s6_addr #undef s6_addr #endif #ifdef s6_addr16 #undef s6_addr16 #endif #ifdef s6_addr32 #undef s6_addr32 #endif #define s6_addr in6_u.u6_addr8 // #define s6_addr16 in6_u.u6_addr16 // #define s6_addr32 in6_u.u6_addr32 }; #define in6_addr win_in6_addr /* Generic extension header. */ struct ndpi_ip6_ext { u_int8_t ip6e_nxt; /* next header. */ u_int8_t ip6e_len; /* length in units of 8 octets. */ }; #define s6_addr16 __u6_addr.__u6_addr16 #define s6_addr32 __u6_addr.__u6_addr32 #else #ifndef __KERNEL__ #include <arpa/inet.h> #endif #endif struct ndpi_in6_addr { union { u_int8_t __u6_addr8[16]; u_int16_t __u6_addr16[8]; u_int32_t __u6_addr32[4]; } __u6_addr; /* 128-bit IP6 address */ }; struct ndpi_ip6_hdr { union { struct ndpi_ip6_hdrctl { u_int32_t ip6_un1_flow; u_int16_t ip6_un1_plen; u_int8_t ip6_un1_nxt; u_int8_t ip6_un1_hlim; } ip6_un1; u_int8_t ip6_un2_vfc; } ip6_ctlun; struct ndpi_in6_addr ip6_src; struct ndpi_in6_addr ip6_dst; }; struct ndpi_tcphdr { u_int16_t source; u_int16_t dest; u_int32_t seq; u_int32_t ack_seq; #if defined(__LITTLE_ENDIAN__) u_int16_t res1:4, doff:4, fin:1, syn:1, rst:1, psh:1, ack:1, urg:1, ece:1, cwr:1; #elif defined(__BIG_ENDIAN__) u_int16_t doff:4, res1:4, cwr:1, ece:1, urg:1, ack:1, psh:1, rst:1, syn:1, fin:1; #else # error "Byte order must be defined" #endif u_int16_t window; u_int16_t check; u_int16_t urg_ptr; }; struct ndpi_udphdr { u_int16_t source; u_int16_t dest; u_int16_t len; u_int16_t check; }; #endif
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import _, api, fields, models from odoo.exceptions import UserError class ProductChangeQuantity(models.TransientModel): _name = "stock.change.product.qty" _description = "Change Product Quantity" product_id = fields.Many2one('product.product', 'Product', required=True) product_tmpl_id = fields.Many2one('product.template', 'Template', required=True) product_variant_count = fields.Integer('Variant Count', related='product_tmpl_id.product_variant_count', readonly=False) new_quantity = fields.Float( 'New Quantity on Hand', default=1, digits='Product Unit of Measure', required=True, help='This quantity is expressed in the Default Unit of Measure of the product.') @api.onchange('product_id') def _onchange_product_id(self): self.new_quantity = self.product_id.qty_available @api.constrains('new_quantity') def check_new_quantity(self): if any(wizard.new_quantity < 0 for wizard in self): raise UserError(_('Quantity cannot be negative.')) def change_product_qty(self): """ Changes the Product Quantity by creating/editing corresponding quant. """ warehouse = self.env['stock.warehouse'].search( [('company_id', '=', self.env.company.id)], limit=1 ) # Before creating a new quant, the quand `create` method will check if # it exists already. If it does, it'll edit its `inventory_quantity` # instead of create a new one. self.env['stock.quant'].with_context(inventory_mode=True).create({ 'product_id': self.product_id.id, 'location_id': warehouse.lot_stock_id.id, 'inventory_quantity': self.new_quantity, }) return {'type': 'ir.actions.act_window_close'}
/* $Id: ce79c66fc2b39e6970f1c30d336003e0f1812daf $ * $URL: https://dev.almende.com/svn/abms/enterprise-ontology/src/main/java/io/coala/enterprise/transaction/Transaction.java $ * * Part of the EU project Adapt4EE, see http://www.adapt4ee.eu/ * * @license * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * Copyright (c) 2010-2014 Almende B.V. */ package io.coala.enterprise.transaction; import io.coala.capability.BasicCapabilityStatus; import io.coala.capability.Capability; import io.coala.capability.CapabilityID; import io.coala.enterprise.fact.CoordinationFact; import io.coala.model.ModelComponent; import rx.Observable; /** * {@link Transaction} * * @version $Revision: 279 $ * @author <a href="mailto:[email protected]">Rick</a> * * @param <F> the (super)type of {@link CoordinationFact} * @param <THIS> the concrete type of {@link Transaction} */ public interface Transaction<F extends CoordinationFact> extends Capability<BasicCapabilityStatus>, ModelComponent<CapabilityID> { // <I extends Initiator<F, I>> Class<I> getInitiatorRoleType(); // <E extends Executor<F, E>> Class<E> getExecutorRoleType(); // AgentID getInitiatorID(); // AgentID getExecutorID(); /** @return the coordination facts having occurred in this {@link Transaction} */ Observable<F> facts(); /** @return a new request coordination fact for this {@link Transaction} kind */ // CoordinationFactBuilder<F, ?> createFact(); /** * @param cause the {@link CoordinationFact} that led to this request * @return the {@link LookupFact} request resulting from specified cause */ //F createRequest(final CoordinationFact<?> cause); /** * @param request the {@link CoordinationFact} for which to determine a response * @return the {@link CoordinationFact} response or {@code null} for time-out */ //F createReponse(F request); }
import { NgModule } from "@angular/core" import { SharedModule } from "../shared/shared.module" import { ActivitiesListComponent } from "./activities-list.component" import { ActivityFormComponent } from "./activity-form.component" import { ActivitiesService } from "./activities.service" @NgModule({ imports: [ SharedModule ], declarations: [ ActivitiesListComponent, ActivityFormComponent ], exports: [ ActivitiesListComponent, ActivityFormComponent ], providers: [ ActivitiesService ] }) export class ActivitiesModule {}
/// /// \package astlib /// \file TypedValueEncoder.cpp /// /// \author Marian Krivos <[email protected]> /// \date 20Feb.,2017 /// /// (C) Copyright 2017 R-SYS s.r.o /// All rights reserved. /// #include "TypedValueEncoder.h" #include "astlib/ByteUtils.h" #include "astlib/AsterixItemDictionary.h" #include "astlib/Exception.h" #include <Poco/NumberParser.h> #include <iostream> namespace astlib { bool TypedValueEncoder::encode(const CodecContext& ctx, Poco::UInt64& value, int index) { AsterixItemCode code = ctx.bits.code; /* if (code.value == TRAJECTORY_INTENT_TCP_LONGITUDE_CODE.value) { value = 0x123456; return true; } */ if (!code.value) { //std::cout << "Skipped " << ctx.bits.toString() << std::endl; return false; } if (index == -1 && code.isArray()) { throw Exception("TypedValueEncoder::encode: " + asterixCodeToSymbol(code) + " array expects an index"); } if (index != -1 && !code.isArray()) { throw Exception("TypedValueEncoder::encode: " + asterixCodeToSymbol(code) + " scalar value doesn't expects an index"); } Encoding::ValueType encoding = ctx.bits.encoding.toValue(); bool encoded = false; switch(code.type()) { case PrimitiveType::Boolean: { bool boolean = false; encoded = encodeBoolean(ctx, boolean, index); if (encoded) value = Poco::UInt64(boolean); break; } case PrimitiveType::Real: { double real; encoded = encodeReal(ctx, real, index); if (encoded) { if (ctx.policy.normalizeValues) { double unit = 1.0; switch(ctx.bits.units.toValue()) { case Units::FT: unit = 0.3048; break; case Units::NM: unit = 1852.0; break; case Units::FL: unit = 0.3048 * 100.0; break; } value = Poco::UInt64(real / (ctx.bits.scale * unit)); } else { value = Poco::UInt64(real); } } break; } case PrimitiveType::Integer: { Poco::Int64 integer = 0; encoded = encodeSigned(ctx, integer, index); if (encoded) value = Poco::UInt64(integer); break; } case PrimitiveType::Unsigned: { encoded = encodeUnsigned(ctx, value, index); if (encoding == Encoding::Octal) value = ByteUtils::dec2oct(value); break; } default: { switch (encoding) { case Encoding::Ascii: { std::string str; encoded = encodeString(ctx, str, index); if (encoded) { //std::reverse(str.begin(), str.end()); for(Byte byte: str) { value <<= 8; value |= byte; } } break; } case Encoding::Octal: { Poco::UInt64 aux = 0; encoded = encodeUnsigned(ctx, aux, index); if (encoded) { value = ByteUtils::dec2oct(aux); } break; } case Encoding::SixBitsChar: { std::string str; encoded = encodeString(ctx, str, index); if (encoded) { std::string aux = ByteUtils::toSixBitString(str); std::reverse(aux.begin(), aux.end()); for(Byte byte: aux) { value <<= 8; value |= byte; } } break; } case Encoding::Hex: { std::string str; encoded = encodeString(ctx, str, index); if (encoded) { value = Poco::NumberParser::parseHex64(str); } break; } } break; } } return encoded; } } /* namespace astlib */
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2008-2009 Zuza Software Foundation # # This file is part of the Translate Toolkit. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. """The Translate Toolkit is a Python package that assists in localization of software. See U{http://translate.sourceforge.net/wiki/toolkit/index} or U{http://translate.org.za} for more information. @organization: Zuza Software Foundation @copyright: 2002-2009 Zuza Software Foundation @license: U{GPL <http://www.fsf.org/licensing/licenses/gpl.html>} @group Localization and Localizable File Formats: storage @group Format Converters: convert @group Localisation File Checker: filters @group Localization File Manipulation Tools: tools @group Language Specifications: lang @group Search and String Matching: search @group Services: services @group Miscellaneous: misc source_tree_infrastructure __version__ i18n """
package com.irwin.skin; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
using System; using System.Collections.Generic; using PlayingCards; using BlackJackGame; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace Tests { [TestClass] public class TestBlackJack { [TestMethod] public void TestDealCards() { //Initialise variables Deck d = new Deck(); Player p1 = new Player(); Player p2 = new Player(); List<Player> pList = new List<Player>(); pList.Add(p1); pList.Add(p2); //Deal 2 cards to each player foreach (Player p in pList) { d.DealCard(p, 2); } //Assert 2 cards are in each player's hand Assert.AreEqual(2, p1._hand.Count); Assert.AreEqual(2, p2._hand.Count); //Assert 4 cards have been removed from deck Assert.AreEqual(48, d._deck.Count); } [TestMethod] public void TestEmptyDeckDealCards() { //Initialise variables Deck d = new Deck(); Player p1 = new Player(); bool success = d.DealCard(p1, 53); Assert.AreEqual(52, p1._hand.Count); Assert.AreEqual(0, d._deck.Count); Assert.IsFalse(success); } [TestMethod] public void TestIsBusted() { //Double Aces Player p = new Player(); p.Add(new AceOfClubs()); p.Add(new AceOfDiamonds()); var busted = BlackJack.IsBusted(p); Assert.IsFalse(busted); Assert.AreEqual(12, p.HandValue); p.DiscardHand(); //King + Ace p.Add(new AceOfHearts()); p.Add(new KingOfClubs()); busted = BlackJack.IsBusted(p); Assert.IsFalse(busted); Assert.AreEqual(21, p.HandValue); p.DiscardHand(); //Jack + Queen + Ace p.Add(new JackOfClubs()); p.Add(new QueenOfClubs()); p.Add(new AceOfClubs()); busted = BlackJack.IsBusted(p); Assert.IsFalse(busted); Assert.AreEqual(21, p.HandValue); p.DiscardHand(); //Jack + Queen + Ace + Ace p.Add(new JackOfClubs()); p.Add(new QueenOfClubs()); p.Add(new AceOfClubs()); p.Add(new AceOfDiamonds()); busted = BlackJack.IsBusted(p); Assert.IsTrue(busted); Assert.AreEqual(22, p.HandValue); } } }
from test_support import * t = (1, 2, 3) l = [4, 5, 6] class Seq: def __getitem__(self, i): if i >= 0 and i < 3: return i raise IndexError a = -1 b = -1 c = -1 # unpack tuple if verbose: print 'unpack tuple' a, b, c = t if a <> 1 or b <> 2 or c <> 3: raise TestFailed # unpack list if verbose: print 'unpack list' a, b, c = l if a <> 4 or b <> 5 or c <> 6: raise TestFailed # unpack implied tuple if verbose: print 'unpack implied tuple' a, b, c = 7, 8, 9 if a <> 7 or b <> 8 or c <> 9: raise TestFailed # unpack string... fun! if verbose: print 'unpack string' a, b, c = 'one' if a <> 'o' or b <> 'n' or c <> 'e': raise TestFailed # unpack generic sequence if verbose: print 'unpack sequence' a, b, c = Seq() if a <> 0 or b <> 1 or c <> 2: raise TestFailed # single element unpacking, with extra syntax if verbose: print 'unpack single tuple/list' st = (99,) sl = [100] a, = st if a <> 99: raise TestFailed b, = sl if b <> 100: raise TestFailed # now for some failures # unpacking non-sequence if verbose: print 'unpack non-sequence' try: a, b, c = 7 raise TestFailed except TypeError: pass # unpacking tuple of wrong size if verbose: print 'unpack tuple wrong size' try: a, b = t raise TestFailed except ValueError: pass # unpacking list of wrong size if verbose: print 'unpack list wrong size' try: a, b = l raise TestFailed except ValueError: pass # unpacking sequence too short if verbose: print 'unpack sequence too short' try: a, b, c, d = Seq() raise TestFailed except ValueError: pass # unpacking sequence too long if verbose: print 'unpack sequence too long' try: a, b = Seq() raise TestFailed except ValueError: pass # unpacking a sequence where the test for too long raises a different # kind of error class BozoError(Exception): pass class BadSeq: def __getitem__(self, i): if i >= 0 and i < 3: return i elif i == 3: raise BozoError else: raise IndexError # trigger code while not expecting an IndexError if verbose: print 'unpack sequence too long, wrong error' try: a, b, c, d, e = BadSeq() raise TestFailed except BozoError: pass # trigger code while expecting an IndexError if verbose: print 'unpack sequence too short, wrong error' try: a, b, c = BadSeq() raise TestFailed except BozoError: pass
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2021 the original author or authors. */ package org.assertj.core.internal; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import org.junit.jupiter.api.Test; @SuppressWarnings("deprecation") class IgnoringFieldsComparator_toString_Test { @Test void should_return_description_of_IgnoringFieldsComparator() { // GIVEN IgnoringFieldsComparator actual = new IgnoringFieldsComparator("a", "b"); // THEN assertThat(actual).hasToString(format("field/property by field/property comparator on all fields/properties except [\"a\", \"b\"]%n" + "Comparators used:%n" + "- for elements fields (by type): {Double -> DoubleComparator[precision=1.0E-15], Float -> FloatComparator[precision=1.0E-6], Path -> lexicographic comparator (Path natural order)}")); } }
<?php namespace web\controllers; class IndexController extends \web\ext\Controller { /** * Init */ public function init() { parent::init(); } /** * Main page */ public function actionIndex() { $this->forward('/news/latest'); } /** * Error page */ public function actionError() { // Get error $error = \yii::app()->errorHandler->error; if (empty($error['message'])) { switch ($error['code']) { case 403: $error['message']= \yii::t('app', 'Access forbidden'); break; case 404: $error['message'] = \yii::t('app', 'Requested page not found'); break; default: $error['message'] = \yii::t('app', 'Unknown error'); break; } } // Render view $this->render('error', array( 'error' => $error, )); } }
read.controller('FileController', function($scope, $rootScope, SSHService, $timeout) { var residual = ''; $scope.infoType = true; $scope.connected = false; $scope.skimming = false; $scope.stayAtBottom = true; $scope.lines = []; $scope.init = function() { $scope.infoMessageText = 'Connecting...'; $scope.connection = SSHService.tail( $scope.host, $scope.port, $scope.username, $scope.password, $scope.filename, $scope.maxLines, function(data) { var element = { data: data.toString() }; var content = residual + data.toString(); var parsedContent = content.match(/[^\r\n]+/g); if (parsedContent.length > 1) residual = parsedContent.splice(parsedContent.length - 1, 1)[0]; else residual = ''; var bucket = parsedContent.map(function(line) { return { data: line }; }); $scope.lines = $scope.lines.concat(bucket); var extraLines = $scope.lines.length - $scope.maxLines; if (extraLines > 0) { var toBeDeletedLines = $scope.lines.slice(0, extraLines); $scope.lines = $scope.lines.slice(extraLines, $scope.maxLines); } var filtered = execute($scope.pipe, bucket); if (toBeDeletedLines) { var numOfDeletedFilteredLines = execute($scope.pipe, toBeDeletedLines).length; if (numOfDeletedFilteredLines > 0) $scope.filtered = $scope.filtered.slice(numOfDeletedFilteredLines, $scope.filtered.length - numOfDeletedFilteredLines); } $scope.filtered = $scope.filtered.concat(filtered); $scope.$digest(); $scope.toBottom(); }, function(errData) { console.log('Err: ' + errData); }, function(type, code, signal) { console.log(type, code, signal); if (type == 'ready') { $scope.connected = true; } else if (type == 'close') { $scope.connected = false; } else if (type == 'exit') { $scope.connected = false; } $scope.$digest(); }); }; function doMatch(input, filter) { var match = filter.match(new RegExp('^/(.*?)/([gimy]*)$')); if (!match || match.length < 2) return input.indexOf(filter) != -1; var regex = new RegExp(match[1], match[2]); return input.match(regex); }; function execute(pipe, opt_base) { var filtered = opt_base || $scope.lines; if (pipe.filter) { filtered = filtered.filter(function(line) { delete line.marker; return doMatch(line.data, pipe.filter); }); } if (pipe.marker) { var markers; if (!(pipe.marker instanceof Array)) { markers = [pipe.marker]; } else { markers = pipe.marker; } markers.forEach(function(marker) { if (!marker.filter) return console.log('Err: Marker filter not found'); if (!marker.color) return console.log('Err: Marker color not found'); filtered.forEach(function(item) { if (doMatch(item.data, marker.filter)) item.marker = marker.color; }); }); } return filtered; } function applyProcessed(filtered) { $scope.filtered = filtered; $scope.toBottom(); } $scope.mouseDown = function($event) { if ($event.button != 2) return; console.log($event); $event.preventDefault(); menu.popup($event.pageX, $event.pageY); }; $scope.clearView = function() { $scope.lines = []; $scope.filtered = $scope.lines; }; $scope.getInfo = function() { if ($scope.infoType) { if ($scope.filtered.length == $scope.lines.length) { return $scope.lines.length; } else { return Math.ceil(($scope.filtered.length / $scope.lines.length) * 100) + '%'; } } else { if ($scope.filtered.length == $scope.lines.length) { return $scope.lines.length; } else { return $scope.filtered.length + '/' + $scope.lines.length; } } } $scope.$watch('skiming', function(value) { //applyProcessed(execute($scope.pipe)); }); $scope.$watch('query', function(query) { var pipe; try { pipe = JSON.parse(query); } catch (e) { pipe = {filter: query}; } $scope.pipe = pipe; if ($scope.pipe.err) { $scope.queryInvalid = true; } else { $scope.queryInvalid = false; applyProcessed(execute($scope.pipe)); } }); });
<?php /** * Job Content * * @package Jobify * @since Jobify 1.0 */ global $job_manager; ?> <div class="single_job_listing"> <?php if ( $post->post_status == 'expired' ) : ?> <div class="job-manager-info"><?php _e( 'This job listing has expired', 'jobify' ); ?></div> <?php else : ?> <?php if ( is_position_filled() ) : ?> <div class="job-manager-error"><?php _e( 'This position has been filled', 'jobify' ); ?></div> <?php endif; ?> <div class="job-overview-content"> <div class="job-overview<?php echo '' == jobify_get_the_company_description() ? ' no-company-desc' : null; ?>"> <h2 class="job-overview-title"><?php _e( 'Overview', 'jobify' ); ?></h2> <?php echo apply_filters( 'the_job_description', get_the_content() ); ?> </div> <?php if ( '' != jobify_get_the_company_description() ) : ?> <div class="job-company-about"> <h2 class="job-overview-title" itemscope itemtype="http://data-vocabulary.org/Organization"><?php printf( __( 'About %s', 'jobify' ), get_the_company_name() ); ?></h2> <?php jobify_the_company_description(); ?> </div> <?php endif; ?> <div class="job-meta"> <ul class="meta"> <li> <?php if ( class_exists( 'Astoundify_Job_Manager_Companies' ) && '' != get_the_company_name() ) : $companies = Astoundify_Job_Manager_Companies::instance(); $company_url = esc_url( $companies->company_url( get_the_company_name() ) ); ?> <a href="<?php echo $company_url; ?>" target="_blank"><?php the_company_logo(); ?></a> <?php else : ?> <?php the_company_logo(); ?> <?php endif; ?> </li> <li class="job-type <?php echo get_the_job_type() ? sanitize_title( get_the_job_type()->slug ) : ''; ?>"><?php the_job_type(); ?></li> <?php if ( ! is_position_filled() && $post->post_status !== 'preview' ) : ?><li><?php get_job_manager_template( 'job-application.php' ); ?></li><?php endif; ?> <li> <h4 class="company-social-title"><?php _e( 'Company Details', 'jobify' ); ?></h4> <?php do_action( 'job_listing_company_details_before' ); ?> <ul class="company-social"> <?php do_action( 'job_listing_company_social_before' ); ?> <?php if ( get_the_company_website() ) : ?> <li><a href="<?php echo get_the_company_website(); ?>" target="_blank" itemprop="url"> <i class="icon-link"></i> <?php _e( 'Website', 'jobify' ); ?> </a></li> <?php endif; ?> <?php if ( get_the_company_twitter() ) : ?> <li><a href="http://twitter.com/<?php echo get_the_company_twitter(); ?>"> <i class="icon-twitter"></i> <?php _e( 'Twitter', 'jobify' ); ?> </a></li> <?php endif; ?> <?php if ( jobify_get_the_company_facebook() ) : ?> <li><a href="http://facebook.com/<?php echo jobify_get_the_company_facebook(); ?>"> <i class="icon-facebook"></i> <?php _e( 'Facebook', 'jobify' ); ?> </a></li> <?php endif; ?> <?php if ( jobify_get_the_company_gplus() ) : ?> <li><a href="http://plus.google.com/<?php echo jobify_get_the_company_gplus(); ?>"> <i class="icon-gplus"></i> <?php _e( 'Google+', 'jobify' ); ?> </a></li> <?php endif; ?> <?php do_action( 'job_listing_company_social_after' ); ?> </ul> <?php if ( class_exists( 'Astoundify_Job_Manager_Companies' ) || get_option( 'job_manager_enable_categories' ) ) : ?> <ul class="company-social"> <?php if ( class_exists( 'Astoundify_Job_Manager_Companies' ) && '' != get_the_company_name() ) : ?> <li> <a href="<?php echo $company_url; ?>" title="<?php printf( __( 'More jobs by %s', 'jobify' ), get_the_company_name() ); ?>"><i class="icon-newspaper"></i> <?php _e( 'More Jobs', 'jobify' ); ?></a> </li> <?php endif; ?> <?php if ( get_option( 'job_manager_enable_categories' ) ) : $categories = get_the_terms( $post->ID, 'job_listing_category' ); if ( $categories ) : $category = current( $categories ); ?> <?php if ( class_exists( 'WP_Job_Manager_Cat_Colors' ) ) : ?> <li><a href="<?php echo get_term_link( $category, 'job_listing_category' ); ?>" class="job-category <?php echo get_the_job_category() ? sanitize_title( get_the_job_category()->slug ) : ''; ?>"><?php the_job_category(); ?></a></li> <?php else : ?> <li><a href="<?php echo get_term_link( $category, 'job_listing_category' ); ?>"><i class="icon-tag"></i> <?php echo $category->name; ?></a></li> <?php endif; ?> <?php endif; ?> <?php endif; ?> </ul> <?php endif; ?> <?php get_template_part( 'content-share' ); ?> <?php do_action( 'job_listing_company_details_after' ); ?> </li> </ul> </div> </div> <?php endif; ?> </div>
// // RateLimiter.cs // // Authors: // Alan McGovern [email protected] // // Copyright (C) 2006 Alan McGovern // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Text; using System.Threading; namespace MonoTorrent.Client { class RateLimiter : IRateLimiter { bool unlimited; int savedError; int chunks; public bool Unlimited { get { return unlimited; } } public RateLimiter() { UpdateChunks(0, 0); } public void UpdateChunks(int maxRate, int actualRate) { unlimited = maxRate == 0; if (unlimited) return; // From experimentation, i found that increasing by 5% gives more accuate rate limiting // for peer communications. For disk access and whatnot, a 5% overshoot is fine. maxRate = (int)(maxRate * 1.05); int errorRateDown = maxRate - actualRate; int delta = (int)(0.4 * errorRateDown + 0.6 * this.savedError); this.savedError = errorRateDown; int increaseAmount = (int)((maxRate + delta) / ConnectionManager.ChunkLength); Interlocked.Add(ref this.chunks, increaseAmount); if (this.chunks > (maxRate * 1.2 / ConnectionManager.ChunkLength)) Interlocked.Exchange(ref this.chunks, (int)(maxRate * 1.2 / ConnectionManager.ChunkLength)); if (this.chunks < (maxRate / ConnectionManager.ChunkLength / 2)) Interlocked.Exchange(ref this.chunks, (maxRate / ConnectionManager.ChunkLength / 2)); if (maxRate == 0) chunks = 0; } public bool TryProcess(int amount) { if (Unlimited) return true; int c; do { c = chunks; if (c < amount) return false; } while (Interlocked.CompareExchange(ref chunks, c - amount, c) != c); return true; } } }
using System; using System.Diagnostics; using System.Threading; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// /// <summary> /// A <code>DocumentsWriterPerThreadPool<code> that selects thread states at random. /// /// @lucene.internal /// @lucene.experimental /// </summary> internal class RandomDocumentsWriterPerThreadPool : DocumentsWriterPerThreadPool { private readonly ThreadState[] States; private readonly Random Random; private readonly int MaxRetry; public RandomDocumentsWriterPerThreadPool(int maxNumPerThreads, Random random) : base(maxNumPerThreads) { Debug.Assert(MaxThreadStates >= 1); States = new ThreadState[maxNumPerThreads]; this.Random = new Random(random.Next()); this.MaxRetry = 1 + random.Next(10); } public override ThreadState GetAndLock(Thread requestingThread, DocumentsWriter documentsWriter) { ThreadState threadState = null; if (ActiveThreadState == 0) { lock (this) { if (ActiveThreadState == 0) { threadState = States[0] = NewThreadState(); return threadState; } } } Debug.Assert(ActiveThreadState > 0); for (int i = 0; i < MaxRetry; i++) { int ord = Random.Next(ActiveThreadState); lock (this) { threadState = States[ord]; Debug.Assert(threadState != null); } if (threadState.TryLock()) { return threadState; } if (Random.Next(20) == 0) { break; } } /* * only try to create a new threadstate if we can not lock the randomly * selected state. this is important since some tests rely on a single * threadstate in the single threaded case. Eventually it would be nice if * we would not have this limitation but for now we just make sure we only * allocate one threadstate if indexing is single threaded */ lock (this) { ThreadState newThreadState = NewThreadState(); if (newThreadState != null) // did we get a new state? { threadState = States[ActiveThreadState - 1] = newThreadState; //Debug.Assert(threadState.HeldByCurrentThread); return threadState; } // if no new state is available lock the random one } Debug.Assert(threadState != null); threadState.@Lock(); return threadState; } } }
--TEST-- HTML_FormPersister: in_array() workaround (second case) --FILE-- <?php require dirname(__FILE__) . '/init.php'; ob_start(array('HTML_FormPersister', 'ob_formpersisterhandler')); $_POST['_currencies'] = array("0"); ?> <form method="post"> <input type="checkbox" name="_currencies[]" value="RUB" /> <input type="checkbox" name="_currencies[]" value="EUR" /> <input type="checkbox" name="_currencies[]" value="USD" /> </form> <form method="post"> <input type="checkbox" name="_currencies[]" value="0" /> <input type="checkbox" name="_currencies[]" value="1" /> </form> --EXPECT-- <form method="post" action> <input type="checkbox" name="_currencies[]" value="RUB" /> <input type="checkbox" name="_currencies[]" value="EUR" /> <input type="checkbox" name="_currencies[]" value="USD" /> </form> <form method="post" action> <input type="checkbox" name="_currencies[]" value="0" checked="checked" /> <input type="checkbox" name="_currencies[]" value="1" /> </form>
public class Wave { private readonly short qntOfEnemies; private readonly float[] percentageOfEachEnemy; private readonly float respawnTime; public Wave(short qntOfEnemies, float[] percentageOfEachEnemy, float respawnTime) { this.qntOfEnemies = qntOfEnemies; this.percentageOfEachEnemy = percentageOfEachEnemy; this.respawnTime = respawnTime; } public short getQntOfEnemies() { return qntOfEnemies; } public float[] getPercentageOfEachEnemy() { return percentageOfEachEnemy; } public float getRespawnTime() { return respawnTime; } }
/** * ScriptDev2 is an extension for mangos providing enhanced features for * area triggers, creatures, game objects, instances, items, and spells beyond * the default database scripting in mangos. * * Copyright (C) 2006-2013 ScriptDev2 <http://www.scriptdev2.com/> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * World of Warcraft, and all World of Warcraft or Warcraft art, images, * and lore are copyrighted by Blizzard Entertainment, Inc. */ /** * ScriptData * SDName: Molten_Core * SD%Complete: 25 * SDComment: None * SDCategory: Molten Core * EndScriptData */ /** * ContentData * go_molten_core_rune * EndContentData */ #include "precompiled.h" #include "molten_core.h" /*###### ## go_molten_core_rune ######*/ bool GOUse_go_molten_core_rune(Player* /*pPlayer*/, GameObject* pGo) { ScriptedInstance* pInstance = (ScriptedInstance*)pGo->GetInstanceData(); if (!pInstance) { return true; } for (uint8 i = 0; i < MAX_MOLTEN_RUNES; ++i) { if (pGo->GetEntry() == m_aMoltenCoreRunes[i].m_uiRuneEntry) { // check if boss is already dead - if not return true if (pInstance->GetData(m_aMoltenCoreRunes[i].m_uiType) != DONE) { return true; } pInstance->SetData(m_aMoltenCoreRunes[i].m_uiType, SPECIAL); return false; } } return true; } void AddSC_molten_core() { Script* pNewScript; pNewScript = new Script; pNewScript->Name = "go_molten_core_rune"; pNewScript->pGOUse = &GOUse_go_molten_core_rune; pNewScript->RegisterSelf(); }
using System.Collections.Generic; namespace meridian.diagram { public enum ElementType { None, Proto, Field, Inherit, Aggregation, Composition, InlineComposition, Association, Entity, View, Set, Foreign, Primary, StoredProcedure } public static class ElementTypeMapper { static ElementTypeMapper() { m_Mapper["NONE"] = ElementType.None; m_Mapper["PROTO"] = ElementType.Proto; m_Mapper["FIELD"] = ElementType.Field; m_Mapper["INHERIT"] = ElementType.Inherit; m_Mapper["AGGREGATION"] = ElementType.Aggregation; m_Mapper["COMPOSITION"] = ElementType.Composition; m_Mapper["INLINECOMPOSITION"] = ElementType.InlineComposition; m_Mapper["ASSOCIATION"] = ElementType.Association; m_Mapper["ENTITY"] = ElementType.Entity; m_Mapper["VIEW"] = ElementType.View; m_Mapper["SET"] = ElementType.Set; m_Mapper["FOREIGN"] = ElementType.Foreign; m_Mapper["PRIMARY"] = ElementType.Primary; } public static bool IsType(string _type) { return m_Mapper.IndexOfKey(_type.Trim().ToString().ToUpper()) != -1; } public static ElementType Map(string _type) { return m_Mapper[_type.Trim().ToString().ToUpper()]; } private static SortedList<string, ElementType> m_Mapper = new SortedList<string,ElementType>(); } }
# Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from nova import db from nova.objects import security_group from nova.objects import security_group_rule from nova.tests.objects import test_objects from nova.tests.objects import test_security_group fake_rule = { 'created_at': None, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 1, 'protocol': 'tcp', 'from_port': 22, 'to_port': 22, 'cidr': '0.0.0.0/0', } class _TestSecurityGroupRuleObject(object): def test_get_by_id(self): with mock.patch.object(db, 'security_group_rule_get') as sgrg: sgrg.return_value = fake_rule rule = security_group_rule.SecurityGroupRule.get_by_id( self.context, 1) for field in fake_rule: if field == 'cidr': self.assertEqual(fake_rule[field], str(rule[field])) else: self.assertEqual(fake_rule[field], rule[field]) sgrg.assert_called_with(self.context, 1) def test_get_by_security_group(self): secgroup = security_group.SecurityGroup() secgroup.id = 123 rule = dict(fake_rule) rule['grantee_group'] = dict(test_security_group.fake_secgroup, id=123) stupid_method = 'security_group_rule_get_by_security_group' with mock.patch.object(db, stupid_method) as sgrgbsg: sgrgbsg.return_value = [rule] rules = (security_group_rule.SecurityGroupRuleList. get_by_security_group(self.context, secgroup)) self.assertEqual(1, len(rules)) self.assertEqual(123, rules[0].grantee_group.id) class TestSecurityGroupRuleObject(test_objects._LocalTest, _TestSecurityGroupRuleObject): pass class TestSecurityGroupRuleObjectRemote(test_objects._RemoteTest, _TestSecurityGroupRuleObject): pass
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe import smtplib import email.utils import _socket from frappe.utils import cint from frappe import _ def send(email, append_to=None): """send the message or add it to Outbox Email""" if frappe.flags.in_test: frappe.flags.sent_mail = email.as_string() return if frappe.are_emails_muted(): frappe.msgprint(_("Emails are muted")) return try: smtpserver = SMTPServer(append_to=append_to) smtpserver.replace_sender_in_email(email) smtpserver.sess.sendmail(email.sender, email.recipients + (email.cc or []), email.as_string()) except smtplib.SMTPSenderRefused: frappe.msgprint(_("Invalid login or password")) raise except smtplib.SMTPRecipientsRefused: frappe.msgprint(_("Invalid recipient address")) raise def get_outgoing_email_account(raise_exception_not_set=True, append_to=None): """Returns outgoing email account based on `append_to` or the default outgoing account. If default outgoing account is not found, it will try getting settings from `site_config.json`.""" if not getattr(frappe.local, "outgoing_email_account", None): frappe.local.outgoing_email_account = {} if not frappe.local.outgoing_email_account.get(append_to or "default"): email_account = None if append_to: email_account = _get_email_account({"enable_outgoing": 1, "append_to": append_to}) if not email_account: email_account = get_default_outgoing_email_account(raise_exception_not_set=raise_exception_not_set) if not email_account and raise_exception_not_set: frappe.throw(_("Please setup default Email Account from Setup > Email > Email Account"), frappe.OutgoingEmailError) if email_account: email_account.default_sender = email.utils.formataddr((email_account.name, email_account.get("sender") or email_account.get("email_id"))) frappe.local.outgoing_email_account[append_to or "default"] = email_account return frappe.local.outgoing_email_account[append_to or "default"] def get_default_outgoing_email_account(raise_exception_not_set=True): email_account = _get_email_account({"enable_outgoing": 1, "default_outgoing": 1}) if not email_account and frappe.conf.get("mail_server"): # from site_config.json email_account = frappe.new_doc("Email Account") email_account.update({ "smtp_server": frappe.conf.get("mail_server"), "smtp_port": frappe.conf.get("mail_port"), "use_tls": cint(frappe.conf.get("use_ssl") or 0), "email_id": frappe.conf.get("mail_login"), "password": frappe.conf.get("mail_password"), "sender": frappe.conf.get("auto_email_id", "[email protected]") }) email_account.from_site_config = True email_account.name = frappe.conf.get("email_sender_name") or "Frappe" if not email_account and not raise_exception_not_set: return None if frappe.are_emails_muted(): # create a stub email_account = frappe.new_doc("Email Account") email_account.update({ "sender": "[email protected]" }) return email_account def _get_email_account(filters): name = frappe.db.get_value("Email Account", filters) return frappe.get_doc("Email Account", name) if name else None class SMTPServer: def __init__(self, login=None, password=None, server=None, port=None, use_ssl=None, append_to=None): # get defaults from mail settings self._sess = None self.email_account = None self.server = None if server: self.server = server self.port = port self.use_ssl = cint(use_ssl) self.login = login self.password = password else: self.setup_email_account(append_to) def setup_email_account(self, append_to=None): self.email_account = get_outgoing_email_account(raise_exception_not_set=False, append_to=append_to) if self.email_account: self.server = self.email_account.smtp_server self.login = getattr(self.email_account, "login_id", None) \ or self.email_account.email_id self.password = self.email_account.password self.port = self.email_account.smtp_port self.use_ssl = self.email_account.use_tls self.sender = self.email_account.email_id self.always_use_account_email_id_as_sender = self.email_account.get("always_use_account_email_id_as_sender") def replace_sender_in_email(self, email): if hasattr(self, "always_use_account_email_id_as_sender") and \ cint(self.always_use_account_email_id_as_sender) and self.login: if not email.reply_to: email.reply_to = email.sender email.sender = self.login @property def sess(self): """get session""" if self._sess: return self._sess # check if email server specified if not getattr(self, 'server'): err_msg = _('Email Account not setup. Please create a new Email Account from Setup > Email > Email Account') frappe.msgprint(err_msg) raise frappe.OutgoingEmailError, err_msg try: if self.use_ssl and not self.port: self.port = 587 self._sess = smtplib.SMTP((self.server or "").encode('utf-8'), cint(self.port) or None) if not self._sess: err_msg = _('Could not connect to outgoing email server') frappe.msgprint(err_msg) raise frappe.OutgoingEmailError, err_msg if self.use_ssl: self._sess.ehlo() self._sess.starttls() self._sess.ehlo() if self.login and self.password: ret = self._sess.login((self.login or "").encode('utf-8'), (self.password or "").encode('utf-8')) # check if logged correctly if ret[0]!=235: frappe.msgprint(ret[1]) raise frappe.OutgoingEmailError, ret[1] return self._sess except _socket.error: # Invalid mail server -- due to refusing connection frappe.throw(_('Invalid Outgoing Mail Server or Port')) except smtplib.SMTPAuthenticationError: frappe.throw(_("Invalid login or password")) except smtplib.SMTPException: frappe.msgprint(_('Unable to send emails at this time')) raise
from django.conf import settings from django.contrib.sites.models import Site from django.db import models from django.db.models import loading from django.utils.translation import ugettext_lazy as _ from satchmo.caching import cache_key, cache_get, cache_set, NotCachedError from satchmo.caching.models import CachedObjectMixin from django.contrib.sites.models import Site import logging from django.db import transaction log = logging.getLogger('configuration.models') __all__ = ['SettingNotSet', 'Setting', 'LongSetting', 'find_setting'] def _safe_get_siteid(site): if not site: try: site = Site.objects.get_current() except: transaction.rollback() if site and site.id: siteid = site.id else: siteid = settings.SITE_ID else: siteid = site.id transaction.commit() return siteid _safe_get_siteid=transaction.commit_manually(_safe_get_siteid) def find_setting(group, key, site=None): """Get a setting or longsetting by group and key, cache and return it.""" siteid = _safe_get_siteid(site) ck = cache_key('Setting', siteid, group, key) setting = None try: setting = cache_get(ck) except NotCachedError, nce: if loading.app_cache_ready(): try: setting = Setting.objects.get(site__id__exact=siteid, key__exact=key, group__exact=group) except Setting.DoesNotExist: # maybe it is a "long setting" try: setting = LongSetting.objects.get(site__id__exact=siteid, key__exact=key, group__exact=group) except LongSetting.DoesNotExist: pass cache_set(ck, value=setting) if not setting: raise SettingNotSet(key, cachekey=ck) return setting class SettingNotSet(Exception): def __init__(self, k, cachekey=None): self.key = k self.cachekey = cachekey class SettingManager(models.Manager): def get_query_set(self): all = super(SettingManager, self).get_query_set() siteid = _safe_get_siteid(None) return all.filter(site__id__exact=siteid) class Setting(models.Model, CachedObjectMixin): site = models.ForeignKey(Site, verbose_name=_('Site')) group = models.CharField(max_length=100, blank=False, null=False) key = models.CharField(max_length=100, blank=False, null=False) value = models.CharField(max_length=255, blank=True) objects = SettingManager() def __nonzero__(self): return self.id is not None def cache_key(self, *args, **kwargs): return cache_key('Setting', self.site, self.group, self.key) def delete(self): self.cache_delete() super(Setting, self).delete() def save(self, force_insert=False, force_update=False): try: site = self.site except Site.DoesNotExist: self.site = Site.objects.get_current() super(Setting, self).save(force_insert=force_insert, force_update=force_update) self.cache_set() class Meta: unique_together = ('site', 'group', 'key') class LongSettingManager(models.Manager): def get_query_set(self): all = super(LongSettingManager, self).get_query_set() siteid = _safe_get_siteid(None) return all.filter(site__id__exact=siteid) class LongSetting(models.Model, CachedObjectMixin): """A Setting which can handle more than 255 characters""" site = models.ForeignKey(Site, verbose_name=_('Site')) group = models.CharField(max_length=100, blank=False, null=False) key = models.CharField(max_length=100, blank=False, null=False) value = models.TextField(blank=True) objects = LongSettingManager() def __nonzero__(self): return self.id is not None def cache_key(self, *args, **kwargs): # note same cache pattern as Setting. This is so we can look up in one check. # they can't overlap anyway, so this is moderately safe. At the worst, the # Setting will override a LongSetting. return cache_key('Setting', self.site, self.group, self.key) def delete(self): self.cache_delete() super(LongSetting, self).delete() def save(self, force_insert=False, force_update=False): try: site = self.site except Site.DoesNotExist: self.site = Site.objects.get_current() super(LongSetting, self).save(force_insert=force_insert, force_update=force_update) self.cache_set() class Meta: unique_together = ('site', 'group', 'key')
# Copyright 2018,2019,2020,2021 Sony Corporation. # Copyright 2021 Sony Group Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .graph_converter import (GraphConverter, FunctionModifier) from .batch_normalization_folding import (BatchNormalizationFoldingModifier, AddBiasModifier, BatchNormalizationFoldingModifierInner, BatchNormalizationFoldingOppositeModifierInner) from .batch_normalization_self_folding import BatchNormalizationSelfFoldingModifier from .fused_batch_normalization import FusedBatchNormalizationModifier from .unfused_batch_normalization import UnfusedBatchNormalizationModifier from .channel_last import ChannelLastModifier from .channel_first import ChannelFirstModifier from .remove_function import RemoveFunctionModifier from .batch_norm_batchstat import BatchNormBatchStatModifier from .test_mode import TestModeModifier from .identity import IdentityModifier from .no_grad import NoGradModifier
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.graal.api.replacements; import java.lang.reflect.Type; /** * A registry for {@link MethodSubstitution}s. */ public interface MethodSubstitutionRegistry { /** * Gets the type representing the receiver (i.e., {@code this}) argument in a non-static method. */ Class<?> getReceiverType(); /** * Registers a substitution method. * * @param substituteDeclaringClass the class declaring the substitute method * @param name the name of both the original and substitute method * @param argumentTypes the argument types of the method. Element 0 of this array must be * {@link #getReceiverType()} iff the method is non-static. Upon returning, element 0 * will have been rewritten to {@code declaringClass}. */ default void registerMethodSubstitution(Class<?> substituteDeclaringClass, String name, Type... argumentTypes) { registerMethodSubstitution(substituteDeclaringClass, name, name, argumentTypes); } /** * Registers a substitution method. * * @param substituteDeclaringClass the class declaring the substitute method * @param name the name of both the original method * @param substituteName the name of the substitute method * @param argumentTypes the argument types of the method. Element 0 of this array must be * {@link #getReceiverType()} iff the method is non-static. Upon returning, element 0 * will have been rewritten to {@code declaringClass}. */ void registerMethodSubstitution(Class<?> substituteDeclaringClass, String name, String substituteName, Type... argumentTypes); }
import Reflux from 'reflux'; import AuthActions from 'actions/auth_actions'; export default Reflux.createStore({ listenables: AuthActions, init: function () { this.message = ''; }, onSubmitError: function (resp) { console.log(resp); var body = resp.entity; this.message = 'This is embarasing... Looks like we are having technical difficulties, try again later.'; if (typeof body === 'object' && body.reason) { this.message = body.reason; } this.trigger(this.message); } });
package org.wonderbeat.home; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; import org.wonderbeat.home.service.UserService; import javax.inject.Inject; import javax.validation.Valid; @Controller class HomeController { @Inject private UserService userService; @RequestMapping(value = "/", method = RequestMethod.GET) public ModelAndView index() { return new ModelAndView("index").addObject("users", userService.usersStatuses()); } @RequestMapping(value = "/user/add", method = RequestMethod.POST) public String addUser(@ModelAttribute @Valid UserCommand command) { userService.createUser(command.getUserId()); return "redirect:/"; } @RequestMapping(value = "/user/block", method = RequestMethod.POST) public String blockUser(@ModelAttribute @Valid UserCommand command) { userService.blockUser(command.getUserId()); return "redirect:/"; } void setUserService(UserService userService) { this.userService = userService; } }
module.exports = { // 一行最多 100 字符 printWidth: 100, // 使用 4 个空格缩进 tabWidth: 2, // 不使用缩进符,而使用空格 useTabs: false, // 行尾需要有分号 semi: false, // 使用单引号 singleQuote: true, // 对象的 key 仅在必要时用引号 quoteProps: 'as-needed', // jsx 不使用单引号,而使用双引号 jsxSingleQuote: false, // 末尾不需要逗号 trailingComma: 'none', // 大括号内的首尾需要空格 bracketSpacing: true, // jsx 标签的反尖括号需要换行 jsxBracketSameLine: false, // 箭头函数,只有一个参数的时候,也需要括号 arrowParens: 'always', // 每个文件格式化的范围是文件的全部内容 rangeStart: 0, rangeEnd: Infinity, // 不需要写文件开头的 @prettier requirePragma: true, // 不需要自动在文件开头插入 @prettier insertPragma: false, // 使用默认的折行标准 proseWrap: 'preserve', // 根据显示样式决定 html 要不要折行 htmlWhitespaceSensitivity: 'css', // 换行符使用 lf endOfLine: 'lf' }
import synapse.lib.cli as s_cli import synapse.cmds.boss as s_cmds_boss import synapse.cmds.cron as s_cmds_cron import synapse.cmds.hive as s_cmds_hive import synapse.cmds.cortex as s_cmds_cortex import synapse.cmds.trigger as s_cmds_trigger cmdsbycell = { 'cell': ( s_cmds_hive.HiveCmd, s_cmds_boss.PsCmd, s_cmds_boss.KillCmd, ), 'cortex': ( s_cmds_cron.At, s_cmds_cron.Cron, s_cmds_cortex.Log, s_cmds_boss.PsCmd, s_cmds_boss.KillCmd, s_cmds_hive.HiveCmd, s_cmds_cortex.StormCmd, s_cmds_trigger.Trigger, ), } async def getItemCmdr(cell, outp=None, color=False, **opts): ''' Construct and return a cmdr for the given remote cell. Args: cell: Cell proxy being commanded. outp: Output helper object. color (bool): If true, enable colorized output. **opts: Additional options pushed into the Cmdr locs. Examples: Get the cmdr for a proxy:: cmdr = await getItemCmdr(foo) Returns: s_cli.Cli: A Cli instance with Cmds loaeded into it. ''' cmdr = await s_cli.Cli.anit(cell, outp=outp) if color: cmdr.colorsenabled = True typename = await cell.getCellType() for ctor in cmdsbycell.get(typename, ()): cmdr.addCmdClass(ctor) return cmdr async def runItemCmdr(item, outp=None, color=False, **opts): ''' Create a cmdr for the given item and run the cmd loop. Args: item: Cell proxy being commanded. outp: Output helper object. color (bool): If true, enable colorized output. **opts: Additional options pushed into the Cmdr locs. Notes: This function does not return while the command loop is run. Examples: Run the Cmdr for a proxy:: await runItemCmdr(foo) Returns: None: This function returns None. ''' cmdr = await getItemCmdr(item, outp=outp, color=color, **opts) await cmdr.runCmdLoop()
$(document).ready(function(){ $('a[href^="#"]').on('click',function (e) { e.preventDefault(); var target = this.hash; var $target = $(target); $('html, body').stop().animate({ 'scrollTop': $target.offset().top - 60 }, 900, 'swing', function () { window.location.hash = target; }); }); });
package ro.sci.gms.dao; import java.util.Collection; import ro.sci.gms.domain.Appointment; import ro.sci.gms.domain.User; public interface AppointmentDAO extends BaseDAO<Appointment>{ Collection<Appointment> searchById(String query); Collection<Appointment> getAll(User user); Collection<Appointment> search(String query); }
#include <filezilla.h> #include "graphics.h" CWindowTinter::CWindowTinter(wxWindow& wnd) : m_wnd(wnd) { } void CWindowTinter::SetBackgroundTint(wxColour const& tint) { if (!m_originalColor.IsOk()) { m_originalColor = m_wnd.GetBackgroundColour(); } wxColour const newColour = AlphaComposite_Over(m_originalColor, tint); if (newColour != m_wnd.GetBackgroundColour()) { if (m_wnd.SetBackgroundColour(newColour)) { m_wnd.Refresh(); } } } void Overlay(wxBitmap& bg, wxBitmap const& fg) { if (!bg.IsOk() || !fg.IsOk()) { return; } wxImage foreground = fg.ConvertToImage(); if (!foreground.HasAlpha()) { foreground.InitAlpha(); } wxImage background = bg.ConvertToImage(); if (!background.HasAlpha()) { background.InitAlpha(); } if (foreground.GetSize() != background.GetSize()) { foreground.Rescale(background.GetSize().x, background.GetSize().y, wxIMAGE_QUALITY_HIGH); } unsigned char* bg_data = background.GetData(); unsigned char* bg_alpha = background.GetAlpha(); unsigned char* fg_data = foreground.GetData(); unsigned char* fg_alpha = foreground.GetAlpha(); unsigned char* bg_end = bg_data + background.GetWidth() * background.GetHeight() * 3; while (bg_data != bg_end) { AlphaComposite_Over_Inplace( *bg_data, *(bg_data + 1), *(bg_data + 2), *bg_alpha, *fg_data, *(fg_data + 1), *(fg_data + 2), *fg_alpha); bg_data += 3; fg_data += 3; ++bg_alpha; ++fg_alpha; } #ifdef __WXMAC__ bg = wxBitmap(background, -1, bg.GetScaleFactor()); #else bg = wxBitmap(background, -1); #endif }
import pytest import taichi as ti @ti.all_archs def test_POT(): val = ti.field(ti.i32) n = 4 m = 8 p = 16 ti.root.dense(ti.i, n).dense(ti.j, m).dense(ti.k, p).place(val) assert val.shape == (n, m, p) assert val.dtype == ti.i32 @ti.all_archs def test_non_POT(): val = ti.field(ti.i32) n = 3 m = 7 p = 11 blk1 = ti.root.dense(ti.i, n) blk2 = blk1.dense(ti.j, m) blk3 = blk2.dense(ti.k, p) blk3.place(val) assert val.shape == (n, m, p) assert val.dtype == ti.i32 @ti.all_archs def test_unordered(): val = ti.field(ti.i32) n = 3 m = 7 p = 11 blk1 = ti.root.dense(ti.k, n) blk2 = blk1.dense(ti.i, m) blk3 = blk2.dense(ti.j, p) blk3.place(val) assert val.dtype == ti.i32 assert val.shape == (n, m, p) assert val.snode.parent(0) == val.snode assert val.snode.parent() == blk3 assert val.snode.parent(1) == blk3 assert val.snode.parent(2) == blk2 assert val.snode.parent(3) == blk1 assert val.snode.parent(4) == ti.root assert val.snode in blk3.get_children() assert blk3 in blk2.get_children() assert blk2 in blk1.get_children() assert blk1 in ti.root.get_children() expected_str = f'ti.root => dense {[n]} => dense {[n, m]}' \ f' => dense {[n, m, p]} => place {[n, m, p]}' assert str(val.snode) == expected_str @ti.all_archs def test_unordered_matrix(): val = ti.Matrix.field(3, 2, ti.i32) n = 3 m = 7 p = 11 blk1 = ti.root.dense(ti.k, n) blk2 = blk1.dense(ti.i, m) blk3 = blk2.dense(ti.j, p) blk3.place(val) assert val.shape == (n, m, p) assert val.dtype == ti.i32 assert val.loop_range().snode.parent(0) == val.loop_range().snode assert val.loop_range().snode.parent() == blk3 assert val.loop_range().snode.parent(1) == blk3 assert val.loop_range().snode.parent(2) == blk2 assert val.loop_range().snode.parent(3) == blk1 assert val.loop_range().snode.parent(4) == ti.root @pytest.mark.filterwarnings('ignore') @ti.host_arch_only def test_deprecated(): val = ti.field(ti.f32) mat = ti.Matrix.field(3, 2, ti.i32) n = 3 m = 7 p = 11 blk1 = ti.root.dense(ti.k, n) blk2 = blk1.dense(ti.i, m) blk3 = blk2.dense(ti.j, p) blk3.place(val, mat) assert val.dim() == 3 assert val.data_type() == ti.f32 assert val.shape() == (n, m, p) assert mat.dim() == 3 assert mat.data_type() == ti.i32 assert mat.shape() == (n, m, p) assert blk3.dim() == 3 assert blk3.shape() == (n, m, p) assert val.snode().parent() == blk3 assert mat.snode().parent() == blk3 @ti.all_archs def test_parent_exceeded(): val = ti.field(ti.f32) m = 7 n = 3 blk1 = ti.root.dense(ti.i, m) blk2 = blk1.dense(ti.j, n) blk2.place(val) assert val.snode.parent() == blk2 assert val.snode.parent(2) == blk1 assert val.snode.parent(3) == ti.root assert val.snode.parent(4) == None assert val.snode.parent(42) == None assert ti.root.parent() == None
# Copyright 2013 Viewfinder Inc. All Rights Reserved. """Look for old and unused provisioned devices. The input is one of: --apple: Apple provisioning profile (plist file downloaded from developer.apple.com) --testflight: TestFlight list of devices (select all users on testflightapp.com/dashboard/team/all/, then click Action and "Export iOS Devices" Searches back through the processed "device details" logs (dump of all device_dict seen on the backends) and searches for the latest timestamp at which each UDID was seen. Each device UDID falls into one of three categories: - missing: not found in the backend logs going back --search_days days (default 120) - inactive: found, but not in the last --inactive_days days (default 60) - active: found and seen in the last --inactive_days days (default 60) The first two categories should probably be removed from the provisioning profile. """ __author__ = '[email protected] (Marc Berhault)' import json import logging import os import sys import time from tornado import gen, options from viewfinder.backend.base import constants, main, util from viewfinder.backend.logs import logs_util from viewfinder.backend.storage.object_store import ObjectStore from viewfinder.backend.storage import store_utils from viewfinder.backend.services.provisioning_profiles import AppleProvisioningProfile, TestFlightDevices options.define('apple', default=None, help='File path to the Apple Provisioning Profile') options.define('testflight', default=None, help='File path to the TestFlight list of devices') options.define('search_days', default=120, help='Search back this many days') options.define('inactive_days', default=60, help='Devices not seen in this many days are considered inactive') @gen.coroutine def GetFileList(merged_store, marker): """Fetch the list of file names from S3.""" base_path = 'processed_data/device_details/' marker = os.path.join(base_path, marker) file_list = yield gen.Task(store_utils.ListAllKeys, merged_store, prefix=base_path, marker=marker) file_list.sort() raise gen.Return(file_list) @gen.coroutine def GetUDIDTimestamps(merged_store, files): """Iterate over all files and build a dict of UDID -> last-seen-timestamp.""" last_seen = {} for f in files: # Let exceptions surface. contents = yield gen.Task(merged_store.Get, f) dev_list = json.loads(contents) for entry in dev_list: timestamp = entry['timestamp'] # The device dict is found under different keys based on the operation (ping vs update user/device) device_dict = entry['request'].get('device', entry['request'].get('device_dict', None)) if not device_dict: # Some User.RegisterOperation entries do not have a device_dict. continue udid = device_dict.get('test_udid', None) if not udid: continue prev_seen = last_seen.get(udid, 0) if timestamp > prev_seen: last_seen[udid] = timestamp raise gen.Return(last_seen) @gen.engine def Start(callback): assert options.options.apple or options.options.testflight, \ 'You must specify exactly one of --apple or --testflight' assert options.options.search_days > 0 # Exceptions are surfaced from both file parsers. if options.options.apple: assert not options.options.testflight, 'You must specify exactly one of --apple or --testflight' devices = AppleProvisioningProfile(options.options.apple).Devices() else: devices = TestFlightDevices(options.options.testflight).Devices() logs_paths = logs_util.ServerLogsPaths('viewfinder', 'full') merged_store = ObjectStore.GetInstance(logs_paths.MERGED_LOGS_BUCKET) # +1 because the start_date is exclusive. start_time = time.time() - (options.options.search_days + 1) * constants.SECONDS_PER_DAY start_date = util.TimestampUTCToISO8601(start_time) files = yield GetFileList(merged_store, start_date) logging.info('Looking for %d devices UDIDs in %d files' % (len(devices), len(files))) last_seen = yield GetUDIDTimestamps(merged_store, files) missing = [] by_age = [] valid = [] now = time.time() for d in devices: if d not in last_seen: missing.append(d) else: age = (now - last_seen[d]) / constants.SECONDS_PER_DAY if age > options.options.inactive_days: by_age.append((age, d)) else: valid.append(d) by_age.sort() print 'Devices still active: %d' % len(valid) print 'Devices not seen in %d days: %d' % (options.options.search_days, len(missing)) if missing: print ' ' + '\n '.join(missing) print 'Inactive devices (and days since last seen): %d' % len(by_age) for (age, device) in by_age: print ' %3d %s' % (age, device) callback() if __name__ == '__main__': sys.exit(main.InitAndRun(Start))
; /* * jQuery extends * * Copyright (c) 2014 MZ jeros * * Dual licensed under the MIT and GPL licenses. * http://en.wikipedia.org/wiki/MIT_License * http://en.wikipedia.org/wiki/GNU_General_Public_License */ (function( $, window, document){ /** * @name : listItem * @desc : * mouseover, focusin : add active class * mouseout, focusout : remove active class * check box checked : add selected class * click cart : open cart layer * quantity minus/plus. * @arguments * {Function} detail * {Function} zzim * {Function} cart */ $.fn.listItem = function( options ) { var defaults = { duration : 150 }, // default config info. config = $.extend(true, defaults, options), // extend default config form options. inner_config = { item : '.item', item_detail : 'a[name=item_detail]', item_option : 'select[name=item_option]', item_quantity : 'input[name=item_quantity]', item_no : 'input[name=item_no]', btn_item_detail : 'a[name=btn_item_detail]', btn_item_zzim : 'a[name=btn_item_zzim]', btn_item_cart : 'a[name=btn_item_cart]', btn_option_cart : 'a[name=btn_option_cart]', btn_option_close : 'a[name=btn_option_close]', layer : '.layer-option', clsActive : 'active', clsSelected : 'selected', }, $list = this, $items = this.find(inner_config.item), $layers = $items.find(inner_config.layer); function closeAllCart () { $list.find(inner_config.layer).hide(); } // detail $items.on('click', inner_config.btn_item_detail, function (e) { if($.isFunction(config.detail)){ config.detail($(this)); // arguments [btn_item_detail] } else { location.href = $(inner_config.item_detail).attr('href'); } return false; }); // zzim $items.on('click', inner_config.btn_item_zzim, function (e) { if($.isFunction(config.zzim)){ config.zzim($(this)); // arguments [btn_item_zzim] } return false; }); // cart $items.on('click', inner_config.btn_item_cart, function (e) { var $btnCart = $(this), $item = $btnCart.closest(inner_config.item); // option_layer 유무에 따른 설정 변경. if($item.find(inner_config.layer).length){ if($item.find(inner_config.layer).css('display') == 'none') { closeAllCart(); } $item.find(inner_config.layer).fadeToggle(config.duration); return false; } else { if($.isFunction(config.cart)){ config.cart($btnCart); // arguments [btn_item_cart] } return false; } }); // options > cart $items.on('click', inner_config.btn_option_cart, function (e) { if($.isFunction(config.cart)){ config.cart($(this)); // arguments [btn_option_cart] } return false; }); // option > close $items.find(inner_config.btn_option_close).on('click', outHandler); function outHandler(e) { $(this).closest(inner_config.item).find(inner_config.layer).hide(); return false; } // item checkbox $items.on('click', inner_config.item_no, function (e) { var $chk = $(this), $item = $chk.closest(inner_config.item); if($chk.is(':checked')){ $item.addClass(inner_config.clsSelected); } else { $item.removeClass(inner_config.clsSelected); } }); // quantity minus/plus. if($.isFunction($.fn.nAdjust)){ $items.find(inner_config.item_quantity).nAdjust({ readonly:false }); } function getLastTarget($item){ var searchTag = 'A, BUTTON, INPUT, SELECT, TEXTAREA'; var $searchTag = $item.find(searchTag); return $searchTag.get($searchTag.length - 1); } $items.each(function (i, v) { var $item = $(v); lastTarget = getLastTarget($item); // item mouseover/mouseleave $item.on('mouseenter focusin', function (e) { $(this).addClass(inner_config.clsActive); return false; }).on('mouseleave focusout', function (e) { if(e.type == 'mouseleave'){ $(this).removeClass(inner_config.clsActive).find(inner_config.layer).fadeOut(config.duration); } if(e.type == 'focusout' && e.target.name == lastTarget.name){ $(this).closest(inner_config.item).removeClass(inner_config.clsActive).find(inner_config.layer).fadeOut(config.duration); } return false; }); }); }; })(jQuery, window, document);
var Usuari = require('../models/usuari'); exports.actionList = function (req, res) { /*if (req.user.es_admin) { Usuari.find(req.query, function (err, usuaris) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'LIST: Error intern al servidor. Veure log'}); } else res.json(usuaris); }); } else {*/ res.json([req.user]); //} }; exports.actionShow = function (req, res) { if (req.user.es_admin) { Usuari.findById(req.params.id, function (err, usuari) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'SHOW: Error intern al servidor. Veure log'}); } else if (!usuari) res.status(404).json({ codError: 404, descError: "SHOW: No existeix l'usuari amb id=" + req.params.id }); else res.json(usuari); }); } else { if (req.user._id === req.params.id) res.json(req.user); else res.status(403).json({ codError: 403, descError: "SHOW: L'usuari no té permís per accedir a l'usuari amb id=" + req.params.id }); } }; exports.actionCreate = function (req, res) { if (!req.user.es_admin) res.status(403).json({codError: 403, descError: "CREATE: L'usuari no té permís per crear usuaris"}); else { var usuari = new Usuari(req.body); usuari.save(function (err) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'CREATE: Error intern al servidor. Veure log'}); } else res.json({_id: usuari._id}); }); } }; exports.actionUpdate = function (req, res) { Usuari.findById(req.params.id, function (err, usuari) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'UPDATE: Error intern al servidor. Veure log'}); } else if (!usuari) res.status(404).json({ codError: 404, descError: "UPDATE: No existeix l'usuari amb id=" + req.params.id }); else { if(req.body.oldPassword === usuari.contrasenya) { var obj = { contrasenya: req.body.newPassword }; if(obj.contrasenya.length >= 8) { usuari.set(obj); usuari.save(function (err) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'UPDATE: Error intern al servidor. Veure log'}); } else res.json({_id: usuari._id}); }); } else { res.status(496).json({codError: 496, descError: 'UPDATE: Contrasenya minim 8 caràcters'}); } } else { res.status(495).json({codError: 495, descError: 'UPDATE: Contrasenya incorrecta'}); } } }); }; exports.actionDelete = function (req, res) { if (!req.user.es_admin) res.status(403).json({codError: 403, descError: "DELETE: L'usuari no té permís per esborrar usuaris"}); else { Usuari.findById(req.params.id, function (err, usuari) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'DELETE: Error intern al servidor. Veure log'}); } else if (!usuari) res.status(404).json({ codError: 404, descError: "DELETE: No existeix l'usuari amb id=" + req.params.id }); else { usuari.remove(function (err) { if (err) { console.error(new Date().toISOString(), err); res.status(500).json({codError: 500, descError: 'DELETE: Error intern al servidor. Veure log'}); } else res.json({_id: usuari.id}); }); } }); } };
from wtforms import ( Form, BooleanField, HiddenField, PasswordField, SelectField, StringField, SubmitField, TextAreaField) from wtforms.validators import Length, optional from .tables import DBSession, Categories groups = [(1, 'editor'), (0, 'admin')] class UserForm(Form): name = StringField('Username', [Length(min=2, max=15)]) password = PasswordField('Password', [Length(min=6)]) group = SelectField('Group', coerce=int, choices=groups) submit = SubmitField('Submit') class EditUserForm(UserForm): delete = BooleanField('Delete') password = PasswordField('Password', [optional(), Length(min=6)]) user_id = HiddenField() group = HiddenField() class LoginForm(Form): came_from = HiddenField() username = StringField('Username') password = PasswordField('Password') submit = SubmitField('Login') categories = DBSession.query(Categories).order_by(Categories.name) categories = [(c.id, c.name) for c in categories.all()] class PostForm(Form): title = StringField('Title', [Length(min=1)]) category = SelectField('Category', coerce=int, choices=categories) post_content = TextAreaField('Post', [Length(min=1)]) submit = SubmitField('Submit Post') class CategoryForm(Form): name = StringField('Name', [Length(min=1)]) submit = SubmitField('Submit Post') class EditCategoryForm(CategoryForm): delete = BooleanField('Delete Category')
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Formats as a .json file that can be used to localize Google Chrome extensions.""" from json import JSONEncoder import re import types from grit import util from grit.node import message def Format(root, lang='en', output_dir='.'): """Format the messages as JSON.""" yield '{\n' encoder = JSONEncoder(); format = (' "%s": {\n' ' "message": %s%s\n' ' }') placeholder_format = (' "%i": {\n' ' "content": "$%i"\n' ' }') first = True for child in root.ActiveDescendants(): if isinstance(child, message.MessageNode): id = child.attrs['name'] if id.startswith('IDR_') or id.startswith('IDS_'): id = id[4:] loc_message = encoder.encode(child.ws_at_start + child.Translate(lang) + child.ws_at_end) # Replace $n place-holders with $n$ and add an appropriate "placeholders" # entry. Note that chrome.i18n.getMessage only supports 9 placeholders: # https://developer.chrome.com/extensions/i18n#method-getMessage placeholders = '' for i in range(1, 10): if loc_message.find('$%d' % i) == -1: break loc_message = loc_message.replace('$%d' % i, '$%d$' % i) if placeholders: placeholders += ',\n' placeholders += placeholder_format % (i, i) if not first: yield ',\n' first = False if placeholders: placeholders = ',\n "placeholders": {\n%s\n }' % placeholders yield format % (id, loc_message, placeholders) yield '\n}\n'
(function () { "use strict"; function resolve(l) { var r = {}; angular.forEach(arguments, function (name) { r["h_" + name] = function (h) { return h[name](); }; }); return r; } function computeRoutes(baseURL) { var templatesBaseURL = baseURL + "/partials"; var l = [{ route: '/welcome', mainText: "Accueil", controller: 'EmptyCtrl' }, { route: '/applications', mainText: "Applications clientes", show: 'loggedUser.can.FCTN_API_CONFIG_APPLIS', controller: 'ApplicationsCtrl', resolve: resolve('applications') }, { route: '/accounts', mainText: "Comptes d'imputation", show: 'loggedUser.can.FCTN_GESTION_CPT_IMPUT', controller: 'AccountsCtrl', resolve: resolve('accounts') }, { route: '/consolidatedSummary', mainText: "Relevé consolidé", show: 'loggedUser.can.FCTN_API_EDITION_RAPPORT', controller: 'ConsolidatedSummaryCtrl', resolve: resolve('summary_consolidated') }, { route: '/detailedSummary', mainText: "Relevé détaillé", show: 'loggedUser.can.FCTN_API_EDITION_RAPPORT', controller: 'DetailedSummaryCtrl', resolve: resolve('summary_detailed_criteria') }, { route: '/users', mainText: "Gestion des utilisateurs", show: 'loggedUser.can.FCTN_MANAGE_USERS', controller: 'UsersCtrl', resolve: resolve('users') }, { route: '/logout', mainText: "Déconnexion", show: 'allowLogout', controller: 'EmptyCtrl' }, { route: '/about', mainText: "A propos de", title: "A propos de SMSU-U", controller: 'EmptyCtrl'}, { route: '/users/:id', parent: '/users', controller: 'UsersDetailCtrl', templateUrl: templatesBaseURL + '/users-detail.html', resolve: resolve('users') }, { route: '/applications/:id', parent: '/applications', controller: 'ApplicationsDetailCtrl', templateUrl: templatesBaseURL + '/applications-detail.html', resolve: resolve('accounts','applications') }, { route: '/accounts/:id', parent: '/accounts', controller: 'AccountsDetailCtrl', templateUrl: templatesBaseURL + '/accounts-detail.html', resolve: resolve('accounts') }]; angular.forEach(l, function (tab) { if (!tab.templateUrl) tab.templateUrl = templatesBaseURL + tab.route + '.html'; }); return l; } function findCurrentTab($scope, templateUrl) { var routes = this.routes; var tab = this.h.simpleFind(routes, function (tab) { return tab.templateUrl === templateUrl; }); if (!tab) return; var mainTab; if (tab.parent) { mainTab = this.h.simpleFind(routes, function (mainTab) { return mainTab.route === tab.parent; }); } else { mainTab = tab; } $scope.currentMainTab = mainTab; $scope.currentTab = tab; } var app = angular.module('myApp'); app.provider('routes', function () { this.routes = []; this.computeRoutes = computeRoutes; this.$get = function (basicHelpers) { return { routes: this.routes, findCurrentTab: findCurrentTab, h: basicHelpers }; }; }); }());
from core.himesis import Himesis, HimesisPreConditionPatternLHS import uuid class HContract07_CompleteLHS(HimesisPreConditionPatternLHS): def __init__(self): """ Creates the himesis graph representing the AToM3 model HContract07_CompleteLHS """ # Flag this instance as compiled now self.is_compiled = True super(HContract07_CompleteLHS, self).__init__(name='HContract07_CompleteLHS', num_nodes=0, edges=[]) # Add the edges self.add_edges([]) # Set the graph attributes self["mm__"] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule'] self["MT_constraint__"] = """return True""" self["name"] = """""" self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'HContract07_CompleteLHS') self["equations"] = [] # Set the node attributes # match class Property(Property) node self.add_node() self.vs[0]["MT_pre__attr1"] = """return True""" self.vs[0]["MT_label__"] = """1""" self.vs[0]["mm__"] = """MT_pre__Property""" self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Property') # apply class Feature(Feature) node self.add_node() self.vs[1]["MT_pre__attr1"] = """return True""" self.vs[1]["MT_label__"] = """2""" self.vs[1]["mm__"] = """MT_pre__Feature""" self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Feature') # trace association null--trace-->nullnode self.add_node() self.vs[2]["MT_label__"] = """3""" self.vs[2]["mm__"] = """MT_pre__trace_link""" self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Featureassoc2Property') # Add the edges self.add_edges([ (1,2), # apply class null(Property) -> backward_association (2,0), # backward_associationnull -> match_class null(Property) ]) # define evaluation methods for each match class. def eval_attr11(self, attr_value, this): return True # define evaluation methods for each apply class. def eval_attr12(self, attr_value, this): return True # define evaluation methods for each match association. # define evaluation methods for each apply association. def constraint(self, PreNode, graph): return True