text
stringlengths
2
6.14k
# -*- coding: utf-8 -*- import os.path import numpy as np import matplotlib.pyplot as plt from scipy import ndimage, misc import pdb def weightedAverage(pixel): return 0.299 * pixel[0] + 0.587 * pixel[1] + 0.114 * pixel[2] def exponential_euclidean(canal, sigma): return np.exp(-(canal - 0.5)**2 / (2 * sigma**2)) def show(color_array): """ Function to show image""" plt.imshow(color_array) plt.show() plt.axis('off') def show_gray(gray_array): """ Function to show grayscale image""" fig = plt.figure() plt.imshow(gray_array, cmap=plt.cm.Greys_r) plt.show() plt.axis('off') class Image(object): """Class for Image""" def __init__(self, fmt, path, crop=False, n=0): self.path = os.path.join("image_set", fmt, str(path)) self.fmt = fmt self.array = misc.imread(self.path) self.array = self.array.astype(np.float32) / 255 if crop: self.crop_image(n) self.shape = self.array.shape def crop_image(self, n): resolution = 2**n (height, width, _) = self.array.shape (max_height, max_width) = (resolution * (height // resolution), resolution * (width // resolution)) (begin_height, begin_width) = ((height - max_height) / 2, (width - max_width) / 2) self.array = self.array[begin_height:max_height + begin_height, begin_width:max_width + begin_width] @property def grayScale(self): """Grayscale image""" rgb = self.array self._grayScale = np.dot(rgb[..., :3], [0.299, 0.587, 0.114]) return self._grayScale def saturation(self): """Function that returns the Saturation map""" red_canal = self.array[:, :, 0] green_canal = self.array[:, :, 1] blue_canal = self.array[:, :, 2] mean = (red_canal + green_canal + blue_canal) / 3.0 saturation = np.sqrt(((red_canal - mean)**2 + (green_canal - mean)**2 + (blue_canal - mean)**2) / 3) return saturation def contrast(self): """Function that returns the Constrast numpy array""" grey = self.grayScale contrast = np.zeros((self.shape[0], self.shape[1])) grey_extended = np.zeros((self.shape[0] + 2, self.shape[1] + 2)) grey_extended[1:self.shape[0] + 1, 1:self.shape[1] + 1] = grey # kernel = np.array([[ -1, -1, -1 ], # [ -1, 8, -1 ], # [ -1, -1, -1 ]]) kernel = np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]]) for row in range(self.shape[0]): for col in range(self.shape[1]): contrast[row][col] = np.abs( (kernel * grey_extended[row:(row + 3), col:(col + 3)]).sum()) contrast = (contrast - np.min(contrast)) contrast = contrast / np.max(contrast) return contrast def sobel(self): """Function that returns the Constrast numpy array""" grey = self.grayScale sobel_h = np.zeros((self.shape[0], self.shape[1])) sobel_v = np.zeros((self.shape[0], self.shape[1])) grey_extended = np.zeros((self.shape[0] + 2, self.shape[1] + 2)) grey_extended[1:self.shape[0] + 1, 1:self.shape[1] + 1] = grey kernel1 = np.array([[-1, -2, -1], [0, 0, 0], [1, 2, 1]]) kernel2 = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, -1]]) for row in range(self.shape[0]): for col in range(self.shape[1]): sobel_h[row][col] = np.abs( (kernel1 * grey_extended[row:(row + 3), col:(col + 3)]).sum()) sobel_v[row][col] = np.abs( (kernel2 * grey_extended[row:(row + 3), col:(col + 3)]).sum()) return sobel_h, sobel_v def exposedness(self): """Function that returns the Well-Exposedness map""" red_canal = self.array[:, :, 0] green_canal = self.array[:, :, 1] blue_canal = self.array[:, :, 2] sigma = 0.2 red_exp = exponential_euclidean(red_canal, sigma) green_exp = exponential_euclidean(green_canal, sigma) blue_exp = exponential_euclidean(blue_canal, sigma) return red_exp * green_exp * blue_exp if __name__ == "__main__": im = Image("jpeg", "grandcanal_mean.jpg") sat = im.contrast() show_gray(sat)
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="Logger.cs" company="NBug Project"> // Copyright (c) 2011 - 2013 Teoman Soygul. Licensed under MIT license. // </copyright> // -------------------------------------------------------------------------------------------------------------------- using System.Diagnostics; namespace NBug.Core.Util.Logging { using System; using System.IO; using System.Linq.Expressions; using NBug.Core.UI.Developer; using NBug.Core.Util.Exceptions; using NBug.Enums; /// <summary> /// Uses <see cref="System.Diagnostics.Trace.Write(string, string)"/> method to log important messages. Also provides a <see cref="LogWritten"/> /// event. If <see cref="NBug.Settings.WriteLogToDisk"/> is set to true, a default "NBug.log" file is written to disk. /// </summary> /// <example> /// A sample trace listener can easily be added to the current application with an app.config file looking as below: /// <code> /// {?xml version="1.0"?} /// {configuration} /// {configSections} /// {/configSections} /// {system.diagnostics} /// {trace autoflush="true" indentsize="2"} /// {listeners} /// {add name="testAppListener" type="System.Diagnostics.TextWriterTraceListener" initializeData="MyApplication.log" /} /// {/listeners} /// {/trace} /// {/system.diagnostics} /// {/configuration} /// </code> /// </example> internal static class Logger { [DebuggerStepThrough()] static Logger() { if (Settings.WriteLogToDisk) { LogWritten += (message, category) => File.AppendAllText(Path.Combine(Settings.NBugDirectory, "NBug.log"), category + ": " + message + Environment.NewLine); } } /// <summary> /// First parameters is message string, second one is the category. /// </summary> internal static event Action<string, LoggerCategory> LogWritten; [DebuggerStepThrough()] internal static void Error(string message) { Write(message, LoggerCategory.NBugError); if (Settings.DisplayDeveloperUI) { using (var viewer = new InternalExceptionViewer()) { viewer.ShowDialog(new NBugRuntimeException(message)); } } if (Settings.ThrowExceptions) { throw new NBugRuntimeException(message); } } [DebuggerStepThrough()] internal static void Error(string message, Exception exception) { Write(message + Environment.NewLine + "Exception: " + exception, LoggerCategory.NBugError); if (Settings.DisplayDeveloperUI) { using (var viewer = new InternalExceptionViewer()) { viewer.ShowDialog(exception); } } if (Settings.ThrowExceptions) { throw new NBugRuntimeException(message, exception); } } [DebuggerStepThrough()] internal static void Error<T>(Expression<Func<T>> propertyExpression, string message) { Write(message + " Misconfigured Property: " + ((MemberExpression)propertyExpression.Body).Member.Name, LoggerCategory.NBugError); if (Settings.DisplayDeveloperUI) { using (var viewer = new InternalExceptionViewer()) { viewer.ShowDialog(NBugConfigurationException.Create(propertyExpression, message)); } } if (Settings.ThrowExceptions) { throw NBugConfigurationException.Create(propertyExpression, message); } } [DebuggerStepThrough()] internal static void Info(string message) { Write(message, LoggerCategory.NBugInfo); } [DebuggerStepThrough()] internal static void Trace(string message) { Write(message, LoggerCategory.NBugTrace); } [DebuggerStepThrough()] internal static void Warning(string message) { Write(message, LoggerCategory.NBugWarning); } [DebuggerStepThrough()] private static void Write(string message, LoggerCategory category) { System.Diagnostics.Trace.Write(message + Environment.NewLine, category.ToString()); if (Settings.DisplayDeveloperUI) { // InternalLogViewer.LogEntry(message, category); } var handler = LogWritten; if (handler != null) { handler(message, category); } } } }
import json import os from floyd.exceptions import FloydException from floyd.model.experiment_config import ExperimentConfig from floyd.log import logger as floyd_logger class ExperimentConfigManager(object): """ Manages .floydexpt file in the current directory """ CONFIG_FILE_PATH = os.path.join(os.getcwd() + "/.floydexpt") @classmethod def set_config(cls, experiment_config): floyd_logger.debug("Setting {} in the file {}".format(experiment_config.to_dict(), cls.CONFIG_FILE_PATH)) with open(cls.CONFIG_FILE_PATH, "w") as config_file: config_file.write(json.dumps(experiment_config.to_dict())) @classmethod def get_config(cls): if not os.path.isfile(cls.CONFIG_FILE_PATH): raise FloydException("Missing .floydexpt file, run floyd init first") with open(cls.CONFIG_FILE_PATH, "r") as config_file: experiment_config = json.loads(config_file.read()) return ExperimentConfig.from_dict(experiment_config)
#-*- coding:utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.report import report_sxw from hr_payroll import report class payslip_details_report_in(report.report_payslip_details.payslip_details_report): def __init__(self, cr, uid, name, context): super(payslip_details_report_in, self).__init__(cr, uid, name, context) self.localcontext.update({ 'get_details_by_rule_category': self.get_details_by_rule_category, }) report_sxw.report_sxw('report.paylip.details.in', 'hr.payslip', 'l10n_in_hr_payroll/report/report_payslip_details.rml', parser=payslip_details_report_in) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
package org.apache.jsp.WEB_002dINF.layout; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; public final class footer_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { private static final JspFactory _jspxFactory = JspFactory.getDefaultFactory(); private static java.util.List<String> _jspx_dependants; private org.glassfish.jsp.api.ResourceInjector _jspx_resourceInjector; public java.util.List<String> getDependants() { return _jspx_dependants; } public void _jspService(HttpServletRequest request, HttpServletResponse response) throws java.io.IOException, ServletException { PageContext pageContext = null; HttpSession session = null; ServletContext application = null; ServletConfig config = null; JspWriter out = null; Object page = this; JspWriter _jspx_out = null; PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=UTF-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; _jspx_resourceInjector = (org.glassfish.jsp.api.ResourceInjector) application.getAttribute("com.sun.appserv.jsp.resource.injector"); out.write("\n"); out.write("<hr/>\n"); out.write(" <footer>\n"); out.write(" <hr>\n"); out.write(" <p> <a href=\"\" target=\"_blank\">©Sublime Soft 2015</a></p>\n"); out.write(" </footer>"); } catch (Throwable t) { if (!(t instanceof SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) out.clearBuffer(); if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); else throw new ServletException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } }
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/ssm/model/DocumentKeyValuesFilter.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace SSM { namespace Model { DocumentKeyValuesFilter::DocumentKeyValuesFilter() : m_keyHasBeenSet(false), m_valuesHasBeenSet(false) { } DocumentKeyValuesFilter::DocumentKeyValuesFilter(JsonView jsonValue) : m_keyHasBeenSet(false), m_valuesHasBeenSet(false) { *this = jsonValue; } DocumentKeyValuesFilter& DocumentKeyValuesFilter::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("Key")) { m_key = jsonValue.GetString("Key"); m_keyHasBeenSet = true; } if(jsonValue.ValueExists("Values")) { Array<JsonView> valuesJsonList = jsonValue.GetArray("Values"); for(unsigned valuesIndex = 0; valuesIndex < valuesJsonList.GetLength(); ++valuesIndex) { m_values.push_back(valuesJsonList[valuesIndex].AsString()); } m_valuesHasBeenSet = true; } return *this; } JsonValue DocumentKeyValuesFilter::Jsonize() const { JsonValue payload; if(m_keyHasBeenSet) { payload.WithString("Key", m_key); } if(m_valuesHasBeenSet) { Array<JsonValue> valuesJsonList(m_values.size()); for(unsigned valuesIndex = 0; valuesIndex < valuesJsonList.GetLength(); ++valuesIndex) { valuesJsonList[valuesIndex].AsString(m_values[valuesIndex]); } payload.WithArray("Values", std::move(valuesJsonList)); } return payload; } } // namespace Model } // namespace SSM } // namespace Aws
import numpy as np import random points = np.array([[1,1],[1,2],[1,3],[2,1],[2,2],[2,3],[3,1],[3,2],[3,3]]) p = np.array([3,2]) outcomes = np.array([0,0,0,0,1,1,1,1,1]) def distance(p1,p2): """Find the distance between two points""" return np.sqrt(np.sum(np.power(p2-p1,2))) def majority_select(votes): """ Returns the most frequent class,i.e. who has maximum occurence""" vote_counts={} for vote in votes: #for known words if vote in vote_counts: vote_counts[vote]+=1 #for unknown words else: vote_counts[vote] = 1 max_freq=[] max_countValue = max(vote_counts.values()) for vote,count in vote_counts.items(): if count==max_countValue: max_freq.append(vote) return random.choice(max_freq) def nearest_neighbours(points,p,k=5):#k is the no of nearest neighbours to return """Returns the closest k neighbours of p""" distances = np.zeros(points.shape[0])#to hold all of the distances. for i in range(len(distances)): distances[i] = distance(p,points[i]) #sort the distances to get the shortest distance, using index vector ind = np.argsort(distances)# results in an array of indices(sorted indices) return ind[:k] #predicting the class to which the points belong def knn_predict(p,points,outcomes,k=5): ind = nearest_neighbours(p,points,k) print(majority_select(outcomes[ind])) knn_predict(p,points,outcomes,3)#k can be taken as user input #Scope of the script- #can classify all points(synthetically generated) on a prediction grid and plot them
__author__ = 'Elliot' from django.conf.urls import patterns, url from views import * #URLS for the CFEO skin of the UI urlpatterns = patterns('', url(r'^browse/$', cfeoBrowse, name='cfeo_browse'), (r'^browse/acview/(?P<acHash>[\d|\w]+)/$', cfeoacview), url(r'^browse/pageview/(?P<id>\d+)/$', cfeoPageImageview, name='cfeo_pageview'), url(r'^browse/comparepageview/(?P<compareleft>\d*)/(?P<compareright>\d*)/$', comparePageImageview,name='cfeo_comparepageview'), url(r'^browse/comparepageview/(?P<compareleft>\d*)/$', comparePageImageview,name='cfeo_compareleftpageview'), (r'^browse/comparepageview/$', comparePageImageview), url(r'^browse/sourceinformation/(?P<id>\d+)/$', cfeoSourceInformation,name='cfeo_sourceinformation'), url(r'^browse/workinformation/(?P<id>\d+)/$', cfeoWorkInformation,name='cfeo_workinformation'), )
<div class="header_btm"> <div class="wrap"> <div class="header_sub"> <div class="h_menu"> <ul> <li><a href="index.html#/home">home</a></li> <li><a href="index.html#/sales">sales</a></li> <li class="active"><a href="index.html#/men">men</a></li> <li><a href="index.html#/women">women</a></li> <li><a href="index.html#/contact">contact</a></li> </ul> </div> <div class="top-nav"> <nav class="nav"> <a href="#" id="w3-menu-trigger"> </a> <ul class="nav-list" style=""> <li class="nav-item"><a href="index.html">home</a></li> <li class="nav-item"><a href="promotion.html">promotion</a></li> <li class="nav-item"><a class="active" href="men.html">men</a></li> <li class="nav-item"><a href="women.html">women</a></li> <li class="nav-item"><a href="contact.html">Contact</a></li> </ul> </nav> <!-- #TODO: Removing searh box for now <div class="search_box"> <form> <input type="text" value="Search" onfocus="this.value = '';" onblur="if (this.value == '') {this.value = 'Search';}"><input type="submit" value=""> </form> </div> --> <div class="clear"> </div> <script src="js/responsive.menu.js"></script> </div> <div class="clear"></div> </div> </div> </div> <!-- start main --> <div class="main_bg"> <div class="wrap"> <div class="main"> <h2 class="style top">accessories</h2> <!-- start grids_of_3 --> <div class="grids_of_3"> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic1.jpg" alt=""/> <h3>branded shoes</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic2.jpg" alt=""/> <h3>branded t-shirts</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic3.jpg" alt=""/> <h3>branded tees</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="clear"></div> </div> <div class="grids_of_3"> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic4.jpg" alt=""/> <h3>branded bags</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic5.jpg" alt=""/> <h3>ems women bag</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="grid1_of_3"> <a href="details.html"> <img src="images/pic6.jpg" alt=""/> <h3>branded cargos</h3> <div class="price"> <h4>$300<span>indulge</span></h4> </div> <span class="b_btm"></span> </a> </div> <div class="clear"></div> </div> <!-- end grids_of_3 --> </div> </div> </div>
#!/usr/bin/env python import datetime,time,sys import subprocess from functools import wraps def on_timeout(limit, handler, hint=None): ''' call handler with a hint on timeout(seconds) http://qiita.com/siroken3/items/4bb937fcfd4c2489d10a ''' def notify_handler(signum, frame): handler("'%s' is not finished in %d second(s)." % (hint, limit)) def __decorator(function): def __wrapper(*args, **kwargs): import signal signal.signal(signal.SIGALRM, notify_handler) signal.alarm(limit) result = function(*args, **kwargs) signal.alarm(0) return result return wraps(function)(__wrapper) return __decorator def abort_handler(msg): global child_proc sys.stderr.write(msg) child_proc.kill() sys.exit(1) @on_timeout(limit=700, handler = abort_handler, hint='realtime forecast') def realtime_forecast(): global child_proc # The os.setsid() is passed in the argument preexec_fn so # it's run after the fork() and before exec() to run the shell. child_proc = subprocess.Popen('exec ./realtime-forecast.py --work-dir=. -r do',shell=True) child_proc.wait() epoch = datetime.datetime(2015,1,1) last_diff=None while True: now = datetime.datetime.now() diff = int((now-epoch).total_seconds())//(60*12) if last_diff < diff: last_diff=diff try: realtime_forecast() except Exception as e: print e.message pass subprocess.call('time ./review-forecast.py',shell=True) subprocess.call('cp review-forecast.png ~/public_html',shell=True) subprocess.call('scp review-forecast.png [email protected]:/var/www/html/wordpress/wp-content/uploads/2016/04/review-forecast-1.png', shell=True) time.sleep(10)
/* * Copyright (C) 2015 The greyfish authors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.asoem.greyfish.impl.agent; import org.asoem.greyfish.core.actions.AgentContext; public interface Basic2DAgentContext extends AgentContext<Basic2DAgent> { }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.future.provider; import com.opengamma.analytics.financial.interestrate.future.derivative.BondFuture; import com.opengamma.util.money.MultipleCurrencyAmount; /** * Methods for the pricing of bond futures generic to all models. * @deprecated Use the {@link BondFuturesTransactionMethod}. */ @Deprecated public abstract class BondFutureMethod { /** * Compute the present value of a future transaction from a quoted price. * @param future The future. * @param price The quoted price. * @return The present value. */ public MultipleCurrencyAmount presentValueFromPrice(final BondFuture future, final double price) { double pv = (price - future.getReferencePrice()) * future.getNotional(); return MultipleCurrencyAmount.of(future.getCurrency(), pv); } }
import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { SharedModule } from '../shared/shared.module'; import { AccordionComponent } from './accordion.component'; import accordionRoutes from './accordion.routes'; @NgModule({ imports: [CommonModule, SharedModule, accordionRoutes], declarations: [AccordionComponent], exports: [AccordionComponent], providers: [] }) export class AccordionModule { }
from rezgui.qt import QtCore, QtGui from rezgui.util import create_pane from rezgui.mixins.StoreSizeMixin import StoreSizeMixin from rezgui.widgets.StreamableTextEdit import StreamableTextEdit from rezgui.objects.App import app from threading import Thread, Lock class ProcessDialog(QtGui.QDialog, StoreSizeMixin): """A dialog that monitors a process and captures its output. Note that in order to capture the process's output, you need to have piped its stdout and stderr to subprocess.PIPE. """ def __init__(self, process, command_string, parent=None): config_key = "layout/window/process" super(ProcessDialog, self).__init__(parent) StoreSizeMixin.__init__(self, app.config, config_key) self.setWindowTitle("Running: %s" % command_string) self.proc = process self.ended = False self.output_ended = False self.capture_output = True self.buffer = [] self.bar = QtGui.QProgressBar() self.bar.setRange(0, 0) self.edit = StreamableTextEdit() close_btn = QtGui.QPushButton("Close") btn_pane = create_pane([None, close_btn], True) create_pane([self.bar, self.edit, btn_pane], False, parent_widget=self) self.lock = Lock() self.stdout_thread = Thread(target=self._read_output, args=(self.proc.stdout,)) self.stderr_thread = Thread(target=self._read_output, args=(self.proc.stderr,)) self.timer = QtCore.QTimer() self.timer.setInterval(100) self.timer.timeout.connect(self._update) self.timer.start() self.stdout_thread.start() self.stderr_thread.start() close_btn.clicked.connect(self.close) def closeEvent(self, event): self.capture_output = False def _read_output(self, buf): for line in buf: try: self.lock.acquire() self.buffer.append(line) finally: self.lock.release() if not self.capture_output: break def _update(self): if not self.output_ended \ and not self.stdout_thread.is_alive() \ and not self.stderr_thread.is_alive() \ and self.proc.poll() is not None: self.output_ended = True self.buffer.append("\nProcess ended with returncode %d\n" % self.proc.returncode) if self.buffer: try: self.lock.acquire() buf = self.buffer self.buffer = [] finally: self.lock.release() txt = ''.join(buf) print >> self.edit, txt if not self.ended and self.proc.poll() is not None: self.bar.setMaximum(10) self.bar.setValue(10) self.ended = True if self.ended and self.output_ended: self.timer.stop() # Copyright 2013-2016 Allan Johns. # # This library is free software: you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation, either # version 3 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library. If not, see <http://www.gnu.org/licenses/>.
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2017-2018 CERN. # # Invenio is free software; you can redistribute it and/or modify it # under the terms of the MIT License; see LICENSE file for more details. """Add on delete cascade.""" from alembic import op # revision identifiers, used by Alembic. revision = '4e57407b8e4a' down_revision = '12a88921ada2' branch_labels = () depends_on = None def upgrade(): """Upgrade database.""" op.drop_constraint('fk_oauth2server_client_user_id_accounts_user', 'oauth2server_client', type_='foreignkey') op.create_foreign_key(op.f('fk_oauth2server_client_user_id_accounts_user'), 'oauth2server_client', 'accounts_user', ['user_id'], ['id'], ondelete='CASCADE') op.create_index(op.f('ix_oauth2server_client_user_id'), 'oauth2server_client', ['user_id'], unique=False) op.drop_constraint('fk_oauth2server_token_user_id_accounts_user', 'oauth2server_token', type_='foreignkey') op.create_foreign_key(op.f('fk_oauth2server_token_user_id_accounts_user'), 'oauth2server_token', 'accounts_user', ['user_id'], ['id'], ondelete='CASCADE') op.create_index(op.f('ix_oauth2server_token_user_id'), 'oauth2server_token', ['user_id'], unique=False) op.drop_constraint('fk_oauth2server_token_client_id_oauth2server_client', 'oauth2server_token', type_='foreignkey') op.create_foreign_key( op.f('fk_oauth2server_token_client_id_oauth2server_client'), 'oauth2server_token', 'oauth2server_client', ['client_id'], ['client_id'], ondelete='CASCADE') op.create_index(op.f('ix_oauth2server_token_client_id'), 'oauth2server_token', ['client_id'], unique=False) def downgrade(): """Downgrade database.""" op.drop_constraint(op.f('fk_oauth2server_token_user_id_accounts_user'), 'oauth2server_token', type_='foreignkey') op.drop_index(op.f('ix_oauth2server_token_user_id'), table_name='oauth2server_token') op.create_foreign_key('fk_oauth2server_token_user_id_accounts_user', 'oauth2server_token', 'accounts_user', ['user_id'], ['id']) op.drop_constraint( op.f('fk_oauth2server_token_client_id_oauth2server_client'), 'oauth2server_token', type_='foreignkey') op.drop_index(op.f('ix_oauth2server_token_client_id'), table_name='oauth2server_token') op.create_foreign_key( 'fk_oauth2server_token_client_id_oauth2server_client', 'oauth2server_token', 'oauth2server_client', ['client_id'], ['client_id']) op.drop_constraint(op.f('fk_oauth2server_client_user_id_accounts_user'), 'oauth2server_client', type_='foreignkey') op.drop_index(op.f('ix_oauth2server_client_user_id'), table_name='oauth2server_client') op.create_foreign_key('fk_oauth2server_client_user_id_accounts_user', 'oauth2server_client', 'accounts_user', ['user_id'], ['id'])
#!/usr/bin/env python # # Cloudlet Infrastructure for Mobile Computing # # Author: Kiryong Ha <[email protected]> # Zhuo Chen <[email protected]> # # Copyright (C) 2011-2013 Carnegie Mellon University # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import Queue import json import struct import threading import time import gabriel LOG = gabriel.logging.getLogger(__name__) class ProxyError(Exception): pass class SensorReceiveClient(gabriel.network.CommonClient): """ This client will receive data from the control server as much as possible. And put the data into the @output_queue, so that the other thread (@CognitiveProcessThread) can use the data """ def __init__(self, control_addr, output_queue): gabriel.network.CommonClient.__init__(self, control_addr) self.output_queue = output_queue def __repr__(self): return "Sensor Receive Client" def _handle_input_data(self): # receive data from control VM header_size = struct.unpack("!I", self._recv_all(4))[0] data_size = struct.unpack("!I", self._recv_all(4))[0] header_str = self._recv_all(header_size) data = self._recv_all(data_size) header_json = json.loads(header_str) # add header data for measurement if gabriel.Debug.TIME_MEASUREMENT: header_json[gabriel.Protocol_measurement.JSON_KEY_APP_RECV_TIME] = time.time() # token buffer - discard if the token(queue) is not available if self.output_queue.full(): try: self.output_queue.get_nowait() except Queue.Empty as e: pass self.output_queue.put((header_json, data)) class CognitiveProcessThread(threading.Thread): ''' The thread that does real processing. It takes input data from @data_queue and puts output data into @output_queue. An interesting cognitive engine should implement its own @handle function. ''' def __init__(self, data_queue, output_queue, engine_id=None): self.data_queue = data_queue self.output_queue = output_queue self.engine_id = engine_id self.stop = threading.Event() threading.Thread.__init__(self, target=self.run) def __repr__(self): return "Cognitive Processing Thread" def run(self): while (not self.stop.wait(0.0001)): try: (header, data) = self.data_queue.get(timeout=0.0001) if header is None or data is None: LOG.warning("header or data in data_queue is not valid!") continue except Queue.Empty as e: continue ## the real processing # header can be changed directly in the proxy (design choice made for backward compatibility) result = self.handle(header, data) # header is in JSON format ## put return data into output queue rtn_json = header rtn_json[gabriel.Protocol_client.JSON_KEY_ENGINE_ID] = self.engine_id if gabriel.Debug.TIME_MEASUREMENT: rtn_json[gabriel.Protocol_measurement.JSON_KEY_APP_SENT_TIME] = time.time() self.output_queue.put((json.dumps(rtn_json), result)) LOG.info("[TERMINATE] Finish %s" % str(self)) def handle(self, header, data): # header is in JSON format return None def terminate(self): self.stop.set() class ResultPublishClient(gabriel.network.CommonClient): """ This client will publish processed result from @data_queue to the ucomm server. """ def __init__(self, ucomm_addr, data_queue, log_flag=True): gabriel.network.CommonClient.__init__(self, ucomm_addr) self.data_queue = data_queue if not log_flag: import logging LOG.setLevel(logging.CRITICAL + 1) def __repr__(self): return "Result Publish Client" def _handle_queue_data(self): try: rtn_header, rtn_data = self.data_queue.get(timeout=0.0001) total_size = len(rtn_header) + len(rtn_data) # packet format: total size, header size, header, data packet = struct.pack("!II{}s{}s".format(len(rtn_header), len(rtn_data)), total_size, len(rtn_header), rtn_header, rtn_data) self.sock.sendall(packet) LOG.info("sending result to ucomm: %s" % gabriel.util.print_rtn(json.loads(rtn_header))) except Queue.Empty as e: pass
'use strict'; const test = require('tape'); const adminClient = require('../../'); const baseUrl = 'http://localhost:8082/ag-push'; const settings = { username: 'admin', password: 'admin', kcUrl: 'http://localhost:8080/auth', kcRealmName: 'master' }; /** SimplePush Variant Tests */ test('SimplePush variant create - success', (t) => { const upsClient = adminClient(baseUrl, settings); upsClient.then((client) => { // First we need to create an application to add a variant to client.applications.create({name: 'For SimplePush Variant'}).then((application) => { const variantOptions = { pushAppId: application.pushApplicationID, name: 'SimplePush Variant', type: 'simplePush' }; client.variants.create(variantOptions).then((variant) => { t.equal(variant.name, 'SimplePush Variant', 'name should be SimplePush Variantt'); t.equal(variant.type, 'simplePush', 'type should be simplePush'); }).then(() => { // now remove the thing we created, we will test delete later on client.applications.remove(application.pushApplicationID); t.end(); }); }); }); }); test('SimplePush find all - success', (t) => { const upsClient = adminClient(baseUrl, settings); upsClient.then((client) => { // First we need to create an application to add a variant to client.applications.create({name: 'For SimplePush'}).then((application) => { const variantOptions = { pushAppId: application.pushApplicationID, name: 'SimplePush', type: 'simplePush' }; client.variants.create(variantOptions).then((variant) => { const variantToFind = { pushAppId: application.pushApplicationID, type: 'simplePush' }; return client.variants.find(variantToFind); }).then((anrdoidVariant) => { t.equal(anrdoidVariant.length, 1, 'should only return 1'); t.equal(Array.isArray(anrdoidVariant), true, 'the return value should be an array'); // now remove the thing we created, we will test delete later on client.applications.remove(application.pushApplicationID); t.end(); }); }); }); }); test('SimplePush find one with variant ID - success', (t) => { const upsClient = adminClient(baseUrl, settings); upsClient.then((client) => { // First we need to create an application to add a variant to client.applications.create({name: 'For SimplePush'}).then((application) => { const variantOptions = { pushAppId: application.pushApplicationID, name: 'SimplePush', type: 'simplePush' }; client.variants.create(variantOptions).then((variant) => { const variantToFind = { pushAppId: application.pushApplicationID, type: 'simplePush', variantId: variant.variantID }; return client.variants.find(variantToFind); }).then((simplePushVariant) => { t.equal(simplePushVariant.name, 'SimplePush', 'name should be iOS Variant'); t.equal(simplePushVariant.type, 'simplePush', 'should be the simplePush type'); t.equal(Array.isArray(simplePushVariant), false, 'the return value should be an array'); // now remove the thing we created, we will test delete later on client.applications.remove(application.pushApplicationID); t.end(); }); }); }); }); test('SimplePush remove - success', (t) => { const upsClient = adminClient(baseUrl, settings); upsClient.then((client) => { // First we need to create an application to add a variant to client.applications.create({name: 'For SimplePush'}).then((application) => { const variantOptions = { pushAppId: application.pushApplicationID, name: 'SimplePush', type: 'simplePush' }; client.variants.create(variantOptions).then((variant) => { const variantToRemove = { pushAppId: application.pushApplicationID, type: 'simplePush', variantId: variant.variantID }; return client.variants.remove(variantToRemove); }).then(() => { // now remove the thing we created, we will test delete later on client.applications.remove(application.pushApplicationID); t.end(); }); }); }); }); test('SimplePush remove - error - wrong variantID', (t) => { const upsClient = adminClient(baseUrl, settings); upsClient.then((client) => { // First we need to create an application to add a variant to client.applications.create({name: 'For SimplePush'}).then((application) => { const variantOptions = { pushAppId: application.pushApplicationID, name: 'SimplePush', type: 'simplePush' }; client.variants.create(variantOptions).then((variant) => { const variantToRemove = { pushAppId: application.pushApplicationID, type: 'simplePush', variantId: 'NOT_RIGHT' }; return client.variants.remove(variantToRemove); }).catch((error) => { if (error) { console.error(error); } // now remove the thing we created, we will test delete later on client.applications.remove(application.pushApplicationID); t.end(); }); }); }); });
# Copyright (c) 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """VNC Console Proxy Server.""" import sys from oslo_config import cfg from nova import config from nova import objects from nova.openstack.common import log as logging from nova.openstack.common.report import guru_meditation_report as gmr from nova import service from nova import version CONF = cfg.CONF def main(): config.parse_args(sys.argv) logging.setup("nova") objects.register_all() gmr.TextGuruMeditation.setup_autorun(version) server = service.Service.create(binary='nova-consoleauth', topic=CONF.consoleauth_topic) service.serve(server) service.wait()
#!/bin/env python3 # AUTHORS: # Hakan Ozadam # # Moore Laboratory # UMASS Medical School / HHMI # RNA Therapeutics Institute # Albert Sherman Center, ASC4-1009 # 368 Plantation Street # Worcester, MA 01605 # USA # #################################################################### import argparse import os import sys ##################################################################### def handle_arguments(): parser = argparse.ArgumentParser(description= ''' convert a given fasta file to a fastq file where the qualities will all be set to I ''') parser.add_argument("-i" , help = "Input fasta file" , required = True , metavar = "input_fasta_file" , type = str) parser.add_argument("-o" , help = "Output fastq file" , required = True , metavar = "output_fastq_file" , type = str) arguments = parser.parse_args() return arguments ##################################################################### def main(): arguments = handle_arguments() with FastaFile(arguments.i) as fasta_input_stream,\ open(arguments.o, 'w') as fastq_output_file: for fasta_entry in fasta_input_stream: new_fastq_entry = FastqEntry(header = "Read_" + fasta_entry.header, sequence = fasta_entry.sequence, plus = '+', quality = "I"*len(fasta_entry.sequence)) print(new_fastq_entry, file = fastq_output_file) #####################################################################ß script_directory = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) ) bal_dir = os.path.split( os.path.split( script_directory)[0] )[0] if __name__ == '__main__': sys.path.append( bal_dir ) from genomic_io.fasta import FastaFile, FastaEntry from genomic_io.fastq import FastqFile, FastqEntry main() else: exit(1) ####################################################################
using Newtonsoft.Json; using System; using System.Net; namespace CEXIO_API { public class Balance { private const String url = "https://cex.io/api/balance/"; public Int64 timestamp; /// <summary> /// referral program bonus /// </summary> public Double bonus = 0; public BalanceItem BTC; public BalanceItem GHS; public BalanceItem BF1; public BalanceItem NMC; public BalanceItem ICX; public BalanceItem DVC; public class BalanceItem { /// <summary> /// available balance /// </summary> public Double available = 0; /// <summary> /// balance in pending orders /// </summary> public Double orders = 0; } public static Balance GetBalance() { if (!CEXIO.CanRequest()) return null; WebClient www = new WebClient(); byte[] res = www.UploadValues(url, CEXIO.AuthHeader); String strRes = System.Text.Encoding.UTF8.GetString(res); String err = CEXIO.DetectError(strRes); if (!String.IsNullOrEmpty(err)) return null; return Deserialize(strRes); } public static Balance Deserialize(String json) { return JsonConvert.DeserializeObject<Balance>(json); } } }
# -*- coding: utf-8 -*- """Settings of SEO_LINK""" from django.conf import settings as django_settings DEBUG = getattr(django_settings, 'SEO_LINK_DEBUG',False) # available backends BACKEND = getattr(django_settings, 'SEO_LINK_BACKEND','seo_link.backends.simple.SimpleBackend') #BACKEND = getattr(django_settings, 'SEO_LINK_BACKEND','seo_link.backends.simple.SimpleCachedBackend') #BACKEND = getattr(django_settings, 'SEO_LINK_BACKEND','seo_link.backends.advanced.LXMLBackend') #BACKEND = getattr(django_settings, 'SEO_LINK_BACKEND','seo_link.backends.advanced.LXMLCachedBackend') # do not replace content that is inside these entities # the snippets need to be wrapped in a html entity, otherwise double replacement is possible IGNORE_HTML_ENTITIES = getattr(django_settings, 'SEO_LINK_IGNORE_HTML_ENTITIES' , ['head','style','script','javascript','a','h1','h2','h3','h4','h5','h6','strong','b','i','span']) # do not replace content that is inside these selectors # one of the IGNORE_CSS_SCELECTOR_* can be NONE IGNORE_CSS_SELECTOR_CLASSES = getattr(django_settings, 'SEO_LINK_IGNORE_CSS_SELECTOR_CLASSES' , ['nav','user-nav','footer',] ) IGNORE_CSS_SELECTOR_IDS = getattr(django_settings, 'SEO_LINK_IGNORE_CSS_SELECTOR_IDS' , ['cms_plugin_overlay','cms_toolbar_mini','cms_toolbar_col2','cms_toolbar_pagebutton','cms_toolbar_settingsbutton'] ) # one of the OPERATIONAL_CSS_SELECTOR_* can be NONE # if OPERATIONAL_CSS_SELECTOR_CLASSES one is NONE work on all elements # use this value to select the content area you want to operate on OPERATIONAL_CSS_SELECTOR_CLASSES = getattr(django_settings, 'SEO_LINK_OPERATIONAL_CSS_SELECTOR_CLASSES', ['main',]) # only operate on these id elements # if this one is None, it does not restrict # use this value to select the content area you want to operate on OPERATIONAL_CSS_SELECTOR_IDS = getattr(django_settings, 'SEO_LINK_OPERATIONAL_CSS_SELECTOR_IDS', None) #ALL THE OPERATIONAL_CSS * SETTINGS ARE COMBINDE BY OR # only operate on these entities OPERATIONAL_HTML_ENTITIES = getattr(django_settings, 'SEO_LINK_OPERATIONAL_HTML_ENTITIES' , ['div','p',]) # min word count # operate only on terms that have this as minimum word count # the replacement always starts with the highest wordcount MIN_TERM_WORD_COUNT = getattr(django_settings, 'SEO_LINK_MIN_TERM_WORD_COUNT' , 0) # Limit the injested REPLACEMENT Terms TO A MAXIMUM PER PAGE # if this is None, no restriction MAX_DIFFERENT_TERM_REPLACMENT_PER_PAGE = getattr(django_settings, 'SEO_LINK_MAX_DIFFERENT_TERM_REPLACMENT_PER_PAGE' , None) # Operate only when a user is not logged in # if you want to use the test url in the backend # this setting needs to be set to FALSE # if it is to yes, there is no link injesting after a successful login ACTIVE_ANONYMOUS_USER_ONLY = getattr(django_settings, 'SEO_LINK_ACTIVE_ANONYMOUS_USER_ONLY' , False) # Replace the found term in the page only one time REPLACE_ONLY_ONE_TIME_PER_TERM = getattr(django_settings, 'SEO_LINK_REPLACE_ONLY_ONE_TIME_PER_TERM' , True) # Exclude Path where the app should not work GLOBAL_EXCLUDE_PATHES = getattr(django_settings, 'SEO_LINK_GLOBAL_EXCLUDE_PATHES' , ['/admin', '/sentry', '/media', '/__debug__', '/search' '/uploads' '/static' ] ) #Add Timer and Timeroutput to the output # turn this off for production TIMER_ON = getattr(django_settings, 'SEO_LINK_TIMER_ON' , False) #no processing get parameter NO_PROCESSING_GET_PARAM = getattr(django_settings, 'SEO_LINK_NO_PROCESSING_GET_PARAM' , 'no_seo_link') # DAU User Protection NO_ROOT_PROCESSING = getattr(django_settings, 'SEO_LINK_NO_ROOT_PROCESSING' , True) #cacheing CACHE_KEY_PREFIX = getattr(django_settings, 'SEO_LINK_CACHE_KEY_PREFIX' , 'seo_link_') CACHE_DURATION = getattr(django_settings, 'SEO_LINK_CACHE_DURATION' , 60*60) #ADMIN URL TEST Feature DUMP_TEST_URLS_FAILURES_TO_STATIC = getattr(django_settings, 'SEO_LINK_DUMP_TEST_URLS_FAILURES_TO_STATIC' , True) # ending without slash # substitution option in admin PREVIEW_TEST_URL_PREFIX = getattr(django_settings, 'SEO_LINK_PREVIEW_TEST_URL_PREFIX' , "http://localhost:8000") #sometimes the output is not parseable so ignore and log exceptions? # set this to True for production IGNORE_EXCEPTIONS_ON = getattr(django_settings, 'SEO_LINK_IGNORE_EXCEPTIONS_ON' , True) # prettify the html before processing # slows down but helps with broken html output # default FALSE LXML_BEAUTIFULSOUP_PRETTIFY = getattr(django_settings, 'SEO_LINK_LXML_BEAUTIFULSOUP_PRETTIFY' , False) # if you have broken and unclean html, # this cleans the html before processing # default FALSE LXML_CLEANER_ON = getattr(django_settings, 'SEO_LINK_LXML_CLEANER_ON' , False)
/** * JavaScript behaviors for the front-end display of webforms. */ (function ($) { Drupal.behaviors.webform = function(context) { // Calendar datepicker behavior. Drupal.webform.datepicker(context); }; Drupal.webform = Drupal.webform || {}; Drupal.webform.datepicker = function(context) { $('div.webform-datepicker').each(function() { var $webformDatepicker = $(this); var $calendar = $webformDatepicker.find('input.webform-calendar'); var startYear = $calendar[0].className.replace(/.*webform-calendar-start-(\d+).*/, '$1'); var endYear = $calendar[0].className.replace(/.*webform-calendar-end-(\d+).*/, '$1'); var firstDay = $calendar[0].className.replace(/.*webform-calendar-day-(\d).*/, '$1'); // Ensure that start comes before end for datepicker. if (startYear > endYear) { var greaterYear = startYear; startYear = endYear; endYear = greaterYear; } // Set up the jQuery datepicker element. $calendar.datepicker({ dateFormat: 'yy-mm-dd', yearRange: startYear + ':' + endYear, firstDay: parseInt(firstDay), onSelect: function(dateText, inst) { var date = dateText.split('-'); $webformDatepicker.find('select.year, input.year').val(+date[0]); $webformDatepicker.find('select.month').val(+date[1]); $webformDatepicker.find('select.day').val(+date[2]); }, beforeShow: function(input, inst) { // Get the select list values. var year = $webformDatepicker.find('select.year, input.year').val(); var month = $webformDatepicker.find('select.month').val(); var day = $webformDatepicker.find('select.day').val(); // If empty, default to the current year/month/day in the popup. var today = new Date(); year = year ? year : today.getFullYear(); month = month ? month : today.getMonth() + 1; day = day ? day : today.getDate(); // Make sure that the default year fits in the available options. year = (year < startYear || year > endYear) ? startYear : year; // jQuery UI Datepicker will read the input field and base its date off // of that, even though in our case the input field is a button. $(input).val(year + '-' + month + '-' + day); } }); // Prevent the calendar button from submitting the form. $calendar.click(function(event) { $(this).focus(); event.preventDefault(); }); }); } })(jQuery);
/* Log trace triaging and etc. Copyright (C) 2016 Sylvain Hallé This program is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ca.uqac.lif.ecp.ltl; public class Constant<T,U> extends ConcreteValue<T,U> { /** * The value */ protected final U m_value; /** * Creates a new constant * @param value The value of this constant */ public Constant(U value) { super(); m_value = value; } @Override public final U getValue(T structure) { return m_value; } }
package org.flowable.common.rest.filter; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class JsonpFilter { protected void afterHandle(HttpServletRequest request, HttpServletResponse response) { /* * String jsonp = request.getResourceRef().getQueryAsForm().getFirstValue("callback"); * * if (jsonp != null) { StringBuilder stringBuilder = new StringBuilder(jsonp); stringBuilder.append("("); * * if ((response.getStatus().getCode() >= 300)) { stringBuilder.append("{code:"); stringBuilder.append(response.getStatus().getCode()); stringBuilder.append(",msg:'"); * stringBuilder.append(response.getStatus().getDescription() .replace("'", "\\'")); stringBuilder.append("'}"); response.setStatus(Status.SUCCESS_OK); } else { Representation representation = * response.getEntity(); if (representation != null) { try { InputStream is = representation.getStream(); if (is != null) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] buf * = new byte[0x10000]; int len; while ((len = is.read(buf)) > 0) { bos.write(buf, 0, len); } stringBuilder.append(bos.toString("UTF-8")); } else { * response.setStatus(Status.SERVER_ERROR_INTERNAL, "NullPointer in Jsonp filter"); } } catch (IOException e) { response.setStatus(Status.SERVER_ERROR_INTERNAL, e.getMessage()); } } } * * stringBuilder.append(");"); response.setEntity(new StringRepresentation(stringBuilder.toString(), MediaType.TEXT_JAVASCRIPT)); } */ } }
# -*- coding: utf-8 -*- ''' Copyright (C) 2013 onwards University of Deusto All rights reserved. This software is licensed as described in the file COPYING, which you should have received as part of this distribution. This software consists of contributions made by many individuals, listed below: @author: Aitor Gómez Goiri <[email protected]> ''' from tempfile import mkstemp from actuation.api import Node from actuation.api.space import AbstractSubscriptionObserver, AbstractCallback from actuation.impl.rest.mock.agents import Crawler, PlanAchiever from actuation.impl.space import SPARQLSubscriptionTemplate from actuation.proofs.plan import PlanFactory from actuation.utils.conversors import QueryLanguageConversor class IntermediaryAgent(Node, AbstractSubscriptionObserver, AbstractCallback): """ This agent intermediates between the space and the REST approach. It resides in the same machine as the space. """ def __init__(self, space, input_folder, output_folder, reasoner, discovery): super(IntermediaryAgent,self).__init__() self._goals = [] self._space = space self._discovery = discovery self.output_folder = output_folder self.crawler = Crawler( discovery ) self.lgraph_factory = PlanFactory( output_folder, reasoner ) self._generic_template_for_preference_fp = input_folder + "generic_task_subscription.sparql" self._all_kb_fp = self.output_folder + "all_knowledge_base.n3" def notify_subscription(self, template): """Intercepts subscription to use it as a goal.""" # priority for the space-based approach? if isinstance(template, SPARQLSubscriptionTemplate): # should it capture all the templates or just a subset? self._goals.append( self._write_n3_goal_in_file( template.query ) ) def _write_n3_goal_in_file(self, sparql_query): """ @param sparql_query: Query in SPARQL format. @return: The file created by the method which contains the equivalent N3QL goal. """ pathname = mkstemp( suffix=".n3", prefix="goal_", dir=self.output_folder)[1] with open( pathname, "w" ) as goal_file: goal_file.write( QueryLanguageConversor.sparql_to_n3ql(sparql_query) ) return pathname def start(self): self.crawler.update() # TODO when? with open( self._generic_template_for_preference_fp, "r" ) as subscription_file: st = SPARQLSubscriptionTemplate( subscription_file.read() ) self._space.subscribe( st, self ) # after subscribing myself, otherwise I get the notification of my own subscription! self._space.add_subscription_observer( self ) def stop(self): pass def call(self): for query_goal_path in self._goals: all_knowledge = set() all_knowledge.add( self._space_to_file() ) all_knowledge = all_knowledge.union( self.crawler.descriptions ) all_knowledge = all_knowledge.union( self.crawler.base_knowledge ) lgraph = self.lgraph_factory.create(query_goal_path, all_knowledge) pa = PlanAchiever( lgraph, self._discovery ) pa.achieve() # TODO write the responses in the space # ( to let the consumer now that the effect has taken place ) def _space_to_file(self): """ @return: A filepath with the whole space serialized. """ # ugly as hell! # in the long term, another solution should be found self._space._da.get_space(None).graphs.serialize( self._all_kb_fp, format="n3" ) # y que sea lo que dios quiera ;-) return self._all_kb_fp
# # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ URLs for the Shaarli module. """ from django.conf.urls import url from plinth.views import ServiceView from plinth.modules import shaarli urlpatterns = [ url(r'^apps/shaarli/$', ServiceView.as_view( service_id="shaarli", description=shaarli.description, show_status_block=False, ), name='index'), ]
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the LDP4j Project: * http://www.ldp4j.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2014-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.ldp4j.framework:ldp4j-application-data:0.2.2 * Bundle : ldp4j-application-data-0.2.2.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.ldp4j.application.data.validation; import java.util.Collection; public interface ValidationReport { boolean isValid(); Collection<ValidationFailure> validationFailures(); }
class Param: def __init__(self, parent, name, doc, defaultValue): self.parent = parent self.name = name self.doc = doc self.defaultValue = defaultValue def withValue(self, value): return ParamPair(self, value) def __str__(self): return "{0}/{1}: {2} (default: {3})".format(self.parent, self.name, self.doc, self.defaultValue) def __repr__(self): return "{0}/{1}".format(self.parent, self.name) class ParamPair: def __init__(self, param, value): assert isinstance(param, Param) self.param = param self.value = value class ParamMap: def __init__(self): self.params = {} def put(self, param, value): self.params[param] = value return self def getOrDefault(self, param): return self.params[param] if param in self.params else param.defaultValue def copy(self): newMap = ParamMap() newMap.params = self.params.copy() return newMap def __repr__(self): return self.params.__repr__() class ParamGridBuilder: def __init__(self): self.paramGrid = {} def add(self, param, value): return self.addMulti(param, [value,]) def addMulti(self, param, values): self.paramGrid[param] = values return self def build(self): paramMaps = [ParamMap(),] for (param, values) in self.paramGrid.items(): newParamMaps = [] for paramMap in paramMaps: for v in values: newParamMap = paramMap.copy() newParamMap.put(param, v) newParamMaps.append(newParamMap) paramMaps = newParamMaps return paramMaps
#import <SSignalKit/SDisposable.h> @class SSignal; @interface SDisposableSet : NSObject <SDisposable> - (void)add:(id<SDisposable>)disposable; - (void)remove:(id<SDisposable>)disposable; @end
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # See http://www.salome-platform.org/ or email : [email protected] # # This case corresponds to: /visu/IsoSurfaces/A6 case # Create Iso Surface for all data of the given MED file import sys from paravistest import datadir, pictureext, get_picture_dir from presentations import CreatePrsForFile, PrsTypeEnum import pvserver as paravis # Create presentations myParavis = paravis.myParavis # Directory for saving snapshots picturedir = get_picture_dir("IsoSurfaces/A6") file = datadir + "clo.med" print " --------------------------------- " print "file ", file print " --------------------------------- " print "CreatePrsForFile..." CreatePrsForFile(myParavis, file, [PrsTypeEnum.ISOSURFACES], picturedir, pictureext)
(function(window, angular, undefined) { 'use strict'; /** * @ngdoc overview * @name angulartics.facebook.pixel * Enables analytics support for Facebook Pixel (https://www.facebook.com/business/a/online-sales/custom-audiences-website) */ angular.module('angulartics.facebook.pixel', ['angulartics']) .config(['$analyticsProvider', function ($analyticsProvider) { // Pixel already supports buffered invocations so we don't need // to wrap these inside angulartics.waitForVendorApi $analyticsProvider.settings.pageTracking.trackRelativePath = true; // Pixel dosen't provide setting user information into analytics session. // So, we doesn't need to set default settings. // Pixel has built-in `pushState`, `replaceState`, `popstate` handlers which sending Pageviews // and supports hash based routes (hashbang locations) since Apr 04, 2016. // we don't need to register pageview handlers by using `$analyticsProvider.registerPageTrack` API anymore. // @todo Remove `$analyticsProvider.registerPageTrack` API $analyticsProvider.registerPageTrack(function (path) { if (window.fbq) { fbq('track', 'PageView'); } }); $analyticsProvider.registerEventTrack(function (action, properties) { properties = properties || {}; var eventList = [ 'ViewContent', 'Search', 'AddToCart', 'AddToWishlist', 'InitiateCheckout', 'AddPaymentInfo', 'Purchase', 'Lead', 'CompleteRegistration']; if (window.fbq) { eventList.indexOf(action) === -1 ? fbq('trackCustom', action, properties) : fbq('track', action, properties); } }); }]); })(window, window.angular);
"""Utility functions for tests.""" import json from multiprocessing import Process from scrapy.crawler import CrawlerProcess from scrapy.http import TextResponse, Request def response_for_content(content, encoding, url='http://example.com', metadata=None): """Create a Scrapy Response containing the content. This function is used for unit-testing to verify that spiders can parse the contents provided. Args: content (str): the contents of the response. encoding (str): the character encoding of the content, e.g. 'utf-8'. Kwargs: url (str): the URL from the request that created the response. metadata (dict): parameters to pass to the response. Returns: TextResponse. A scrapy response object. """ request = Request(url=url, meta=metadata) return TextResponse(url=url, request=request, body=content, encoding=encoding) def response_for_data(data, url='http://example.com', metadata=None): """Create a Scrapy Response for the json encode-able data. This function is used for unit-testing to verify that spiders can parse the JSON encode-able data provided. Args: data (list): the contents of the response. Kwargs: url (str): the URL from the request that created the response. metadata (dict): parameters to pass to the response. Returns: TextResponse. A scrapy response object. """ content = json.dumps(data) encoding = 'utf-8' return response_for_content(content, encoding, url=url, metadata=metadata) class RunCrawler(): """RunCrawler runs a crawler in a separate process. Useful sources: https://groups.google.com/forum/?fromgroups#!topic/scrapy-users/8zL8W3SdQBo http://stackoverflow.com/questions/13437402/how-to-run-scrapy-from-within-a-python-script """ def __init__(self, settings): self.crawler = CrawlerProcess(settings) self.crawler.configure() def _crawl(self, spider): self.crawler.crawl(spider) self.crawler.start() self.crawler.stop() def crawl(self, spider): p = Process(target=self._crawl, args=(spider,)) p.start() p.join()
__author__ = 'Manos' from pymongo import MongoClient mongo_client=MongoClient('mongodb://localhost:27017/') db=mongo_client.mydb print(mongo_client) print(db) print("connected") # # locs=[{ # "locs" : [{"lng": "$geo_lng" , "lat":"$geo_lat" } ] # }] # # locs=db.things.save(locs) # # geolocation=[ # {"$project": { "geolocation": {"$concat": ["$geo_lat", " , ", "$geo_lng" ] } } } # ] # result = db.things.aggregate(geolocation) # # print(result) # # querry8= [ # {"loc": {["$geo_lat","$geo_lng"]}}, # { # "$geoWithin":{ # "$geometry": {"type" : "Polygon" , # "coordinates":[ [ [0, 0], [ 3, 6 ], [ 6, 1 ], [ 0, 0 ] ] ] # } # } # } # # ] # print= db.things.find(querry8) # # print("question 1") # distinct_count = db.things.distinct('geo_lng') # print("The number of the distinct lat:" ) # print(len(distinct_count)) # result = db.things.aggregate( # [ # {"$group": { "_id": {"lang": "$geo_lng", "lat": "$geo_lat" } } } # ] # ) # print(db.things.find(result))
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.hadoop; import java.io.Serializable; import java.util.LinkedHashSet; import java.util.Objects; import java.util.Set; /** * User can add required columns */ public class CarbonProjection implements Serializable { private static final long serialVersionUID = -4328676723039530713L; private Set<String> columns = new LinkedHashSet<>(); public CarbonProjection() { } public CarbonProjection(String[] columnNames) { Objects.requireNonNull(columnNames); for (String columnName : columnNames) { columns.add(columnName); } } public void addColumn(String column) { columns.add(column); } public String[] getAllColumns() { return columns.toArray(new String[columns.size()]); } public boolean isEmpty() { return columns.isEmpty(); } }
/** * @file llmap.h * @brief LLMap class header file * * $LicenseInfo:firstyear=2001&license=viewergpl$ * * Copyright (c) 2001-2009, Linden Research, Inc. * * Second Life Viewer Source Code * The source code in this file ("Source Code") is provided by Linden Lab * to you under the terms of the GNU General Public License, version 2.0 * ("GPL"), unless you have obtained a separate licensing agreement * ("Other License"), formally executed by you and Linden Lab. Terms of * the GPL can be found in doc/GPL-license.txt in this distribution, or * online at http://secondlifegrid.net/programs/open_source/licensing/gplv2 * * There are special exceptions to the terms and conditions of the GPL as * it is applied to this Source Code. View the full text of the exception * in the file doc/FLOSS-exception.txt in this software distribution, or * online at * http://secondlifegrid.net/programs/open_source/licensing/flossexception * * By copying, modifying or distributing this software, you acknowledge * that you have read and understood your obligations described above, * and agree to abide by those obligations. * * ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO * WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY, * COMPLETENESS OR PERFORMANCE. * $/LicenseInfo$ */ #ifndef LL_LLMAP_H #define LL_LLMAP_H // llmap uses the fast stl library code in a manner consistant with LLSkipMap, et. al. template<class INDEX_TYPE, class MAPPED_TYPE> class LLMap { private: typedef typename std::map<INDEX_TYPE, MAPPED_TYPE> stl_map_t; typedef typename stl_map_t::iterator stl_iter_t; typedef typename stl_map_t::value_type stl_value_t; stl_map_t mStlMap; stl_iter_t mCurIter; // *iterator = pair<const INDEX_TYPE, MAPPED_TYPE> MAPPED_TYPE dummy_data; INDEX_TYPE dummy_index; public: LLMap() : mStlMap() { memset((void*)(&dummy_data), 0x0, sizeof(MAPPED_TYPE)); memset((void*)(&dummy_index), 0x0, sizeof(INDEX_TYPE)); mCurIter = mStlMap.begin(); } ~LLMap() { mStlMap.clear(); } // use these functions to itterate through a list void resetMap() { mCurIter = mStlMap.begin(); } // get the current data and bump mCurrentp // This is kind of screwy since it returns a reference; // We have to have a dummy value for when we reach the end // or in case we have an empty list. Presumably, this value // will initialize to some NULL value that will end the iterator. // We really shouldn't be using getNextData() or getNextKey() anyway... MAPPED_TYPE &getNextData() { if (mCurIter == mStlMap.end()) { return dummy_data; } else { return (*mCurIter++).second; } } const INDEX_TYPE &getNextKey() { if (mCurIter == mStlMap.end()) { return dummy_index; } else { return (*mCurIter++).first; } } MAPPED_TYPE &getFirstData() { resetMap(); return getNextData(); } const INDEX_TYPE &getFirstKey() { resetMap(); return getNextKey(); } S32 getLength() { return mStlMap.size(); } void addData(const INDEX_TYPE &index, MAPPED_TYPE pointed_to) { mStlMap.insert(stl_value_t(index, pointed_to)); } void addData(const INDEX_TYPE &index) { mStlMap.insert(stl_value_t(index, dummy_data)); } // if index doesn't exist, then insert a new node and return it MAPPED_TYPE &getData(const INDEX_TYPE &index) { std::pair<stl_iter_t, bool> res; res = mStlMap.insert(stl_value_t(index, dummy_data)); return res.first->second; } // if index doesn't exist, then insert a new node, return it, and set b_new_entry to true MAPPED_TYPE &getData(const INDEX_TYPE &index, BOOL &b_new_entry) { std::pair<stl_iter_t, bool> res; res = mStlMap.insert(stl_value_t(index, dummy_data)); b_new_entry = res.second; return res.first->second; } // If there, returns the data. // If not, returns NULL. // Never adds entries to the map. MAPPED_TYPE getIfThere(const INDEX_TYPE &index) { stl_iter_t iter; iter = mStlMap.find(index); if (iter == mStlMap.end()) { return (MAPPED_TYPE)0; } else { return (*iter).second; } } // if index doesn't exist, then make a new node and return it MAPPED_TYPE &operator[](const INDEX_TYPE &index) { return getData(index); } // do a reverse look-up, return NULL if failed INDEX_TYPE reverseLookup(const MAPPED_TYPE data) { stl_iter_t iter; stl_iter_t end_iter; iter = mStlMap.begin(); end_iter = mStlMap.end(); while (iter != end_iter) { if ((*iter).second == data) return (*iter).first; iter++; } return (INDEX_TYPE)0; } BOOL removeData(const INDEX_TYPE &index) { mCurIter = mStlMap.find(index); if (mCurIter == mStlMap.end()) { return FALSE; } else { stl_iter_t iter = mCurIter++; // incrament mCurIter to the next element mStlMap.erase(iter); return TRUE; } } // does this index exist? BOOL checkData(const INDEX_TYPE &index) { stl_iter_t iter; iter = mStlMap.find(index); if (iter == mStlMap.end()) { return FALSE; } else { mCurIter = iter; return TRUE; } } BOOL deleteData(const INDEX_TYPE &index) { mCurIter = mStlMap.find(index); if (mCurIter == mStlMap.end()) { return FALSE; } else { stl_iter_t iter = mCurIter++; // incrament mCurIter to the next element delete (*iter).second; mStlMap.erase(iter); return TRUE; } } void deleteAllData() { stl_iter_t iter; stl_iter_t end_iter; iter = mStlMap.begin(); end_iter = mStlMap.end(); while (iter != end_iter) { delete (*iter).second; iter++; } mStlMap.clear(); mCurIter = mStlMap.end(); } void removeAllData() { mStlMap.clear(); } }; #endif
package com.lanceolata.leetcode; public class Question_0025_Reverse_Nodes_in_k_Group { // Definition for singly-linked list. public class ListNode { int val; ListNode next; ListNode(int x) { val = x; next = null; } } public ListNode reverseKGroup(ListNode head, int k) { ListNode dummy = new ListNode(0); dummy.next = head; ListNode pre = dummy; int cnt = 0; while (head != null) { cnt++; if (cnt % k == 0) { pre = reverse(pre, head.next); // 注意修改head指针 head = pre; } head = head.next; } return dummy.next; } private ListNode reverse(ListNode pre, ListNode next) { ListNode last = pre.next, cur = pre.next.next; while (cur != next) { last.next = cur.next; cur.next = pre.next; pre.next = cur; cur = last.next; } return last; } }
#!/usr/bin/env python # Copyright (C) 2010 Ludovic Rousseau <[email protected]> # # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from __future__ import print_function import PyKCS11.LowLevel a = PyKCS11.LowLevel.CPKCS11Lib() info = PyKCS11.LowLevel.CK_INFO() slotInfo = PyKCS11.LowLevel.CK_SLOT_INFO() lib = "/usr/lib/pkcs11/opensc-pkcs11.so" session = PyKCS11.LowLevel.CK_SESSION_HANDLE() slotList = PyKCS11.LowLevel.ckintlist() rand = PyKCS11.LowLevel.ckbytelist(20) seed = PyKCS11.LowLevel.ckbytelist(5) print("Load of " + lib + ": " + str(a.Load(lib, 1))) print("C_GetInfo: " + hex(a.C_GetInfo(info))) print("Library manufacturerID: " + info.GetManufacturerID()) del info print("C_GetSlotList(NULL): " + hex(a.C_GetSlotList(0, slotList))) print("\tAvailable Slots: " + str(len(slotList))) print("C_OpenSession(): " + hex(a.C_OpenSession(slotList[1], PyKCS11.LowLevel.CKF_RW_SESSION | PyKCS11.LowLevel.CKF_SERIAL_SESSION, session))) print(' '.join("%02X" % i for i in seed)) print("C_SeedRandom(): " + hex(a.C_SeedRandom(session, seed))) print("C_GenerateRandom(): " + hex(a.C_GenerateRandom(session, rand))) print(' '.join("%02X" % i for i in rand)) print("C_CloseSession(): " + hex(a.C_CloseSession(session))) print("C_Finalize(): " + hex(a.C_Finalize())) print(a.Unload())
import {Component, OnInit} from '@angular/core'; import {take} from 'rxjs/operators'; import {Router} from '@angular/router'; import {AuthService} from '../auth.service'; import {MessageService} from 'primeng/api'; import {noop} from 'rxjs'; @Component({ selector: 'app-totp', templateUrl: './totp.component.html', styleUrls: ['./totp.component.css'] }) export class TotpComponent implements OnInit { constructor(private readonly router: Router, private readonly messageService: MessageService, private readonly authService: AuthService) { } ngOnInit(): void { // are we in the correct phase this.authService.authentication$.pipe(take(1)).subscribe(flow => { if (flow === 'AUTHENTICATED') { this.router.navigate(['home'], {replaceUrl: true}); } else if (flow !== 'TOTP') { this.router.navigate(['signin'], {replaceUrl: true}); } }); } async verifyTotp(code: string): Promise<void> { this.authService.verifyTotp(code).subscribe(noop, err => this.handleError(err)); } // eslint-disable-next-line @typescript-eslint/no-explicit-any async handleError(error: any): Promise<void> { let message: string; if (typeof error === 'string') { message = error; } else { message = `Unexpected error: ${error.statusText}`; } this.messageService.add({key: 'tst', severity: 'error', summary: 'Error', detail: message}); } }
/* ------------------------------------------------------------- application.h (part of The KDE Dictionary Client) Copyright (C) 2000-2001 Christian Gebauer <[email protected]> This file is distributed under the Artistic License. See LICENSE for details. ------------------------------------------------------------- */ #ifndef APPLICATION_H #define APPLICATION_H #include <kuniqueapplication.h> #include <qguardedptr.h> #define KDICT_VERSION "0.6" class TopLevel; class Application : public KUniqueApplication { Q_OBJECT public: Application(); ~Application(); /** Create new instance of Kdict. Make the existing main window active if Kdict is already running */ int newInstance(); private: QGuardedPtr<TopLevel> m_mainWindow; }; #endif
import os from aiodownload.util import clean_filename, make_dirs, default_url_transform def test_clean_filename(): sanitized_filename = clean_filename('français.txt') assert sanitized_filename == 'francais.txt' def test_make_dirs(tmpdir): test_path = os.path.sep.join([tmpdir.strpath, 'test', 'make', 'dir']) make_dirs(os.path.sep.join([test_path, 'mock.txt'])) make_dirs(os.path.sep.join([test_path, 'mock.txt'])) # induces OSError which is passed (for test coverage) assert os.path.isdir(test_path) def test_default_url_transformation_root_domain(): transformed_url = default_url_transform('https://httpbin.org') assert transformed_url == os.path.sep.join(['httpbin.org', 'index']) def test_default_url_transformation_with_query_string(): transformed_url = default_url_transform('https://httpbin.org/drip?duration=5&numbytes=5&code=200') assert transformed_url == os.path.sep.join(['httpbin.org', 'drip_duration_5-numbytes_5-code_200']) def test_default_url_transformation_with_params(): transformed_url = default_url_transform('http://test.example.com/admin/Servlet;jsessionid=01A2B3C4D5E6F7GH') assert transformed_url == os.path.sep.join(['test.example.com', 'admin', 'Servlet(jsessionid_01A2B3C4D5E6F7GH)'])
#! /usr/bin/env python3 """ Design a data structure that supports all following operations in average O(1) time. insert(val): Inserts an item val to the set if not already present. remove(val): Removes an item val from the set if present. random_element: Returns a random element from current set of elements. Each element must have the same probability of being returned. """ import random class RandomizedSet(): """ idea: shoot """ def __init__(self): self.elements = [] self.index_map = {} # element -> index def insert(self, new_one): if new_one in self.index_map: return self.index_map[new_one] = len(self.elements) self.elements.append(new_one) def remove(self, old_one): if not old_one in self.index_map: return index = self.index_map[old_one] last = self.elements.pop() self.index_map.pop(old_one) if index == len(self.elements): return self.elements[index] = last self.index_map[last] = index def random_element(self): return random.choice(self.elements) def __test(): rset = RandomizedSet() ground_truth = set() n = 64 for i in range(n): rset.insert(i) ground_truth.add(i) # Remove a half for i in random.sample(range(n), n // 2): rset.remove(i) ground_truth.remove(i) print(len(ground_truth), len(rset.elements), len(rset.index_map)) for i in ground_truth: assert(i == rset.elements[rset.index_map[i]]) for i in range(n): print(rset.random_element(), end=' ') print() if __name__ == "__main__": __test()
<?php global $shortname; if (is_front_page()) echo('<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jqueryui/1.7.1/jquery-ui.min.js"></script> <script type="text/javascript">jQuery("div#from-blog").tabs({ fx: { opacity: "toggle" } });</script>'); //on Homepage; Featured slider is activated if (is_front_page() && (get_option($shortname.'_featured')=='on')) { ?> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/jquery.cycle.all.min.js"></script> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/jquery.easing.1.3.js"></script> <?php }; ?> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/superfish.js"></script> <script type="text/javascript" src="<?php echo get_template_directory_uri(); ?>/js/init.js"></script> <script type="text/javascript"> //<![CDATA[ jQuery(".js div#from-blog div.entries").show(); //prevents a flash of unstyled content <?php //on Homepage; Featured slider is activated if (is_front_page() && (get_option($shortname.'_featured')=='on')) { ?> jQuery(window).load( function(){ jQuery('#feat-content').css( 'backgroundImage', 'none' ).cycle({ timeout: 0, speed: 300, cleartypeNoBg: true, fx: '<?php echo esc_js(get_option($shortname.'_slider_effect')); ?>' }); var $featured_area = jQuery('#featured-slider'); var $featured_item = jQuery('#featured-area div.featitem'); var $slider_control = jQuery('ul#slider-control'); //tabs var $slider_control_tab = jQuery('ul#slider-control li'); var ordernum; var pause_scroll = false; if ( $featured_item.length == 1 ){ $featured_item.css({'position':'absolute','top':'31px','left':'45px'}).show(); } <?php if (get_option($shortname.'_pause_hover') == 'on') { ?> $featured_area.mouseover(function(){ pause_scroll = true; }).mouseout(function(){ pause_scroll = false; }); <?php }; ?> function gonext(this_element){ $slider_control.children("li.active").removeClass('active'); this_element.addClass('active'); ordernum = this_element.prevAll().length+1; jQuery('#feat-content').cycle(ordernum - 1); } $slider_control_tab.click(function() { clearInterval(interval); gonext(jQuery(this)); return false; }); jQuery('#featured-area a#prevlink, #featured-area a#nextlink').click(function() { clearInterval(interval); if (jQuery(this).attr("id") === 'nextlink') { auto_number = $slider_control.children("li.active").prevAll().length+1; if (auto_number === $slider_control_tab.length) auto_number = 0; } else { auto_number = $slider_control.children("li.active").prevAll().length-1; if (auto_number === -1) auto_number = $slider_control_tab.length-1; }; gonext($slider_control_tab.eq(auto_number)); return false; }); var auto_number; var interval; $slider_control_tab.bind('autonext', function autonext(){ if (!(pause_scroll)) gonext(jQuery(this)); return false; }); <?php if (get_option($shortname.'_slider_auto') == 'on') { ?> interval = setInterval(function(){ auto_number = $slider_control.children("li.active").prevAll().length+1; if (auto_number === $slider_control_tab.length) auto_number = 0; $slider_control_tab.eq(auto_number).trigger('autonext'); }, <?php echo esc_js(get_option($shortname.'_slider_autospeed')); ?>); <?php }; ?> } ); <?php }; ?> <?php if (get_option($shortname.'_disable_toptier') == 'on') echo('jQuery("ul.nav > li > ul").prev("a").attr("href","#");'); ?> //]]> </script>
import { BrowserModule } from '@angular/platform-browser'; import { NgModule } from '@angular/core'; import { AppComponent } from './app.component'; import { BrainSocketService } from './services/brain-socket.service'; import { StatusComponent } from './status/status.component'; import { EegDataComponent } from './eeg-data/eeg-data.component'; import { ESenseDataComponent } from './e-sense-data/e-sense-data.component'; @NgModule({ declarations: [ AppComponent, StatusComponent, EegDataComponent, ESenseDataComponent, ], imports: [ BrowserModule, ], providers: [ BrainSocketService ], bootstrap: [AppComponent] }) export class AppModule { }
# -*- coding: utf-8 -*- from twisted.trial import unittest from twisted.web.error import Error import requests from scrapyrt.resources import CrawlResource from .servers import ScrapyrtTestServer, MockServer class TestCrawlResource(unittest.TestCase): def test_is_leaf(self): self.assertTrue(CrawlResource.isLeaf) class TestCrawlResourceGetRequiredArgument(unittest.TestCase): def setUp(self): self.resource = CrawlResource() self.url = 'http://localhost:1234' self.data = {'url': self.url} def test_get_argument(self): self.assertEqual( self.resource.get_required_argument(self.data, 'url'), self.url) def test_raise_error(self): exception = self.assertRaises( Error, self.resource.get_required_argument, self.data, 'key') self.assertEqual(exception.status, '400') def test_empty_argument(self): self.data['url'] = '' exception = self.assertRaises( Error, self.resource.get_required_argument, self.data, 'url') self.assertEqual(exception.status, '400') class TestCrawlResourceIntegration(unittest.TestCase): def setUp(self): self.server = ScrapyrtTestServer() self.server.start() self.crawl_url = self.server.url('crawl.json') self.site = MockServer() self.site.start() self.site_url = self.site.url('page1.html') self.spider_name = 'test' def tearDown(self): if not self._passed: print self.server._non_block_read(self.server.proc.stderr) self.server.stop() self.site.stop() def test_no_parameters(self): res = requests.get(self.crawl_url) assert res.status_code == 400 res_json = res.json() expected_result = {u'status': u'error', u'code': 400} self.assertDictContainsSubset(expected_result, res_json) assert 'url' in res_json['message'] def test_no_url(self): res = requests.get( self.crawl_url, params={ 'spider_name': self.spider_name } ) assert res.status_code == 400 expected_result = { u'status': u'error', u'code': 400 } res_json = res.json() self.assertDictContainsSubset(expected_result, res_json) assert 'url' in res_json['message'] def test_no_spider_name(self): res = requests.get( self.crawl_url, params={ 'url': self.site_url, } ) assert res.status_code == 400 res_json = res.json() expected_result = { u'status': u'error', u'code': 400 } self.assertDictContainsSubset(expected_result, res_json) assert 'spider_name' in res_json['message'] def test_crawl(self): res = requests.get( self.crawl_url, params={ 'url': self.site_url, 'spider_name': self.spider_name } ) expected_result = { u'status': u'ok', u'items_dropped': [] } expected_items = [{ u'name': ['Page 1'], }] res_json = res.json() self.assertDictContainsSubset(expected_result, res_json) assert res_json['items'] assert len(res_json['items']) == len(expected_items) for exp_item, res_item in zip(expected_items, res_json['items']): self.assertDictContainsSubset(exp_item, res_item)
#!/bin/env python3 import ../parse_msg import sys import os from pprint import pprint if len(sys.argv) > 1: EML_DIR = sys.argv[1] eml_res = {} exp_res = {} if not os.path.exists(EML_DIR): print(EML_DIR, ' a ulasilamadi!') sys.exit(1) # toplam dosyalar: cnt = 0 fp = open('farklar', 'w') for dirpath, dirnames, files in os.walk(EML_DIR): for name in files: # print("dir:", dirpath,"---", name) eml_file = os.path.join(dirpath, name) filename, file_extension = os.path.splitext(eml_file) # if file_extension == '.meta' or file_extension == '.tgz' : if file_extension != '.eml': continue # print(eml_file, "işleniyor.....") if os.path.isfile(eml_file): print(eml_file, "işleniyor.....") ret = parse_msg.main(os.path.join(dirpath, name)) if 'dlp' in name or 'ho_' in name: exp_res[name] = 100 else: exp_res[name] = 0 eml_res[name] = ret if eml_res[name] != exp_res[name]: print(name, "icin eml_res ile exp_res degerleri farkli geldi!") fp.write(name + " icin eml_res ile exp_res degerleri" + "farkli geldi!\n") cnt = cnt + 1 else: print(name, "dosya değil!") fp.close() print('Toplam dosya sayisi: ', cnt) with open('eml_res_sonuclar.txt', 'w') as out: pprint(eml_res, stream=out) with open('exp_res_sonuclar.txt', 'w') as out: pprint(exp_res, stream=out)
using System; using System.IO; namespace TailBlazer.Domain.FileHandling.Recent { public class RecentFile : IEquatable<RecentFile> { public DateTime Timestamp { get; } public string Name { get; } public RecentFile(FileInfo fileInfo) { Name = fileInfo.FullName; Timestamp = DateTime.UtcNow; } public RecentFile(DateTime timestamp, string name) { Timestamp = timestamp; Name = name; } #region Equality public bool Equals(RecentFile other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return Timestamp.Equals(other.Timestamp) && string.Equals(Name, other.Name); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != this.GetType()) return false; return Equals((RecentFile) obj); } public override int GetHashCode() { unchecked { return (Timestamp.GetHashCode()*397) ^ (Name?.GetHashCode() ?? 0); } } public static bool operator ==(RecentFile left, RecentFile right) { return Equals(left, right); } public static bool operator !=(RecentFile left, RecentFile right) { return !Equals(left, right); } #endregion public override string ToString() { return $"{Name} ({Timestamp})"; } } }
'''OpenGL extension NV.parameter_buffer_object This module customises the behaviour of the OpenGL.raw.GL.NV.parameter_buffer_object to provide a more Python-friendly API Overview (from the spec) This extension, in conjunction with NV_gpu_program4, provides a new type of program parameter than can be used as a constant during vertex, fragment, or geometry program execution. Each program target has a set of parameter buffer binding points to which buffer objects can be attached. A vertex, fragment, or geometry program can read data from the attached buffer objects using a binding of the form "program.buffer[a][b]". This binding reads data from the buffer object attached to binding point <a>. The buffer object attached is treated either as an array of 32-bit words or an array of four-component vectors, and the binding above reads the array element numbered <b>. The use of buffer objects allows applications to change large blocks of program parameters at once, simply by binding a new buffer object. It also provides a number of new ways to load parameter values, including readback from the frame buffer (EXT_pixel_buffer_object), transform feedback (NV_transform_feedback), buffer object loading functions such as MapBuffer and BufferData, as well as dedicated parameter buffer update functions provided by this extension. The official definition of this extension is available here: http://www.opengl.org/registry/specs/NV/parameter_buffer_object.txt ''' from OpenGL import platform, constant, arrays from OpenGL import extensions, wrapper import ctypes from OpenGL.raw.GL import _types, _glgets from OpenGL.raw.GL.NV.parameter_buffer_object import * from OpenGL.raw.GL.NV.parameter_buffer_object import _EXTENSION_NAME def glInitParameterBufferObjectNV(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) # INPUT glProgramBufferParametersfvNV.params size not checked against count glProgramBufferParametersfvNV=wrapper.wrapper(glProgramBufferParametersfvNV).setInputArraySize( 'params', None ) # INPUT glProgramBufferParametersIivNV.params size not checked against count glProgramBufferParametersIivNV=wrapper.wrapper(glProgramBufferParametersIivNV).setInputArraySize( 'params', None ) # INPUT glProgramBufferParametersIuivNV.params size not checked against count glProgramBufferParametersIuivNV=wrapper.wrapper(glProgramBufferParametersIuivNV).setInputArraySize( 'params', None ) ### END AUTOGENERATED SECTION
# Advent of Code - http://adventofcode.com/day/1 # From http://adventofcode.com/day/1 # Coder : Ginny C Ghezzo # What I learned: import sys if len(sys.argv) > 1: filename = sys.argv[1] else: filename = 'day5data.txt' print(filename) def checkForDoubleChar(myStr): judgement = False i=1 # last one is broken while i < len(myStr): if myStr[i-1] == myStr[i]: return(True) i += 1 return judgement def checkForVowel(myStr): judgement = True count = 0 count = myStr.count('a') + myStr.count('e') + myStr.count('i') + myStr.count('o') + myStr.count('u') if count < 3: judgement = False return judgement def checkSantaList(myStr): judgement = True if ('ab' in myStr): return(False) if ('cd' in myStr): return(False) if ('pq' in myStr): return(False) if ('xy' in myStr): return(False) if (not checkForVowel(myStr)): return(False) if (not checkForDoubleChar(myStr)): return(False) return judgement naughty = 0 nice = 0 f = open(filename,'r') line = f.readline() while line: if checkSantaList(line): nice += 1 else: naughty += 1 line = f.readline() print('Nice = ', nice, " Naughty = ", naughty)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Simple built-in backend. """ __author__ = "Lluís Vilanova <[email protected]>" __copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>" __license__ = "GPL version 2 or (at your option) any later version" __maintainer__ = "Stefan Hajnoczi" __email__ = "[email protected]" from tracetool import out PUBLIC = True def is_string(arg): strtype = ('const char*', 'char*', 'const char *', 'char *') if arg.lstrip().startswith(strtype): return True else: return False def c(events): out('#include "trace.h"', '#include "trace/control.h"', '#include "trace/simple.h"', '', ) for num, event in enumerate(events): out('void trace_%(name)s(%(args)s)', '{', ' TraceBufferRecord rec;', name = event.name, args = event.args, ) sizes = [] for type_, name in event.args: if is_string(type_): out(' size_t arg%(name)s_len = %(name)s ? MIN(strlen(%(name)s), MAX_TRACE_STRLEN) : 0;', name = name, ) strsizeinfo = "4 + arg%s_len" % name sizes.append(strsizeinfo) else: sizes.append("8") sizestr = " + ".join(sizes) if len(event.args) == 0: sizestr = '0' out('', ' TraceEvent *eventp = trace_event_id(%(event_id)s);', ' bool _state = trace_event_get_state_dynamic(eventp);', ' if (!_state) {', ' return;', ' }', '', ' if (trace_record_start(&rec, %(event_id)s, %(size_str)s)) {', ' return; /* Trace Buffer Full, Event Dropped ! */', ' }', event_id = num, size_str = sizestr, ) if len(event.args) > 0: for type_, name in event.args: # string if is_string(type_): out(' trace_record_write_str(&rec, %(name)s, arg%(name)s_len);', name = name, ) # pointer var (not string) elif type_.endswith('*'): out(' trace_record_write_u64(&rec, (uintptr_t)(uint64_t *)%(name)s);', name = name, ) # primitive data type else: out(' trace_record_write_u64(&rec, (uint64_t)%(name)s);', name = name, ) out(' trace_record_finish(&rec);', '}', '') def h(events): out('#include "trace/simple.h"', '') for event in events: out('void trace_%(name)s(%(args)s);', name = event.name, args = event.args, )
from distutils.core import setup, Command import os import django_geckoboard os.environ['DJANGO_SETTINGS_MODULE'] = 'django_geckoboard.tests.settings' cmdclass = {} class TestCommand(Command): description = "run package tests" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): from django_geckoboard.tests.utils import run_tests run_tests() cmdclass['test'] = TestCommand def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() def build_long_description(): return "\n\n".join([ django_geckoboard.__doc__, #@UndefinedVariable read('CHANGELOG.rst'), ]) setup( name = 'django-geckoboard', version = django_geckoboard.__version__, license = django_geckoboard.__license__, description = 'Geckoboard custom widgets for Django projects', long_description = build_long_description(), author = django_geckoboard.__author__, author_email = django_geckoboard.__email__, packages = [ 'django_geckoboard', 'django_geckoboard.tests', ], keywords = ['django', 'geckoboard'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], platforms = ['any'], url = 'http://github.com/jcassee/django-geckoboard', download_url = 'http://github.com/jcassee/django-geckoboard/archives/master', cmdclass = cmdclass, )
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Libgeotiff(AutotoolsPackage): """GeoTIFF represents an effort by over 160 different remote sensing, GIS, cartographic, and surveying related companies and organizations to establish a TIFF based interchange format for georeferenced raster imagery. """ homepage = "https://trac.osgeo.org/geotiff/" url = "http://download.osgeo.org/geotiff/libgeotiff/libgeotiff-1.4.2.tar.gz" version('1.4.2', '96ab80e0d4eff7820579957245d844f8') variant('zlib', default=True, description='Include zlib support') variant('jpeg', default=True, description='Include jpeg support') variant('proj', default=True, description='Use PROJ.4 library') depends_on('zlib', when='+zlib') depends_on('jpeg', when='+jpeg') depends_on('libtiff') depends_on('proj', when='+proj') def configure_args(self): spec = self.spec args = [ '--with-libtiff={0}'.format(spec['libtiff'].prefix), ] if '+zlib' in spec: args.append('--with-zlib={0}'.format(spec['zlib'].prefix)) else: args.append('--with-zlib=no') if '+jpeg' in spec: args.append('--with-jpeg={0}'.format(spec['jpeg'].prefix)) else: args.append('--with-jpeg=no') if '+proj' in spec: args.append('--with-proj={0}'.format(spec['proj'].prefix)) else: args.append('--with-proj=no') return args
/* * Copyright (c) 2012, Texas Instruments Incorporated - http://www.ti.com/ * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ /*---------------------------------------------------------------------------*/ /** * \addtogroup cc2538-mqtt-demo * @{ * * \file * Project specific configuration defines for the MQTT demo */ /*---------------------------------------------------------------------------*/ #ifndef PROJECT_CONF_H_ #define PROJECT_CONF_H_ /*---------------------------------------------------------------------------*/ /* User configuration */ #define MQTT_DEMO_STATUS_LED LEDS_GREEN #define MQTT_DEMO_PUBLISH_TRIGGER &button_right_sensor #define MQTT_DEMO_BROKER_IP_ADDR "test.mosquitto.org" /* "mqtt.relayr.io" */ /*---------------------------------------------------------------------------*/ /* Default configuration values */ #define DEFAULT_EVENT_TYPE_ID "2826729f-d677-4425-88d6-78a62ae7061f" /* DEVICE ID same as MQTT USER */ #define DEFAULT_SUBSCRIBE_CMD_TYPE "leds" #define DEFAULT_AUTH_USER "TKCZyn9Z3RCWI1nimKucGHw" /* CLIENT ID */ #define DEFAULT_AUTH_TOKEN "Ei316V-YqznM" /* PASSWORD */ #define DEFAULT_BROKER_PORT 1883 #define DEFAULT_PUBLISH_INTERVAL (45 * CLOCK_SECOND) #define DEFAULT_KEEP_ALIVE_TIMER 60 /*---------------------------------------------------------------------------*/ /* Use either the cc1200_driver for sub-1GHz, or cc2538_rf_driver (default) * for 2.4GHz built-in radio interface */ #undef NETSTACK_CONF_RADIO #define NETSTACK_CONF_RADIO cc2538_rf_driver /* Alternate between ANTENNA_SW_SELECT_SUBGHZ or ANTENNA_SW_SELECT_2_4GHZ */ #define ANTENNA_SW_SELECT_DEF_CONF ANTENNA_SW_SELECT_2_4GHZ #define NETSTACK_CONF_RDC nullrdc_driver #endif /* PROJECT_CONF_H_ */ /*---------------------------------------------------------------------------*/ /** @} */
/* Software floating-point emulation: convert to fortran nearest. Copyright (C) 1997-2013 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Richard Henderson ([email protected]) and Jakub Jelinek ([email protected]). The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library. If not, see <http://www.gnu.org/licenses/>. */ #include "local-soft-fp.h" long _OtsNintXQ (long al, long ah, long _round) { FP_DECL_EX; FP_DECL_Q(A); FP_DECL_Q(B); FP_DECL_Q(C); unsigned long r; long s; /* If bit 3 is set, then integer overflow detection is requested. */ s = _round & 8 ? 1 : -1; _round = _round & 3; FP_INIT_ROUNDMODE; FP_UNPACK_SEMIRAW_Q(A, a); /* Build 0.5 * sign(A) */ B_e = _FP_EXPBIAS_Q; __FP_FRAC_SET_2 (B, 0, 0); B_s = A_s; FP_ADD_Q(C, A, B); _FP_FRAC_SRL_2(C, _FP_WORKBITS); _FP_FRAC_HIGH_RAW_Q(C) &= ~(_FP_W_TYPE)_FP_IMPLBIT_Q; FP_TO_INT_Q(r, C, 64, s); if (s > 0 && (_fex &= FP_EX_INVALID)) FP_HANDLE_EXCEPTIONS; return r; }
# -*- coding: utf-8 -*- """ test_env ~~~~~~~~ Test the BuildEnvironment class. :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys from util import TestApp, remove_unicode_literals, path from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.builders.latex import LaTeXBuilder app = env = None warnings = [] def setup_module(): global app, env app = TestApp(srcdir='(temp)', freshenv=True) env = app.env env.set_warnfunc(lambda *args: warnings.append(args)) def teardown_module(): app.cleanup() def warning_emitted(file, text): for warning in warnings: if len(warning) == 2 and file in warning[1] and text in warning[0]: return True return False # Tests are run in the order they appear in the file, therefore we can # afford to not run update() in the setup but in its own test def test_first_update(): msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app) assert msg.endswith('%d added, 0 changed, 0 removed' % len(env.found_docs)) docnames = set() for docname in it: # the generator does all the work docnames.add(docname) assert docnames == env.found_docs == set(env.all_docs) # test if exclude_patterns works ok assert 'subdir/excluded' not in env.found_docs def test_images(): assert warning_emitted('images', 'image file not readable: foo.png') assert warning_emitted('images', 'nonlocal image URI found: ' 'http://www.python.org/logo.png') tree = env.get_doctree('images') app._warning.reset() htmlbuilder = StandaloneHTMLBuilder(app) htmlbuilder.imgpath = 'dummy' htmlbuilder.post_process_images(tree) image_uri_message = "no matching candidate for image URI u'foo.*'" if sys.version_info >= (3, 0): image_uri_message = remove_unicode_literals(image_uri_message) assert image_uri_message in app._warning.content[-1] assert set(htmlbuilder.images.keys()) == \ set(['subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg']) assert set(htmlbuilder.images.values()) == \ set(['img.png', 'img1.png', 'simg.png', 'svgimg.svg']) app._warning.reset() latexbuilder = LaTeXBuilder(app) latexbuilder.post_process_images(tree) assert image_uri_message in app._warning.content[-1] assert set(latexbuilder.images.keys()) == \ set(['subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf', 'svgimg.pdf']) assert set(latexbuilder.images.values()) == \ set(['img.pdf', 'img.png', 'img1.png', 'simg.png', 'svgimg.pdf']) def test_second_update(): # delete, add and "edit" (change saved mtime) some files and update again env.all_docs['contents'] = 0 root = path(app.srcdir) # important: using "autodoc" because it is the last one to be included in # the contents.txt toctree; otherwise section numbers would shift (root / 'autodoc.txt').unlink() (root / 'new.txt').write_text('New file\n========\n') msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app) assert '1 added, 3 changed, 1 removed' in msg docnames = set() for docname in it: docnames.add(docname) # "includes" and "images" are in there because they contain references # to nonexisting downloadable or image files, which are given another # chance to exist assert docnames == set(['contents', 'new', 'includes', 'images']) assert 'autodoc' not in env.all_docs assert 'autodoc' not in env.found_docs def test_object_inventory(): refs = env.domaindata['py']['objects'] assert 'func_without_module' in refs assert refs['func_without_module'] == ('objects', 'function') assert 'func_without_module2' in refs assert 'mod.func_in_module' in refs assert 'mod.Cls' in refs assert 'mod.Cls.meth1' in refs assert 'mod.Cls.meth2' in refs assert 'mod.Cls.meths' in refs assert 'mod.Error' not in refs assert 'errmod.Error' in refs assert 'func_in_module' not in refs assert 'func_noindex' not in refs assert env.domaindata['py']['modules']['mod'] == \ ('objects', 'Module synopsis.', 'UNIX', False) assert env.domains['py'].data is env.domaindata['py'] assert env.domains['c'].data is env.domaindata['c']
/* * Copyright 2013 NGDATA nv * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ngdata.hbaseindexer.parse.extract; import static com.ngdata.hbaseindexer.parse.extract.ExtractTestUtil.assertByteArraysEquals; import static org.junit.Assert.assertTrue; import com.google.common.collect.Lists; import com.ngdata.hbaseindexer.parse.ByteArrayExtractor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; import static com.ngdata.sep.impl.HBaseShims.newResultFromObject; public class PrefixMatchingQualifierExtractorTest { private static final byte[] ROW = new byte[] { 1, 2, 3 }; private static final byte[] COLFAM_A = Bytes.toBytes("A"); private static final byte[] COLFAM_B = Bytes.toBytes("B"); private static final byte[] QUALIFIER_A1 = Bytes.toBytes("A1"); private static final byte[] QUALIFIER_A2 = Bytes.toBytes("A2"); private static final byte[] QUALIFIER_B1 = Bytes.toBytes("B1"); private static final byte[] VALUE_A1 = Bytes.toBytes("value a1"); private static final byte[] VALUE_A2 = Bytes.toBytes("value a2"); private static final byte[] VALUE_B1 = Bytes.toBytes("value b1"); private Result result; @Before public void setUp() { KeyValue kvA1 = new KeyValue(ROW, COLFAM_A, QUALIFIER_A1, VALUE_A1); KeyValue kvA2 = new KeyValue(ROW, COLFAM_A, QUALIFIER_A2, VALUE_A2); KeyValue kvB1 = new KeyValue(ROW, COLFAM_B, QUALIFIER_B1, VALUE_B1); result = newResultFromObject(Lists.newArrayList((Object)kvA1, (Object)kvA2, (Object)kvB1)); } @Test public void testExtract() { ByteArrayExtractor extractor = new PrefixMatchingQualifierExtractor(COLFAM_A, Bytes.toBytes("A")); assertByteArraysEquals(Lists.newArrayList(QUALIFIER_A1, QUALIFIER_A2), extractor.extract(result)); } @Test public void testExtract_EmptyPrefix() { ByteArrayExtractor extractor = new PrefixMatchingQualifierExtractor(COLFAM_A, new byte[0]); assertByteArraysEquals(Lists.newArrayList(QUALIFIER_A1, QUALIFIER_A2), extractor.extract(result)); } @Test public void testExtract_FullCellName() { ByteArrayExtractor extractor = new PrefixMatchingQualifierExtractor(COLFAM_A, Bytes.toBytes("A1")); assertByteArraysEquals(Lists.newArrayList(QUALIFIER_A1), extractor.extract(result)); } @Test public void testExtract_NoMatches() { ByteArrayExtractor extractor = new PrefixMatchingQualifierExtractor(COLFAM_A, Bytes.toBytes("doesnt exist")); assertTrue(extractor.extract(result).isEmpty()); } }
/** * JBoss, Home of Professional Open Source. * Copyright 2014-2020 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.spi.repositorymanager; import java.util.Map; import org.jboss.pnc.model.BuildRecord; import org.jboss.pnc.enums.BuildType; import org.jboss.pnc.enums.RepositoryType; import org.jboss.pnc.spi.repositorymanager.model.RepositorySession; import org.jboss.pnc.spi.repositorymanager.model.RunningRepositoryDeletion; import org.jboss.pnc.spi.repositorymanager.model.RunningRepositoryPromotion; /** * Created by <a href="mailto:[email protected]">Matej Lazar</a> on 2014-11-23. */ public interface RepositoryManager { /** * Create a new repository session tuned to the parameters of that build collection and the build that will use this * repository session. * * @param buildExecution The build execution currently running * @param accessToken The access token to use * @param serviceAccountToken The access token for service account to use for repo creation, promotion and cleanup * @param repositoryType the created repositories' type (npm, maven, etc.) * @param genericParameters Generic parameters specified in the BuildConfiguration * @return The new repository session * @throws RepositoryManagerException If there is a problem creating the repository */ RepositorySession createBuildRepository( BuildExecution buildExecution, String accessToken, String serviceAccountToken, RepositoryType repositoryType, Map<String, String> genericParameters) throws RepositoryManagerException; /** * Collects processed repository manager result for a previously finished build for any repair work needed. This * reads the tracking report and collects the downloads and uploads the same way as they are collected at the end of * a successful build. * * @param buildContentId string identifier of the build * @param tempBuild flag if this is a temporary build * @return repository manager result * @throws RepositoryManagerException in case of an error when collecting the build artifacts and dependencies */ RepositoryManagerResult collectRepoManagerResult(Integer id) throws RepositoryManagerException; /** * Add the repository containing output associated with the specified {@link BuildRecord} to the membership of the * repository group with the given ID. Note that the operation won't start until monitoring starts for the returned * {@link RunningRepositoryPromotion} instance. * * @param buildRecord The build output to promote * @param pakageType package type key used by repository manager * @param toGroup The ID of the repository group where the build output should be promoted * @param accessToken The access token to use * @return An object representing the running promotion process, with callbacks for result and error. * * @throws RepositoryManagerException If there is a problem promoting the build */ RunningRepositoryPromotion promoteBuild( BuildRecord buildRecord, String pakageType, String toGroup, String accessToken) throws RepositoryManagerException; /** * Used to purge the artifacts that were output from a given build (including the specific hosted repository which * was used for that build). Note that the operation won't start until monitoring starts for the returned * {@link RunningRepositoryDeletion} instance. * * @param buildRecord The build whose artifacts/repositories should be removed * @param pakageType package type key used by repository manager * @param accessToken The access token to use * @return An object representing the running deletion, with callbacks for result and error. * * @throws RepositoryManagerException If there is a problem deleting the build */ RunningRepositoryDeletion deleteBuild(BuildRecord buildRecord, String pakageType, String accessToken) throws RepositoryManagerException; boolean canManage(RepositoryType managerType); }
using System; using System.Threading.Tasks; using Microsoft.Extensions.Localization; using OrchardCore.Localization.Drivers; using OrchardCore.Navigation; namespace OrchardCore.Localization { /// <summary> /// Represents a localization menu in the admin site. /// </summary> public class AdminMenu : INavigationProvider { private readonly IStringLocalizer S; /// <summary> /// Creates a new instance of the <see cref="AdminMenu"/>. /// </summary> /// <param name="localizer">The <see cref="IStringLocalizer"/>.</param> public AdminMenu(IStringLocalizer<AdminMenu> localizer) { S = localizer; } ///<inheritdocs /> public Task BuildNavigationAsync(string name, NavigationBuilder builder) { if (String.Equals(name, "admin", StringComparison.OrdinalIgnoreCase)) { builder .Add(S["Configuration"], NavigationConstants.AdminMenuConfigurationPosition, localization => localization .Add(S["Settings"], settings => settings .Add(S["Localization"], localization => localization .AddClass("localization").Id("localization") .Add(S["Cultures"], S["Cultures"].PrefixPosition(), entry => entry .AddClass("cultures").Id("cultures") .Action("Index", "Admin", new { area = "OrchardCore.Settings", groupId = LocalizationSettingsDisplayDriver.GroupId }) .Permission(Permissions.ManageCultures) .LocalNav() ) ) ) ); } return Task.CompletedTask; } } }
import pydot from sklearn import tree from sklearn.ensemble import RandomForestClassifier from sklearn.externals.six import StringIO from db_operation import db_exec from decisiontree_extracting import tree_to_code, tree_to_code_db def DSTree(X, y, feature_names, class_names): # Undefined input variable package_name = 'EPP-14-225' print 'Generating Random Forest Classifier...' clf = RandomForestClassifier(n_estimators = 4) clf = clf.fit(X, y) # print 'Different importances of each features: %s' % clf.feature_importances_ print 'Removing the old package result...' sql = 'delete from dsresult where package=\'{}\''.format(package_name) db_exec('catl', sql) print 'Generating Tree plot...' for i in xrange(len(clf.estimators_)): dot_data = StringIO() # result_filename = 'classifying_result/classifying-{}.txt'.format(i) # tree_to_code(clf.estimators_[i], feature_names = feature_names, class_names = class_names, result_filename = result_filename) tree_to_code_db(clf.estimators_[i], feature_names = feature_names, class_names = class_names, package_name = package_name) print 'Generating %d plot...' % i tree.export_graphviz(clf.estimators_[i], out_file = dot_data, feature_names = feature_names, class_names = class_names, filled = True, rounded = True, special_characters = True, leaves_parallel = True) print 'Convering %d plot...' % i graph = pydot.graph_from_dot_data(dot_data.getvalue()) graph[0].write_png('classifying_result/classifying-%d.png' % i) if __name__ == '__main__': main()
import xml.etree.ElementTree as ET import xmltodict class Processor: def process_withAWBNumber(self, awb_number, piece_enabled): response_path = './UnitTestPlan/Tracking/Response/SingleknownTrackResponse-no-data-found.xml' tree = ET.parse(response_path) root = tree.getroot() st = None with open('./UnitTestPlan/Tracking/Response/SingleknownTrackResponse-no-data-found.xml') as fd: doc = xmltodict.parse(fd.read()) root = doc['res:TrackingResponse'] if type(root['AWBInfo']) == list: l = root['AWBInfo'] for AWBInfo_element in l: cur_AWBInfo_element = AWBInfo_element if int(AWBInfo_element['AWBNumber']) == awb_number: st = AWBInfo_element['Status'] break if st == None: print("Tracking request failed\n" + "Please check your AWBNumber") return else: st = root['AWBInfo']['Status'] if st['ActionStatus'] != "Success": print("Tracking request failed\n"+st['Condition']['ConditionData']) return else: print(("Tracking request is Successful for ")+str(awb_number)) if piece_enabled == 'p': self.show_pieces(cur_AWBInfo_element) elif piece_enabled == 's': self.show_shippment(cur_AWBInfo_element) else: self.show_pieces(cur_AWBInfo_element) self.show_shippment(cur_AWBInfo_element) def show_pieces(self, cur_AWBInfo_element): pieces = cur_AWBInfo_element['Pieces'] if type(pieces['PieceInfo']) == list: l = pieces['PieceInfo'] for pieces_Info in l: piece_details = pieces_Info['PieceDetails'] print("Piece details for peices are as follows,\n") print("Depth of you package is "+piece_details['ActualDepth']+"\nWidth of the package is "+ piece_details['ActualWidth']+"Height of you package is "+piece_details['ActualHeight'] + "Weight of you package is "+piece_details['ActualWeight']+piece_details["'WeightUnit"]) else: pieces_Info = pieces['PieceInfo'] piece_details = pieces_Info['PieceDetails'] print("Piece details for peices are as follows,\n") print("Depth of you package is " + piece_details['ActualDepth'] + "\nWidth of the package is " + piece_details['ActualWidth'] + "Height of you package is " + piece_details['ActualHeight'] + "Weight of you package is " + piece_details['ActualWeight'] + piece_details["'WeightUnit"]) def show_shippment(self, cur_AWBInfo_element): shipmentinfo = cur_AWBInfo_element['ShipmentInfo'] print("Shipment informatyion is given below for your package\n Origin Service area :"+ shipmentinfo["OriginServiceArea"]["Description"]+"\nDestination Service area :"+ shipmentinfo["DestinationServiceArea"]["Description"]+"\nShipper name is :"+shipmentinfo["ShipperName"]+ "\nDate of shipment is "+shipmentinfo["ShipmentDate"]) if 'EstDlvyDate' in shipmentinfo: print("\nEstimated date of delivery is"+shipmentinfo['EstDlvyDate']) def process_withLPNumber(self, lp_number): response_path = './UnitTestPlan/Tracking/Response/TrackingResponse_SingleLP_PieceEnabled_B_1.xml' tree = ET.parse(response_path) root = tree.getroot() st = None with open('./UnitTestPlan/Tracking/Response/SingleknownTrackResponse-no-data-found.xml') as fd: doc = xmltodict.parse(fd.read()) root = doc['res:TrackingResponse'] if type(root['AWBInfo']) == list: l = root['AWBInfo'] for AWBInfo_element in l: if AWBInfo_element['TrackedBy'] != None: if int(AWBInfo_element['TrackedBy']['LPNumber']) == lp_number: st = AWBInfo_element['Status'] break else: print("LPNumber doesnot exists") if st == None: print("Tracking request failed\n" + "Please check your AWBNumber") return else: st = root['AWBInfo']['Status'] if st['ActionStatus'] != "Success": print("Tracking request failed\n" + st['Condition']['ConditionData']) return else: print(("Tracking request is Successful for ") + str(lp_number)) def __init__(self, response_path): self.response_path = response_path processor = Processor('./UnitTestPlan/Tracking/Response/SingleknownTrackResponse-no-data-found.xml') processor.process_withAWBNumber(123444444, 'p')
from typing import TYPE_CHECKING from eth_utils import to_normalized_address from web3.exceptions import BadFunctionCallOutput from raiden.exceptions import AddressWrongContract, ContractVersionMismatch from raiden.network.rpc.smartcontract_proxy import ContractProxy from raiden.transfer.identifiers import CanonicalIdentifier from raiden.utils.typing import Address, BlockSpecification, Locksroot, Tuple if TYPE_CHECKING: # pylint: disable=unused-import from raiden.network.blockchain_service import BlockChainService def compare_contract_versions( proxy: ContractProxy, expected_version: str, contract_name: str, address: Address ) -> None: """Compare version strings of a contract. If not matching raise ContractVersionMismatch. Also may raise AddressWrongContract if the contract contains no code.""" assert isinstance(expected_version, str) try: deployed_version = proxy.contract.functions.contract_version().call() except BadFunctionCallOutput: raise AddressWrongContract("") deployed_version = deployed_version.replace("_", "0") expected_version = expected_version.replace("_", "0") deployed = [int(x) for x in deployed_version.split(".")] expected = [int(x) for x in expected_version.split(".")] if deployed != expected: raise ContractVersionMismatch( f"Provided {contract_name} contract ({to_normalized_address(address)}) " f"version mismatch. Expected: {expected_version} Got: {deployed_version}" ) def get_onchain_locksroots( chain: "BlockChainService", canonical_identifier: CanonicalIdentifier, participant1: Address, participant2: Address, block_identifier: BlockSpecification, ) -> Tuple[Locksroot, Locksroot]: """Return the locksroot for `participant1` and `participant2` at `block_identifier`. This is resolving a corner case where the current node view of the channel state does not reflect what the blockchain contains. E.g. for a channel A->B: - A sends a LockedTransfer to B - B sends a Refund to A - B goes offline - A sends a LockExpired to B Here: (1) the lock is removed from A's state (2) B never received the message - A closes the channel with B's refund - Here a few things may happen: (1) B never cames back online, and updateTransfer is never called. (2) B is using monitoring services, which use the known LockExpired balance proof. (3) B cames back online and aclls updateTransfer with the LockExpired message (For some transports B will never receive the LockExpired message because the channel is closed already, and message retries may be disabled). - When channel is settled A must query the blockchain to figure out which locksroot was used. """ payment_channel = chain.payment_channel(canonical_identifier=canonical_identifier) token_network = payment_channel.token_network participants_details = token_network.detail_participants( participant1=participant1, participant2=participant2, channel_identifier=canonical_identifier.channel_identifier, block_identifier=block_identifier, ) our_details = participants_details.our_details our_locksroot = our_details.locksroot partner_details = participants_details.partner_details partner_locksroot = partner_details.locksroot return our_locksroot, partner_locksroot
import unittest import numpy as np from numpy import sqrt import pyqg class LinearStabilityTester(unittest.TestCase): def setUp(self): self.atol=1.e-16 def test_two_layer_stability(self): """ Make growth rates calculates numerically agree with the exact dispersion relationship for nz = 2 """ m = pyqg.LayeredModel(L=1.e6,rd = 15.e3,nx=256,U=np.array([.1,0.]),V=np.array([0.,0.]), H=np.array([2000,2000.]),delta=1.,nz=2,f=1.) # numerical results evals, evecs = m.stability_analysis() # analytical results kb = sqrt(m.beta/(m.Us/2.)) wv4 = m.wv2**2 kd2 = m.rd**-2 kd4 = kd2**2 kb4 = kb**4 omg_ana = np.zeros_like(m.wv2) + 0.j D = 1. + (4.*wv4*(wv4 - kd4))/(kb4*kd4) fneg = D<0. omg_ana[fneg] = 1j*m.k[fneg]*(m.beta/(m.wv2[fneg] + kd2))*( (kd2/(2.*m.wv2[fneg]))*sqrt(-D[fneg]) ) res = np.abs(omg_ana.imag-evals.imag).max() np.testing.assert_allclose(res,0.,atol=self.atol, err_msg=' Residual of growth rate is larger than %1.1e' %res) if __name__ == "__main__": unittest.main()
import { MockBoot } from "v2/DevTools" import { mount } from "enzyme" import React from "react" import { ArtistConsignMeta } from "../ArtistConsignMeta" jest.mock("v2/Utils/getENV", () => ({ getENV: () => "https://artsy.net", })) describe("ArtistConsignMeta", () => { const props = { artist: { name: "Alex Katz", href: "/artist/alex-katz", targetSupply: { microfunnel: { artworks: [ { artwork: { image: { imageURL: "path/to/image.jpg", }, }, }, ], }, }, }, } const getWrapper = (passedProps = {}) => { return mount( <MockBoot> <ArtistConsignMeta {...(props as any)} {...passedProps} /> </MockBoot> ) } it("outputs correct title tags", () => { const wrapper = getWrapper() expect(wrapper.find("Title").debug()).toContain(props.artist.name) // use `debug` to assert on component tree, vs html tree expect( wrapper .find("Meta") .findWhere(c => c.props().property === "og:title") .debug() ).toContain(props.artist.name) expect( wrapper .find("Meta") .findWhere(c => c.props().property === "og:title") .debug() ).toContain(props.artist.name) }) it("outputs correct description tags", () => { const wrapper = getWrapper() expect( wrapper .find("Meta") .findWhere(c => c.props().name === "description") .debug() ).toContain(props.artist.name) expect( wrapper .find("Meta") .findWhere(c => c.props().property === "twitter:description") .debug() ).toContain(props.artist.name) expect( wrapper .find("Meta") .findWhere(c => c.props().property === "og:description") .debug() ).toContain(props.artist.name) }) it("outputs correct URL tags", () => { const consignHref = `https://artsy.net${props.artist.href}/consign` const wrapper = getWrapper() expect( wrapper .find("Link") .findWhere(c => c.props().rel === "canonical") .debug() ).toContain(consignHref) expect( wrapper .find("Meta") .findWhere(c => c.props().property === "og:url") .debug() ).toContain(consignHref) }) describe("image tags", () => { it("doesn't blow up if no images", () => { const wrapper = getWrapper({ artist: { targetSupply: { microfunnel: { artworks: null, }, }, }, }) expect( wrapper.find("Meta").findWhere(c => c.props().name === "thumbnail") .length ).toEqual(0) }) it("does not output image tag if image not available", () => { const wrapper = getWrapper({ artist: { targetSupply: { microfunnel: { artworks: [ { artwork: { image: { imageURL: null, }, }, }, ], }, }, }, }) expect( wrapper.find("Meta").findWhere(c => c.props().name === "thumbnail") .length ).toEqual(0) }) it("outputs correct image tags if available", () => { const wrapper = getWrapper() expect( wrapper .find("Meta") .findWhere(c => c.props().name === "thumbnail") .debug() ).toContain("path/to/image.jpg") }) }) })
import os import sys from setuptools import setup, find_packages from tethys_apps.app_installation import custom_develop_command, custom_install_command ### Apps Definition ### app_package = 'canned_gssha' release_package = 'tethysapp-' + app_package app_class = 'canned_gssha.app:CannedGSSHA' app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package) ### Python Dependencies ### dependencies = ['django',] setup( name=release_package, version='0.1.0', description='Access GSSHA model results that have been put away for a rainy day.', long_description='', keywords='', author='Nathan Swain, Herman Dolder', author_email='[email protected]', url='tethys.ci-water.org', license='BSD 2-Clause', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['tethysapp', 'tethysapp.' + app_package], include_package_data=True, zip_safe=False, install_requires=dependencies, cmdclass={ 'install': custom_install_command(app_package, app_package_dir, dependencies), 'develop': custom_develop_command(app_package, app_package_dir, dependencies) } )
# -*- coding: utf-8 -*- """ Created on Sun Apr 30 01:24:05 2017 @author: AmatVictoriaCuramIII """ def DefModADXControlledStrategyOptimizer(ranger2, s): import numpy as np import pandas as pd import random as rand empty = [] winners = pd.DataFrame() for r in ranger: a = rand.randint(2,15) b = rand.randint(2,15) c = rand.random() * 3 d = rand.random() * 3 e = rand.random() * 3 f = rand.random() * 3 s['LogRet'] = np.log(s['Adj Close']/s['Adj Close'].shift(1)) s['LogRet'] = s['LogRet'].fillna(0) s['Regime'] = np.where(s['Advice'] > -1.874201, 1, 0) s['Regime'] = np.where(s['Advice'] < -.328022, -1, s['Regime']) s['Strategy'] = (s['Regime']).shift(1)*s['LogRet'] s['Strategy'] = s['Strategy'].fillna(0) s['NewStrategy'] = s['Strategy'] s['Width'] = (s['High'] - s['Low'])/s['Open'] s['OverNight'] = (s['Open'] - s['Adj Close'].shift(1))/s['Adj Close'].shift(1) s['RollingWidth'] = s['Width'].rolling(center = False, window=a).mean() s['RollingOverNight'] = abs(s['OverNight']).rolling(center=False, window=b).mean() s['DayUp'] = (s['High'] - s['Adj Close'].shift(1))/s['Open'] s['DayUp'] = s['DayUp'][s['DayUp']> 0] s['DayUp'] = s['DayUp'].fillna(0) s['DayDown'] = (s['Adj Close'].shift(1) - s['Low'])/s['Open'] s['DayDown'] = s['DayDown'][s['DayDown']> 0] s['DayDown'] = s['DayDown'].fillna(0) s['sharpe'] = (s['Strategy'].mean()-abs(s['LogRet'].mean()))/s['Strategy'].std() s['LongGains'] = np.where(s['DayUp'] >= (s['RollingWidth']/c),s['RollingWidth']/c,0) s['ShortGains'] = np.where(s['DayDown'] >= (s['RollingWidth']/d),s['RollingWidth']/d,0) s['LongStop'] = np.where(s['OverNight'] <= (s['RollingWidth'].shift(1)/e * -1), s['OverNight'] ,0) s['ShortStop'] = np.where(s['OverNight'] >= s['RollingWidth'].shift(1)/f, (s['OverNight']*-1) ,0) s['NewStrategy'] = np.where(s['Regime'].shift(1) == 1,s['LongGains'],0) s['NewStrategy'] = np.where(s['Regime'].shift(1) == -1,s['ShortGains'],s['NewStrategy']) s['NewStrategy'] = np.where(s['NewStrategy'] == 0, s['Strategy'], s['NewStrategy']) s['NewStrategy'] = np.where(s['LongStop'] < 0, s['LongStop'], s['NewStrategy']) s['NewStrategy'] = np.where(s['ShortStop'] < 0, s['ShortStop'], s['NewStrategy']) s['newsharpe'] = (s['NewStrategy'].mean()-abs(s['LogRet'].mean()))/s['NewStrategy'].std() if s['newsharpe'][-1] < -400: continue empty.append(a) empty.append(b) empty.append(c) empty.append(d) empty.append(e) empty.append(f) empty.append(s['sharpe'][-1]) empty.append(s['newsharpe'][-1]) emptyseries = pd.Series(empty) winners[r] = emptyseries.values empty[:] = [] z = winners.iloc[7] w = np.percentile(z, 80) v = [] #this variable stores the Nth percentile of top performers DS1W = pd.DataFrame() #this variable stores your financial advisors for specific dataset for h in z: if h > w: v.append(h) for j in v: r = winners.columns[(winners == j).iloc[7]] DS1W = pd.concat([DS1W,winners[r]], axis = 1) y = max(z) x = winners.columns[(winners == y).iloc[7]] #this is the column number return winners[x] #this is the dataframe index based on column number
version https://git-lfs.github.com/spec/v1 oid sha256:bb7bee9f828b9a8d7592e5941618891d9d5a6bbbb378951ae2ffc9118d7f40f6 size 450
import logging log = logging.getLogger("androguard.gui") class Signature: def __init__(self, cls, method=None, descriptor=None): self.cls = cls self.class_components = str(self.cls.name).strip('L').strip(';').split('/') self.class_path = self.class_components[:-1] self.class_name = self.class_components[-1] self.full_class_name = self.cls.name self.method = method self.descriptor = descriptor def class2func(path): """ Convert a path such as 'Landroid/support/v4/app/ActivityCompat;' into a method string 'CLASS_Landroid_support_v4_app_ActivityCompat' so we can call d.CLASS_Landroid_support_v4_app_ActivityCompat.get_source() """ func = "CLASS_" + path.replace("/", "_").replace("$", "_").replace(";", "") return func def method2func(method): return "METHOD_" + method.replace("/", "_").replace("[", "").replace( "(", "").replace(")", "").replace(";", "") def classmethod2func(class_, method_): """Convert two strings such as "Lcom/mwr/example/sieve/AddEntryActivity;" and "onCreate" into a string "CLASS_Lcom_example_sieve_AddEntryActivity.METHOD_onCreate" so we can access d.CLASS_Lcom_example_sieve_AddEntryActivity.METHOD_onCreate.XREFfrom """ return "{}.{}".format(class2func(class_), method2func(method_)) def classmethod2display(class_, method_, descriptor_): """Convert two strings such as "Lcom/mwr/example/sieve/AddEntryActivity;" and "onCreate" into a beautiful :) string to display Xrefs: "Lcom/mwr/example/sieve/AddEntryActivity; -> onCreate" """ return "{} -> {} ( {} )".format(class_, method_, descriptor_) def display2classmethod(display): """Opposite of classmethod2display. """ L = display.split(" -> ") return L[0], L[1] def classdot2func(path): """ Convert a path such as 'android.support.v4.app.ActivityCompat' into a method string 'CLASS_Landroid_support_v4_app_ActivityCompat' so we can call d.CLASS_Landroid_support_v4_app_ActivityCompat.get_source() """ func = "CLASS_L" + path.replace(".", "_").replace("$", "_") return func def classdot2class(path): """ Convert a path such as 'android.support.v4.app.ActivityCompat' into a string 'Landroid/support/v4/app/ActivityCompat' so we can change name of a class by d.CLASS_Landroid_support_v4_app_ActivityCompat.set_name(new_name) """ if path[0] == 'L' and path[-1] == ';': log.debug("WARNING: %s already a Lclass; name" % path) return path new_name = 'L' + path.replace('.', '/') + ';' return new_name def proto2methodprotofunc(proto): """Convert a prototype such as 'Ljava/lang/String;' into a string 'Ljava_lang_String" so we can append that to the 'METHOD_myMethod' if its export python name contains the prototype """ return proto.replace(' ', '').replace('(', '').replace('[', '').replace( ')', '').replace('/', '_').replace(';', '')
# Generated from sdoc/antlr/sdoc1Parser.g4 by ANTLR 4.5.3 from antlr4 import * if __name__ is not None and "." in __name__: from .sdoc1Parser import sdoc1Parser else: from sdoc1Parser import sdoc1Parser # This class defines a complete generic visitor for a parse tree produced by sdoc1Parser. class sdoc1ParserVisitor(ParseTreeVisitor): # Visit a parse tree produced by sdoc1Parser#sdoc. def visitSdoc(self, ctx:sdoc1Parser.SdocContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#text. def visitText(self, ctx:sdoc1Parser.TextContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#command. def visitCommand(self, ctx:sdoc1Parser.CommandContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_comment. def visitCmd_comment(self, ctx:sdoc1Parser.Cmd_commentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_debug. def visitCmd_debug(self, ctx:sdoc1Parser.Cmd_debugContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_expression. def visitCmd_expression(self, ctx:sdoc1Parser.Cmd_expressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_error. def visitCmd_error(self, ctx:sdoc1Parser.Cmd_errorContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_if. def visitCmd_if(self, ctx:sdoc1Parser.Cmd_ifContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_include. def visitCmd_include(self, ctx:sdoc1Parser.Cmd_includeContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_notice. def visitCmd_notice(self, ctx:sdoc1Parser.Cmd_noticeContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_substitute. def visitCmd_substitute(self, ctx:sdoc1Parser.Cmd_substituteContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#cmd_sdoc2. def visitCmd_sdoc2(self, ctx:sdoc1Parser.Cmd_sdoc2Context): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#primaryExpressionIdentifier. def visitPrimaryExpressionIdentifier(self, ctx:sdoc1Parser.PrimaryExpressionIdentifierContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#primaryExpressionIntegerConstant. def visitPrimaryExpressionIntegerConstant(self, ctx:sdoc1Parser.PrimaryExpressionIntegerConstantContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#primaryExpressionStringConstant. def visitPrimaryExpressionStringConstant(self, ctx:sdoc1Parser.PrimaryExpressionStringConstantContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#primaryExpressionSubExpression. def visitPrimaryExpressionSubExpression(self, ctx:sdoc1Parser.PrimaryExpressionSubExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#primaryExpressionParent. def visitPrimaryExpressionParent(self, ctx:sdoc1Parser.PrimaryExpressionParentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#postfixExpressionExpression. def visitPostfixExpressionExpression(self, ctx:sdoc1Parser.PostfixExpressionExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#multiplicativeExpression. def visitMultiplicativeExpression(self, ctx:sdoc1Parser.MultiplicativeExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#additiveExpression. def visitAdditiveExpression(self, ctx:sdoc1Parser.AdditiveExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#relationalExpression. def visitRelationalExpression(self, ctx:sdoc1Parser.RelationalExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#equalityExpression. def visitEqualityExpression(self, ctx:sdoc1Parser.EqualityExpressionContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#logicalAndExpressionParent. def visitLogicalAndExpressionParent(self, ctx:sdoc1Parser.LogicalAndExpressionParentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#logicalAndExpressionAnd. def visitLogicalAndExpressionAnd(self, ctx:sdoc1Parser.LogicalAndExpressionAndContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#logicalOrExpressionParent. def visitLogicalOrExpressionParent(self, ctx:sdoc1Parser.LogicalOrExpressionParentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#logicalOrExpressionLogicalOr. def visitLogicalOrExpressionLogicalOr(self, ctx:sdoc1Parser.LogicalOrExpressionLogicalOrContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#assignmentExpressionParent. def visitAssignmentExpressionParent(self, ctx:sdoc1Parser.AssignmentExpressionParentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#assignmentExpressionAssignment. def visitAssignmentExpressionAssignment(self, ctx:sdoc1Parser.AssignmentExpressionAssignmentContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#assignmentOperator. def visitAssignmentOperator(self, ctx:sdoc1Parser.AssignmentOperatorContext): return self.visitChildren(ctx) # Visit a parse tree produced by sdoc1Parser#expression. def visitExpression(self, ctx:sdoc1Parser.ExpressionContext): return self.visitChildren(ctx) del sdoc1Parser
import struct import pytest from pybind11_tests import buffers as m from pybind11_tests import ConstructorStats pytestmark = pytest.requires_numpy with pytest.suppress(ImportError): import numpy as np def test_from_python(): with pytest.raises(RuntimeError) as excinfo: m.Matrix(np.array([1, 2, 3])) # trying to assign a 1D array assert str(excinfo.value) == "Incompatible buffer format!" m3 = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32) m4 = m.Matrix(m3) for i in range(m4.rows()): for j in range(m4.cols()): assert m3[i, j] == m4[i, j] cstats = ConstructorStats.get(m.Matrix) assert cstats.alive() == 1 del m3, m4 assert cstats.alive() == 0 assert cstats.values() == ["2x3 matrix"] assert cstats.copy_constructions == 0 # assert cstats.move_constructions >= 0 # Don't invoke any assert cstats.copy_assignments == 0 assert cstats.move_assignments == 0 # PyPy: Memory leak in the "np.array(m, copy=False)" call # https://bitbucket.org/pypy/pypy/issues/2444 @pytest.unsupported_on_pypy def test_to_python(): mat = m.Matrix(5, 4) assert memoryview(mat).shape == (5, 4) assert mat[2, 3] == 0 mat[2, 3] = 4.0 mat[3, 2] = 7.0 assert mat[2, 3] == 4 assert mat[3, 2] == 7 assert struct.unpack_from('f', mat, (3 * 4 + 2) * 4) == (7, ) assert struct.unpack_from('f', mat, (2 * 4 + 3) * 4) == (4, ) mat2 = np.array(mat, copy=False) assert mat2.shape == (5, 4) assert abs(mat2).sum() == 11 assert mat2[2, 3] == 4 and mat2[3, 2] == 7 mat2[2, 3] = 5 assert mat2[2, 3] == 5 cstats = ConstructorStats.get(m.Matrix) assert cstats.alive() == 1 del mat pytest.gc_collect() assert cstats.alive() == 1 del mat2 # holds a mat reference pytest.gc_collect() assert cstats.alive() == 0 assert cstats.values() == ["5x4 matrix"] assert cstats.copy_constructions == 0 # assert cstats.move_constructions >= 0 # Don't invoke any assert cstats.copy_assignments == 0 assert cstats.move_assignments == 0 @pytest.unsupported_on_pypy def test_inherited_protocol(): """SquareMatrix is derived from Matrix and inherits the buffer protocol""" matrix = m.SquareMatrix(5) assert memoryview(matrix).shape == (5, 5) assert np.asarray(matrix).shape == (5, 5) @pytest.unsupported_on_pypy def test_pointer_to_member_fn(): for cls in [m.Buffer, m.ConstBuffer, m.DerivedBuffer]: buf = cls() buf.value = 0x12345678 value = struct.unpack('i', bytearray(buf))[0] assert value == 0x12345678
#!/usr/bin/python3 import logging import os import re import subprocess import sys from copy import copy ESLINT = 'hunt/node_modules/eslint/bin/eslint.js' logging.basicConfig(level=logging.INFO) def es_check(filename, content): backup = filename + '.backup' os.rename(filename, backup) f = open(filename, 'a') f.write(content + '\n') f.close() logging.debug('running: %s -c .eslintrc --max-warnings 0 %s 2>&1 >/dev/null' % (ESLINT, filename)) r = subprocess.call('%s -c .eslintrc --max-warnings 0 %s 2>&1 >/dev/null' % (ESLINT, filename), shell=True) # noqa: DUO116 logging.debug('eslint rc %d' % r) os.unlink(filename) os.rename(backup, filename) return r != 0 def show_error(f, line_no, check=None): line_no += 1 if check: print('%s:%i Useless check %s' % (f, line_no, check)) else: print('%s:%i Useless global check' % (f, line_no)) def check_file(filename): f = open(filename) original_content = f.read() lines = original_content.splitlines() for line_no, line in enumerate(lines, 0): line = line.strip() if 'eslint-disable' in line: eslint_kw = re.search(r'(eslint-disable[^ ]*)', line) eslint_kw = eslint_kw.group(0) terms = line.split(eslint_kw, 1) logging.debug('%s:%i terms %s' % (filename, line_no, terms)) exceptions = re.split('[ ,]', terms[1]) exceptions = list(filter(lambda x: x != '', exceptions)) post_terms = '' for j, exc in enumerate(exceptions): if not re.match(r'[a-zA-Z0-9/_-]+$', exc): logging.debug('end of except "%s"' % exc) post_terms = ' ' + ' '.join(exceptions[j:]) exceptions = exceptions[:j] break logging.debug('post terms %s' % post_terms) logging.debug('checking exceptions %s' % exceptions) if len(exceptions) < 2: content = copy(lines) content.pop(line_no) if not es_check(filename, '\n'.join(content)): if len(exceptions): show_error(filename, line_no, exceptions[0]) else: show_error(filename, line_no) continue for j, exc in enumerate(exceptions): new_exceptions = copy(exceptions) new_exceptions.pop(j) _terms = terms[0] + eslint_kw + ' ' + ','.join(new_exceptions) + post_terms content = '\n'.join(lines[:line_no] + [_terms] + lines[line_no + 1:]) if not es_check(filename, content): show_error(filename, line_no, exc) for arg in sys.argv[1:]: check_file(arg)
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.ning.asynchttpclient; import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.*; import java.util.concurrent.Future; import com.navercorp.pinpoint.pluginit.utils.AgentPath; import com.navercorp.pinpoint.pluginit.utils.PluginITConstants; import com.navercorp.pinpoint.pluginit.utils.WebServer; import com.navercorp.pinpoint.test.plugin.ImportPlugin; import com.navercorp.pinpoint.test.plugin.PinpointAgent; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifierHolder; import com.navercorp.pinpoint.test.plugin.Dependency; import com.navercorp.pinpoint.test.plugin.JvmVersion; import com.navercorp.pinpoint.test.plugin.PinpointPluginTestSuite; import com.ning.http.client.AsyncHandler; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.Request; import com.ning.http.client.Response; /** * @author netspider */ @RunWith(PinpointPluginTestSuite.class) @PinpointAgent(AgentPath.PATH) @Dependency({ "com.ning:async-http-client:[1.7.24],[1.8.16,1.8.999)", WebServer.VERSION, PluginITConstants.VERSION}) @JvmVersion(7) @ImportPlugin({"com.navercorp.pinpoint:pinpoint-ning-asynchttpclient-plugin"}) public class NingAsyncHttpClientIT { private static WebServer webServer; @BeforeClass public static void BeforeClass() throws Exception { webServer = WebServer.newTestWebServer(); } @AfterClass public static void AfterClass() { webServer = WebServer.cleanup(webServer); } @Test public void test() throws Exception { AsyncHttpClient client = new AsyncHttpClient(); try { Future<Response> f = client.preparePost(webServer.getCallHttpUrl()).addParameter("param1", "value1").execute(); Response response = f.get(); } finally { client.close(); } PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance(); verifier.printCache(); String destinationId = webServer.getHostAndPort(); String httpUrl = webServer.getCallHttpUrl(); verifier.verifyTrace(event("ASYNC_HTTP_CLIENT", AsyncHttpClient.class.getMethod("executeRequest", Request.class, AsyncHandler.class), null, null, destinationId, annotation("http.url", httpUrl))); verifier.verifyTraceCount(0); } }
// // Copyright (c) 2016, Bianco Veigel // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // namespace DiscUtils.Xfs { internal enum ExtentFlag : byte { Normal, Unwritten, Invalid } }
<html><body>Warrior's Grave:<br> <Button ALIGN=LEFT ICON="NORMAL" action="bypass -h Quest Q00126_TheNameOfEvil2 32122-5.html">You have entered the first verse of the warrior song correctly. Please sing the second verse.</Button> </body></html>
package cn.dunn.vertx; import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; import io.vertx.core.spi.cluster.ClusterManager; import io.vertx.spi.cluster.zookeeper.ZookeeperClusterManager; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; @Component public class VertxBean implements FactoryBean<Vertx> { @Value("${zk.url}") private String zkUrl; @Value("${vertx.namespace}") private String namespace; private class Result { Vertx vertx = null; } @Override public Vertx getObject() throws Exception { Result result = new Result(); ReentrantLock lock = new ReentrantLock(); Condition condition = lock.newCondition(); RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 1); CuratorFramework curatorFramework = CuratorFrameworkFactory.builder().connectString(zkUrl).namespace(namespace).sessionTimeoutMs(100).connectionTimeoutMs(100) .retryPolicy(retryPolicy).build(); curatorFramework.start(); ClusterManager mgr = new ZookeeperClusterManager(curatorFramework); VertxOptions options = new VertxOptions().setClusterManager(mgr); lock.lock(); try { Vertx.clusteredVertx(options, res -> { lock.lock(); try { if (res.succeeded()) { Vertx vertx = res.result(); result.vertx = vertx; } else { res.cause().printStackTrace(); } condition.signalAll(); } finally { lock.unlock(); } }); condition.await(); } finally { lock.unlock(); } return result.vertx; } @Override public Class<?> getObjectType() { return Vertx.class; } @Override public boolean isSingleton() { return true; } }
import * as MetaProviderPlugins from './meta'; import * as StreamProviderPlugins from './stream'; import * as LyricsProviderPlugins from './lyrics'; export const config = { plugins: { metaProviders: MetaProviderPlugins, streamProviders: StreamProviderPlugins, lyricsProviders: LyricsProviderPlugins } };
import { Action } from "redux" import { Observable, Subject } from "rxjs" import { Store } from "../../store/store" type DontDispatchConfig = { dispatch: false } type DispatchConfig = { dispatch?: true } type Config = DispatchConfig | DontDispatchConfig /** Simple placeholder for NgRx's createEffect. The goal is to provide the * same api as NgRx, so that we can switch to NgRx in the long run. * Please note that its functionality is very minimal so far. */ export function createEffect<T>(source: () => Observable<T>, config?: Config) { const subjectOfEffect = new Subject<T>() source().subscribe(output => { if (!config || config.dispatch !== false) { if (!isAction(output)) { throw new Error("output must be an action") } Store.dispatch(output) } subjectOfEffect.next(output) }) return subjectOfEffect as Observable<T> } function isAction(something: unknown): something is Action { return something && Object.prototype.hasOwnProperty.call(something, "type") }
/**************************************************************************************** Copyright (C) 2015 Autodesk, Inc. All rights reserved. Use of this software is subject to the terms of the Autodesk license agreement provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. ****************************************************************************************/ //! \file fbxgeometryweightedmap.h #ifndef _FBXSDK_SCENE_GEOMETRY_WEIGHTED_MAP_H_ #define _FBXSDK_SCENE_GEOMETRY_WEIGHTED_MAP_H_ #include <fbxsdk/fbxsdk_def.h> #include <fbxsdk/core/fbxobject.h> #include <fbxsdk/scene/geometry/fbxweightedmapping.h> #include <fbxsdk/fbxsdk_nsbegin.h> class FbxGeometry; /** \brief This class provides the structure to build a correspondence between 2 geometries. * * This correspondence is done at the vertex level. Which means that for each vertex in the * source geometry, you can have from 0 to N corresponding vertices in the destination * geometry. Each corresponding vertex is weighted. * * For example, if the source geometry is a NURB and the destination geometry is a mesh, * the correspondence object will express the correspondence between the NURB's control vertices * and the mesh's vertices. * * If the mesh corresponds to a tesselation of the NURB, the correspondence object can be used * to transfer any deformation that affect the NURB's control vertices to the mesh's vertices. * * See FbxWeightedMapping for more details. */ class FBXSDK_DLL FbxGeometryWeightedMap : public FbxObject { FBXSDK_OBJECT_DECLARE(FbxGeometryWeightedMap, FbxObject); public: /** Set correspondence values. * \param pWeightedMappingTable Pointer to the table containing values * \remark \e pWeightedMappingTable becomes owned by this object and will be destroyed by it * when the object goes out of scope or on the next call to SetValues(). The deletion * uses FbxDelete() so the content of the pointer must have been allocated with FbxNew<>() */ void SetValues(const FbxWeightedMapping* pWeightedMappingTable); /** Return correspondence values. * \return Pointer to the correspondence values table. */ FbxWeightedMapping* GetValues() const; /** Return source geometry. * \return Pointer to the source geometry, or \c NULL if there is no connected source geometry */ FbxGeometry* GetSourceGeometry(); /** Return destination geometry. * \return Pointer to the destination geometry, or \c NULL if there is no connected destination geometry */ FbxGeometry* GetDestinationGeometry(); /***************************************************************************************************************************** ** WARNING! Anything beyond these lines is for internal use, may not be documented and is subject to change without notice! ** *****************************************************************************************************************************/ #ifndef DOXYGEN_SHOULD_SKIP_THIS FbxObject& Copy(const FbxObject& pObject) override; protected: void Construct(const FbxObject* pFrom) override; void Destruct(bool pRecursive) override; // Real weigths table FbxWeightedMapping* mWeightedMapping; #endif /* !DOXYGEN_SHOULD_SKIP_THIS *****************************************************************************************/ }; #include <fbxsdk/fbxsdk_nsend.h> #endif /* _FBXSDK_SCENE_GEOMETRY_WEIGHTED_MAP_H_ */
# Copyright © 2020, Joseph Berry, Rico Tabor ([email protected]) # OpenDrop is released under the GNU GPL License. You are free to # modify and distribute the code, but always under the same license # (i.e. you cannot make commercial derivatives). # # If you use this software in your research, please cite the following # journal articles: # # J. D. Berry, M. J. Neeson, R. R. Dagastine, D. Y. C. Chan and # R. F. Tabor, Measurement of surface and interfacial tension using # pendant drop tensiometry. Journal of Colloid and Interface Science 454 # (2015) 226–237. https://doi.org/10.1016/j.jcis.2015.05.012 # # E. Huang, T. Denning, A. Skoufis, J. Qi, R. R. Dagastine, R. F. Tabor # and J. D. Berry, OpenDrop: Open-source software for pendant drop # tensiometry & contact angle measurements, submitted to the Journal of # Open Source Software # # These citations help us not only to understand who is using and # developing OpenDrop, and for what purpose, but also to justify # continued development of this code and other open source resources. # # OpenDrop is distributed WITHOUT ANY WARRANTY; without even the # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU General Public License for more details. You # should have received a copy of the GNU General Public License along # with this software. If not, see <https://www.gnu.org/licenses/>. from . import image_acquisition, footer
package pages; import static org.fest.assertions.Assertions.assertThat; import org.fluentlenium.core.FluentPage; import org.openqa.selenium.WebDriver; public class BookCreatePage extends FluentPage { private String url; public BookCreatePage(WebDriver webDriver, int port) { super(webDriver); this.url = "http://localhost:" + port + "/books/create"; } @Override public String getUrl() { return this.url; } @Override public void isAt() { assertThat(title()).isEqualTo("Student Book Exchange | Create Book"); } public void createNewBook(String bookId) { fill("#bookId").with(bookId); fill("#title").with("Title"); fill("#price").with("30"); fill("#isbn").with("1234567890"); submit("#create"); } }
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name" : "Marketing", "version" : "1.1", "depends" : ["base", "base_setup"], "author" : "OpenERP SA", "category": 'Hidden/Dependency', 'complexity': "expert", "description": """ Menu for Marketing. =================== Contains the installer for marketing-related modules. """, 'website': 'http://www.openerp.com', 'init_xml': [], 'update_xml': [ 'security/marketing_security.xml', 'security/ir.model.access.csv', 'marketing_view.xml' ], 'demo_xml': ['marketing_demo.xml'], 'installable': True, 'auto_install': False, 'certificate' : '00598574977629228189', 'images': ['images/config_marketing.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class WebhookReceiver(Model): """A webhook receiver. :param name: The name of the webhook receiver. Names must be unique across all receivers within an action group. :type name: str :param service_uri: The URI where webhooks should be sent. :type service_uri: str """ _validation = { 'name': {'required': True}, 'service_uri': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'service_uri': {'key': 'serviceUri', 'type': 'str'}, } def __init__(self, name, service_uri): self.name = name self.service_uri = service_uri
from django.contrib import admin from django_summernote.admin import SummernoteModelAdmin from django.utils.translation import ugettext_lazy as _ from django.template import Context from django.conf import settings from core.models import Post, Author, Tag, Category, Rating, NewsLetter from core.utils import read_template, send_gearman_mail from core.forms import NewsLetterForm class InlineRating(admin.TabularInline): model = Rating extra = 0 class PostAdmin(SummernoteModelAdmin): inlines = (InlineRating,) admin.site.register(Post, PostAdmin) class AuthorAdmin(admin.ModelAdmin): list_display = ['email', '__str__', 'username', 'is_active', 'receive_update'] filter_display = ['is_active'] admin.site.register(Author, AuthorAdmin) class TagAdmin(admin.ModelAdmin): pass admin.site.register(Tag, TagAdmin) class CategoryAdmin(admin.ModelAdmin): pass admin.site.register(Category, CategoryAdmin) def send_newsletters(modeladmin, request, queryset): for letter in queryset: context = { 'subject': letter.subject, 'content': letter.content, } template = read_template("/home/django/projects/fun/core/templates/core/post_newsletter_email.txt") for author in Author.objects.filter(newsletter=True): send_gearman_mail(letter.subject, template.render(Context(context)), '[email protected]', [author.email], fail_silently=False, auth_user=settings.MANDRILL_USER, auth_password=settings.MANDRILL_API_KEY, host=settings.MANDRILL_HOST) queryset.update(sent=True) send_newsletters.short_description = _("Send selected Newsletters") class NewsLetterAdmin(admin.ModelAdmin): form = NewsLetterForm list_display = ['subject', 'sent', 'date_updated', 'date_created', 'date_sent'] filter_display = ['sent'] actions = ['send_newsletters'] admin.site.register(NewsLetter, NewsLetterAdmin)
#!/usr/bin/env python # # A library that provides a Python interface to the Telegram Bot API # Copyright (C) 2015-2018 # Leandro Toledo de Souza <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # # You should have received a copy of the GNU Lesser Public License # along with this program. If not, see [http://www.gnu.org/licenses/]. """This module contains an object that represents a Telegram Contact.""" from telegram import TelegramObject class Contact(TelegramObject): """This object represents a phone contact. Attributes: phone_number (:obj:`str`): Contact's phone number. first_name (:obj:`str`): Contact's first name. last_name (:obj:`str`): Optional. Contact's last name. user_id (:obj:`int`): Optional. Contact's user identifier in Telegram. Args: phone_number (:obj:`str`): Contact's phone number. first_name (:obj:`str`): Contact's first name. last_name (:obj:`str`, optional): Contact's last name. user_id (:obj:`int`, optional): Contact's user identifier in Telegram. **kwargs (:obj:`dict`): Arbitrary keyword arguments. """ def __init__(self, phone_number, first_name, last_name=None, user_id=None, **kwargs): # Required self.phone_number = str(phone_number) self.first_name = first_name # Optionals self.last_name = last_name self.user_id = user_id self._id_attrs = (self.phone_number,) @classmethod def de_json(cls, data, bot): if not data: return None return cls(**data)
/*! * Copyright (c) 2015-2020 Cisco Systems, Inc. See LICENSE file. */ import Events from 'ampersand-events'; import {oneFlight} from '@webex/common'; const bindings = new WeakMap(); /** * Makes a WebexStore for the specified type bound to the specified webex instance * @param {string} type * @param {ProxyWebex} webex * @private * @returns {WebexStore} */ export default function makeWebexStore(type, webex) { /** * Lazy Key-Value Store Interface */ class WebexStore { /** * @param {Object} attrs * @param {Object} options * @returns {Store} */ constructor() { webex.logger.debug(`webex-store: constructing ${type}Storage`); bindings.set(this, new Map()); } /** * Provides easy access to the storage adapter identified in config. * @returns {Object} */ get adapter() { return webex.config.storage[`${type}Adapter`]; } /** * @returns {WeakMap} */ get bindings() { return bindings.get(this); } /** * Clears the store * @returns {Promise} */ clear() { const promises = []; this.bindings.forEach((binding) => { promises.push(binding.clear()); }); return Promise.all(promises); } /** * Deletes the specified key from the store * @param {string} namespace * @param {string} key * @returns {[type]} */ del(namespace, key) { webex.logger.debug(`webex-store: removing ${namespace}:${key}`); return this._getBinding(namespace) .then((binding) => binding.del(key)); } /** * Retrieves the value specified by key from the store. Rejects with * NotFoundError if no value can be found * @param {string} namespace * @param {string} key * @returns {Promise} */ get(namespace, key) { webex.logger.debug(`webex-store: retrieving ${namespace}:${key}`); return this._getBinding(namespace) .then((binding) => binding.get(key)); } /** * Writes a value to the store. Deletes the specified key from the store * if passed `undefined` * @param {string} namespace * @param {string} key * @param {any} value * @returns {Promise} Resolves with value (to simplify write-through caching) */ put(namespace, key, value) { if (typeof value === 'undefined') { return this.del(namespace, key); } webex.logger.debug(`webex-store: setting ${namespace}:${key}`); return this._getBinding(namespace) .then((binding) => binding.put(key, value.serialize ? value.serialize() : value)) .then(() => value); } @oneFlight({keyFactory: (namespace) => namespace}) /** * Creates an interface bound to the specified namespace * @param {string} namespace * @private * @returns {Promise} */ // suppress doc warning because decorators confuse eslint // eslint-disable-next-line require-jsdoc _getBinding(namespace) { return new Promise((resolve) => { webex.logger.debug(`storage: getting binding for \`${namespace}\``); const binding = this.bindings.get(namespace); if (binding) { webex.logger.debug(`storage: found binding for \`${namespace}\``); return resolve(binding); } return resolve(this.adapter.bind(namespace, {logger: webex.logger}) .then((_binding) => { webex.logger.debug(`storage: made binding for \`${namespace}\``); this.bindings.set(namespace, _binding); return _binding; })); }); } } Object.assign(WebexStore.prototype, Events); return new WebexStore(); }
""" Copyright (c) 2015 Michael Bright and Bamboo HR LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. test_history_date_created Revision ID: ce9be6e8354c Revises: bf363c3a9ef0 Create Date: 2018-04-30 18:44:54.258839 """ # revision identifiers, used by Alembic. import datetime from sqlalchemy import func revision = 'ce9be6e8354c' down_revision = 'bf363c3a9ef0' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### if 'sqlite' == op.get_context().dialect.name: op.add_column('qa_test_histories', sa.Column('date_created', sa.DateTime(), default=datetime.datetime.utcnow())) else: op.add_column('qa_test_histories', sa.Column('date_created', sa.DateTime(), nullable=False, server_default=func.now(), default=datetime.datetime.utcnow())) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('qa_test_histories', 'date_created') ### end Alembic commands ###
import datetime import os from django.db import models from django.core.files.base import ContentFile from django.utils.translation import ugettext as _ from django.utils.hashcompat import md5_constructor from django.utils.encoding import smart_str from django.db.models import signals from django.contrib.auth.models import User try: from cStringIO import StringIO dir(StringIO) # Placate PyFlakes except ImportError: from StringIO import StringIO try: from PIL import Image dir(Image) # Placate PyFlakes except ImportError: import Image from avatar.util import invalidate_cache from avatar.settings import (AVATAR_STORAGE_DIR, AVATAR_RESIZE_METHOD, AVATAR_MAX_AVATARS_PER_USER, AVATAR_THUMB_FORMAT, AVATAR_HASH_USERDIRNAMES, AVATAR_HASH_FILENAMES, AVATAR_THUMB_QUALITY, AUTO_GENERATE_AVATAR_SIZES) def avatar_file_path(instance=None, filename=None, size=None, ext=None): tmppath = [AVATAR_STORAGE_DIR] if AVATAR_HASH_USERDIRNAMES: tmp = md5_constructor(instance.user.username).hexdigest() tmppath.extend([tmp[0], tmp[1], instance.user.username]) else: tmppath.append(instance.user.username) if not filename: # Filename already stored in database filename = instance.avatar.name if ext and AVATAR_HASH_FILENAMES: # An extension was provided, probably because the thumbnail # is in a different format than the file. Use it. Because it's # only enabled if AVATAR_HASH_FILENAMES is true, we can trust # it won't conflict with another filename (root, oldext) = os.path.splitext(filename) filename = root + "." + ext else: # File doesn't exist yet if AVATAR_HASH_FILENAMES: (root, ext) = os.path.splitext(filename) filename = md5_constructor(smart_str(filename)).hexdigest() filename = filename + ext if size: tmppath.extend(['resized', str(size)]) tmppath.append(os.path.basename(filename)) return os.path.join(*tmppath) def find_extension(format): format = format.lower() if format == 'jpeg': format = 'jpg' return format class Avatar(models.Model): user = models.ForeignKey(User) primary = models.BooleanField(default=False) avatar = models.ImageField(max_length=1024, upload_to=avatar_file_path, blank=True) date_uploaded = models.DateTimeField(default=datetime.datetime.now) def __unicode__(self): return _(u'Avatar for %s') % self.user def save(self, *args, **kwargs): avatars = Avatar.objects.filter(user=self.user) if self.pk: avatars = avatars.exclude(pk=self.pk) if AVATAR_MAX_AVATARS_PER_USER > 1: if self.primary: avatars = avatars.filter(primary=True) avatars.update(primary=False) else: avatars.delete() invalidate_cache(self.user) super(Avatar, self).save(*args, **kwargs) def delete(self, *args, **kwargs): invalidate_cache(self.user) super(Avatar, self).delete(*args, **kwargs) def thumbnail_exists(self, size): return self.avatar.storage.exists(self.avatar_name(size)) def create_thumbnail(self, size, quality=None): # invalidate the cache of the thumbnail with the given size first invalidate_cache(self.user, size) try: orig = self.avatar.storage.open(self.avatar.name, 'rb').read() image = Image.open(StringIO(orig)) except IOError: return # What should we do here? Render a "sorry, didn't work" img? quality = quality or AVATAR_THUMB_QUALITY (w, h) = image.size if w != size or h != size: if w > h: diff = (w - h) / 2 image = image.crop((diff, 0, w - diff, h)) else: diff = (h - w) / 2 image = image.crop((0, diff, w, h - diff)) if image.mode != "RGB": image = image.convert("RGB") image = image.resize((size, size), AVATAR_RESIZE_METHOD) thumb = StringIO() image.save(thumb, AVATAR_THUMB_FORMAT, quality=quality) thumb_file = ContentFile(thumb.getvalue()) else: thumb_file = ContentFile(orig) thumb = self.avatar.storage.save(self.avatar_name(size), thumb_file) def avatar_url(self, size): return self.avatar.storage.url(self.avatar_name(size)) def avatar_name(self, size): ext = find_extension(AVATAR_THUMB_FORMAT) return avatar_file_path( instance=self, size=size, ext=ext ) def create_default_thumbnails(instance=None, created=False, **kwargs): if created: for size in AUTO_GENERATE_AVATAR_SIZES: instance.create_thumbnail(size) signals.post_save.connect(create_default_thumbnails, sender=Avatar)
# -*- coding: utf-8 -*- from django.db import models from cms.models import CMSPlugin # sorry for the cryptic names. But we were hitting max lengths on Django 1.6 # and 1.7 with the too long names otherwise. class UnalteredPM(CMSPlugin): title = models.CharField(max_length=50) search_fields = ['title'] class NoRelNmePM(CMSPlugin): cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_name='+', parent_link=True) title = models.CharField(max_length=50) search_fields = ['title'] class NoRelQNmePM(CMSPlugin): cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_query_name='+', parent_link=True) title = models.CharField(max_length=50) search_fields = ['title'] class CustomRelQNmePM(CMSPlugin): cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_query_name='reldesc_custom_relqn', parent_link=True) title = models.CharField(max_length=50) search_fields = ['title'] class CustomRelNmePM(CMSPlugin): cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_name='reldesc_custom_reln', parent_link=True) title = models.CharField(max_length=50) search_fields = ['title'] class CustomRelNmeAndRelQNmePM(CMSPlugin): cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_name='reldesc_custom_reln2', related_query_name='reldesc_custom_relqn2', parent_link=True) title = models.CharField(max_length=50) search_fields = ['title']
import * as React from 'react'; import { Card, Box, Small, Text, Button } from 'rebass'; import { Combat, Values } from '@battles/models'; type CombatInfoProps = { combat: Combat; }; const CombatInfo: React.StatelessComponent<CombatInfoProps> = ({ combat }) => ( <Card width={256}> <Box p={2}> <Text>Combat {combat.location.data.id} </Text> {combat.combatants.map((combatant, i) => ( <Small key={i}> <Text color={Values.ColourStrings[combatant.player ? combatant.player.data.colour : 'black']}> {combatant.player ? `Player ${combatant.player.data.id}` : 'No Player'}{' '} <Small color="black">{combatant.combatRating} points</Small> </Text> </Small> ))} </Box> </Card> ); export default CombatInfo;
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import unittest from myBinaryStruct.ttypes import * from myBoolStruct.ttypes import * from myByteStruct.ttypes import * from myComplexStruct.ttypes import * from myDoubleStruct.ttypes import * from myI16Struct.ttypes import * from myI32Struct.ttypes import * from myMixedStruct.ttypes import * from mySetStruct.ttypes import * from myMapStruct.ttypes import * from myNestedMapStruct.ttypes import * from mySimpleStruct.ttypes import * #import logging #log = logging.getLogger() #log.setLevel(logging.DEBUG) #ch = logging.StreamHandler() #ch.setLevel(logging.DEBUG) #log.addHandler(ch) from thrift.util.TValidator import TValidator class ValidationTest(unittest.TestCase): def setUp(self): self.v = TValidator() def valid(self, msg): self.assertTrue(self.v.validate(msg)) def wrong(self, msg): self.assertFalse(self.v.validate(msg)) def testBinary(self): self.valid(myBinaryStruct(a='xyzzy')) self.wrong(myBinaryStruct(a=3)) def testBool(self): self.valid(myBoolStruct(a=True)) self.valid(myBoolStruct(a=False)) self.wrong(myBoolStruct(a=1)) self.wrong(myBoolStruct(a='a')) def testByte(self): self.valid(myByteStruct(a=0)) self.valid(myByteStruct(a=127)) self.valid(myByteStruct(a=-128)) self.wrong(myByteStruct(a=1.1)) self.wrong(myByteStruct(a=128)) self.wrong(myByteStruct(a=-129)) def testI16(self): self.valid(myI16Struct(a=4567)) self.wrong(myI16Struct(a=0xFEDCBA987)) def testI32(self): self.valid(myI32Struct(a=12131415)) self.wrong(myI32Struct(a=0xFFFFFFFFEDCBA)) def testDouble(self): self.valid(myDoubleStruct(a=-2.192)) self.valid(myDoubleStruct(a=float('inf'))) self.valid(myDoubleStruct(a=float('-inf'))) self.wrong(myDoubleStruct(a=2)) def testMixed(self): self.valid(myMixedStruct( a=[], b=[mySuperSimpleStruct(a=5)], c={'flame': -8, 'fire': -191}, d={}, e=set([1, 2, 3, 4]) )) def testStruct(self): self.valid(mySetStruct(a=set([4, 8, 15, 16]))) self.valid(mySetStruct(a=set([]))) self.wrong(mySetStruct(a=set([1, 0xFFFFFFFFFF, 2]))) def testMap(self): self.valid(myMapStruct( stringMap={"a": "A", "b": "B"}, boolMap={True: "True", False: "False"}, byteMap={1: "one", 2: "two"}, doubleMap={float("0.1"): "0.one", float("0.2"): "0.two"}, enumMap={1: "male", 2: "female"} )) self.valid(mySimpleStruct( a=False, b=87, c=7880, d=-7880, e=-1, f=-0.1, g='T-bone' )) self.wrong(mySimpleStruct(a=1)) self.valid(myComplexStruct( mySimpleStruct( a=True, b=92, c=902, d=65536, e=123456789, f=3.1415, g='Whan that Aprille' ), b=[314, 15, 9, 26535], c={"qwerty": mySimpleStruct(c=1), "slippy": mySimpleStruct(a=False, b=-4, c=5)}, e=EnumTest.EnumTwo, x=ExceptionTest("test") )) def testCustomValidator(self): def a_must_be_true(v): return v.a self.v.addClassValidator('mySimpleStruct', a_must_be_true) self.valid(myComplexStruct( mySimpleStruct(a=True), )) self.wrong(myComplexStruct( mySimpleStruct(a=False), )) def testNestedMap(self): self.valid(myNestedMapStruct( maps={"1": {"1": mySimpleStruct(c=1)}, "2": {"2": mySimpleStruct(a=False, c=2)}} )) self.wrong(myNestedMapStruct( maps={"1": {"1": mySimpleStruct(c=1)}, "2": {"2": mySimpleStruct(a=0, c=2)}} )) if __name__ == "__main__": unittest.main()
<?php use yii\helpers\Html; use yii\widgets\DetailView; /* @var $this yii\web\View */ /* @var $model common\models\Post */ $this->title = $model->title; $this->params['breadcrumbs'][] = ['label' => Yii::t('backend', 'Posts'), 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="post-view"> <h1><?= Html::encode($this->title) ?></h1> <p> <?= Html::a(Yii::t('backend', 'Update'), ['update', 'id' => $model->id], ['class' => 'btn btn-primary']) ?> <?= Html::a(Yii::t('backend', 'Delete'), ['delete', 'id' => $model->id], [ 'class' => 'btn btn-danger', 'data' => [ 'confirm' => Yii::t('backend', 'Are you sure you want to delete this item?'), 'method' => 'post', ], ]) ?> </p> <?= DetailView::widget([ 'model' => $model, 'attributes' => [ 'id', 'category_id', 'title', 'slug', 'image', 'content:ntext', 'meta_title', 'meta_keywords', 'meta_description', 'status', 'created_by', 'updated_by', 'created_at', 'updated_at', ], ]) ?> </div>
// // UIViewController_vc.h // copyTemp // // Created by Arthur.yu on 16/4/11. // Copyright © 2016年 Arthur.yu. All rights reserved. // #import <UIKit/UIKit.h> @interface UIViewController () - (void)hahahahahahaha; @end
#ifndef __ASM_STRING_H__ #define __ASM_STRING_H__ /* * We don't do inline string functions, since the * optimised inline asm versions are not small. */ #define __HAVE_ARCH_STRRCHR char * strrchr(const char * s, int c); #define __HAVE_ARCH_STRCHR char * strchr(const char * s, int c); #define __HAVE_ARCH_MEMCPY void * memcpy(void *, const void *, size_t); #define __HAVE_ARCH_MEMMOVE void * memmove(void *, const void *, size_t); #define __HAVE_ARCH_MEMCHR void * memchr(const void *, int, size_t); #define __HAVE_ARCH_MEMZERO #define __HAVE_ARCH_MEMSET void * memset(void *, int, size_t); #define __HAVE_ARCH_BCOPY void __memzero(void *ptr, size_t n); #define memset(p,v,n) \ ({ \ if ((n) != 0) { \ if (__builtin_constant_p((v)) && (v) == 0) \ __memzero((p),(n)); \ else \ memset((p),(v),(n)); \ } \ (p); \ }) #define memzero(p,n) ({ if ((n) != 0) __memzero((p),(n)); (p); }) #endif
import numpy as np def removeOverlap(starts, ends, index): # Check first is a segment is completely inside another segment # If it is we remove it. # if start[n+1] > end[n] => remover n # if # [1,3,6] # [5,4,8] totalOverlapIndex = np.where(ends[:-1] >= ends[:1]) if len(totalOverlapIndex[0]) > 0: # As there can be more then one segment inside another segment # we need to iterate over it until we have no more total overlap. while len(totalOverlapIndex[0]) != 0: removeIndex = totalOverlapIndex[0] res[:, -1][removeIndex] = -1 removeIndex += 1 res = np.delete(res, totalOverlapIndex, 0) totalOverlapIndex = np.where(res[:-1,1] > res[1:,1]) # Find partially overlapping segments # end[n] > start[n+1] partialOverlapIndex = np.where(res[:-1, 1] > res[1:, 0]) if len(partialOverlapIndex[0]) > 0: # Creating masks to merge the overlapping segments overlapStartMask = np.ones(len(res), dtype=bool) overlapStartMask[[partialOverlapIndex[0]+1]] = False overlapEndMask = np.ones(len(res), dtype=bool) overlapEndMask[partialOverlapIndex[0]] = False # Saving the updated ends ends = res[:, 1] ends = ends[overlapEndMask] encodingMask = np.invert(overlapEndMask) encoding = res[:, -1] encoding[encodingMask] = -1 encoding = encoding[overlapStartMask] res = res[overlapStartMask] res[:, 1] = ends res[:, -1] = encoding
//===----------------------------------------------------------------------===// // // The LLVM Compiler Infrastructure // // This file is dual licensed under the MIT and the University of Illinois Open // Source Licenses. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // UNSUPPORTED: c++98, c++03, c++11, c++14 // asan and msan will not call the new handler. // UNSUPPORTED: sanitizer-new-delete // FIXME turn this into an XFAIL // UNSUPPORTED: no-aligned-allocation // test operator new (nothrow) #include <new> #include <cstddef> #include <cstdint> #include <cassert> #include <limits> #include "test_macros.h" constexpr auto OverAligned = alignof(std::max_align_t) * 2; int new_handler_called = 0; void new_handler() { ++new_handler_called; std::set_new_handler(0); } bool A_constructed = false; struct alignas(OverAligned) A { A() {A_constructed = true;} ~A() {A_constructed = false;} }; void test_max_alloc() { std::set_new_handler(new_handler); auto do_test = []() { void* vp = operator new (std::numeric_limits<std::size_t>::max(), std::align_val_t(OverAligned), std::nothrow); assert(new_handler_called == 1); assert(vp == 0); }; #ifndef TEST_HAS_NO_EXCEPTIONS try { do_test(); } catch (...) { assert(false); } #else do_test(); #endif } int main() { { A* ap = new(std::nothrow) A; assert(ap); assert(reinterpret_cast<std::uintptr_t>(ap) % OverAligned == 0); assert(A_constructed); delete ap; assert(!A_constructed); } { test_max_alloc(); } }
/* * @(#)PointAdditionLayerUI.java 2012.11.26 at 08:48:26 PST * * Copyright 2011 MBARI * * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package vars.annotation.ui.imagepanel; import org.jdesktop.jxlayer.JXLayer; import mbarix4j.awt.AwtUtilities; import mbarix4j.swing.JImageUrlCanvas; import javax.swing.SwingUtilities; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Point; import java.awt.event.MouseEvent; import java.awt.geom.GeneralPath; import java.util.Collection; import java.util.Vector; /** * Class for testing out resizing images * @author brian * * @param <T> */ public class PointAdditionLayerUI<T extends JImageUrlCanvas> extends MultiLayerUI<T> { private Point coordinatePoint = null; private String coordinateString = null; final Collection<Point> sourcePoints = new Vector<Point>(); private final Font font = new Font("Sans Serif", Font.PLAIN, 12); private JXCrossHairPainter<T> crossHairPainter = new JXCrossHairPainter<>(); /** */ @Override public void clearPainters() { super.clearPainters(); addPainter(crossHairPainter); } @Override protected void paintLayer(Graphics2D g2, JXLayer<? extends T> jxl) { super.paintLayer(g2, jxl); if (coordinateString != null) { g2.setXORMode(Color.WHITE); g2.setFont(font); g2.drawString(coordinateString, coordinatePoint.x, coordinatePoint.y); } // Draw points g2.setPaintMode(); // Make sure XOR is turned off g2.setPaint(new Color(255, 0, 0, 180)); g2.setStroke(new BasicStroke(3)); JImageUrlCanvas imageCanvas = jxl.getView(); for (Point point : sourcePoints) { point = AwtUtilities.toPoint(imageCanvas.convertToComponent(point)); int x = point.x; int y = point.y; // Draw the annotation int armLength = 7; GeneralPath gp = new GeneralPath(); gp.moveTo(x - armLength, y - armLength); gp.lineTo(x + armLength, y + armLength); gp.moveTo(x + armLength, y - armLength); gp.lineTo(x - armLength, y + armLength); g2.draw(gp); } g2.setPaintMode(); } @Override protected void processMouseEvent(MouseEvent me, JXLayer<? extends T> jxl) { super.processMouseEvent(me, jxl); if (me.getID() == MouseEvent.MOUSE_RELEASED) { Point point = SwingUtilities.convertPoint(me.getComponent(), me.getPoint(), jxl); JImageUrlCanvas imageCanvas = jxl.getView(); Point imagePoint = AwtUtilities.toPoint(imageCanvas.convertToImage(point)); sourcePoints.add(imagePoint); setDirty(true); } } @Override protected void processMouseMotionEvent(MouseEvent me, JXLayer<? extends T> jxl) { super.processMouseMotionEvent(me, jxl); if ((me.getID() == MouseEvent.MOUSE_MOVED) || (me.getID() == MouseEvent.MOUSE_DRAGGED)) { Point point = SwingUtilities.convertPoint(me.getComponent(), me.getPoint(), jxl); JImageUrlCanvas imageCanvas = jxl.getView(); if (imageCanvas.getImageRectangle().contains(point)) { Point imagePoint = AwtUtilities.toPoint(imageCanvas.convertToImage(point)); Point componentPoint = AwtUtilities.toPoint(imageCanvas.convertToComponent(imagePoint)); coordinateString = "(SRC[" + point.x + ", " + point.y + "] Image[" + imagePoint.x + ", " + imagePoint.y + "] DST[" + componentPoint.x + ", " + componentPoint.y + "])"; coordinatePoint = point; } else { coordinateString = null; coordinatePoint = null; } // mark the ui as dirty and needed to be repainted setDirty(true); } } }
/** * Copyright 2013 Ordina * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.ordina.bag.etl.validation; import nl.kadaster.schemas.bag_verstrekkingen.extract_levering.v20090901.BAGExtractLevering; import nl.ordina.bag.etl.util.Utils.FileType; import org.apache.commons.lang.StringUtils; public class BAGExtractLeveringValidator { private String klantnummer; private String productcode; private String gebiedType; private String gegevensvariant; private String formaat; private String producttype; private String productversie; public BAGExtractLeveringValidator() { } public BAGExtractLeveringValidator(String klantnummer, String productcode, String gebiedType, String gegevensvariant, String formaat, String producttype, String productversie) { this.klantnummer = klantnummer; this.productcode = productcode; this.gebiedType = gebiedType; this.gegevensvariant = gegevensvariant; this.formaat = formaat; this.producttype = producttype; this.productversie = productversie; } public void validate(FileType fileType, BAGExtractLevering bagExtractLevering) { if (bagExtractLevering == null) throw new ValidationException(fileType.filename.concat(" not found!")); validate("Klantnummer",klantnummer,bagExtractLevering.getMetadata().getKlantgegevens().getKlantnummer()); validate("Productcode",productcode,bagExtractLevering.getMetadata().getProductgegevens().getProductcode()); validate("GebiedType",gebiedType,bagExtractLevering.getMetadata().getProductgegevens().getGebiedType()); validate("Gegevensvariant",gegevensvariant,bagExtractLevering.getMetadata().getProductgegevens().getGegevensvariant()); validate("Formaat",formaat,bagExtractLevering.getMetadata().getProductgegevens().getFormaat()); validate("Producttype",producttype,bagExtractLevering.getMetadata().getProductgegevens().getProducttype()); validate("Productversie",productversie,bagExtractLevering.getMetadata().getProductgegevens().getProductversie()); } private void validate(String name, String expectedValue, String actualValue) { if (!StringUtils.isEmpty(expectedValue) && !expectedValue.equals(actualValue)) { throw new ValidationException(name + " " + actualValue + " found. " + name + " " + expectedValue + " expected."); } } public String getKlantnummer() { return klantnummer; } public void setKlantnummer(String klantnummer) { this.klantnummer = klantnummer; } public String getProductcode() { return productcode; } public void setProductcode(String productcode) { this.productcode = productcode; } public String getGebiedType() { return gebiedType; } public void setGebiedType(String gebiedType) { this.gebiedType = gebiedType; } public String getGegevensvariant() { return gegevensvariant; } public void setGegevensvariant(String gegevensvariant) { this.gegevensvariant = gegevensvariant; } public String getFormaat() { return formaat; } public void setFormaat(String formaat) { this.formaat = formaat; } public String getProducttype() { return producttype; } public void setProducttype(String producttype) { this.producttype = producttype; } public String getProductversie() { return productversie; } public void setProductversie(String productversie) { this.productversie = productversie; } }
#This file is part of Tryton. The COPYRIGHT file at the top level of #this repository contains the full copyright notices and license terms. from trytond.backend import fields class Boolean(fields.Boolean): @staticmethod def sql_type(field): return ('bool', 'bool') @staticmethod def sql_format(value): return value class Integer(fields.Integer): @staticmethod def sql_type(field): return ('bigint', 'bigint') class BigInteger(fields.BigInteger): @staticmethod def sql_type(field): return ('bigint', 'bigint') class Char(fields.Char): @staticmethod def sql_type(field): if isinstance(field.size, int): return ('varchar', 'varchar(%d)' % (field.size,)) return ('varchar', 'varchar(255)') class Sha(fields.Sha): @staticmethod def sql_type(field): return ('varchar', 'varchar(40)') class Text(fields.Text): @staticmethod def sql_type(field): return ('text', 'text') class Float(fields.Float): @staticmethod def sql_type(field): return ('double', 'double(255, 15)') class Numeric(fields.Numeric): @staticmethod def sql_type(field): return ('decimal', 'decimal(65, 30)') class Date(fields.Date): @staticmethod def sql_type(field): return ('date', 'date') class DateTime(fields.DateTime): @staticmethod def sql_type(field): return ('timestamp', 'timestamp null') class Timestamp(fields.Timestamp): @staticmethod def sql_type(field): return ('timestamp', 'timestamp null') class Time(fields.Time): @staticmethod def sql_type(field): return ('time', 'time') class Binary(fields.Binary): @staticmethod def sql_format(value): return value or None @staticmethod def sql_type(field): return ('longblob', 'longblob') class Selection(fields.Selection): @staticmethod def sql_type(field): return ('varchar', 'varchar(255)') class Reference(fields.Reference): @staticmethod def sql_type(field): return ('varchar', 'varchar(255)') class Many2One(fields.Many2One): @staticmethod def sql_type(field): return ('bigint', 'bigint') class Dict(fields.Dict): @staticmethod def sql_type(field): return ('text', 'text') FIELDS = { 'boolean': Boolean, 'integer': Integer, 'biginteger': BigInteger, 'char': Char, 'sha': Sha, 'text': Text, 'float': Float, 'numeric': Numeric, 'date': Date, 'datetime': DateTime, 'timestamp': Timestamp, 'time': Time, 'binary': Binary, 'selection': Selection, 'reference': Reference, 'many2one': Many2One, 'one2many': fields.One2Many, 'many2many': fields.Many2Many, 'function': fields.Function, 'property': fields.Property, 'dict': Dict, }
goog.provide('al.view.MobileIntroScreen'); al.view.MobileIntroScreen = function() { al.view.AbstractView.call(this); goog.events.listenOnce(this, this.PAGE_LOADED, this.pageLoadedHandler, false, this); this.loadPage('mobile-initial.html'); } goog.inherits(al.view.MobileIntroScreen, al.view.AbstractView); al.view.MobileIntroScreen.prototype.pageLoadedHandler = function(event) { var that = this; this.domElement = goog.dom.getElementByClass('pageContent', this.document); goog.dom.append(goog.dom.getDocument().body, this.domElement); goog.events.listen(goog.dom.getElementByClass('submitBtn', this.domElement), goog.events.EventType.CLICK, function(event) { this.dispatchEvent({type: this.REQUEST_SOCKET_CONNECTION, code: goog.dom.getElementByClass('codeInput', this.domElement).value}); event.preventDefault(); return false; }, false, this); } al.view.MobileIntroScreen.prototype.displayeError = function() { goog.dom.getElement('error', this.domElement).innerText = 'Unable to find a client with that code'; goog.dom.getElement('codeInput', this.domElement).value = ''; }
<?php // Website: WWW.OpenCartArab.com // E-Mail : [email protected] // Heading $_['heading_title'] = 'الاحصائيات حسب خريطة العالم'; // Text $_['text_extension'] = 'الموديولات'; $_['text_success'] = 'تم التعديل !'; $_['text_edit'] = 'تحرير'; $_['text_order'] = 'الطلبات'; $_['text_sale'] = 'المبيعات'; // Entry $_['entry_status'] = 'الحالة'; $_['entry_sort_order'] = 'ترتيب الفرز'; $_['entry_width'] = 'العرض'; // Error $_['error_permission'] = 'تحذير: انت لا تمتلك صلاحيات التعديل !';
using System; using System.Collections.Generic; using System.Linq; namespace Kingdom.Collections { using Xunit; using static String; /// <summary> /// Reporter extension methods provided for shorthand. /// </summary> public static class ReporterExtensionMethods { internal static void VerifyValuesCoverage(this IEnumerable<Enumeration> values, IDictionary<string, int> coverage) { /* This is a hard exception. If this occurs, we have other problems to contend with. * Think it through, there need to be at least One item in the Values array for this * to be useful. */ // ReSharper disable once InconsistentNaming var __values = values.AssertNotNull().AssertNotEmpty().ToArray(); try { // Then, we expect each of the Values to be Evaluated. __values.AssertEqual(coverage.Count, x => x.Length); // Each of the Values shall be Evaluated at least Once. coverage.Values.AssertTrue(x => x.All(count => count > 0)); } catch (Exception ex) { // TODO: TBD: Assert inconclusive how? i.e. NUnit provides Assert.Inconclusive(...). var incomplete = __values.Select(x => x.Name).Except(coverage.Keys) .Aggregate(Empty, (g, x) => IsNullOrEmpty(g) ? $"'{x}'" : $"{g}, '{x}'"); // TODO: TBD: for lack of a better way of signaling, just throw the IOEX here... throw new InvalidOperationException($"Incomplete test coverage: [ {incomplete} ]", ex); } } } }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #import "WCBaseTextFieldItem.h" #import "UIPickerViewDataSource.h" #import "UIPickerViewDelegate.h" @class MMUIViewController, NSString, UIButton, UIPickerView; @interface WCPayValidDateItem : WCBaseTextFieldItem <UIPickerViewDataSource, UIPickerViewDelegate> { UIPickerView *m_pickerView; int m_iStartYear; NSString *m_value; UIButton *m_tipBtn; MMUIViewController *m_viewController; NSString *m_nsTipTitle; NSString *m_nsTipContent; } - (void).cxx_destruct; - (void)setTipContent:(id)arg1 Title:(id)arg2; - (void)setTipBtnShowed:(_Bool)arg1; - (void)showValidTip; - (void)setViewController:(id)arg1; - (void)addTipBtnTarget:(id)arg1 sel:(SEL)arg2; - (void)UIPickerViewDidFinish; - (void)UIPickerViewDidCancel; - (id)pickerView:(id)arg1 viewForRow:(long long)arg2 forComponent:(long long)arg3 reusingView:(id)arg4; - (id)pickerView:(id)arg1 titleForRow:(long long)arg2 forComponent:(long long)arg3; - (double)pickerView:(id)arg1 widthForComponent:(long long)arg2; - (long long)pickerView:(id)arg1 numberOfRowsInComponent:(long long)arg2; - (long long)numberOfComponentsInPickerView:(id)arg1; - (void)dealloc; - (void)initView:(struct CGRect)arg1; - (id)getValue; - (id)initWithTitle:(id)arg1 tip:(id)arg2 key:(id)arg3; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end