content
stringlengths
10
4.9M
/** * Argument handler for processing flags that take a string as their parameter. */ public abstract class ArgHandlerString extends ArgHandler { @Override public int handle(String[] args, int startIndex) { if (startIndex + 1 < args.length) { if (!setString(args[startIndex + 1])) { return -1; } return 1; } System.err.println(getTag() + " must be followed by an argument for " + getTagArgs()[0]); return -1; } public abstract boolean setString(String str); }
<reponame>sophiebits/ent // Generated by github.com/lolopinto/ent/ent, DO NOT EDIT. import { GraphQLEnumType, GraphQLFieldConfig, GraphQLFieldConfigMap, GraphQLID, GraphQLInputFieldConfigMap, GraphQLInputObjectType, GraphQLNonNull, GraphQLObjectType, GraphQLResolveInfo, } from "graphql"; import { RequestContext } from "@lolopinto/ent"; import { convertFromGQLEnum, mustDecodeIDFromGQLID, } from "@lolopinto/ent/graphql"; import { Event } from "src/ent/"; import EditEventRsvpStatusAction, { EditEventRsvpStatusInput, } from "src/ent/event/actions/edit_event_rsvp_status_action"; import { EventRsvpStatusInput, getEventRsvpStatusInputValues, } from "src/ent/event/actions/generated/edit_event_rsvp_status_action_base"; import { EventType } from "src/graphql/resolvers/"; interface customEventRsvpStatusEditInput extends EditEventRsvpStatusInput { eventID: string; userID: string; } interface EventRsvpStatusEditPayload { event: Event; } export const EventRsvpStatusInputType = new GraphQLEnumType({ name: "EventRsvpStatusInput", values: { ATTENDING: { value: "ATTENDING", }, DECLINED: { value: "DECLINED", }, MAYBE: { value: "MAYBE", }, }, }); export const EventRsvpStatusEditInputType = new GraphQLInputObjectType({ name: "EventRsvpStatusEditInput", fields: (): GraphQLInputFieldConfigMap => ({ eventID: { type: GraphQLNonNull(GraphQLID), }, rsvpStatus: { type: GraphQLNonNull(EventRsvpStatusInputType), }, userID: { type: GraphQLNonNull(GraphQLID), }, }), }); export const EventRsvpStatusEditPayloadType = new GraphQLObjectType({ name: "EventRsvpStatusEditPayload", fields: (): GraphQLFieldConfigMap< EventRsvpStatusEditPayload, RequestContext > => ({ event: { type: GraphQLNonNull(EventType), }, }), }); export const EventRsvpStatusEditType: GraphQLFieldConfig< undefined, RequestContext, { [input: string]: customEventRsvpStatusEditInput } > = { type: GraphQLNonNull(EventRsvpStatusEditPayloadType), args: { input: { description: "", type: GraphQLNonNull(EventRsvpStatusEditInputType), }, }, resolve: async ( _source, { input }, context: RequestContext, _info: GraphQLResolveInfo, ): Promise<EventRsvpStatusEditPayload> => { let event = await EditEventRsvpStatusAction.saveXFromID( context.getViewer(), mustDecodeIDFromGQLID(input.eventID), { rsvpStatus: convertFromGQLEnum( input.rsvpStatus, getEventRsvpStatusInputValues(), EventRsvpStatusInputType.getValues(), ) as EventRsvpStatusInput, userID: mustDecodeIDFromGQLID(input.userID), }, ); return { event: event }; }, };
use super::*; /// An object that can be notified after the state is updated. pub trait Subscriber: Send + Sync { fn notify(&self, state: &State, handler: &dyn EventHandler); }
<gh_stars>1-10 package spark.examples; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.streaming.api.java.JavaStreamingContext; import org.apache.spark.streaming.api.java.JavaPairDStream; import scala.Tuple2; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import java.io.Serializable; public class ReadTransferStats implements Serializable { public JavaPairDStream<Long, Integer> readStats(JavaStreamingContext jssc, String inputDirectory) { // Note: This example doesn't work until Spark 1.2 JavaPairDStream<LongWritable, Text> input = jssc.fileStream(inputDirectory, LongWritable.class, Text.class, TextInputFormat.class); // convert the input from Writables to native types JavaPairDStream<Long, Integer> usefulInput = input.mapToPair( new PairFunction<Tuple2<LongWritable, Text>, Long, Integer>() { public Tuple2<Long, Integer> call(Tuple2<LongWritable, Text> input) { return new Tuple2(input._1().get(), Integer.parseInt(input._2().toString())); } }); return usefulInput; } }
<filename>jobotz/jb2_201811/src/test/Test17Octree2.java /* * * Vear 2017-2018 * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package test; import java.util.Random; import jb2.math.BoundingBox; import jb2.util.FastList; import jb2.util.Octree; import jb2.util.LocalContext; /** * * @author vear */ public class Test17Octree2 { /** * @param args the command line arguments */ public static void main(String[] args) { LocalContext.getContext(); double creationavg = 0; double testavg = 0; int treenodes = 0; Random r = new Random(12345678); int sumgood = 0; int sumbad = 0; int summistake = 0; int numvectors = 1000; int guaranteedhittest = 200; int randomtest = 500; int tests = 1; for (int count = 0; count < tests; count++) { long start = System.currentTimeMillis(); float xmin = (r.nextFloat() - 0.5f) * 2000f; float xrange = 500 + r.nextFloat() * 2000f; float ymin = (r.nextFloat() - 0.5f) * 10f; float yrange = 150 + (r.nextFloat() * 100f); float zmin = (r.nextFloat() - 0.5f) * 2000f; float zrange = 500 + r.nextFloat() * 2000f; Octree tree = new Octree(); tree.clear(); //IntMap vecMap = new IntMap(); BoundingBox[] vectors = new BoundingBox[numvectors]; // add 450 objects, then 50 object bounds for (int i = 0; i < numvectors; i++) { vectors[i] = new BoundingBox(); vectors[i].center.x = xmin + r.nextFloat() * xrange; if (i % 50 == 0) { // only every 50'th objext is in air vectors[i].center.y = ymin + r.nextFloat() * yrange; } else { // typically objects are on the ground vectors[i].center.y = ymin + r.nextFloat() * (yrange / 5); } vectors[i].center.z = zmin + r.nextFloat() * zrange; vectors[i].extents.x = r.nextFloat() * 10f; vectors[i].extents.y = r.nextFloat() * 2f; vectors[i].extents.z = r.nextFloat() * 10f; tree.add(vectors[i], vectors[i]); } long creation = System.currentTimeMillis() - start; treenodes = tree.countNodes(); // skip test data preparation, and precompute the correct results FastList<FastList<BoundingBox>> correct = new FastList<>(); FastList<FastList<BoundingBox>> results = new FastList<>(); //FastList<FastList<Vector3f>> entResults = new FastList<>(); FastList<BoundingBox> boxes = new FastList<BoundingBox>(); // create random testing bounding boxes for (int i = 0; i < randomtest; i++) { // generate random bb, only every 5'th has height, the others are full y range BoundingBox bb = new BoundingBox(); bb.extents.x = 10 + r.nextFloat() * 50; if (i % 5 == 0) { bb.extents.y = 10 + r.nextFloat() * 50; bb.center.y = ymin + bb.extents.y + r.nextFloat() * (yrange - 2 * bb.extents.y); } else { // full height box, for horizontal selection bb.extents.y = yrange / 2; bb.center.y = ymin + yrange / 2; } bb.extents.z = 10 + r.nextFloat() * 50; bb.center.x = xmin + bb.extents.x + r.nextFloat() * (xrange - 2 * bb.extents.x); bb.center.z = zmin + bb.extents.z + r.nextFloat() * (zrange - 2 * bb.extents.z); boxes.add(bb); } // create some boxes around known points for (int i = 0; i < guaranteedhittest; i++) { int veci = r.nextInt(numvectors); BoundingBox bb = new BoundingBox(vectors[veci].center, 10, 10, 10); boxes.add(bb); } // precompute the correct results for (int i = 0; i < boxes.size(); i++) { BoundingBox bb = boxes.get(i); FastList<BoundingBox> result = new FastList<>(); for (int j = 0; j < numvectors; j++) { if (bb.intersects(vectors[j])) { result.add(vectors[j]); } } correct.set(i, result); } results.clear(); start = System.currentTimeMillis(); for (int i = 0; i < boxes.size(); i++) { BoundingBox bb = boxes.get(i); FastList<BoundingBox> result = new FastList<>(); // find indices from the tree result = tree.getIntersecting(bb, result); results.set(i, result); } long testime = System.currentTimeMillis() - start; for (int i = 0; i < boxes.size(); i++) { FastList<BoundingBox> result = results.get(i); //FastList<Vector3f> entResult=entResults.get(i); int good = 0; int bad = 0; int mistake = 0; // check against correct results FastList<BoundingBox> goods = correct.get(i); for (int j = 0; j < goods.size(); j++) { BoundingBox ent = goods.get(j); int found = result.indexOf(ent); if (found > -1) { good++; result.remove(found); //entResult.remove(found); } else { bad++; } } // if something remains in results, its a mistake mistake += result.size(); if (mistake > 0 || bad > 0) { System.out.printf("Good %d, mistake %d, bad %d\n", good, mistake, bad); // get the result again, for debugging BoundingBox bb = boxes.get(i); result = tree.getIntersecting(bb, result); } sumgood += good; summistake += mistake; sumbad += bad; } creationavg += creation; testavg += testime; } creationavg /= 1000d; testavg /= 1000d; System.out.printf("Good %f, mistake %f, bad %f\n", (float)sumgood / tests, (float)summistake / tests, (float)sumbad / tests); System.out.printf("Construction time time %f sec\n", creationavg); System.out.printf("Test time time %f sec\n", testavg); System.out.printf("Tree nodes %d\n", treenodes); } }
""" Pascal VOC dataset parser Copyright 2020 Ross Wightman """ import os import xml.etree.ElementTree as ET from collections import defaultdict import numpy as np from .parser import Parser from .parser_config import VocParserCfg class VocParser(Parser): DEFAULT_CLASSES = ( 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') def __init__(self, cfg: VocParserCfg): super().__init__( bbox_yxyx=cfg.bbox_yxyx, has_labels=cfg.has_labels, include_masks=False, # FIXME to support someday include_bboxes_ignore=False, ignore_empty_gt=cfg.has_labels and cfg.ignore_empty_gt, min_img_size=cfg.min_img_size ) self.correct_bbox = 1 self.keep_difficult = cfg.keep_difficult self.anns = None self.img_id_to_idx = {} self._load_annotations( split_filename=cfg.split_filename, img_filename=cfg.img_filename, ann_filename=cfg.ann_filename, classes=cfg.classes, ) def _load_annotations( self, split_filename: str, img_filename: str, ann_filename: str, classes=None, ): classes = classes or self.DEFAULT_CLASSES self.cat_names = list(classes) self.cat_ids = self.cat_names self.cat_id_to_label = {cat: i + self.label_offset for i, cat in enumerate(self.cat_ids)} self.anns = [] with open(split_filename) as f: ids = f.readlines() for img_id in ids: img_id = img_id.strip("\n") filename = img_filename % img_id xml_path = ann_filename % img_id tree = ET.parse(xml_path) root = tree.getroot() size = root.find('size') width = int(size.find('width').text) height = int(size.find('height').text) if min(width, height) < self.min_img_size: continue anns = [] for obj_idx, obj in enumerate(root.findall('object')): name = obj.find('name').text label = self.cat_id_to_label[name] difficult = int(obj.find('difficult').text) bnd_box = obj.find('bndbox') bbox = [ int(bnd_box.find('xmin').text), int(bnd_box.find('ymin').text), int(bnd_box.find('xmax').text), int(bnd_box.find('ymax').text) ] anns.append(dict(label=label, bbox=bbox, difficult=difficult)) if not self.ignore_empty_gt or len(anns): self.anns.append(anns) self.img_infos.append(dict(id=img_id, file_name=filename, width=width, height=height)) self.img_ids.append(img_id) else: self.img_ids_invalid.append(img_id) def merge(self, other): assert len(self.cat_ids) == len(other.cat_ids) self.img_ids.extend(other.img_ids) self.img_infos.extend(other.img_infos) self.anns.extend(other.anns) def get_ann_info(self, idx): return self._parse_ann_info(self.anns[idx]) def _parse_ann_info(self, ann_info): bboxes = [] labels = [] bboxes_ignore = [] labels_ignore = [] for ann in ann_info: ignore = False x1, y1, x2, y2 = ann['bbox'] label = ann['label'] w = x2 - x1 h = y2 - y1 if w < 1 or h < 1: ignore = True if self.yxyx: bbox = [y1, x1, y2, x2] else: bbox = ann['bbox'] if ignore or (ann['difficult'] and not self.keep_difficult): bboxes_ignore.append(bbox) labels_ignore.append(label) else: bboxes.append(bbox) labels.append(label) if not bboxes: bboxes = np.zeros((0, 4), dtype=np.float32) labels = np.zeros((0, ), dtype=np.float32) else: bboxes = np.array(bboxes, ndmin=2, dtype=np.float32) - self.correct_bbox labels = np.array(labels, dtype=np.float32) if self.include_bboxes_ignore: if not bboxes_ignore: bboxes_ignore = np.zeros((0, 4), dtype=np.float32) labels_ignore = np.zeros((0, ), dtype=np.float32) else: bboxes_ignore = np.array(bboxes_ignore, ndmin=2, dtype=np.float32) - self.correct_bbox labels_ignore = np.array(labels_ignore, dtype=np.float32) ann = dict( bbox=bboxes.astype(np.float32), cls=labels.astype(np.int64)) if self.include_bboxes_ignore: ann.update(dict( bbox_ignore=bboxes_ignore.astype(np.float32), cls_ignore=labels_ignore.astype(np.int64))) return ann
#include<iostream> #include<vector> #include<algorithm> #include<set> using namespace std; int main() { ios_base::sync_with_stdio(false); cin.tie(NULL); int t; cin>>t; while(t--) { long long int n,s; int k; cin>>n>>s>>k; set<int> floor; for(int i=1;i<=k;i++) { int c; cin>>c; floor.insert(c); } long long int a=10000000,b=10000000; for(long long int q=s;q<=n;q++) { if(floor.find(q)==floor.end()) { a=q-s; break; } } for(long long int q=s;q>=1;q--) { if(floor.find(q)==floor.end()) { b=s-q; break; } } int res=min(a,b); cout<<res<<"\n"; } return 0; }
Throughout my travels I’m constantly getting asked about Personal Trainer certifications: Which one is best? – Which do I recommend?, etc: In today’s post I’m going to share with you my answers to these questions along with my general outlook on Personal Training certifications. I have no affiliation with any fitness training certification organization. So you can rest assured that what you’re about to read is 100% unbiased. Additionally, I don’t know how to communicate without being truthful, straight-forward and to-the-point. The readers digest version of my TRUTH about Personal Training certifications! This is what I’ve found to be true about certifications from being a fitness professional for 12yrs + and from working/ communicating with 100’s of other fitness professionals over the years: – Fitness certifications do NOT help you make more $! Getting results helps you make more $! – Most people, even doctors and PTs could care less about who you hold a cert under. They care more about what you know and what you can do with what you know. I’ve never once been asked by any other health professional (aside from other trainers) who I’m certified under. My clients are all happy and getting the results they’re after. That’s what people care about! – Yes you should get a certification if you don’t have a college degree. But it’s only for legal purposes (and, I’m not sure how much that even helps if you are taken to court). – It doesn’t matter which certification you’ve got because most of the stuff you’ll actually use “in the trenches” will come from non-certification based education anyway. – If you plan on working for a fitness club, they may require a specific type of personal trainer certification. If so, you’re stuck with what they require. – Go with a certification that’s cheap and well established like: ACE or NSCA. – Don’t assume that since you’re “certified” that you’re “qualified” (I stole that little saying from Dave Parise). If you’ve passed a test from an organization, it simply means that you’ve memorized what that specific organization feels is important based on thier system. – There are lots of great fitness trainer certification courses out there. Some are weekend courses, and others take several months of intense learning to complete. In the end, you’re still just certified as a trainer. And ANYONE can be certified. So don’t put to much juice behind it because the folks paying you certainly won’t. – If you are shy, don’t communicate well or you’re simply an ass-hole who people don’t seem to like; all the certifications in the world won’t make you successful. In this business, you need “personality.” And, you don’t get that from any personal training certification course! – You’ll probably end up getting more clients from how you look and how you carry yourself than you will from the letters behind your name. – People in the fitness setting will rarely pay more $ for a more heavily certified trainer since 1) they’ve never even heard of your “special” certs in the first place to put a $ value on them, and 2) most people still don’t consider personal training to be a “real job” anyway. My Top 7 Recommendations to upcoming Personal trainers! 1. Find a cheap, quick and easy certification and go with that one. Then, with the $ and time you’ve saved, buy DVDs & books, attend live workshops & conferences, and do lots of mentorships and internships. 2. Certification courses are just memorizing someone else’s system. Good trainers develop their own philosophies, and training systems based on those philosophy. So take the shortest/ easiest course you can find since you won’t use much of it in your actual training anyway. 3. Clients (for the most part) don’t care who you’re certified by, they just care about results. So don’t focus on having the best certs. Instead, focus on being the most able to get results each individual client is after. 4. Clients who do care about what personal trainer certification you have probably saw something about it on TV or in the news paper. So they just want to “feel” like an informed consumer. Those folks are also the ones who may become a pain in your ass because every time they see a new article or TV show on fitness training, they’ll ask you about that too. 5. You can never go wrong with getting certified through NASM, ACE or NSCA since those are fairly well known and established personal training certifications. 6. The best learning will come from reading research and combining that with practical experience, as personal training is the art of expressing the science. 7. If you’re just getting into this industry, focus on just getting certified vs. what certification to get. As I’ve said, it really doesn’t matter anyway since none will help you make more $, get more clients or give you more legal coverage. Final Thoughts I’m well aware that many people will disagree with what I’ve said above due to their personal biases toward certain personal trainer certifications, professional affiliation, personal opinion or elitism! And, that’s cool with me, as my only bias is to the evidence I have. And, with the very large number of individuals I’ve helped break into and advance the fitness field, not one of them has ever come back to me saying “I owe all my success to the certification I got from __________ organization.” Their success has come from having a passion for helping others get fitter, an infectious energy, great communication and teaching ability, along with a mastery of basic exercise techniques, progressions and regressions, and the principles of program design.
Progressing from Recurring Tissue Injury to Genomic Instability: A New Mechanism of Neutrophil Pathogenesis. Aberrant neutrophil (PMN) infiltration of the intestinal mucosa is a hallmark of inflammatory bowel diseases, including Crohn's disease and ulcerative colitis. While the genotoxic function of PMNs and its implications in carcinogenesis have been primarily associated with oxidative stress, recent work by Butin-Israeli and colleagues has defined a novel mechanism where PMN-derived microparticles through the delivery and activity of specific miRNAs promoted formation of double-strand breaks (DSBs), and in parallel, suppressed DSB repair through the downregulation of lamin B1 and Rad51. Respective downregulation of these two proteins compromised the nuclear envelope and high-fidelity repair by homologous recombination, increasing DSB accumulation and aneuploidy. This discovery defined a novel mode of action where PMN-mediated suppression of DSB repair leading to genomic instability in the injured mucosa may facilitate progression toward colorectal cancer.
/** * Configures the speed edit text preference. * * @param keyId the key id */ private void configureSpeedEditTextPreference(final int keyId) { final EditTextPreference editTextPreference = keyId == R.string.track_color_mode_slow_key ? slowEditTextPreference : mediumEditTextPreference; editTextPreference.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { storeSpeedValue(keyId, (String) newValue); updateSpeedSummary(keyId); return true; } }); editTextPreference.setOnPreferenceClickListener(new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { int value = getSpeedDisplayValue(keyId); ((EditTextPreference) preference).getEditText().setText(String.valueOf(value)); return true; } }); }
Today marks the one-year anniversary of death by flower arrangement’s release on itch.io, and I felt that it was super necessary to do a postmortem - mostly as a way to chronicle the ups and downs of making the game. It’s also the first time I’ve sat down to do this, so pardon the rambling and wordiness which might ensue. It might also become a light “making of” feature. Who the Hell is Pippin Barr? My involvement in the Pippin Barr GAME IDEA Game Jam was a bit of a lucky happening. To be completely honest, I didn’t know much about the titular human behind the jam. Besides the occasional tweets of wacky game ideas that would float by my feed (retweeted by other people), that was it. Barr is a developer whose work is by all means the pinnacle of absolute weirdness in interactive media, from games to words to photos. The jam honors that by pushing the participants to take one of his game ideas and create a game. Whether it’s “YOU ARE LITERALLY A BRIDGE OVER TROUBLED WATER” or “IT’S JENGA BUT YOU SEE DEAD PEOPLE”, the end result of what it means to you should be expressed in the game you create. Honestly, if you’d like to use his ideas, I highly suggest looking at the game idea archive on his website. The game idea which seemingly spoke to me the most was “THERE IS NOTHING BUT YOU AND YOUR AK-47 AND MOUNTAINS OF FRESHLY CUT FLOWERS” - almost immediately, you can understand where dbfa takes its inspiration from in that line. In fact, I loved that line so much that it became the game’s tagline whenever I needed an easy description or way to pitch the game to people. In a sense, the game went for a literal exposition of the idea - there is nothing but you (the player) and your AK-47 (the weapon of choice), and mountains of freshly-cut flowers (the enemies). Everything that I wanted the game to be almost immediately clicked into place, but the bulk of the work wasn’t done yet. And I didn’t have enough time. Time is Never On Your Side #GameIdeaJam started on the 7th of October. The game’s development started on the 11th. #GameIdeaJam ends on the 16th of October. No pressure, right? If there was anything that was a constant with myself and game jams, it would be time management. I would almost always be at odds with the time because I had other things to do, or I was simply goofing off. Or sleeping. Despite those setbacks, a game would be made. For this project, a lot of its DNA can be attributed to a past project of mine - a game called Writer’s Block, which I made for my Creative Media Portfolio II class during the summer of 2015. It’s not the best game in the world, but it does capture a snapshot of how I felt at the time. It wasn’t the best summer I could’ve hoped for (at least initially), and I was in a pretty washed up state as a creator. Any other projects were torpedoed because I had no inspiration to see them through, or I felt that they weren’t really gonna be good projects. WB was a weird kind of shooter where controls were restricted to the home keys - y’know, that thing Mavis Beacon taught you in that program about how to be a better typist? That was a deliberate choice, to mimic the author character who was writing a novel. The player presses one of the home keys to position a turret and fire at bad influences (represented by skulls), while you let through ideas and such. The game’s ending would depend on how well the player performed, and that was basically it. The framework of the game was a crash course in Construct 2′s pathfinding behaviors which could be attached to objects - making a sort of rudimentary AI which only needed a beacon to attach itself to. death by flower arrangement was an evolution of that, expanding the playing from narrow hallways to a wide open area and designating the player character as a moving beacon that they would consistently pursue as they defended themselves. This video from my Facebook page showed how the base of the game ran - everything was all squares, and ran quite smoothly. I was also testing movement with ESDF, rather than WASD - which would be an issue later on. The spawn objects were placed off-screen, generating enemies every so often. In fact, the spawning of enemies would prove to be one of the more difficult things about developing the game. It needed constant tweaking, and sometimes, I got really frustrated with how janky it was. At this point in time, I was glad to have a solid base to work with, and making a “look” for the game followed suit. Pixel Art for Novices If there is one thing that I do want to teach myself, it’s pixel art. I love it a lot, and I’ve always been fascinated with artists who are masters of the craft. Admittedly, things take time and most times when I’ve tried my hand at it, I didn’t really get very far. That being said, it wasn’t very different with this game as I would have to apply whatever bits and piece I had learned in order to get by. The first thing I designed were the flowers. Rather than make them tiny little nuggets, they were a bit larger than the player character. You can also see that they’re not particularly menacing - it was an initial idea that I had, but decided against since I would rather keep it simple than complicate the design any more than it should’ve been. This video was the first time the original red flower asset was implemented - along with a little tiled background to represent the grass. Also introduced were after effects of killing the flowers - a patch of petals which they drop after being defeated. It was a lot larger (and noticeably square) in this video, but that would also be tweaked in order to fit in naturally. There still wasn’t a character in place of the controlled square which fired square bullets at the now-flowers. The little story of the florist began around the time when I made the art for the flower, as I felt it would need a bit of story to explain why such a game was even happening. It easily became the adventure of a florist named Flanigan who took a trip to the mountains to gather flowers for her shop. Her AK-47 was a tool for both harvesting these special flowers and defending herself, so the game acted as a peek into what she does for a living. Flanigan’s sprite took ages. I couldn’t for the life of me animate a top-down sprite of a person, despite many attempts to ape games like Contra and their style. With time not being on my side, I opted for a simple avatar of how I imagined her in my head - black girl with blue hair adorned with a red flower, up in a ponytail, wearing a pink shirt and blue skirt, holding an AK-47. I kinda look back at the design as a clusterfuck of whatever was swirling around in my head, but it is distinctively her look. It All Comes Together Project #dbfa, as it was called, was getting closer and closer to being feature complete or at least, as feature complete as it could be. The last gameplay video I released online was as close to version 1.0 as possible - the purple square became Flanigan, the flowers were slightly redesigned and had palette swaps, the petals were brought in line, and for a dash of visual flair, killing them would activate a particle effect of petals spurting out. Flanigan’s AK-47 had actual AK-47 sounds (thanks to royalty-free SFX floating around the web) and bullets. And at the end, there was a screencap of the battlefield with petals of different colors strewn all about, a nice tease of the screenshot function I added in. Though the object of the game was to survive the waves of endlessly spawning flowers, I added in the screenshot function as a way to appreciate the aftermath of the game rather than giving players the choice to immediately restart or booting them back to the main menu. Further tweaks were made over the next two days since the video was out - pacing the spawn times for the flowers, finding appropriate music, working on the non-game bits like menus, crushing bugs that cropped up (like flowers being stuck and not moving at all), and also working on a logo for the game. Finally, a mere 39 minutes and 23 seconds before the deadline on October 16th, I put the game out there for all to play. Playable in a browser, and also with desktop versions for Windows, Mac, and Linux through NW.js exports. A day later, I uploaded a little trailer I put together with my gameplay. From there, it was waiting, watching, playing the other #GameIdeaJam entries, and being somewhat worried that it wouldn’t be played at all. I decided to share the game on Reddit - most notably on the /r/WebGames and /r/playmygame subreddits in hopes of getting more eyes on the game. The responses soon after would prove to be rather valuable and eye-opening. Nothing’s Ever Right on the First Try Way too easy. The character can be practically invincible if you do this. Flowers don’t spawn here, so it’s basically a safe haven. Why ESDF? No scoring system? Controls can be frustrating. Too much blinking. Can’t tell where the character is facing. Maybe add in customizable controls? If you’re wondering, that’s not all from the same person. Thankfully, people played the game. But as expected, they had their own take on how it worked and what could be done to make it better. Then you’ll probably wonder “didn’t anyone else besides you test the game?” The answer is a flat no. Not because I didn’t want to, but rather, I was in isolation at that point in my life - I had friends, yes, but I didn’t have a chance to hang out and do much. And before you ask about online friends, I didn’t really speak much to them either. I had no time for testing, and so the game was basically pushed out the door with barely any QA besides what I could see. The game was cute to some, a bore to others, and an unpolished product to basically everyone. I was’t going to be defensive of the game, or hope for cushy comments that don’t help with the game getting better. Not to say that I didn’t accept any praises, but I did want the game to be a nice experience for players. I played other entries in the #GameIdeaJam - some of which I felt were beyond amazing. The game idea I took on was done by another developer, and it was wildly different from my own: no enemies, no interactions. I thought it was really cool, and in contrast to dbfa, a lot more calmer. The ratings didn’t really matter (releasing a game was an automatic win), though with whatever ratings I could muster up, the game was 38th overall, with scores hovering around a 3 out of 5 range. Indie games gal Jupiter Hadley covered it in her ALL THE INDIE GAMES series on the jam (Part 2, to be exact) - which was honestly a nice goal for me, since I missed out on being featured in a past vid for a different game in a different jam due to a faulty executable. And so, another game was made and it was on the next one… or so I thought. A Real Fixer-Upper It is true that I basically ignored the game for the rest of the year, but version 1.1 was going to happen regardless. I just needed a definitive list of what went wrong, and what needed to be right. Reddit user Qu4Z basically mapped out all of what version 1.1 would become in a comment that was sent to me via a comment in the /r/NintendoSwitchIndie subreddit. They were very polite with bringing up their findings, and I had a decent chat with them about the things which went wrong overall. For ease of access, this was mostly my response to their questions and concerns. ESDF was not a great move for me. I think it stayed was due to something I read about WASD vs. ESDF, but I do want to revert to WASD in version 1.1 of the game. I actually meant to use clickable buttons for better menu navigation, but to be completely honest, I lazed around with the non-game parts. Normally, I do take my time, but I rushed through that part to make the deadline. I thought there were particle effects for the energy drinks, but I will alter that to be more visible. I think I stuck to just four for time’s sake. I think my initial idea was for three types of flowers (a set of four), with four color variations. There was only one set for the jam version. Someone had brought up the idea of objectives, though you were right about it trying to be meditative. The petal scatter for each flower was actually supposed to spawn at a certain position adjacent to it (north, south, west, east). Once players would pick up on that, maybe they would try to conserve certain flowers, while getting rid of others. I considered the idea of removing flowers without their petals being sprayed, but it wasn’t properly fleshed out. Yeah, there is no real reason to stop firing, is there. I definitely wanted to get a health system going, though I ran into logic problems. Bear with me, I hadn’t touched Construct 2 in a few months, and I momentarily felt like a noob. I also wanted to have varying health among different flower types. Spawners were such a pain during development. I normally position them off-screen, but for some reason, flowers would get stuck in certain positions, which is why I brought them on-screen though they are invisible to the player. The energy drinks used the same spawner, which is why some flowers would spawn over it, and the player would definitely collide with the hidden spawners. I really like the idea for the animation. There’s definitely a lot more juice that I can probably add to the game. Again, I read up on how to improve feedback to players as they play, and it is very handy. Those kind of improvements would definitely help. I think there’s a small wait period before flowers de-spawn after they’re hit, which may lead to that pop-in effect (which is actually a flashing effect to signify that they took a hit). Okay, that was my fault for making it 99MB: the Windows ZIP file contains both the 32-bit and 64-bit versions. I don’t think the NW.js export should add that much bulk to it, but then again, the Mac version is just one file and that’s 116MB. I’ll blame the audio for now. Yeah, I wanted to alter that text color - I wanted to opt for a white text with black border, since that would be easily legible for some. I will consider the idea you put forward. It felt great to have a head-on clash with the issues plaguing version 1.0, and making version 1.1 was gonna be a breeze! Then… I didn’t do anything for two months. Having that much time away for gamedev was good, but I did wish that I had a chance to get started on a few minor things right away. New Year, New Game+ One of my resolutions for the New Year was to get started on version 1.1 when I had the free time. It probably wouldn’t be a spectacle like it was during the #GameIdeaJam (barely any in-progress vids or devlogs), but I already knew what had to be done. Between January and April, I worked on the game on and off, until I felt it was ready for release. With Qu4Z′s list on a separate window (in checklist form now), I began chipping away at the game. Some of the notable fixes were as follows: Movement is now down with WASD. I really regret the bad first impression I got with using ESDF to move. Never again, I say. I completely revamped how flowers spawned by removing the static spawn areas, and instead using random numbers to generate X and Y positions for the spawners to appear (invisible to players) and spawn flowers. I also added a marker to show where they would spawn. This was a great fix, as it fixed the bugged out flowers being stuck and it would make the flower spawns a lot less predictable. The markers would seem to defeat that purpose, but it was mostly so that players would see it and avoid overlapping it. A nice fix for those who pointed out how flowers spawned on the player character. Flowers now had health, and would no longer die instantly. Hello, game difficulty! Goodbye easy mode! I didn’t actually display the health of the flowers, but they were working fine under the hood - just to give the flowers a bit of a buff to their strength. Think of it like how Monster Hunter never reveals the health bar of monsters you fight. The only difference here is that you can’t tell when it’s close to death, like in MH. Flanigan’s AK-47 would no longer have an infinite cache of ammo. They were limited to 30-round clips, along with a generous surplus, with ammo pickups being dropped over the course of gameplay. Again, another good fix which allowed for the game difficulty to actually exist. In the beginning, you really could just hold down the left mouse button and mow down everything. Now, you have to manage how much ammo you had, and reduce wildly firing at the flowers like before. This was also THE hardest fix to implement, solely based on the fact that I wanted to reflect this in the UI like how guns have their current load vs. how many bullets you had. The math was insufferable, and I had to break it down on paper, then bake it into the events system and tweak, tweak, tweak until I got it right. It was perhaps one of my favorite victories as a gamedev, and I felt really proud of pulling it together. More music in the playlist, as well as a way to randomize what track played. While not an exact complaint, having only one song in the game which didn’t loop naturally was awful. I fixed this by adding new songs which fit the “flower” motif. A bunch of these were future funk tracks sourced from YouTube and Soundcloud from artists who were pretty great to listen to. My only criteria was that it had to feel like the kind of music you would want to hear while playing the game, and surely enough, they fit the bill. The inclusion of music was also a callback to an older game, Battery-Powered Drift, which I made as a “mixtape game” - putting in great eurobeat songs to play along with in a game that reminded me heavily of Initial D. One of the artists I featured liked the game, and I got to meet them in 2015 at a convention. Back to the gamedev, the randomization was an offshoot from the new spawn behaviors - only that they were scaled back considerably to work with audio tracks. It took a while to work correctly, but it was definitely worth it. Revamping the menu, and adding a how-to-play pre-game screen. One of the better quality-of-life upgrades to the game was adding a section before the game that taught players the controls and what certain things did. Initially, the control scheme of the game was all over the place and it was really sloppy. Thankfully, I was able to scale back on the amount of buttons needed, and kept it way more simple. — Of course, that’s only the really major changes. The full version 1.1 changelog is on the game’s itch.io page, which was a blast to write since I rarely ever did something like that. So, with an extra 4 months of work (or rather certain weeks of work spread out within 4 months) compared to just 6 days for the initial version 1.0 game jam version, version 1.1 was finished and ready to go. Version 1.1 Version 1.1 of death by flower arrangement landed on April 19, 2017 to as much fanfare as I could’ve hoped to attain. In addition to its updated release on itch.io and the NW.js desktop versions for Windows/Mac/Linux, the game also saw release for the first time on Game Jolt. I tried again to post it on Reddit, but my 15 minutes with the game had already passed. I was able to contact Qu4Z, and send them the updated version as promised. They responded (in great detail), though I sadly haven’t had the time to chat. Hopefully, I can open up dialogue against with this postmortem if I’m lucky. What I do remember from their message was basically how the game went from an unpolished dud to an addictive little endeavor. Nitpicks aside, it was a great update and I couldn’t be any more happier with the progress made on it. Over time, I expanded the game’s release to two more platforms: Newgrounds (which was a fun time for me as a former NG junkie who never uploaded content), and the Scirra Arcade (which was a pain because I had to abide by a file size limit). Post-dbfa So, now what? I don’t know. I missed two game jams that I really wanted to participate in, and I’ve been out of the game for a while now. That isn’t to say that I can only make games for jams, but I have been meaning to hash out a new concept as soon as I can and actively work on small-scale games in order to improve my portfolio. If there’s anything that I’ve learned from my time with version 1.1, it’s that there’s always time for gamedev. Not during Ludum Dare, or some other jam. I’ve also learned a lot about using constructive criticism to my advantage, the need to reach out to people to help me test my game, and also realizing that the things I’ve worked on in the past will always return in a new form - whether it’s applying similar mechanics, reusing code, or causing an unintentional throwback to happen. But perhaps the best thing I learned in this cycle is finding closure with a game. There are some games/projects which I’ve hung on to for way too long, and others which I’ve straight up abandoned. I think that dbfa has gone as far as it possibly could with my current skill set, and I’ve had a ton of fun exploring the possibilities of a potential version 1.2, but I really want to move on to the next project and leave Flanigan to her floral work. Special Thanks Thanks to everyone who helped me out with the game over the last year, whether directly or indirectly: Pippin Barr - the namesake of the jam, and the game idea which led to the game’s creation - Conor McCann - creator of the game jam - Qu4Z - probably the main reason why version 1.1 even exists - Bela Rios - my girlfriend who continually supports me on ventures like these - Jupiter Hadley - for showing dbfa in her vid, made my entire day back then - all the artists of the music I used - y’all are awesome, and your music is dope - Construct 2 - the best gamedev tool ever which has allowed me to make games as a designer/artist - my dad - I don’t really know if he knew about the game, but technically, if I wasn’t into games, this game wouldn’t be made – also, I miss you so much - myself - for not being stubborn with critiques and making a kickass game in like 6 days - friends - for being cool people and supporting my work in whatever ways possible Until next time, friends.
Proponents of the old-school taxi business insist that the Uber ride-sharing service can’t be trusted. But trust issues also extend to taxi drivers, if a shakedown that happened to two women who tried to use a Beck Taxi chit to get from downtown Toronto to Brampton is any indication. Which of these cabs is not like the others? That was the dilemma for two women who got into an orange cab lined up with Beck taxis and were told by the driver he'd accept a Beck cab chit for a ride to Brampton - and then refused to honour it. ( TODD KOROL / TORONTO STAR ) The driver told the women he’d accept the chit, but when they arrived at the Brampton GO station, he demanded cash for the $110 ride. When their boss, who gave them the chit, complained to Beck, he was told it wasn’t their problem because the driver was no longer associated with the company. Beck advised him to tell it to Municipal Licensing and Standards, which regulates taxis. It investigated and closed the file, saying there wasn’t enough evidence to take action against the driver. Article Continued Below Mike McCaw, who manages a large company for which the women work, sent us a series of emails outlining the incident. He also sent us responses he got from Beck, which he found disappointing. McCaw’s firm hosted a large party in March for employees and guests at a downtown brewery. The company arranged for Beck to provide them with rides home, and had chits printed up to give to the drivers. It was a nice bit of business for Beck. McCaw figures that about 150 cab chits were handed out that night. When the women left, they approached a line of taxis painted with Beck’s familiar orange-and-green colours and “ask(ed) the cab driver if he is Beck and will accept a Beck taxi chit,” said McCaw in his note to us. “The driver confirms he is Beck and will accept the chit,” he said, but when they got to Brampton, “the driver advises he will not accept the chit.” They tried to pay by credit card, but the driver told them his card processing device was broken, he said. When it became clear they didn’t have enough cash, his device miraculously started working, said McCaw. McCaw later traded emails about it with Sarah Hussaini, a Beck customer service representative, asking to be reimbursed the $110. Article Continued Below “Unfortunately, your client did not hail a Beck Taxi,” said Hussaini. “The car number that was provided does not belong to the Beck Fleet. We have no contact or affiliation with this driver or car and cannot get in touch with him. “If you would like to pursue with a complaint, it is best to contact municipal licensing and standards. I am sorry I could not be more of an assistance.” Beck spokesperson Gail Souter told us the driver was affiliated with Beck until December, when he severed the relationship. But he continued to drive a taxi painted orange, similar to Beck’s colours. When we talked to her, she was dismissive of McCaw’s complaint, saying it is not responsible for drivers operating taxis that look like Beck’s but not associated with the company. But something changed. McCaw sent us a copy of a groveling email he got from Souter the week after we talked to her, saying his complaint had landed on the wrong desk. She wrote that two staffers who had first dealt with his complaint “failed to communicate that this was the course of action being taken with you,” and that she took care of it once it came to her attention. “I can only imagine the distaste this has left you, along with an unsavoury perception of our commitment to you as a valued customer,” said Souter, adding, “I was very saddened to hear that your clients were inconvenienced.” Souter stressed that the $110 would indeed be reimbursed, and that “to say that I value your loyalty is an understatement.” McCaw told us he believes Beck decided to reimburse him only after we talked to Souter, adding, “that was not an oversight by any means,” and that he will never again do business with Beck. When we explained the situation to John DeCourcey, who’s in charge of Municipal Licensing and Standards, he said an investigation would be started right away. We got a note from MLS several weeks later, saying, “Bylaw Enforcement created an Investigation Request (IR). As a result of this investigation, the reluctance of the complainant to pursue any further action, and the quality of evidence collected, the investigation has been completed with no charges pending.” Even though MLS knows the name and cab number of the driver, it decided not to do anything about it. So, who do you trust? Beck, Uber or MLS? What's broken in your neighbourhood? Wherever you are in Greater Toronto, we want to know. To contact us, go to thestar.com/yourtoronto/the_fixer or call us at 416-869-4823 email [email protected] . To read our blog, go to thestar.com/news/the_fixer . Report problems and follow us on Twitter @TOStarFixer. Read more about:
Lasing properties of Ce:LiCaAlF6 single crystal on effects of the distribution of Ce Ion We found that heterogeneity of Ce:LiCaAlF6 single crystal grown by Czochralski (Cz) method. It was showed by measurement of multi-photon luminescence and it is due to Ce3+ ion by using LA-ICPMS. And we investigated the lasing properties of Ce:LiCaAlF6 at three areas with different concentration and distribution of Ce ion. As a result, the slope efficiency is highest and the lasing threshold is smallest at the uniformly distributed area of Ce ion. So the factor of scatterers has a greater effect on lasing properties than that of concentration of Ce ion.
<reponame>928799934/wingedsnake // +build dragonfly freebsd linux netbsd openbsd solaris package wingedsnake import ( "os/user" "strconv" "syscall" ) // getConfigUser 修改进程uid gid func getConfigUser(conf *config) (int, int, error) { // 获取 user 的 uid ui, err := user.Lookup(conf.Base.User) if err != nil { logf("user.Lookup(%v) error(%v)", conf.Base.User, err) return 0, 0, err } // 获取 group 的 gid gi, err := user.LookupGroup(conf.Base.Group) if err != nil { logf("user.LookupGroup(%v) error(%v)", conf.Base.Group, err) return 0, 0, err } uid, _ := strconv.Atoi(ui.Uid) gid, _ := strconv.Atoi(gi.Gid) return uid, gid, nil } func exchangeOwner(uid, gid int) error { // 修改 进程 uid if err := syscall.Setregid(gid, gid); err != nil { logf("syscall.Setregid(%v,%v) error(%v)", gid, gid, err) return err } // 修改 进程 gid if err := syscall.Setreuid(uid, uid); err != nil { logf("syscall.Setreuid(%v,%v) error(%v)", uid, uid, err) return err } return nil }
<filename>Kunii-1991/ut_Kunnii1991_a.py<gh_stars>1-10 # Calculate terminal velocity, ut, from ut* and dp* for different particle sphericities # from Kunii1991 book, pg 80-83, Eqs 31-33 # applicable for sphericity from 0.5 to 1.0 # use Python 3 print function and division from __future__ import print_function from __future__ import division #--- Parameters # air properties at T = 300K, P = 1 atm rhog = 1.17 # density (kg/m^3) ug = 1.85e-5 # dynamic viscosity (kg/ms) g = 9.81 # gravity (m/s^2) # particle properties dp = 0.000207 # diameter of particle (m) rhos = 2500 # density of particle (kg/m^3) sp = 0.8 # sphericity of the particle, perfect sphere = 1.0 #--- Calculations # dimensionless particle diameter, Kunii1991 Eq 31 dps = dp*(( (rhog*(rhos-rhog)*g) / (ug**2) )**(1/3)) # dimensionless terminal velocity, Kunii1991 Eq 33 uts = ( 18/(dps**2) + (2.335-1.744*sp)/(dps**0.5) )**-1 # terminal velocity, ut (m/s), Kunii1991 Eq 32 ut = uts*( (ug*(rhos-rhog)*g) / (rhog**2) )**(1/3) # Reynolds number for particle, Re (-) Re = rhog*ut*dp/ug print('--- Kunii1991 ut*, dp* ---') print('ut =', ut) print('Re =', Re)
<reponame>novorender/novoweb import { grey } from "@mui/material/colors"; import { alpha, createTheme } from "@mui/material/styles"; declare module "@mui/material/Button" { interface ButtonPropsColorOverrides { grey: true; } } declare module "@mui/material" { interface Color { main: string; dark: string; } } declare module "@mui/material/styles/createTheme" { interface Theme { customBreakPoints: { height: { sm: number; }; }; customShadows: { widgetHeader: string; }; } } let theme = createTheme({ breakpoints: { values: { xs: 0, sm: 767, md: 1100, lg: 1280, xl: 1920, }, }, palette: { primary: { // light: will be calculated from palette.primary.main, main: "#D61E5C", // dark: will be calculated from palette.primary.main, // contrastText: will be calculated to contrast with palette.primary.main }, secondary: { // light: '#0066ff', main: "#253746", // dark: will be calculated from palette.secondary.main, // contrastText: 'white', }, text: { primary: "#253746", }, info: { main: "#007DFF", }, grey: { main: grey[300], dark: grey[400], }, // Used by `getContrastText()` to maximize the contrast between // the background and the text. contrastThreshold: 3, // Used by the functions below to shift a color's luminance by approximately // two indexes within its tonal palette. // E.g., shift from Red 500 to Red 300 or Red 700. tonalOffset: 0.2, }, typography: { fontFamily: ["Open Sans", "Roboto", "Helvetica", "sans-serif"].join(","), h6: { fontSize: "1.125rem", fontWeight: 400, lineHeight: 1, }, body2: { fontWeight: 600, }, }, customShadows: { widgetHeader: "0px 5px 5px rgba(0, 0, 0, 0.05)", }, customBreakPoints: { height: { sm: 849.95, }, }, components: { MuiCssBaseline: { styleOverrides: ` body { overscroll-behavior: contain; overflow: hidden; background: transparent; } `, }, MuiButton: { styleOverrides: { root: { textTransform: "none", }, }, }, MuiListItem: { defaultProps: { // @ts-ignore component: "li", }, }, MuiListItemButton: { defaultProps: { // @ts-ignore component: "li", }, }, }, }); theme = createTheme(theme, { components: { MuiButton: { variants: [ { props: { variant: "contained", color: "grey" }, style: { color: theme.palette.getContrastText(theme.palette.grey[300]), }, }, { props: { variant: "outlined", color: "grey" }, style: { color: theme.palette.text.primary, borderColor: theme.palette.mode === "light" ? "rgba(0, 0, 0, 0.23)" : "rgba(255, 255, 255, 0.23)", "&.Mui-disabled": { border: `1px solid ${theme.palette.action.disabledBackground}`, }, "&:hover": { borderColor: theme.palette.mode === "light" ? "rgba(0, 0, 0, 0.23)" : "rgba(255, 255, 255, 0.23)", backgroundColor: alpha(theme.palette.text.primary, theme.palette.action.hoverOpacity), }, }, }, { props: { color: "grey", variant: "text" }, style: { color: theme.palette.text.primary, "&:hover": { backgroundColor: alpha(theme.palette.text.primary, theme.palette.action.hoverOpacity), }, }, }, ], }, }, }); export { theme };
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference // Implementation, v2.3.0 // See <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2021.08.06 at 04:43:18 PM BST // package org.treblereel.gwt.xml.mapper.client.tests.pmml.model.impl.v4_4; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.IExpression; import org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.ITable; /** * Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element ref="{http://www.dmg.org/PMML-4_4}Extension" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;element ref="{http://www.dmg.org/PMML-4_4}FieldColumnPair" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;choice minOccurs="0"&gt; * &lt;element ref="{http://www.dmg.org/PMML-4_4}TableLocator"/&gt; * &lt;element ref="{http://www.dmg.org/PMML-4_4}InlineTable"/&gt; * &lt;/choice&gt; * &lt;/sequence&gt; * &lt;attribute name="mapMissingTo" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="defaultValue" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="outputColumn" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="dataType" type="{http://www.dmg.org/PMML-4_4}DATATYPE" /&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType( name = "", propOrder = {"extension", "fieldColumnPair", "table"}) @XmlRootElement(name = "MapValues") public class MapValues implements IExpression, org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.MapValues { @XmlElement(name = "Extension", type = Extension.class) protected List<org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.Extension> extension; @XmlElement(name = "FieldColumnPair", type = FieldColumnPair.class) protected List<org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.FieldColumnPair> fieldColumnPair; @XmlElementRefs({ @XmlElementRef( name = "TableLocator", namespace = "http://www.dmg.org/PMML-4_4", type = TableLocator.class, required = false), @XmlElementRef( name = "InlineTable", namespace = "http://www.dmg.org/PMML-4_4", type = InlineTable.class, required = false) }) protected ITable table; @XmlAttribute(name = "mapMissingTo") protected String mapMissingTo; @XmlAttribute(name = "defaultValue") protected String defaultValue; @XmlAttribute(name = "outputColumn", required = true) protected String outputColumn; @XmlAttribute(name = "dataType") protected org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.DATATYPE dataType; /** * Gets the value of the extension property. * * <p>This accessor method returns a reference to the live list, not a snapshot. Therefore any * modification you make to the returned list will be present inside the JAXB object. This is why * there is not a <CODE>set</CODE> method for the extension property. * * <p>For example, to add a new item, do as follows: * * <pre> * getExtension().add(newItem); * </pre> * * <p>Objects of the following type(s) are allowed in the list {@link Extension } */ public List<org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.Extension> getExtension() { if (extension == null) { extension = new ArrayList<org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.Extension>(); } return this.extension; } /** * Gets the value of the fieldColumnPair property. * * <p>This accessor method returns a reference to the live list, not a snapshot. Therefore any * modification you make to the returned list will be present inside the JAXB object. This is why * there is not a <CODE>set</CODE> method for the fieldColumnPair property. * * <p>For example, to add a new item, do as follows: * * <pre> * getFieldColumnPair().add(newItem); * </pre> * * <p>Objects of the following type(s) are allowed in the list {@link FieldColumnPair } */ public List<org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.FieldColumnPair> getFieldColumnPair() { if (fieldColumnPair == null) { fieldColumnPair = new ArrayList< org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.FieldColumnPair>(); } return this.fieldColumnPair; } /** * Gets the value of the table property. * * @return possible object is {@link TableLocator } {@link InlineTable } */ public ITable getTable() { return table; } /** * Sets the value of the table property. * * @param value allowed object is {@link TableLocator } {@link InlineTable } */ public void setTable(ITable value) { this.table = value; } /** * Gets the value of the mapMissingTo property. * * @return possible object is {@link String } */ public String getMapMissingTo() { return mapMissingTo; } /** * Sets the value of the mapMissingTo property. * * @param value allowed object is {@link String } */ public void setMapMissingTo(String value) { this.mapMissingTo = value; } /** * Gets the value of the defaultValue property. * * @return possible object is {@link String } */ public String getDefaultValue() { return defaultValue; } /** * Sets the value of the defaultValue property. * * @param value allowed object is {@link String } */ public void setDefaultValue(String value) { this.defaultValue = value; } /** * Gets the value of the outputColumn property. * * @return possible object is {@link String } */ public String getOutputColumn() { return outputColumn; } /** * Sets the value of the outputColumn property. * * @param value allowed object is {@link String } */ public void setOutputColumn(String value) { this.outputColumn = value; } /** * Gets the value of the dataType property. * * @return possible object is {@link DATATYPE } */ public org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.DATATYPE getDataType() { return dataType; } /** * Sets the value of the dataType property. * * @param value allowed object is {@link DATATYPE } */ public void setDataType( org.treblereel.gwt.xml.mapper.client.tests.pmml.model.api.DATATYPE value) { this.dataType = value; } }
import * as t from 'io-ts' import { FhirCode } from './FhirCode' import { FhirDecimal } from './FhirDecimal' import { FhirElement } from './FhirElement' import { FhirExtension } from './FhirExtension' import { FhirString } from './FhirString' /** An amount of economic utility in some recognized currency. */ export const FhirMoney: t.Type<FhirMoney, FhirMoneyOutput> = t.recursion('FhirMoney', () => t.partial({ /** An amount of economic utility in some recognized currency. */ _currency: t.readonly(FhirElement), /** An amount of economic utility in some recognized currency. */ _value: t.readonly(FhirElement), /** An amount of economic utility in some recognized currency. */ currency: t.readonly(FhirCode), /** An amount of economic utility in some recognized currency. */ extension: t.readonlyArray(FhirExtension), /** An amount of economic utility in some recognized currency. */ id: t.readonly(FhirString), /** An amount of economic utility in some recognized currency. */ value: t.readonly(FhirDecimal), }) ) /** An amount of economic utility in some recognized currency. */ export interface FhirMoney { /** An amount of economic utility in some recognized currency. */ _currency?: Readonly<t.TypeOf<typeof FhirElement>> /** An amount of economic utility in some recognized currency. */ _value?: Readonly<t.TypeOf<typeof FhirElement>> /** An amount of economic utility in some recognized currency. */ currency?: Readonly<t.TypeOf<typeof FhirCode>> /** An amount of economic utility in some recognized currency. */ extension?: ReadonlyArray<t.TypeOf<typeof FhirExtension>> /** An amount of economic utility in some recognized currency. */ id?: Readonly<t.TypeOf<typeof FhirString>> /** An amount of economic utility in some recognized currency. */ value?: Readonly<t.TypeOf<typeof FhirDecimal>> } /** An amount of economic utility in some recognized currency. */ export interface FhirMoneyOutput { /** An amount of economic utility in some recognized currency. */ _currency?: Readonly<t.OutputOf<typeof FhirElement>> /** An amount of economic utility in some recognized currency. */ _value?: Readonly<t.OutputOf<typeof FhirElement>> /** An amount of economic utility in some recognized currency. */ currency?: Readonly<t.OutputOf<typeof FhirCode>> /** An amount of economic utility in some recognized currency. */ extension?: ReadonlyArray<t.OutputOf<typeof FhirExtension>> /** An amount of economic utility in some recognized currency. */ id?: Readonly<t.OutputOf<typeof FhirString>> /** An amount of economic utility in some recognized currency. */ value?: Readonly<t.OutputOf<typeof FhirDecimal>> }
def gradient_phrase(self,interp): interpolated = self.get_interpolated_phrase_probs(interp) grad_list = [] for i, sample in enumerate(interpolated): f_A = np.sum(np.log(sample[0]), axis=0) f_B = np.sum(np.log(sample[1]), axis=0) grad_list.append(f_A - f_B) return np.vstack(grad_list)
/** * AbstractOsiamService provides all basic methods necessary to manipulate the Entities registered in the given OSIAM * installation. */ abstract class AbstractOsiamService<T extends Resource> { static final String CONNECTION_SETUP_ERROR_STRING = "Cannot connect to OSIAM"; private static final String AUTHORIZATION = "Authorization"; static final String BEARER = "Bearer "; final WebTarget targetEndpoint; private final Class<T> type; private final String typeName; private final int connectTimeout; private final int readTimeout; private final Version version; AbstractOsiamService(String endpoint, Class<T> type, int connectTimeout, int readTimeout, Version version) { this.type = type; this.typeName = type.getSimpleName(); this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.version = version; UserDeserializer userDeserializer = this.version == Version.OSIAM_2_LEGACY_SCHEMAS ? new UserDeserializer(OsiamUserService.LEGACY_SCHEMA) : new UserDeserializer(); SimpleModule userDeserializerModule = new SimpleModule("userDeserializerModule", com.fasterxml.jackson.core.Version.unknownVersion()) .addDeserializer(User.class, userDeserializer); objectMapper.registerModule(userDeserializerModule); targetEndpoint = OsiamConnector.getClient().target(endpoint); } static void checkAccessTokenIsNotNull(AccessToken accessToken) { checkNotNull(accessToken, "The given accessToken must not be null."); } T getResource(String id, AccessToken accessToken, String... attributes) { checkArgument(!Strings.isNullOrEmpty(id), "The given id must not be null nor empty."); checkAccessTokenIsNotNull(accessToken); StatusType status; String content; WebTarget target; if (attributes == null || attributes.length == 0) { target = targetEndpoint; } else { target = targetEndpoint.queryParam("attributes", Joiner.on(",").join(attributes)); } try { Response response = target.path(typeName + "s").path(id).request(MediaType.APPLICATION_JSON) .header(AUTHORIZATION, BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, connectTimeout) .property(ClientProperties.READ_TIMEOUT, readTimeout) .get(); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); return mapToResource(content); } List<T> getAllResources(AccessToken accessToken, String... attributes) { QueryBuilder qBuilder = new QueryBuilder().count(Integer.MAX_VALUE); if (attributes != null && attributes.length > 0) { qBuilder.attributes(Joiner.on(',').join(attributes)); } Query query = qBuilder.build(); return searchResources(query, accessToken).getResources(); } SCIMSearchResult<T> searchResources(Query query, AccessToken accessToken) { checkNotNull(query, "The given query must not be null."); checkAccessTokenIsNotNull(accessToken); StatusType status; String content; try { Response response = targetEndpoint.path(typeName + "s") .queryParam("attributes", query.getAttributes()) .queryParam("filter", query.getFilter()) .queryParam("sortBy", query.getSortBy()) .queryParam("sortOrder", query.getSortOrder()) .queryParam("startIndex", query.getStartIndex() != QueryBuilder.DEFAULT_START_INDEX ? query.getStartIndex() : null) .queryParam("count", query.getCount() != QueryBuilder.DEFAULT_COUNT ? query.getCount() : null) .request(MediaType.APPLICATION_JSON) .header(AUTHORIZATION, BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, connectTimeout) .property(ClientProperties.READ_TIMEOUT, readTimeout) .get(); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); try { JavaType queryResultType = TypeFactory.defaultInstance().constructParametrizedType(SCIMSearchResult.class, SCIMSearchResult.class, type); return objectMapper.readValue(content, queryResultType); } catch (IOException e) { throw new OsiamClientException(String.format("Unable to deserialize search result: %s", content), e); } } void deleteResource(String id, AccessToken accessToken) { checkArgument(!Strings.isNullOrEmpty(id), "The given id must not be null nor empty."); checkAccessTokenIsNotNull(accessToken); StatusType status; String content; try { Response response = targetEndpoint.path(typeName + "s").path(id).request(MediaType.APPLICATION_JSON) .header(AUTHORIZATION, BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, connectTimeout) .property(ClientProperties.READ_TIMEOUT, readTimeout) .delete(); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); } T createResource(T resource, AccessToken accessToken) { checkNotNull(resource, "The given %s must not be null nor empty.", typeName); checkAccessTokenIsNotNull(accessToken); String resourceAsString; try { resourceAsString = mapToString(resource); } catch (JsonProcessingException | ClassCastException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } StatusType status; String content; try { Response response = targetEndpoint.path(typeName + "s").request(MediaType.APPLICATION_JSON) .header(AUTHORIZATION, BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, connectTimeout) .property(ClientProperties.READ_TIMEOUT, readTimeout) .post(Entity.json(resourceAsString)); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); return mapToResource(content); } /** * @deprecated Updating with PATCH has been removed in OSIAM 3.0. This method is going to go away with version 1.12 or 2.0. */ @Deprecated T updateResource(String id, T resource, AccessToken accessToken) { return modifyResource(id, resource, "PATCH", accessToken); } T replaceResource(String id, T resource, AccessToken accessToken) { return modifyResource(id, resource, "PUT", accessToken); } private T modifyResource(String id, T resource, String method, AccessToken accessToken) { checkArgument(!Strings.isNullOrEmpty(id), "The given id must not be null nor empty."); checkNotNull(resource, "The given %s must not be null nor empty.", typeName); checkAccessTokenIsNotNull(accessToken); String resourceAsString; try { resourceAsString = mapToString(resource); } catch (JsonProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } StatusType status; String content; try { Response response = targetEndpoint.path(typeName + "s").path(id).request(MediaType.APPLICATION_JSON) .header(AUTHORIZATION, BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, connectTimeout) .property(ClientProperties.READ_TIMEOUT, readTimeout) .method(method, Entity.json(resourceAsString)); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); return mapToResource(content); } private T mapToResource(String content) { return mapToType(content, type); } <U> U mapToType(String content, Class<U> type) { try { if (version == Version.OSIAM_2_LEGACY_SCHEMAS && (type == User.class || type == Group.class)) { ObjectNode resourceNode = (ObjectNode) objectMapper.readTree(content); switchToLegacySchema(resourceNode); return objectMapper.readValue(objectMapper.treeAsTokens(resourceNode), type); } else { return objectMapper.readValue(content, type); } } catch (IOException | ClassCastException e) { throw new OsiamClientException(String.format("Unable to parse %s: %s", typeName, content), e); } } private String mapToString(T resource) throws JsonProcessingException { if (version == Version.OSIAM_2_LEGACY_SCHEMAS) { ObjectNode resourceNode = objectMapper.valueToTree(resource); switchToLegacySchema(resourceNode); return resourceNode.toString(); } else { return objectMapper.writeValueAsString(resource); } } private void switchToLegacySchema(ObjectNode resourceNode) { ArrayNode schemas = (ArrayNode) resourceNode.get("schemas"); for (int i = 0; i < schemas.size(); i++) { if (getSchema().equals(schemas.get(i).textValue())) { schemas.remove(i); } } schemas.add(getLegacySchema()); } protected abstract String getSchema(); protected abstract String getLegacySchema(); void checkAndHandleResponse(String content, StatusType status, AccessToken accessToken) { if (status.getFamily() == Family.SUCCESSFUL) { return; } if (status.getStatusCode() == Status.UNAUTHORIZED.getStatusCode()) { String errorMessage = extractErrorMessageUnauthorized(content, status); throw new UnauthorizedException(errorMessage); } else if (status.getStatusCode() == Status.BAD_REQUEST.getStatusCode()) { String errorMessage = extractErrorMessage(content, status); throw new BadRequestException(errorMessage); } else if (status.getStatusCode() == Status.NOT_FOUND.getStatusCode()) { String errorMessage = extractErrorMessage(content, status); throw new NoResultException(errorMessage); } else if (status.getStatusCode() == Status.FORBIDDEN.getStatusCode()) { String errorMessage = extractErrorMessageForbidden(accessToken); throw new ForbiddenException(errorMessage); } else if (status.getStatusCode() == Status.CONFLICT.getStatusCode()) { String errorMessage = extractErrorMessage(content, status); throw new ConflictException(errorMessage); } else { String errorMessage = extractErrorMessageDefault(content, status); throw new OsiamRequestException(status.getStatusCode(), errorMessage); } } private String extractErrorMessageForbidden(AccessToken accessToken) { return "Insufficient scopes: " + accessToken.getScopes(); } private String extractErrorMessageUnauthorized(String content, StatusType status) { return extractErrorMessage(content, status); } private String extractErrorMessageDefault(String content, StatusType status) { return extractErrorMessage(content, status); } private String extractErrorMessage(String content, StatusType status) { String message; if (version == Version.OSIAM_2_LEGACY_SCHEMAS) { message = getScimErrorMessageLegacy(content); } else { message = getScimErrorMessage(content); } if (message == null) { message = getOAuthErrorMessage(content); } if (message == null) { message = String.format("Could not deserialize the error response for the HTTP status '%s'.", status.getReasonPhrase()); if (content != null) { message += String.format(" Original response: %s", content); } } return message; } private String getScimErrorMessage(String content) { try { ErrorResponse error = objectMapper.readValue(content, ErrorResponse.class); return error.getDetail(); } catch (ProcessingException | IOException e) { return null; } } private String getScimErrorMessageLegacy(String content) { try { Map<String, String> error = objectMapper.readValue(content, new TypeReference<Map<String, String>>() { }); return error.get("description"); } catch (ProcessingException | IOException e) { return null; } } private String getOAuthErrorMessage(String content) { try { OAuthErrorMessage error = objectMapper.readValue(content, OAuthErrorMessage.class); return error.getDescription(); } catch (ProcessingException | IOException e) { return null; } } int getConnectTimeout() { return connectTimeout; } int getReadTimeout() { return readTimeout; } Version getVersion() { return version; } }
// String implements Stringer for Spacecraft displaying its panels. func (ship Spacecraft) String() string { min, max := PointOfOrigin(), PointOfOrigin() for p := range ship.panels { if p.x < min.x { min.x = p.x } else if p.x > max.x { max.x = p.x } if p.y < min.y { min.y = p.y } else if p.y > max.y { max.y = p.y } } var buf bytes.Buffer for y := min.y; y <= max.y; y++ { for x := min.x; x <= max.x; x++ { c := ship.panels[Point{x: x, y: y}] if c == White { buf.WriteString("#") } else { buf.WriteString(".") } } buf.WriteString("\n") } return buf.String() }
#include <iostream> #include "libg3logger/g3logger.h" #include "SerdpRecorder.h" namespace serdp_recorder { using namespace std; using namespace libblackmagic; using std::placeholders::_1; SerdpRecorder::SerdpRecorder( libg3logger::G3Logger &logger ) : _logger(logger), _keepGoing( true ), _bmClient( new InputOutputClient() ), _camState( new CameraState( _bmClient->output().sdiProtocolBuffer() ) ), _sonar( nullptr ), _display( new OpenCVDisplay( std::bind( &SerdpRecorder::handleKey, this, _1 ) ) ), _recorder( nullptr ), _displayed(0), _pingCount(0), _thread( active_object::Active::createActive() ) { _bmClient->input().setNewImagesCallback( std::bind( &SerdpRecorder::receiveImages, this, std::placeholders::_1 )); } SerdpRecorder::~SerdpRecorder() {;} int SerdpRecorder::run( int argc, char **argv ) { CLI::App app{"Simple BlackMagic camera recorder"}; int verbosity = 0; app.add_flag("-v", verbosity, "Additional output (use -vv for even more!)"); bool do3D = false; app.add_flag("--do-3d",do3D, "Enable 3D modes"); bool noDisplay = false; app.add_flag("--no-display,-x", noDisplay, "Disable display"); string desiredModeString = "1080p2997"; app.add_option("--mode,-m", desiredModeString, "Desired mode"); bool doConfigCamera = false; app.add_flag("--config-camera,-c", doConfigCamera, "If enabled, send initialization info to the cameras"); bool doListCards = false; app.add_flag("--list-cards", doListCards, "List Decklink cards in the system then exit"); bool doListInputModes = false; app.add_flag("--list-input-modes", doListInputModes, "List Input modes then exit"); int stopAfter = -1; app.add_option("--stop-after", stopAfter, "Stop after N frames"); app.add_flag("-s,--sonar", _doSonar, "Record Oculus sonar"); string sonarIp("auto"); app.add_option("--sonar-ip", sonarIp, "IP address of sonar or \"auto\" to automatically detect."); app.add_option("--output,-o", _outputDir, "Output dir"); float previewScale = 0.5; app.add_option("--preview-scale", previewScale, "Scale of preview window"); CLI11_PARSE(app, argc, argv); switch(verbosity) { case 1: _logger.stderrHandle->call( &ColorStderrSink::setThreshold, INFO ); break; case 2: _logger.stderrHandle->call( &ColorStderrSink::setThreshold, DEBUG ); break; } // Help string cout << "Commands" << endl; cout << " q quit" << endl; cout << " [ ] Adjust focus" << endl; cout << " f Set autofocus" << endl; cout << " ; ' Adjust aperture" << endl; cout << " . / Adjust shutter speed" << endl; cout << " z x Adjust sensor gain" << endl; cout << " s Cycle through reference sources" << endl; // Handle the one-off commands if( doListCards || doListInputModes ) { if(doListCards) DeckLink::ListCards(); if(doListInputModes) { DeckLink dl; dl.listInputModes(); } return 0; } BMDDisplayMode mode = stringToDisplayMode( desiredModeString ); if( (mode == bmdModeUnknown) || ( mode == bmdModeDetect) ) { LOG(WARNING) << "Card will always attempt automatic detection, starting in HD1080p2997 mode"; mode = bmdModeHD1080p2997; } else { LOG(WARNING) << "Starting in mode " << desiredModeString; } _display->setEnabled( !noDisplay ); _display->setPreviewScale( previewScale ); // Input should always auto-detect _bmClient->input().enable( mode, true, do3D ); _bmClient->output().enable( mode ); if( _doSonar ) { LOG(INFO) << "Enabling sonar"; liboculus::SonarConfiguration sonarConfig; _sonar.reset( new SonarClient( sonarConfig, sonarIp ) ); _sonar->setDataRxCallback( std::bind( &SerdpRecorder::receivePing, this, std::placeholders::_1 )); _sonar->start(); } //int count = 0, miss = 0, displayed = 0; LOG(DEBUG) << "Starting streams"; if( !_bmClient->startStreams() ) { LOG(WARNING) << "Unable to start streams"; exit(-1); } //std::chrono::system_clock::time_point prevTime = std::chrono::system_clock::now(); while( _keepGoing ) { // \TODO. Replace with something blocking usleep( 100000 ); // ++count; // if((stopAfter > 0) && (count > stopAfter)) { break; } // // // Compute dt // // std::chrono::system_clock::time_point now = std::chrono::system_clock::now(); // // // // auto dt = now-prevTime; // // // // //std::chrono::milliseconds(dt).count() // // LOG(WARNING) << "dt = " << float(dt.count())/1e6 << " ms"; // // prevTime = now; } //std::chrono::duration<float> dur( std::chrono::steady_clock::now() - start ); if( _recorder ) _recorder.reset(); LOG(INFO) << "End of main loop, stopping streams..."; _bmClient->stopStreams(); if( _sonar ) _sonar->stop(); // LOG(INFO) << "Recorded " << count << " frames in " << dur.count(); // LOG(INFO) << " Average of " << (float)count / dur.count() << " FPS"; // LOG(INFO) << " " << miss << " / " << (miss+count) << " misses"; // LOG_IF( INFO, displayed > 0 ) << " Displayed " << displayed << " frames"; return 0; } //==== void SerdpRecorder::receiveImages( const libblackmagic::InputHandler::MatVector &rawImages ) { _thread->send( std::bind( &SerdpRecorder::receiveImagesImpl, this, rawImages, std::chrono::system_clock::now() ) ); } void SerdpRecorder::receiveImagesImpl( const libblackmagic::InputHandler::MatVector &rawImages, const std::chrono::time_point< std::chrono::system_clock > time ) { if( _recorder ) _recorder->addMats( rawImages ); if( _display ) _display->showVideo( rawImages ); LOG_IF(INFO, (_displayed % 50) == 0) << "Frame #" << _displayed; ++_displayed; } //===== void SerdpRecorder::receivePing( const SimplePingResult &ping ) { _thread->send( std::bind( &SerdpRecorder::receivePingImpl, this, ping, std::chrono::system_clock::now() ) ); } void SerdpRecorder::receivePingImpl( const SimplePingResult &ping, const std::chrono::time_point< std::chrono::system_clock > time ) { ++_pingCount; // Do something auto valid = ping.valid(); LOG_IF(DEBUG, (bool)_recorder) << "Recording " << (valid ? "valid" : "invalid") << " ping"; // Send to recorder if( _recorder ) _recorder->addSonar( ping, time ); if( _display ) _display->showSonar( ping ); } //===== void SerdpRecorder::handleKey( const char c ) { std::shared_ptr<SharedBMSDIBuffer> sdiBuffer( _bmClient->output().sdiProtocolBuffer() ); const int CamNum = 1; SDIBufferGuard guard( sdiBuffer ); switch(c) { case 'f': // Send absolute focus value LOG(INFO) << "Sending instantaneous autofocus to camera"; guard( []( BMSDIBuffer *buffer ){ bmAddInstantaneousAutofocus( buffer, CamNum ); }); break; case '[': // Send positive focus increment LOG(INFO) << "Sending focus increment to camera"; guard( []( BMSDIBuffer *buffer ){ bmAddFocusOffset( buffer, CamNum, 0.05 ); }); break; case ']': // Send negative focus increment LOG(INFO) << "Sending focus decrement to camera"; guard( []( BMSDIBuffer *buffer ){ bmAddFocusOffset( buffer, CamNum, -0.05 ); }); break; //=== Aperture increment/decrement === case '\'': { // Send positive aperture increment auto val = _camState->apertureInc(); // LOG(INFO) << "Sending aperture increment " << val.ord << " , " << val.val << " , " << val.str; LOG(INFO) << "Set aperture to " << val; } // guard( []( BMSDIBuffer *buffer ){ bmAddOrdinalApertureOffset( buffer, CamNum, 1 ); }); break; case ';': { // Send negative aperture decrement auto val = _camState->apertureDec(); //LOG(INFO) << "Sending aperture decrement " << val.ord << " , " << val.val << " , " << val.str; LOG(INFO) << "Set aperture to " << val; // guard( []( BMSDIBuffer *buffer ){ bmAddOrdinalApertureOffset( buffer, CamNum, -1 ); }); } break; //=== Shutter increment/decrement === case '.': LOG(INFO) << "Sending shutter increment to camera"; _camState->exposureInc(); break; case '/': LOG(INFO) << "Sending shutter decrement to camera"; _camState->exposureDec(); break; //=== Gain increment/decrement === case 'z': LOG(INFO) << "Sending gain increment to camera"; _camState->gainInc(); break; case 'x': LOG(INFO) << "Sending gain decrement to camera"; _camState->gainDec(); break; //== Increment/decrement white balance case 'w': LOG(INFO) << "Auto white balance"; guard( []( BMSDIBuffer *buffer ){ bmAddAutoWhiteBalance( buffer, CamNum ); }); break; case 'e': LOG(INFO) << "Restore white balance"; guard( []( BMSDIBuffer *buffer ){ bmAddRestoreWhiteBalance( buffer, CamNum ); }); break; case 'r': LOG(INFO) << "Sending decrement to white balance"; guard( []( BMSDIBuffer *buffer ){ bmAddWhiteBalanceOffset( buffer, CamNum, -500, 0 ); }); break; case 't': LOG(INFO) << "Sending increment to white balance"; guard( []( BMSDIBuffer *buffer ){ bmAddWhiteBalanceOffset( buffer, CamNum, 500, 0 ); }); break; case '1': LOG(INFO) << "Setting camera to 1080p2997"; // guard( [](BMSDIBuffer *buffer ){bmAddReferenceSource( buffer, CamNum, BM_REF_SOURCE_PROGRAM );}); guard( [](BMSDIBuffer *buffer ){ bmAddVideoMode( buffer, CamNum,bmdModeHD1080p2997 ); bmAddReferenceSource( buffer, CamNum, BM_REF_SOURCE_PROGRAM ); bmAddAutoExposureMode( buffer, CamNum, BM_AUTOEXPOSURE_SHUTTER ); }); break; case '2': LOG(INFO) << "Setting camera to 1080p30"; guard( [](BMSDIBuffer *buffer ){ bmAddVideoMode( buffer, CamNum,bmdModeHD1080p30 ); bmAddReferenceSource( buffer, CamNum, BM_REF_SOURCE_PROGRAM ); bmAddAutoExposureMode( buffer, CamNum, BM_AUTOEXPOSURE_SHUTTER ); }); break; case '3': LOG(INFO) << "Setting camera to 1080p60"; guard( [](BMSDIBuffer *buffer ){ bmAddVideoMode( buffer, CamNum,bmdModeHD1080p6000 ); bmAddReferenceSource( buffer, CamNum, BM_REF_SOURCE_PROGRAM ); bmAddAutoExposureMode( buffer, CamNum, BM_AUTOEXPOSURE_SHUTTER ); }); break; // case '3': // LOG(INFO) << "Sending 2160p25 reference to cameras"; // guard( [](BMSDIBuffer *buffer ){ bmAddVideoMode( buffer, CamNum,bmdMode4K2160p25 );}); // break; case '`': LOG(INFO) << "Updating camera"; _camState->updateCamera(); break; case '\\': if( _recorder ) { LOG(INFO) << "Stopping recording"; _recorder.reset(); } else { LOG(INFO) << "Starting recording"; const libblackmagic::ModeConfig config( _bmClient->input().currentConfig() ); const ModeParams params( config.params() ); if( params.valid() ) { const int numStreams = config.do3D() ? 2 : 1; LOG(INFO) << "Opening video " << params.width << " x " << params.height << " with " << numStreams << " streams"; _recorder.reset( new VideoRecorder( VideoRecorder::MakeFilename( _outputDir ).string(), params.width, params.height, params.frameRate, numStreams, _doSonar )); } else { LOG(WARNING) << "Bad configuration from the decklink"; } } break; case '9': LOG(INFO) << "Enabling overlay"; guard( [](BMSDIBuffer *buffer ){ bmAddOverlayEnable( buffer, CamNum, 0x3 );}); break; case '0': LOG(INFO) << "Enabling overlay"; guard( [](BMSDIBuffer *buffer ){ bmAddOverlayEnable( buffer, CamNum, 0x0 );}); break; case 'q': _keepGoing = false; break; } } }
package jsonrpc import ( "testing" "github.com/stretchr/testify/assert" ) func TestValidateRequest(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/resource/get", Params: params, ID: "1", })) } func TestValidateRequestVersion(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 assert.NotNil(validateRequest(&Request{ Version: "1.0", Method: "/resource/get", Params: params, ID: "1", })) } func TestValidateRequestMethod(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 assert.NotNil(validateRequest(&Request{ Version: "2.0", Method: "", Params: params, ID: "1", })) assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/reousrce/get", Params: params, ID: "1", })) } func TestValidateRequestHttpMethod(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 assert.NotNil(validateRequest(&Request{ Version: "2.0", Method: "/resource/get", HttpMethod: "DELETE", Params: params, ID: "1", })) assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/resource/get", HttpMethod: "", Params: params, ID: "1", })) assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/resource/get", HttpMethod: "GET", Params: params, ID: "1", })) assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/resource/update", HttpMethod: "POST", Params: params, ID: "1", })) } func TestValidateRequestID(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 assert.NotNil(validateRequest(&Request{ Version: "2.0", Method: "/resource/get", Params: params, ID: "", })) assert.Nil(validateRequest(&Request{ Version: "2.0", Method: "/reousrce/get", Params: params, ID: "1", })) } func TestValidateRequests(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 reqs := make([]Request, 0) reqs = append(reqs, Request{ Version: "2.0", Method: "/resource/get1", Params: params, ID: "1", }) reqs = append(reqs, Request{ Version: "2.0", Method: "/resource/get2", Params: params, ID: "2", }) assert.Nil(ValidateRequests(&reqs)) } func TestValidateRequestsIDDup(t *testing.T) { assert := assert.New(t) params := make(map[string]interface{}) params["id"] = 1 reqs := make([]Request, 0) reqs = append(reqs, Request{ Version: "2.0", Method: "/resource/get1", Params: params, ID: "1", }) reqs = append(reqs, Request{ Version: "2.0", Method: "/resource/get2", Params: params, ID: "1", }) assert.NotNil(ValidateRequests(&reqs)) }
/** * Grant authorization on location if a subject doesnt have authorization on location. * * @param location The location on which to check / grant * @param subjectType The type of the subjects to grant * @param subjects The subjects to process */ public void grantAuthorizationOnLocationIfNecessary(Location location, Subject subjectType, String... subjects) { Set<String> unauthorized = Arrays.stream(subjects).filter(name -> !location.getPermissions(subjectType, name).contains(Permission.ADMIN)).collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(unauthorized)) { resourcePermissionService.grantPermission(location, subjectType, unauthorized.toArray(new String[unauthorized.size()])); } }
/** * * When monitoring is disabled, you will not hear * the audio that is coming through the input, * but you will still be able to access the samples * in the left, right, and mix buffers. This is * default state of an AudioInput and is what * you will want if your input is microphone * and your output is speakers. Otherwise: feedback. * * @shortdesc When monitoring is disabled, you will not hear * the audio that is coming through the input. * * @example Basics/MonitorInput * * @related enableMonitoring ( ) * @related isMonitoring ( ) * @related AudioInput * */ public void disableMonitoring() { if ( hasControl(VOLUME) ) { setVolume( 0 ); } else if ( hasControl(GAIN) ) { setGain( -64 ); } m_isMonitoring = false; }
/** * A polygon which is as rectangle which may be rotated. * * @author H. Irtel * @version 0.1.0 */ public class RotatedRectangle extends Polygon { /** * Create a polygon which contains a rotated rectangle. * * @param x * horizontal center position. * @param y * vertical center position. * @param width * rectangle width. * @param height * rectangle height. * @param angle * the angle of counter clockwise rotation. */ public RotatedRectangle(int x, int y, int width, int height, int angle) { int w2 = -width / 2; int h2 = -height / 2; int[] xx = { w2, width + w2, width + w2, w2 }; int[] yy = { h2, h2, height + h2, height + h2 }; npoints = 4; if (angle == 0) { xpoints = xx; ypoints = yy; } else { double a = -Math.PI * angle / 180.0; double cos = Math.cos(a); double sin = Math.sin(a); xpoints = new int[4]; ypoints = new int[4]; for (int i = 0; i < 4; i++) { xpoints[i] = (int) (cos * xx[i] + sin * yy[i]); ypoints[i] = (int) (-sin * xx[i] + cos * yy[i]); } } if ((x != 0) || (y != 0)) this.translate(x, y); } }
package ftn.ktsnvt.culturalofferings.e2e.tests; import ftn.ktsnvt.culturalofferings.e2e.pages.*; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.openqa.selenium.Keys; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.support.PageFactory; import static org.junit.Assert.assertEquals; public class AdminE2ETest { private WebDriver driver; private SigninPage signinPage; private AdminPage adminPage; @Before public void setUp() { System.setProperty("webdriver.chrome.driver", "src/test/resources/chromedriver.exe"); driver = new ChromeDriver(); driver.manage().window().maximize(); signinPage = PageFactory.initElements(driver, SigninPage.class); adminPage = PageFactory.initElements(driver, AdminPage.class); } @After public void tearDown() { driver.quit(); } private void justWait(int seconds) throws InterruptedException { synchronized (driver) { driver.wait(seconds * 1000); } } @Test public void AddAdminSuccess() throws InterruptedException { //sign in as super admin driver.get("http://localhost:4200/sign-in"); justWait(2); signinPage.ensureIsDisplayedEmail(); signinPage.getEmail().sendKeys("<EMAIL>"); signinPage.getPassword().sendKeys("<PASSWORD>"); signinPage.getSigninBtn().click(); justWait(2); assertEquals("http://localhost:4200/super-admin/admins", driver.getCurrentUrl()); justWait(2); //add new admin adminPage.getAddCancelBtn().click(); adminPage.ensureIsDisplayedInput(); adminPage.getEmail().sendKeys("<EMAIL>"); adminPage.getPassword().sendKeys("<PASSWORD>"); adminPage.getFirstName().sendKeys("Zika"); adminPage.getLastName().sendKeys("Zikic"); justWait(3); adminPage.getAddAdminBtn().click(); justWait(5); //delete admin //get to the last page for(int i = 0;i < 100; i++){ justWait(1); if(adminPage.getNextPageButton().isEnabled()) { adminPage.getNextPageButton().click(); } else { break; } } //get button from last tr justWait(2); adminPage.getDeleteBtn().click(); justWait(5); } //typed email exists @Test public void AddAdminFailed() throws InterruptedException { //sign in as super admin driver.get("http://localhost:4200/sign-in"); justWait(2); signinPage.ensureIsDisplayedEmail(); signinPage.getEmail().sendKeys("<EMAIL>"); signinPage.getPassword().sendKeys("<PASSWORD>"); signinPage.getSigninBtn().click(); justWait(2); assertEquals("http://localhost:4200/super-admin/admins", driver.getCurrentUrl()); justWait(2); //add new admin adminPage.getAddCancelBtn().click(); adminPage.ensureIsDisplayedInput(); adminPage.getEmail().sendKeys("<EMAIL>"); adminPage.getPassword().sendKeys("<PASSWORD>"); adminPage.getFirstName().sendKeys("Zika"); adminPage.getLastName().sendKeys("Zikic"); justWait(2); adminPage.getAddAdminBtn().click(); justWait(5); } }
Small-Scale Spatial Variability of Plant Nutrients and Soil Organic Matter: An Arable Cropping Case Study ABSTRACT Soil properties can vary spatially due to differences in topography, parent material, and land management practices. For site-specific management within the field, information on spatial variation of soil properties is essential. This case study was conducted to quantify the spatial variability of available plant nutrients and organic matter under arable cropping in New Zealand. Within a 10.4-ha paddock that had a long-term history of arable cropping, 91 samples (0–7.5 cm) were collected in a grid pattern for determination of mineral nitrogen (Min N), anaerobically mineralizable N (AMN), Olsen P, total carbon (TC), and total N (TN). The data were evaluated using geostatistical and classical statistical methods. Although the paddock had a flat topography and had been managed uniformly for many years, nutrient concentrations exhibited substantial variability. All measured variables except mineral N showed moderate positional dependence. Autocorrelation distances were 400 m for Olsen P, 293 m for AMN, and 347 m for TC. Soil C showed a strong, positive correlation with the amount of clay plus fine silt (<5 µm fraction) and a negative correlation with sand content. These results suggest that textural variation was a major factor influencing within-field variability in soil organic matter. Using the spatial data, zones with different plant nutrient requirements were identified (four for N; two for P). An application strategy that accounts for fertility differences between zones may improve fertilizer use efficiency and contribute to better environmental outcomes; P inputs could be reduced by 50% by avoiding application to the high Olsen P zone.
<filename>nicos_mlz/kws3/setups/mobile.py # -*- coding: utf-8 -*- description = 'Mobile Sample devices' group = 'optional' display_order = 40 excludes = ['virtual_sample'] tango_base = 'tango://phys.kws3.frm2:10000/kws3/' s7_motor = tango_base + 's7_motor/' devices = dict( sam_rot = device('nicos.devices.tango.Motor', description = 'rot-table inside vacuum chamber 10m', tangodevice = s7_motor + 'sam_rot', unit = 'deg', precision = 0.01, ), sam_phi = device('nicos.devices.tango.Motor', description = 'tilt-table-phi in vacuum chamber 10m', tangodevice = s7_motor + 'sam_phi', unit = 'deg', precision = 0.01, ), sam_chi = device('nicos.devices.tango.Motor', description = 'tilt-table-chi in vacuum chamber 10m', tangodevice = s7_motor + 'sam_chi', unit = 'deg', precision = 0.01, ), )
Holonomy orbits of the snake charmer algorithm The snake charmer algorithm permits us to deform a piecewise smooth curve starting from the origin in R^d, so that its end follows a given path. When this path is a loop, a holonomy phenomenon occurs. We prove that the holonomy orbits are closed manifolds diffeomorphic to real Stiefel manifolds. A survey of the snake charmer algorithm is given in the paper. Preliminaries 1.1 Let L be a positive real number and let P = {0 = s 0 < s 1 < · · · < s N −1 < s N = L} be a finite partition of . If (X, d) is a metric space, a map z : → X is said piecewise continuous for P if, for every i = 0, . . . , N − 1, the restriction of z to the semi-open interval . In particular, z is continuous on the right and the discontinuities, only possible at points of P, are just jumps. We denote by C 0 P ( , X) the space of maps from to X which are piecewise continuous for P; this space is endowed with the uniform convergence distance d(z 1 , z 2 ) = sup s∈ {z 1 (s), z 2 (s)} . When P is empty, the map z is just continuous and C 0 ∅ ( , X) = C 0 ( , X). The map z → (z 1 , . . . , z N ) provides a homeomorphism If X is a Riemannian manifold, the space C 0 ( , X) naturally inherits a Banach manifold structure (see ), so C 0 P ( , X) is a Banach manifold using (1). The tangent space T z C 0 P ( , X) to C 0 P ( , X) at z is the space of those v ∈ C 0 P ( , T X) satisfying p•v = z, where p : T X → X is the natural projection. 1.2 The unit sphere in R d centered at the origin is denoted by S d−1 . Let Möb(d − 1) be the group of Möbius transformations of S d−1 . It is a Lie group of dimension d(d + 1)/2, with SO(d) as a compact maximal subgroup. Its Lie algebra is denoted by möb(d − 1). For 0 = v ∈ R d , we define a 1-parameter subgroup Γ v t of Möb(d − 1) by • ϕ v : S d−1 → R d is the stereographic projection sending v/|v| to ∞ and −v/|v| to 0; • ρ v t : R d → R d is the homothety ρ v t (x) = e t|v| x. Thus, Γ v t is a purely hyperbolic flow with stable fixed point v/|v| and unstable fixed point −v/|v|. We agree that Γ 0 t = id. Let C v ∈ möb(d − 1) such that Γ v t = exp(tC v ) (with C 0 = 0). The correspondence v → C v gives an injective linear map χ : R d → möb(d − 1); its image is a d-dimensional vector subspace H of möb(d − 1), supplementary to so(d), and H generates möb(d − 1) as a Lie algebra. Let ∆ H be the right invariant distribution on Möb(d − 1) which is equal to H at the unit element. The 1-parameter subgroups Γ v t may be used to built up a diffeomorphism Ψ : The algorithm In this section, we give a survey of the snake charmer algorithm and some of its properties. For details, see . 2.1 Fix a positive real number L and a finite set P ⊂ as in 1.1. Let Conf = C 0 P ( , S d−1 ), with its Banach manifold structure coming from the standard Riemannian structure on S d−1 . The inclusion of S d−1 into R d makes Conf a submanifold of the Banach space The space Conf is the space of configurations for the snakes of length L which are continuous and "piecewise C 1 for P". The snake S z associated to z ∈ Conf is the map S z (s) = s 0 z(τ )dτ . Taking its "snout" S z (L) provides a map f : Conf → R d , defined by which is proven to be smooth. The image of f is the closed ball of radius L centered at the origin. The snakes corresponding to piecewise constant configurations, z(s) = z i for s ∈ . If all the (s i − s i−1 ) are equal, the snake is called an isosceles polygonal snake. The critical points of f are the lined configurations, where {z(s) | s ∈ } ⊂ {±p} for a point p ∈ S d−1 (for polygonal snakes, this is ). The snake associated to such a configuration is then contained in the line through p. The set of critical values is thus a finite collection of (d − 1)spheres centered at the origin, which depends on P. Charming snakes will now be a path-lifting ability for the map f : given an initial configuration z ∈ Conf and a C 1 -curve γ : → R d such that γ(0) = f (z), we are looking for a curve t → z t ∈ Conf such that z 0 = z and f (z t ) = γ(t). In Ehresmann's spirit, we are looking for a connection for the map f . The tangent space T z Conf to Conf at z is the vector space of those maps v ∈ C 0 P ( , R d ) such that v(s), z(s) = 0 for all s ∈ , where , denotes the usual scalar product in R d . We endow T z Conf with the scalar product v, w = L 0 v(s), w(s) ds. For each smooth map ϕ : R d → R, one gets a vector field Grad (ϕ•f ) on Conf defined by This vector field plays the role of the gradient of ϕ• f , that is for each v ∈ T z Conf (as the metric induced by our scalar product is not complete, gradients do not exist in general). For z ∈ Conf , the set of all Grad z (ϕ• f ) for ϕ ∈ C 1 (R d , R) is a vector subspace ∆ z of T z Conf , of dimension d − 1 if z is a lined configuration and d otherwise. The correspondence z → ∆ z is a distribution ∆ (of non-constant dimension). For a pair (z, γ) ∈ Conf × C 1 ( , R d ) such that f (z) = γ(0), the snake charmer algorithm takes for z t the horizontal lifting of γ for the connection ∆. As the map f is not proper and the dimension of ∆ z is not constant, the existence of horizontal liftings has to be established. We use the C ∞ -action of the Möbius group Möb(d − 1) on Conf by post-composition: g · z = g • z. For z 0 ∈ Conf , let A(z 0 ) be the subspace of those z ∈ Conf which can be joined to z 0 by a succession of ∆-horizontal curves. One of the main results ( , proven in for isosceles polygonal snakes) says that A(z 0 ) coincides with the orbit of z 0 under the action of Möb(d − 1): The proof of Theorem 2.2 uses the following fact about the action of the 1parameter subgroup Γ v t of Möb(d − 1) introduced in 1.2: The flow φ t on Conf is therefore ∆-horizontal. If z ∈ Conf , denote by β z : Möb(d − 1) → Conf the smooth map β z (g) = g · z. Lemma 2.3 implies that the map β z sends the distribution ∆ H onto the distribution ∆. If z is not a lined configuration, 2.4 The differential equation. As a consequence of Theorem 2.2, a ∆-horizontal curve z t starting at z 0 may be written as g(t) · z 0 where g(t) ∈ Möb(d − 1) and g(0) = id. For a given C 1 -curve γ(t) ∈ R d , the ∆-horizontal lifting g(t)·z 0 of γ(t) is obtained by taking for g(t) the solution of an ordinary differential equation in the Möbius group Möb(d − 1) that we describe here below (more details can be found in ). (Observe that the second term is the Gram matrix of the vectors (z 1 , . . . , z d ) for the scalar product). It can be proved that . Using the linear injective map χ of 1.2, with image H, and the right translation diffeomorphism R g in Möb(d − 1), we get a linear map It turns out that the matrix of the linear map (2) is M (g · z 0 ). If g · z 0 is not a lined configuration, T g F is bijective and M (g · z 0 ) is invertible. One way to insure that g · z 0 is not lined for all g ∈ Möb(d − 1) is to assume that z 0 takes at least 3 distinct values. The following result is proven in : Proposition 2.5 Let z 0 ∈ Conf be a configuration that takes at least 3 distinct values. Let γ : Suppose that the C 1 -curve g : → Möb(d − 1) satisfies the following differential equation: Then, the curve g(t) · z 0 is the unique ∆-horizontal lifting of γ starting at z 0 . As the Möbius group is not compact and the map F is not proper, Differential equation (3) may not have a solution for all t ∈ . The notion of "sedentariness", described in 2.7, is the main tool to study the global existence of ∆-horizontal liftings. This maximum exists since the set {p ∈ S d−1 | µ(z −1 (p)) > r} is finite for all r > 0. By Theorem 2.2, sed(z) is an invariant of A(z). Observe that, if sed(z) = L/2, then z takes at least 3 distinct values. The sedentariness of z is used in to get global existence results for horizontal liftings of a path starting at f (z). The one we need is the following Proposition 2.8 Let z ∈ Conf and let γ : As the sedentariness of a configuration is preserved along horizontal curves, Proposition 2.8 is also true for continuous piecewise C 1 -paths. A configuration z is called nomadic if sed(z) = 0. Proposition 2.8 guarantees that, if z is nomadic, any C 1 -path starting at f (z) admits a horizontal lifting, provided its image stays in the open ball of radius L. More general results may be found in . 2.9 Continuity of the algorithm. We now describe how the snake charmer algorithm behaves as we vary the initial configuration z 0 and the C 1 -curve γ. Our goal is not to present the most general statements but only those needed in Section 3. Let it is a metric space with the induced metric from Conf . Consider the map This is a vector field on Möb(d−1) depending on the time t and on the parameter z ∈ E σ,b . Differential equation (3) becomeṡ For any given z ∈ E σ,b , the solution g z of Equation (5) exists for all t ∈ by Proposition 2.8. Since X is continuous and its derivative in g is a continuous map on the variables z and t, classical results on the dependence of parameters for the solution (see for example ) imply that the map (z, t) → g z (t) is continuous. This yields: Let M ⊂ E σ,b be a smooth submanifold of Conf and suppose that γ is of class C 2 . The map X restricted to M is therefore C 1 (in all variables) and using once again we get: We have an analog result when we fix the configuration z 0 and vary γ. that is an open subset of the affine space of C 1 -paths starting at b. Consider the open sets W 0 ⊂ W and U 0 ⊂ R d defined by As W 0 contains no lined configurations, the mapf restricts to a submersion f 0 = f 0 : W 0 → U 0 for which ∆ is an ordinary connection. As f 0 extends tof : W → R d and as W is compact, the map f 0 is proper. The original Ehresmann's construction of horizontal liftings then apply: if z 0 ∈ W 0 and γ : → U 0 is a C 1 -curve γ(0) = z 0 (L), then γ admits a ∆-horizontal lifting γ : → W 0 of f 0 withγ(0) = z 0 . Let us specialize to planar snakes (d = 2). Each fiber of f 0 then consists of two points; as f 0 is proper, it is thus a 2-fold cover of U 0 . Therefore,γ(t) is determined by f (γ(t)) = γ(t). We deduce that the unique lifting of γ into Conf is horizontal. If, for t = t 0 , γ(t) crosses the sphere of radius L p − L q , it may happen thatγ(t) tends to a lined configuration when t → t 0 (see Example 4.2 below). To understand when the unique liftingγ(t) is horizontal at t 0 , we must study horizontal liftings around a lined configuration, which, after changing notations, we call again z 0 , corresponding to (p 0 , q 0 ) ∈ W with p 0 = −q 0 . The vector space ∆ z0 is then of dimension 1 and it turns out that T z0 f (∆ z0 ) is the line orthogonal to p 0 , see . Let γ : → R d be a C 1 -curve with γ(0) = z 0 (L) andγ(0) = 0. A necessary condition for γ to admit a horizontal lifting is then γ(0), p 0 = 0. If we orient the plane with the basis (p 0 ,γ(0)), the curve γ has a signed curvature κ(0) at t = 0. In , it is proven that, around t = 0, the unique lifting of γ into Conf is horizontal if and only ifγ (0) is orthogonal to p 0 and Condition (6) has been detected by the numerician Ernst Hairer. Such a secondorder condition for the existence of a horizontal lifting for a non-constant rank distribution is worth being studied. As far as we know, no such a phenomenon is mentioned in the literature. 2.14 Miscellaneous. We finish this section by listing a few more properties of the snake charmer algorithm. Let (S, γ) be an input for the algorithm, with S a snake of length L. Let S t : → R d be the output. The following two results follow directly from Theorem 2.2. The following proposition follows either from Theorem 2.2 or simply from the fact that S t is a horizontal lifting. Proposition 2.17 (Reparameterization) Let ϕ : → be an orientation preserving C 1 -diffeomorphism. Then, the deformation of S following the curve γ(ϕ(u)) is S ϕ(u) Finally, by construction, the distribution ∆ is orthogonal to fibers of f : Conf → R d . This can be rephrased in the following Proposition 2.18 z t is the unique lifting of γ which, for all t, minimizes the infinitesimal kinetic energy of the hodograph, that is 1 Again, the existence of such minimizer S t is only guaranteed by an analysis like in 2.7. Holonomy orbits , the space of all configurations with associated snake ending at b. Define the holonomy orbit horb(z 0 ) ⊂ Conf b of z 0 ∈ Conf b by horb(z 0 ) = A(z 0 ) ∩ Conf b ; it is thus the subspace of those z ∈ Conf which are the result of the holonomy of the snake charmer algorithm for a pair (z 0 , γ) with γ a piecewise C 1 -loop at b. Even if z 0 is not lined, the point b might not be a regular value of f , so f −1 (b) might not be a submanifold of Conf . But Lemma 3.1 below tells us that this is the case for horb(z 0 ). For z ∈ Conf , define the spherical dimension spdim(z) of z to be the minimal dimension of a sub-sphere of S d−1 containing the set z( ). By Theorem 2.2, spdim(z) is an invariant of A(z). Notice that spdim(z) = 0 if and only if the configuration z takes one or two values. Since spdim(z 0 ) > 0, the configuration z 0 takes at least three values in S d−1 and so does g · z 0 for all g ∈ Möb(d − 1). Therefore, A(z 0 ) contains no lined configurations and the tangent map T g F : The manifold K contains the stabilizer A of z 0 and K · A = K. Let V k be the smallest sub-sphere of S d−1 containing the image of z 0 . The group A is then the stabilizer of the points of V k ; since k > 0, A is conjugate in Möb(d − 1) to SO(d − k − 1). Therefore, the quotient space K/A is of dimension By , the map β induces an embedding of Möb(d − 1)/A into Conf , hence an embedding of K/A into Conf with image horb(z 0 ). This proves Lemma 3.1. In general, horb(z) is not closed; see examples in → R d at f (z 0 ) which is of class C ∞ and which admits a horizontal lifting joining z 0 to z. Proof of Proposition 3.4: A nomadic configuration is not lined. Therefore, k > 0 and the condition |f (z 0 )| < L − 2 sed(z 0 ) = L is automatic. By Theorem 3.2, it is then enough to prove that horb(z 0 ) is connected. is a C ∞ -curve in Möb(d − 1), joining the unit element to g. By Lemma 2.3, the curve g(t) is ∆ H -horizontal. Then g(t) · z 0 is the horizontal lifting of the C ∞ -path γ(t) = f (g(t) · z 0 ), which is a loop of class C ∞ at f (z 0 ). Examples All our examples consist of planar snakes (d = 2). We identify R 2 with the complex plane C. Configurations z ∈ Conf (1) are expressed under the form of s → e iθ(s) with θ(s) ∈ R. 4.1 In our first example, the snake is a half circle with configuration z : → S 1 given by z(s) = ie −is , thus S = S z is given by S(s) = 1 − e −is . We have f (z) = S(π) = 2. The curve γ is a small circle, centered at (2.1875, 0) and of radius 0.1875, followed in the trigonometric direction. The snake charmer algorithm has been solved using the method of 2.6. After one turn, the snake slightly leans to the left. Figure A below shows the snake after various numbers of turns of γ. One sees that after 326 turns, the snake seems being back in its initial position. Figure A The curve t → g(t) ∈ Möb(1) obtained from Equation (3) (such that z t = g(t) · z 0 ), may be visualized using the diffeomorphism R 2 × S 1 ≈ −→ Möb(1) described at the end of 1.2. The two fold covering SU (1, 1) → R 2 × S 1 thus obtained is given in formula by with θ = 2 arg(a) and v = 2 arccosh(|a|)e i arg(ab) . Figure B below illustrates the subset {(v(n · 2π), θ(n · 2π)) | n = 1, . . . , 326} ⊂ R 2 × S 1 . The picture on the left hand side shows the 326 points of the set {v(n · 2π)} (v = (v 1 , v 2 )) and the one on the right hand side is the graph of the function n → θ(n · 2π) (also formed by 326 points, but hardly distinguishable). That the points look more concentrated between 80 and 250 seems to be related to the fact, seen in Figure A, that the snake's shape changes less drasticly in this range. As in Figure A, we observe that θ(t) and v(t) seem to have returned to their original position after 326 turns. Thus, f (z) = 2. Let γ : → C be the piecewise C ∞ -loop at 2 defined by
/** * @author Daniela Perez [email protected] on 4/17/18. */ public class FrgEmailValidation extends BaseFragment implements EventsEmailValidation { private FrgEmailvalidationBinding binding; private ViewModelEmailValidation viewModel; public static FrgEmailValidation newInstance() { Bundle args = new Bundle(); FrgEmailValidation fragment = new FrgEmailValidation(); fragment.setArguments(args); return fragment; } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.viewModel = ViewModelProviders.of(this).get(ViewModelEmailValidation.class); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { this.binding = DataBindingUtil.inflate(inflater, R.layout.frg_emailvalidation, container, false); this.binding.setEventHandler(this); return this.binding.getRoot(); } @Override protected void initVars() { } @Override protected void initViews() { } @Override protected void initListeners() { } @Override public void onClickContinue() { switch (PreferencesUtils.getInstance().getUserInvitedStatus()) { case PreferencesUtils.INVITED_NOT_REGISTERED: addDisposable(viewModel.checkEmail(binding.email.getText().toString()) .subscribe(new Consumer<Response<ResponseBody>>() { @Override public void accept(Response<ResponseBody> responseBodyResponse) throws Exception { Timber.d("Code: " + responseBodyResponse.code()); } }, new Consumer<Throwable>() { @Override public void accept(Throwable throwable) throws Exception { Timber.d("Error: " + throwable.getMessage()); } }) ); break; case PreferencesUtils.INVITED_REGISTERED: goToLogin(); break; default: break; } } private void goToLogin() { ((ActivityInit) getActivity()).navigateToSecondFragment(); } private void goToRegister() { Intent i = new Intent(new Intent(getActivity(), ActivityRegister.class)); i.setFlags(i.getFlags() | Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(i); getActivity().finish(); } }
// for processing incoming update from Telegram func processUpdate(b *bot.Bot, update bot.Update) bool { var userId string if update.Message.From.Username == nil { log.Printf("*** Not allowed (no user name): %s", update.Message.From.FirstName) return false } userId = *update.Message.From.Username if !isAvailableId(userId) { log.Printf("*** Id not allowed: %s", userId) return false } if update.HasMessage() { txt := *update.Message.Text if isVerbose { log.Printf("received telegram message: %s", txt) } if strings.HasPrefix(txt, "/") { var options map[string]interface{} = map[string]interface{}{ "reply_markup": bot.ReplyKeyboardMarkup{ Keyboard: allKeyboards, ResizeKeyboard: true, }, } if strings.HasPrefix(txt, "/"+lib.CommandStart) { message := lib.MessageStart if sent := b.SendMessage(update.Message.Chat.ID, message, options); !sent.Ok { log.Printf("*** Failed to send message: %s", *sent.Description) } } else if strings.HasPrefix(txt, "/"+lib.CommandTime) { time := lib.GetTimeString() queue <- time if sent := b.SendMessage(update.Message.Chat.ID, time, options); !sent.Ok { log.Printf("*** Failed to send message: %s", *sent.Description) } } else if strings.HasPrefix(txt, "/"+lib.CommandIP) { ip := strings.Join(status.IpAddresses(), ", ") queue <- ip if sent := b.SendMessage(update.Message.Chat.ID, ip, options); !sent.Ok { log.Printf("*** Failed to send message: %s", *sent.Description) } } else if strings.HasPrefix(txt, "/"+lib.CommandHelp) { message := lib.MessageHelp if sent := b.SendMessage(update.Message.Chat.ID, message, options); !sent.Ok { log.Printf("*** Failed to send message: %s", *sent.Description) } } else { log.Printf("*** No such command: %s", txt) message := fmt.Sprintf("No such command: %s", txt) if sent := b.SendMessage(update.Message.Chat.ID, message, options); !sent.Ok { log.Printf("*** Failed to send message: %s", *sent.Description) } } } else { queue <- txt } } return false }
/* Reads the FW info structure for the specified Storm from the chip, * and writes it to the specified fw_info pointer. */ static void ecore_read_fw_info(struct ecore_hwfn *p_hwfn, struct ecore_ptt *p_ptt, u8 storm_id, struct fw_info *fw_info) { struct storm_defs *storm = &s_storm_defs[storm_id]; struct fw_info_location fw_info_location; u32 addr, i, *dest; OSAL_MEMSET(&fw_info_location, 0, sizeof(fw_info_location)); OSAL_MEMSET(fw_info, 0, sizeof(*fw_info)); addr = storm->sem_fast_mem_addr + SEM_FAST_REG_INT_RAM + (ECORE_IS_E5(p_hwfn->p_dev) ? DWORDS_TO_BYTES(SEM_FAST_REG_INT_RAM_SIZE_E5) : DWORDS_TO_BYTES(SEM_FAST_REG_INT_RAM_SIZE_BB_K2)) - sizeof(fw_info_location); dest = (u32 *)&fw_info_location; for (i = 0; i < BYTES_TO_DWORDS(sizeof(fw_info_location)); i++, addr += BYTES_IN_DWORD) dest[i] = ecore_rd(p_hwfn, p_ptt, addr); if (fw_info_location.size > 0 && fw_info_location.size <= sizeof(*fw_info)) { addr = fw_info_location.grc_addr; dest = (u32 *)fw_info; for (i = 0; i < BYTES_TO_DWORDS(fw_info_location.size); i++, addr += BYTES_IN_DWORD) dest[i] = ecore_rd(p_hwfn, p_ptt, addr); } }
<gh_stars>10-100 package common import ( "errors" "net/http" "net/url" "strings" "github.com/cloudfoundry-incubator/notifications/uaa" ) type UAAUserNotFoundError struct { Err error } func (e UAAUserNotFoundError) Error() string { return e.Err.Error() } type UAADownError struct { Err error } func (e UAADownError) Error() string { return e.Err.Error() } type UAAGenericError struct { Err error } func (e UAAGenericError) Error() string { return e.Err.Error() } func UAAErrorFor(err error) error { switch err.(type) { case *url.Error: return UAADownError{errors.New("UAA is unavailable")} case uaa.Failure: failure := err.(uaa.Failure) if failure.Code() == http.StatusNotFound { if strings.Contains(failure.Message(), "Requested route") { return UAADownError{errors.New("UAA is unavailable")} } else { return UAAGenericError{errors.New("UAA Unknown 404 error message: " + failure.Message())} } } return UAADownError{errors.New(failure.Message())} default: return UAAGenericError{errors.New("UAA Unknown Error: " + err.Error())} } }
<reponame>yunbaoi/swoole-src #include "swoole_server.h" #include <assert.h> using swoole::network::Address; using swoole::network::Socket; namespace swoole { PacketPtr MessageBus::get_packet() { PacketPtr pkt; if (buffer_->info.flags & SW_EVENT_DATA_PTR) { memcpy(&pkt, buffer_->data, sizeof(pkt)); } else if (buffer_->info.flags & SW_EVENT_DATA_OBJ_PTR) { String *object; memcpy(&object, buffer_->data, sizeof(object)); pkt.data = object->str; pkt.length = object->length; } else { pkt.data = buffer_->data; pkt.length = buffer_->info.len; } return pkt; } String *MessageBus::get_packet_buffer() { String *packet_buffer = nullptr; auto iter = packet_pool_.find(buffer_->info.msg_id); if (iter == packet_pool_.end()) { if (!buffer_->is_begin()) { return nullptr; } packet_buffer = make_string(buffer_->info.len, allocator_); packet_pool_.emplace(buffer_->info.msg_id, std::shared_ptr<String>(packet_buffer)); } else { packet_buffer = iter->second.get(); } return packet_buffer; } ReturnCode MessageBus::prepare_packet(uint16_t &recv_chunk_count, String *packet_buffer) { recv_chunk_count++; if (!buffer_->is_end()) { /** * if the reactor thread sends too many chunks to the worker process, * the worker process may receive chunks all the time, * resulting in the worker process being unable to handle other tasks. * in order to make the worker process handle tasks fairly, * the maximum number of consecutive chunks received by the worker is limited. */ if (recv_chunk_count >= SW_WORKER_MAX_RECV_CHUNK_COUNT) { swoole_trace_log(SW_TRACE_WORKER, "worker process[%u] receives the chunk data to the maximum[%d], return to event loop", SwooleG.process_id, recv_chunk_count); return SW_WAIT; } return SW_CONTINUE; } else { /** * Because we don't want to split the EventData parameters into DataHead and data, * we store the value of the worker_buffer pointer in EventData.data. * The value of this pointer will be fetched in the Server::get_pipe_packet() function. */ buffer_->info.flags |= SW_EVENT_DATA_OBJ_PTR; memcpy(buffer_->data, &packet_buffer, sizeof(packet_buffer)); swoole_trace("msg_id=%ld, len=%u", buffer_->info.msg_id, buffer_->info.len); return SW_READY; } } ssize_t MessageBus::read(Socket *sock) { ssize_t recv_n = 0; uint16_t recv_chunk_count = 0; DataHead *info = &buffer_->info; struct iovec buffers[2]; _read_from_pipe: recv_n = recv(sock->get_fd(), info, sizeof(buffer_->info), MSG_PEEK); if (recv_n < 0) { if (sock->catch_error(errno) == SW_WAIT) { return SW_OK; } return SW_ERR; } else if (recv_n == 0) { swoole_warning("receive data from socket#%d returns 0", sock->get_fd()); return SW_ERR; } if (!buffer_->is_chunked()) { return sock->read(buffer_, sizeof(buffer_->info) + buffer_->info.len); } auto packet_buffer = get_packet_buffer(); if (packet_buffer == nullptr) { swoole_error_log(SW_LOG_WARNING, SW_ERROR_SERVER_WORKER_ABNORMAL_PIPE_DATA, "abnormal pipeline data, msg_id=%ld, pipe_fd=%d, reactor_id=%d", info->msg_id, sock->get_fd(), info->reactor_id); return SW_OK; } size_t remain_len = buffer_->info.len - packet_buffer->length; buffers[0].iov_base = info; buffers[0].iov_len = sizeof(buffer_->info); buffers[1].iov_base = packet_buffer->str + packet_buffer->length; buffers[1].iov_len = SW_MIN(buffer_size_ - sizeof(buffer_->info), remain_len); recv_n = readv(sock->get_fd(), buffers, 2); if (recv_n == 0) { swoole_warning("receive pipeline data error, pipe_fd=%d, reactor_id=%d", sock->get_fd(), info->reactor_id); return SW_ERR; } if (recv_n < 0 && sock->catch_error(errno) == SW_WAIT) { return SW_OK; } if (recv_n > 0) { packet_buffer->length += (recv_n - sizeof(buffer_->info)); swoole_trace("append msgid=%ld, buffer=%p, n=%ld", buffer_->info.msg_id, packet_buffer, recv_n); } switch (prepare_packet(recv_chunk_count, packet_buffer)) { case SW_READY: return recv_n; case SW_CONTINUE: goto _read_from_pipe; case SW_WAIT: return SW_OK; default: assert(0); return SW_ERR; } } /** * Notice: only supports dgram type socket */ ssize_t MessageBus::read_with_buffer(network::Socket *sock) { ssize_t recv_n; uint16_t recv_chunk_count = 0; _read_from_pipe: recv_n = sock->read(buffer_, buffer_size_); if (recv_n < 0) { if (sock->catch_error(errno) == SW_WAIT) { return SW_OK; } return SW_ERR; } else if (recv_n == 0) { swoole_warning("receive data from socket#%d returns 0", sock->get_fd()); return SW_ERR; } recv_chunk_count++; if (!buffer_->is_chunked()) { return recv_n; } String *packet_buffer = get_packet_buffer(); if (packet_buffer == nullptr) { swoole_error_log(SW_LOG_WARNING, SW_ERROR_SERVER_WORKER_ABNORMAL_PIPE_DATA, "abnormal pipeline data, msg_id=%ld, pipe_fd=%d, reactor_id=%d", buffer_->info.msg_id, sock->get_fd(), buffer_->info.reactor_id); return SW_ERR; } packet_buffer->append(buffer_->data, recv_n - sizeof(buffer_->info)); switch (prepare_packet(recv_chunk_count, packet_buffer)) { case SW_READY: return recv_n; case SW_CONTINUE: goto _read_from_pipe; case SW_WAIT: return SW_OK; default: assert(0); return SW_ERR; } } bool MessageBus::write(Socket *sock, SendData *resp) { const char *payload = resp->data; uint32_t l_payload = resp->info.len; off_t offset = 0; uint32_t copy_n; struct iovec iov[2]; uint64_t msg_id = id_generator_(); uint32_t max_length = buffer_size_ - sizeof(resp->info); resp->info.msg_id = msg_id; auto send_fn = [](Socket *sock, const iovec *iov, size_t iovcnt) { if (swoole_event_is_available()) { return swoole_event_writev(sock, iov, iovcnt); } else { return sock->writev_blocking(iov, iovcnt); } }; if (l_payload == 0 || payload == nullptr) { resp->info.flags = 0; resp->info.len = 0; iov[0].iov_base = &resp->info; iov[0].iov_len = sizeof(resp->info); return send_fn(sock, iov, 1) == (ssize_t) iov[0].iov_len; } if (!always_chunked_transfer_ && l_payload <= max_length) { resp->info.flags = 0; resp->info.len = l_payload; iov[0].iov_base = &resp->info; iov[0].iov_len = sizeof(resp->info); iov[1].iov_base = (void *) payload; iov[1].iov_len = l_payload; return send_fn(sock, iov, 2) == (ssize_t)(sizeof(resp->info) + l_payload); } resp->info.flags = SW_EVENT_DATA_CHUNK | SW_EVENT_DATA_BEGIN; resp->info.len = l_payload; while (l_payload > 0) { if (l_payload > max_length) { copy_n = max_length; } else { resp->info.flags |= SW_EVENT_DATA_END; copy_n = l_payload; } iov[0].iov_base = &resp->info; iov[0].iov_len = sizeof(resp->info); iov[1].iov_base = (void *) (payload + offset); iov[1].iov_len = copy_n; swoole_trace("finish, type=%d|len=%u", resp->info.type, copy_n); if (send_fn(sock, iov, 2) < 0) { #ifdef __linux__ if (errno == ENOBUFS && max_length > SW_BUFFER_SIZE_STD) { max_length = SW_IPC_BUFFER_SIZE; if (resp->info.flags & SW_EVENT_DATA_END) { resp->info.flags &= ~SW_EVENT_DATA_END; } continue; } #endif return false; } if (resp->info.flags & SW_EVENT_DATA_BEGIN) { resp->info.flags &= ~SW_EVENT_DATA_BEGIN; } l_payload -= copy_n; offset += copy_n; } return true; } } // namespace swoole
/* * This file is part of the GROMACS molecular simulation package. * * Copyright 1991- The GROMACS Authors * and the project initiators Erik Lindahl, Berk Hess and David van der Spoel. * Consult the AUTHORS/COPYING files and https://www.gromacs.org for details. * * GROMACS is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * * GROMACS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with GROMACS; if not, see * https://www.gnu.org/licenses, or write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * If you want to redistribute modifications to GROMACS, please * consider that scientific software is very special. Version * control is crucial - bugs must be traceable. We will be happy to * consider code for inclusion in the official distribution, but * derived work must not be called official GROMACS. Details are found * in the README & COPYING files - if they are missing, get the * official version at https://www.gromacs.org. * * To help us fund GROMACS development, we humbly ask that you cite * the research papers on the package. Check out https://www.gromacs.org. */ /*! \libinternal \file * * * \brief * This file contains datatypes and function declarations necessary for mdrun to interface with the pull code. * * \author Berk Hess * * \inlibraryapi */ #ifndef GMX_PULLING_PULL_H #define GMX_PULLING_PULL_H #include <cstdio> #include <optional> #include "gromacs/math/vectypes.h" #include "gromacs/mdtypes/pull_params.h" #include "gromacs/utility/arrayref.h" #include "gromacs/utility/basedefinitions.h" #include "gromacs/utility/real.h" struct gmx_mtop_t; struct gmx_output_env_t; struct pull_coord_work_t; struct pull_params_t; struct pull_t; struct t_commrec; struct t_filenm; struct t_inputrec; struct t_pbc; class t_state; enum class PbcType; namespace gmx { template<typename> class ArrayRef; class ForceWithVirial; class LocalAtomSetManager; } // namespace gmx /*! \brief Returns the units of the pull coordinate. * * \param[in] pcrd The pull coordinate to query the units for. * \returns a string with the units of the coordinate. */ const char* pull_coordinate_units(const t_pull_coord& pcrd); /*! \brief Returns the conversion factor from the pull coord init/rate unit to internal value unit. * * \param[in] pcrd The pull coordinate to get the conversion factor for. * \returns the conversion factor. */ double pull_conversion_factor_userinput2internal(const t_pull_coord& pcrd); /*! \brief Returns the conversion factor from the pull coord internal value unit to the init/rate unit. * * \param[in] pcrd The pull coordinate to get the conversion factor for. * \returns the conversion factor. */ double pull_conversion_factor_internal2userinput(const t_pull_coord& pcrd); /*! \brief Get the value for pull coord coord_ind. * * \param[in,out] pull The pull struct. * \param[in] coordIndex Index of the pull coordinate in the list of coordinates * \param[in] pbc Information structure about periodicity. * \returns the value of the pull coordinate. */ double get_pull_coord_value(pull_t* pull, int coordIndex, const t_pbc& pbc); /*! \brief Registers the provider of an external potential for a coordinate. * * This function is only used for checking the consistency of the pull setup. * For each pull coordinate of type external-potential, selected by the user * in the mdp file, there has to be a module that provides this potential. * The module registers itself as the provider by calling this function. * The passed \p provider string has to match the string that the user * passed with the potential-provider pull coordinate mdp option. * This function should be called after init_pull has been called and before * pull_potential is called for the first time. * This function does many consistency checks and when it returns and the * first call to do_potential passes, the pull setup is guaranteed to be * correct (unless the module doesn't call apply_external_pull_coord_force * every step or calls it with incorrect forces). This registering function * will exit with a (release) assertion failure when used incorrely or * with a fatal error when the user (mdp) input in inconsistent. * * Thread-safe for simultaneous registration from multiple threads. * * \param[in,out] pull The pull struct. * \param[in] coord_index The pull coordinate index to register the external potential for. * \param[in] provider Provider string, should match the potential-provider pull coordinate mdp option. */ void register_external_pull_potential(struct pull_t* pull, int coord_index, const char* provider); /*! \brief Apply forces of an external potential to a pull coordinate. * * This function applies the external scalar force \p coord_force to * the pull coordinate. The corresponding potential energy * value should be added to the pull or the module's potential energy term * separately by the module itself. * This function should be called after pull_potential() has been called and, * before calling pull_apply_forces(). * * \param[in,out] pull The pull struct. * \param[in] coord_index The pull coordinate index to set the force for. * \param[in] coord_force The scalar force for the pull coordinate. */ void apply_external_pull_coord_force(pull_t* pull, int coord_index, double coord_force); /*! \brief Set the all the pull forces to zero. * * \param pull The pull group. */ void clear_pull_forces(pull_t* pull); /*! \brief Computes the COM pull forces, returns the potential * * The function computes the COMs of the pull groups and the potentials and forces * acting on the pull groups, except for external potential coordinates, which forces * are set by calls to \p apply_external_pull_coord_force() after calling this function. * To finalize the pull application, a call to \p pull_apply_forces() is required to * distribute the forces on the COMs to the atoms. * * Note: performance global MPI communication, potentially on a subset of the MPI ranks. * * \param[in,out] pull The pull struct. * \param[in] masses Atoms masses. * \param[in] pbc Information struct about periodicity. * \param[in] cr Struct for communication info. * \param[in] t Time. * \param[in] lambda The value of lambda in FEP calculations. * \param[in] x Positions. * \param[out] dvdlambda Pull contribution to dV/d(lambda). * * \returns The pull potential energy. */ real pull_potential(pull_t* pull, gmx::ArrayRef<const real> masses, const t_pbc& pbc, const t_commrec* cr, double t, real lambda, gmx::ArrayRef<const gmx::RVec> x, real* dvdlambda); /*! \brief Applies the computed COM pull forces to the atoms and accumulates the virial * * When \p force!=nullptr, distributes the pull force on the COM of each normal pull * group to the atoms in the group (using mass weighting). * * Also performs the recursion for transformation pull coordinates, when present, * distributing the force on transformation coordinates to the COM of groups involved. * * This function should be called after calling \p pull_potential() and also after * other modules, e.g. AWH, have called \p apply_external_pull_coord_force(). * * Note: this function is fully local and does not perform MPI communication. * * \param[in,out] pull The pull struct. * \param[in] masses Atoms masses. * \param[in] cr Struct for communication info. * \param[in,out] force Forces and virial. */ void pull_apply_forces(struct pull_t* pull, gmx::ArrayRef<const real> masses, const t_commrec* cr, gmx::ForceWithVirial* force); /*! \brief Constrain the coordinates xp in the directions in x * and also constrain v when v != NULL. * * \param[in,out] pull The pull data. * \param[in] masses Atoms masses. * \param[in] pbc Information struct about periodicity. * \param[in] cr Struct for communication info. * \param[in] dt The time step length. * \param[in] t The time. * \param[in] x Positions. * \param[in,out] xp Updated x, can be NULL. * \param[in,out] v Velocities, which may get a pull correction. * \param[in,out] vir The virial, which, if != NULL, gets a pull correction. */ void pull_constraint(struct pull_t* pull, gmx::ArrayRef<const real> masses, const t_pbc& pbc, const t_commrec* cr, double dt, double t, gmx::ArrayRef<gmx::RVec> x, gmx::ArrayRef<gmx::RVec> xp, gmx::ArrayRef<gmx::RVec> v, tensor vir); /*! \brief Make a selection of the home atoms for all pull groups. * Should be called at every domain decomposition. * * \param cr Structure for communication info. * \param pull The pull group. */ void dd_make_local_pull_groups(const t_commrec* cr, pull_t* pull); /*! \brief Allocate, initialize and return a pull work struct. * * \param fplog General output file, normally md.log. * \param pull_params The pull input parameters containing all pull settings. * \param ir The inputrec. * \param mtop The topology of the whole system. * \param cr Struct for communication info. * \param atomSets The manager that handles the pull atom sets * \param lambda FEP lambda. */ struct pull_t* init_pull(FILE* fplog, const pull_params_t* pull_params, const t_inputrec* ir, const gmx_mtop_t& mtop, const t_commrec* cr, gmx::LocalAtomSetManager* atomSets, real lambda); /*! \brief Close the pull output files and delete pull. * * \param pull The pull data structure. */ void finish_pull(struct pull_t* pull); /*! \brief Calculates centers of mass all pull groups. * * \param[in] cr Struct for communication info. * \param[in] pull The pull data structure. * \param[in] masses Atoms masses. * \param[in] pbc Information struct about periodicity. * \param[in] t Time, only used for cylinder ref. * \param[in] x The local positions. * \param[in,out] xp Updated x, can be NULL. * */ void pull_calc_coms(const t_commrec* cr, pull_t* pull, gmx::ArrayRef<const real> masses, const t_pbc& pbc, double t, gmx::ArrayRef<const gmx::RVec> x, gmx::ArrayRef<gmx::RVec> xp); /*! \brief Margin for checking pull group PBC distances compared to half the box size */ static constexpr real c_pullGroupPbcMargin = 0.9; /*! \brief Threshold (as a factor of half the box size) for accepting pull groups without explicitly set refatom */ static constexpr real c_pullGroupSmallGroupThreshold = 0.5; /*! \brief Checks whether all groups that use a reference atom are within PBC restrictions * * Groups that use a reference atom for determining PBC should have all their * atoms within half the box size from the PBC atom. The box size is used * per dimension for rectangular boxes, but can be a combination of * dimensions for triclinic boxes, depending on which dimensions are * involved in the pull coordinates a group is involved in. A margin is specified * to ensure that atoms are not too close to the maximum distance. * * Should be called without MPI parallelization and after pull_calc_coms() * has been called at least once. * * \param[in] pull The pull data structure * \param[in] x The coordinates * \param[in] pbc Information struct about periodicity * \param[in] pbcMargin The minimum margin (as a fraction) to half the box size * \returns -1 when all groups obey PBC or the first group index that fails PBC */ int pullCheckPbcWithinGroups(const pull_t& pull, gmx::ArrayRef<const gmx::RVec> x, const t_pbc& pbc, real pbcMargin); /*! \brief Checks whether a specific group that uses a reference atom is within PBC restrictions * * Groups that use a reference atom for determining PBC should have all their * atoms within half the box size from the PBC atom. The box size is used * per dimension for rectangular boxes, but can be a combination of * dimensions for triclinic boxes, depending on which dimensions are * involved in the pull coordinates a group is involved in. A margin is specified * to ensure that atoms are not too close to the maximum distance. Only one group is * checked. * * Should be called without MPI parallelization and after pull_calc_coms() * has been called at least once. * * \param[in] pull The pull data structure * \param[in] x The coordinates * \param[in] pbc Information struct about periodicity * \param[in] groupNr The index of the group (in pull.group[]) to check * \param[in] pbcMargin The minimum margin (as a fraction) to half the box size * \returns true if the group obeys PBC otherwise false */ bool pullCheckPbcWithinGroup(const pull_t& pull, gmx::ArrayRef<const gmx::RVec> x, const t_pbc& pbc, int groupNr, real pbcMargin); /*! \brief Returns if we have pull coordinates with potential pulling. * * \param[in] pull The pull data structure. */ bool pull_have_potential(const pull_t& pull); /*! \brief Returns if we have pull coordinates with constraint pulling. * * \param[in] pull The pull data structure. */ bool pull_have_constraint(const pull_t& pull); /*! \brief Returns if inputrec has pull coordinates with constraint pulling. * * \param[in] pullParameters Pulling input parameters from input record. */ bool pull_have_constraint(const pull_params_t& pullParameters); /*! \brief Returns the maxing distance for pulling * * For distance geometries, only dimensions with pcrd->params[dim]=1 * are included in the distance calculation. * For directional geometries, only dimensions with pcrd->vec[dim]!=0 * are included in the distance calculation. * * \param[in] pcrd Pulling data structure * \param[in] pbc Information on periodic boundary conditions * \returns The maximume distance */ real max_pull_distance2(const pull_coord_work_t& pcrd, const t_pbc& pbc); /*! \brief Sets the previous step COM in pull to the current COM, and optionally * updates it in the provided ArrayRef * * \param[in] pull The COM pull force calculation data structure * \param[in] comPreviousStep The COM of the previous step of each pull group */ void updatePrevStepPullCom(pull_t* pull, std::optional<gmx::ArrayRef<double>> comPreviousStep); /*! \brief Returns a copy of the previous step pull COM as flat vector * * Used for modular simulator checkpointing. Allows to keep the * implementation details of pull_t hidden from its users. * * \param[in] pull The COM pull force calculation data structure * \return A copy of the previous step COM */ std::vector<double> prevStepPullCom(const pull_t* pull); /*! \brief Set the previous step pull COM from a flat vector * * Used to restore modular simulator checkpoints. Allows to keep the * implementation details of pull_t hidden from its users. * * \param[in] pull The COM pull force calculation data structure * \param[in] prevStepPullCom The previous step COM to set */ void setPrevStepPullCom(pull_t* pull, gmx::ArrayRef<const double> prevStepPullCom); /*! \brief Allocates, initializes and communicates the previous step pull COM (if that option is set to true). * * If ir->pull->bSetPbcRefToPrevStepCOM is not true nothing is done. * * \param[in] ir The input options/settings of the simulation. * \param[in] pull_work The COM pull force calculation data structure * \param[in] masses Atoms masses. * \param[in] state The local (to this rank) state. * \param[in] state_global The global state. * \param[in] cr Struct for communication info. * \param[in] startingFromCheckpoint Is the simulation starting from a checkpoint? */ void preparePrevStepPullCom(const t_inputrec* ir, pull_t* pull_work, gmx::ArrayRef<const real> masses, t_state* state, const t_state* state_global, const t_commrec* cr, bool startingFromCheckpoint); /*! \brief Initializes the COM of the previous step (set to initial COM) * * \param[in] cr Struct for communication info. * \param[in] pull The pull data structure. * \param[in] masses Atoms masses. * \param[in] pbc Information struct about periodicity. * \param[in] x The local positions. */ void initPullComFromPrevStep(const t_commrec* cr, pull_t* pull, gmx::ArrayRef<const real> masses, const t_pbc& pbc, gmx::ArrayRef<const gmx::RVec> x); /*! \brief Initializes the previous step pull COM for new simulations (no reading from checkpoint). * * \param[in] cr Struct for communication info. * \param[in] pull_work The COM pull force calculation data structure. * \param[in] masses Atoms masses. * \param[in] x The local positions. * \param[in] box The current box matrix. * \param[in] pbcType The type of periodic boundary conditions. * \param[in] comPreviousStep The COM of the previous step of each pull group. */ void preparePrevStepPullComNewSimulation(const t_commrec* cr, pull_t* pull_work, gmx::ArrayRef<const real> masses, gmx::ArrayRef<const gmx::RVec> x, const matrix box, PbcType pbcType, std::optional<gmx::ArrayRef<double>>&& comPreviousStep); #endif
<reponame>wivlaro/newton-dynamics ///////////////////////////////////////////////////////////////////////////// // Name: src/cocoa/data.cpp // Purpose: Various data // Author: AUTHOR // Modified by: // Created: ??/??/98 // RCS-ID: $Id$ // Copyright: (c) AUTHOR // Licence: wxWindows licence /////////////////////////////////////////////////////////////////////////////
/** * Provider information including identifiers, name, and contact information. * @author Ioana * @version 1.0 * @updated 23-Nov-2015 6:29:30 PM */ @JsonIgnoreProperties(ignoreUnknown = true) public class Provider implements Serializable { /** * Name of individual provider to delivered the services, formatted for display. * Note that the treating provider's name may be optional if the sending system * incorrectly asserts that the software used may be an author. */ private String providerName = "Individual Provider Name"; /** * Provider organization name. */ private String organizationName = "Provider Organization Name"; /** * This element may be used incorrectly to specify the author. It should not be * used but was included here for discussion. */ private String softwareUse = "Software used"; /** * This optional identifier represents the National Provider Identifer (NPI). * Since CDA documents are not guaranteed to support it, this id reserved for * future uses of this structure. */ private String nationalProviderId; @JsonProperty("contactInfo") private ContactInfo m_ContactInfo; public Provider(){ } public void finalize() throws Throwable { } public String getProviderName() { return providerName; } public void setProviderName(String providerName) { this.providerName = providerName; } public String getOrganizationName() { return organizationName; } public void setOrganizationName(String organizationName) { this.organizationName = organizationName; } public String getSoftwareUse() { return softwareUse; } public void setSoftwareUse(String softwareUse) { this.softwareUse = softwareUse; } public String getNationalProviderId() { return nationalProviderId; } public void setNationalProviderId(String nationalProviderId) { this.nationalProviderId = nationalProviderId; } public ContactInfo getM_ContactInfo() { return m_ContactInfo; } public void setM_ContactInfo(ContactInfo m_ContactInfo) { this.m_ContactInfo = m_ContactInfo; } }
A very bizarre New Zealand cat named Brigit is stealing men's underwear from god knows where and bringing them back to her owner's house, New Zealand Herald reports. Brigit's owner Sarah Nathan posted a photo of "Brigit's haul from the last two months" on Facebook Friday, saying, "Now it's getting silly" and that "every morning we wake up to more. I've put notes in every letterbox in the street. Someone must be missing this stuff. Please share if you know someone who lives in the George St area." Nathan, who later posted a callout on Reddit to help the owners find their underwear, told the Herald that all the underwear her cat has been bringing home for two of the six months they've lived at their current home is men's underwear, which she finds "really, really weird. She's got really specific taste." Nathan also added that at their previous home, the cat would bring home more of a potpourri of items like "men's undies, women's undies, togs, she even brought home a hockey shin pad and a jumper," but that now she's very into bringing men's underwear onto Nathan's bed (or sometimes they're "stuck in the fence" outside). Whomever's underwear this is, hopefully they'll find them again and also realize this is hilarious. Follow Lane on Twitter and Instagram .
import scipy.integrate as spint import numpy as np from astropy.cosmology import FlatLambdaCDM import astropy.units as u astropy_cosmo = FlatLambdaCDM(H0=70 * u.km / u.s / u.Mpc, Tcmb0=2.725 * u.K, Om0=0.3) # -------- Define cosmology -------- # # Flat Lambda CDM H0 = 70.0 # km/s/Mpc omega_m0 = 0.3 omega_lam0 = 1.0 - omega_m0 speed_of_light_kms = 299792.458 # km per s def gen_lookup_table(): zrange = np.arange(0.0001, 10.0001, 0.0001) # Open a txt file for saving with open('dl_lookup_table.txt', 'w') as fh: fh.write('# z dl_cm dp_cm age_gyr' + '\n') for j in range(len(zrange)): z = zrange[j] print("Redshift:", z, end='\r') # Get both distances dp_mpc = proper_distance(z) # in Mpc dp_cm = dp_mpc * 3.086e24 # convert Mpc to cm dl_cm = dp_cm * (1+z) # convert to lum dist # now get age age_at_z = astropy_cosmo.age(z).value # in Gyr fh.write('{:.4f}'.format(z) + ' ' '{:.8e}'.format(dl_cm) + ' ' '{:.8e}'.format(dp_cm) + ' ' '{:.5e}'.format(age_at_z) + '\n') print("Lookup table saved.") return None def print_info(): print("Flat Lambda CDM cosmology assumed.") print("H0: ", H0, "km/s/Mpc") print("Omega_m:", omega_m0) print("Omega_lambda:", "{:.3f}".format(omega_lam0)) return None def proper_distance(redshift): """ This function will integrate 1/(a*a*H) between scale factor at emission to scale factor of 1.0. Will return proper distance in megaparsecs. """ ae = 1 / (1 + redshift) p = lambda a: 1/(a*a*H0*np.sqrt((omega_m0/a**3) + omega_lam0 + ((1 - omega_m0 - omega_lam0)/a**2))) dp = spint.quadrature(p, ae, 1.0) dp = dp[0] * speed_of_light_kms return dp def luminosity_distance(redshift): """ Returns luminosity distance in megaparsecs for a given redshift. """ # Get proper distance and multiply by (1+z) dp = proper_distance(redshift) # returns answer in Mpc dl = dp * (1+redshift) # dl also in Mpc return dl def apply_redshift(restframe_wav, restframe_lum, redshift): dl = luminosity_distance(redshift) # returns dl in Mpc dl = dl * 3.086e24 # convert to cm redshifted_wav = restframe_wav * (1 + redshift) redshifted_flux = restframe_lum / (4 * np.pi * dl * dl * (1 + redshift)) return redshifted_wav, redshifted_flux if __name__ == '__main__': gen_lookup_table()
<gh_stars>1-10 /* This program is distributed under the terms of the 'MIT license'. The text of this licence follows... Copyright (c) 2004 J.D.Medhurst (a.k.a. Tixy) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** @file @brief Implementation of ITU-T (formerly CCITT) Recomendation %G726 */ #include "common.h" #include "G726.h" #include "G711.h" /** @defgroup g726_section4 Internal - Individual functions from Section 4 of G726 @ingroup g726 @{ */ /** @defgroup g726_section4m Internal - Range checking macros @ingroup g726_section4 Macros for checking the range of variables used within the codec algorithm. They are also useful as they indicate the type of the variable being checked. @{ */ /** Check that a signed magnitude value lies entirely withing the given number of bits @param x The value @param bits Number of bits */ #define CHECK_SM(x,bits) ASSERT_DEBUG(((x)>>bits)==0) /** Check that a unsigned magnitude value lies entirely withing the given number of bits @param x The value @param bits Number of bits */ #define CHECK_UM(x,bits) ASSERT_DEBUG(((x)>>bits)==0) /** Check that a twos compliment value lies entirely withing the given number of bits @param x The value @param bits Number of bits */ #define CHECK_TC(x,bits) ASSERT_DEBUG(((x)>>(bits-1))==((x)<0?-1:0)) /** Check that a float value lies entirely withing the given number of bits @param x The value @param bits Number of bits */ #define CHECK_FL(x,bits) ASSERT_DEBUG(((x)>>bits)==0) /** Check that an unsigned integer value lies entirely withing the given number of bits @param x The value @param bits Number of bits */ #define CHECK_UNSIGNED(x,bits) ASSERT_DEBUG(((x)>>bits)==0) /** @} */ // End of group /** @brief EXPAND function from %G726 Section 4.2.1 - Input PCM format conversion and difference signal computation */ static void EXPAND(unsigned S,unsigned LAW,int& SL) { CHECK_UNSIGNED(S,8); CHECK_UNSIGNED(LAW,1); int linear; if(LAW) linear = G711::ALawDecode(S); else linear = G711::ULawDecode(S); SL = linear>>2; CHECK_TC(SL,14); } /** @brief SUBTA function from %G726 Section 4.2.1 - Input PCM format conversion and difference signal computation */ inline static void SUBTA(int SL,int SE,int& D) { CHECK_TC(SL,14); CHECK_TC(SE,15); D = SL-SE; CHECK_TC(D,16); } /** @brief LOG function from %G726 Section 4.2.2 - Adaptive quantizer */ static void LOG(int D,unsigned& DL,int& DS) { CHECK_TC(D,16); DS = D>>15; unsigned DQM = (D<0) ? -D : D; DQM &= 0x7fff; unsigned EXP = 0; unsigned x = DQM; if(x>=(1<<8)) { EXP |= 8; x >>= 8; } if(x>=(1<<4)) { EXP |= 4; x >>= 4; } if(x>=(1<<2)) { EXP |= 2; x >>= 2; } EXP |= x>>1; unsigned MANT = ((DQM<<7)>>EXP)&0x7f; DL = (EXP<<7) + MANT; CHECK_UM(DL,11); CHECK_TC(DS,1); } /** @brief QUAN function from %G726 Section 4.2.2 - Adaptive quantizer */ static void QUAN(unsigned RATE,int DLN,int DS,unsigned& I) { CHECK_TC(DLN,12); CHECK_TC(DS,1); int x; if(RATE==2) x = (DLN>=261); else { static const int16_t quan3[4] = {8,218,331,0x7fff}; static const int16_t quan4[8] = {3972-0x1000,80,178,246,300,349,400,0x7fff}; static const int16_t quan5[16] = {3974-0x1000,4080-0x1000,68,139,198,250,298,339,378,413,445,475,502,528,553,0x7fff}; static const int16_t* const quan[3] = {quan3,quan4,quan5}; const int16_t* levels = quan[RATE-3]; const int16_t* levels0 = levels; while(DLN>=*levels++) {} x = levels-levels0-1; if(!x) x = ~DS; } int mask = (1<<RATE)-1; I = (x^DS)&mask; CHECK_UNSIGNED(I,RATE); } /** @brief SUBTB function from %G726 Section 4.2.2 - Adaptive quantizer */ inline static void SUBTB(unsigned DL,unsigned Y,int& DLN) { CHECK_UM(DL,11); CHECK_UM(Y,13); DLN = DL-(Y>>2); CHECK_TC(DLN,12); } /** @brief ADDA function from %G726 Section 4.2.3 - Inverse adaptive quantizer */ inline static void ADDA(int DQLN,unsigned Y,int& DQL) { CHECK_TC(DQLN,12); CHECK_UM(Y,13); DQL = DQLN+(Y>>2); CHECK_TC(DQL,12); } /** @brief ANTILOG function from %G726 Section 4.2.3 - Inverse adaptive quantizer */ inline static void ANTILOG(int DQL,int DQS,unsigned& DQ) { CHECK_TC(DQL,12); CHECK_TC(DQS,1); unsigned DEX = (DQL >> 7) & 15; unsigned DMN = DQL & 127; unsigned DQT = (1 << 7) + DMN; unsigned DQMAG; if(DQL>=0) DQMAG = (DQT << 7) >> (14 - DEX); else DQMAG = 0; DQ = DQS ? DQMAG+(1<<15) : DQMAG; CHECK_SM(DQ,16); } /** @brief RECONST function from %G726 Section 4.2.3 - Inverse adaptive quantizer */ inline static void RECONST(unsigned RATE,unsigned I,int& DQLN,int& DQS) { CHECK_UNSIGNED(I,RATE); // Tables 11-14 static const int16_t reconst2[2] = {116,365}; static const int16_t reconst3[4] = {2048-4096,135,273,373}; static const int16_t reconst4[8] = {2048-4096,4,135,213,273,323,373,425}; static const int16_t reconst5[16] = {2048-4096,4030-4096,28,104,169,224,274,318,358,395,429,459,488,514,539,566}; static const int16_t* const reconst[4] = {reconst2,reconst3,reconst4,reconst5}; int x = I; int m = 1<<(RATE-1); if(x&m) { DQS = -1; x = ~x; } else DQS = 0; DQLN = reconst[RATE-2][x&(m-1)]; CHECK_TC(DQLN,12); CHECK_TC(DQS,1); } /** @brief FILTD function from %G726 Section 4.2.4 - Quantizer scale factor adaptation */ inline static void FILTD(int WI,unsigned Y,unsigned& YUT) { CHECK_TC(WI,12); CHECK_UM(Y,13); int DIF = (WI<<5)-Y; int DIFSX = DIF>>5; YUT = (Y+DIFSX); // & 8191 CHECK_UM(YUT,13); } /** @brief FILTE function from %G726 Section 4.2.4 - Quantizer scale factor adaptation */ inline static void FILTE(unsigned YUP,unsigned YL,unsigned& YLP) { CHECK_UM(YUP,13); CHECK_UM(YL,19); int DIF = (YUP<<6)-YL; int DIFSX = DIF>>6; YLP = (YL+DIFSX); // & 524287 CHECK_UM(YLP,19); } /** @brief FUNCTW function from %G726 Section 4.2.4 - Quantizer scale factor adaptation */ inline static void FUNCTW(unsigned RATE,unsigned I,int& WI) { CHECK_UNSIGNED(I,RATE); static const int16_t functw2[2] = {4074-4096,439}; static const int16_t functw3[4] = {4092-4096,30,137,582}; static const int16_t functw4[8] = {4084-4096,18,41,64,112,198,355,1122}; static const int16_t functw5[16] = {14,14,24,39,40,41,58,100,141,179,219,280,358,440,529,696}; static const int16_t* const functw[4] = {functw2,functw3,functw4,functw5}; unsigned signMask = 1<<(RATE-1); unsigned n = (I&signMask) ? (2*signMask-1)-I : I; WI = functw[RATE-2][n]; CHECK_TC(WI,12); } /** @brief LIMB function from %G726 Section 4.2.4 - Quantizer scale factor adaptation */ inline static void LIMB(unsigned YUT,unsigned& YUP) { CHECK_UM(YUT,13); unsigned GEUL = (YUT+11264)&(1<<13); unsigned GELL = (YUT+15840)&(1<<13); if(GELL) YUP = 544; else if (!GEUL) YUP = 5120; else YUP = YUT; CHECK_UM(YUP,13); } /** @brief MIX function from %G726 Section 4.2.4 - Quantizer scale factor adaptation */ inline static void MIX(unsigned AL,unsigned YU,unsigned YL,unsigned& Y) { CHECK_UM(AL,7); CHECK_UM(YU,13); CHECK_UM(YL,19); int DIF = YU-(YL>>6); int PROD = DIF*AL; if(DIF<0) PROD += (1<<6)-1; // Force round towards zero for following shift PROD >>= 6; Y = ((YL>>6)+PROD); // & 8191; CHECK_UM(Y,13); } /** @brief FILTA function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void FILTA(unsigned FI,unsigned DMS,unsigned& DMSP) { CHECK_UM(FI,3); CHECK_UM(DMS,12); int DIF = (FI<<9)-DMS; int DIFSX = (DIF>>5); DMSP = (DIFSX+DMS); // & 4095; CHECK_UM(DMSP,12); } /** @brief FILTB function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void FILTB(unsigned FI,unsigned DML,unsigned& DMLP) { CHECK_UM(FI,3); CHECK_UM(DML,14); int DIF = (FI<<11)-DML; int DIFSX = (DIF>>7); DMLP = (DIFSX+DML); // & 16383; CHECK_UM(DMLP,14); } /** @brief FILTC function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void FILTC(unsigned AX,unsigned AP,unsigned& APP) { CHECK_UM(AX,1); CHECK_UM(AP,10); int DIF = (AX<<9)-AP; int DIFSX = (DIF>>4); APP = (DIFSX+AP); // & 1023; CHECK_UM(APP,10); } /** @brief FUNCTF function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void FUNCTF(unsigned RATE,unsigned I,unsigned& FI) { CHECK_UNSIGNED(I,RATE); static const int16_t functf2[2] = {0,7}; static const int16_t functf3[4] = {0,1,2,7}; static const int16_t functf4[8] = {0,0,0,1,1,1,3,7}; static const int16_t functf5[16] = {0,0,0,0,0,1,1,1,1,1,2,3,4,5,6,6}; static const int16_t* const functf[4] = {functf2,functf3,functf4,functf5}; unsigned x = I; int mask=(1<<(RATE-1)); if(x&mask) x = ~x; x &= mask-1; FI = functf[RATE-2][x]; CHECK_UM(FI,3); } /** @brief LIMA function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void LIMA(unsigned AP,unsigned& AL) { CHECK_UM(AP,10); AL = (AP>256) ? 64 : AP>>2; CHECK_UM(AL,7); } /** @brief SUBTC function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void SUBTC(unsigned DMSP,unsigned DMLP,unsigned TDP,unsigned Y,unsigned& AX) { CHECK_UM(DMSP,12); CHECK_UM(DMLP,14); CHECK_UNSIGNED(TDP,1); CHECK_UM(Y,13); int DIF = (DMSP<<2)-DMLP; unsigned DIFM; if(DIF<0) DIFM = -DIF; else DIFM = DIF; unsigned DTHR = DMLP >> 3; AX = (Y>=1536 && DIFM<DTHR) ? TDP : 1; CHECK_UM(AX,1); } /** @brief TRIGA function from %G726 Section 4.2.5 - Adaptation speed control */ inline static void TRIGA(unsigned TR,unsigned APP,unsigned& APR) { CHECK_UNSIGNED(TR,1); CHECK_UM(APP,10); APR = TR ? 256 : APP; CHECK_UM(APR,10); } /** @brief ACCUM function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void ACCUM(int WAn[2],int WBn[6],int& SE,int& SEZ) { CHECK_TC(WAn[0],16); CHECK_TC(WAn[1],16); CHECK_TC(WBn[0],16); CHECK_TC(WBn[1],16); CHECK_TC(WBn[2],16); CHECK_TC(WBn[3],16); CHECK_TC(WBn[4],16); CHECK_TC(WBn[5],16); int16_t SEZI = (int16_t)(WBn[0]+WBn[1]+WBn[2]+WBn[3]+WBn[4]+WBn[5]); int16_t SEI = (int16_t)(SEZI+WAn[0]+WAn[1]); SEZ = SEZI >> 1; SE = SEI >> 1; CHECK_TC(SE,15); CHECK_TC(SEZ,15); } /** @brief ACCUM function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void ADDB(unsigned DQ,int SE,int& SR) { CHECK_SM(DQ,16); CHECK_TC(SE,15); int DQI; if(DQ&(1<<15)) DQI = (1<<15)-DQ; else DQI = DQ; SR = (int16_t)(DQI+SE); CHECK_TC(SR,16); } /** @brief ADDC function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void ADDC(unsigned DQ,int SEZ,int& PK0,unsigned& SIGPK) { CHECK_SM(DQ,16); CHECK_TC(SEZ,15); int DQI; if(DQ&(1<<15)) DQI = (1<<15)-DQ; else DQI = DQ; int DQSEZ = (int16_t)(DQI+SEZ); PK0 = DQSEZ>>15; SIGPK = DQSEZ ? 0 : 1; CHECK_TC(PK0,1); CHECK_UNSIGNED(SIGPK,1); } static void MagToFloat(unsigned mag,unsigned& exp,unsigned& mant) { unsigned e = 0; unsigned m = mag<<1; if(m>=(1<<8)) { e |= 8; m >>= 8; } if(m>=(1<<4)) { e |= 4; m >>= 4; } if(m>=(1<<2)) { e |= 2; m >>= 2; } e |= m>>1; exp = e; mant = mag ? (mag<<6)>>e : 1<<5; } /** @brief FLOATA function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void FLOATA(unsigned DQ, unsigned& DQ0) { CHECK_SM(DQ,16); unsigned DQS = (DQ>>15); unsigned MAG = DQ&32767; unsigned EXP; unsigned MANT; MagToFloat(MAG,EXP,MANT); DQ0 = (DQS<<10) + (EXP<<6) + MANT; CHECK_FL(DQ0,11); } /** @brief FLOATB function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void FLOATB(int SR, unsigned& SR0) { CHECK_TC(SR,16); unsigned SRS = (SR>>15)&1; unsigned MAG = SRS ? (-SR)&32767 : SR; unsigned EXP; unsigned MANT; MagToFloat(MAG,EXP,MANT); SR0 = (SRS<<10) + (EXP<<6) + MANT; CHECK_FL(SR0,11); } /** @brief FMULT function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ static void FMULT(int An,unsigned SRn,int& WAn) { CHECK_TC(An,16); CHECK_FL(SRn,11); unsigned AnS = (An>>15)&1; unsigned AnMAG = AnS ? (-(An>>2))&8191 : An>>2; unsigned AnEXP; unsigned AnMANT; MagToFloat(AnMAG,AnEXP,AnMANT); unsigned SRnS = SRn>>10; unsigned SRnEXP = (SRn>>6) & 15; unsigned SRnMANT = SRn&63; unsigned WAnS = SRnS^AnS; unsigned WAnEXP = SRnEXP+AnEXP; unsigned WAnMANT = ((SRnMANT*AnMANT)+48)>>4; unsigned WAnMAG; if(WAnEXP<=26) WAnMAG = (WAnMANT<<7) >> (26-WAnEXP); else WAnMAG = ((WAnMANT<<7) << (WAnEXP-26)) & 32767; WAn = WAnS ? -(int)WAnMAG : WAnMAG; CHECK_TC(WAn,16); } /** @brief LIMC function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void LIMC(int A2T,int& A2P) { CHECK_TC(A2T,16); const int A2UL = 12288; const int A2LL = 53248-65536; if(A2T<=A2LL) A2P = A2LL; else if(A2T>=A2UL) A2P = A2UL; else A2P = A2T; CHECK_TC(A2P,16); } /** @brief LIMD function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void LIMD(int A1T,int A2P,int& A1P) { CHECK_TC(A1T,16); CHECK_TC(A2P,16); const int OME = 15360; int A1UL = (int16_t)(OME-A2P); int A1LL = (int16_t)(A2P-OME); if(A1T<=A1LL) A1P = A1LL; else if(A1T>=A1UL) A1P = A1UL; else A1P = A1T; CHECK_TC(A1P,16); } /** @brief TRIGB function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void TRIGB(unsigned TR,int AnP,int& AnR) { CHECK_UNSIGNED(TR,1); CHECK_TC(AnP,16); AnR = TR ? 0 : AnP; CHECK_TC(AnR,16); } /** @brief UPA1 function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void UPA1(int PK0,int PK1,int A1,unsigned SIGPK,int& A1T) { CHECK_TC(PK0,1); CHECK_TC(PK1,1); CHECK_TC(A1,16); CHECK_UNSIGNED(SIGPK,1); int UGA1; if(SIGPK==0) { if(PK0^PK1) UGA1 = -192; else UGA1 = 192; } else UGA1 = 0; A1T = (int16_t)(A1+UGA1-(A1>>8)); CHECK_TC(A1T,16); } /** @brief UPA2 function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void UPA2(int PK0,int PK1,int PK2,int A1,int A2,unsigned SIGPK,int& A2T) { CHECK_TC(PK0,1); CHECK_TC(PK1,1); CHECK_TC(PK2,1); CHECK_TC(A1,16); CHECK_TC(A2,16); CHECK_UNSIGNED(SIGPK,1); unsigned UGA2; if(SIGPK==0) { int UGA2A = (PK0^PK2) ? -16384 : 16384; int FA1; if(A1<-8191) FA1 = -8191<<2; else if(A1>8191) FA1 = 8191<<2; else FA1 = A1<<2; int FA = (PK0^PK1) ? FA1 : -FA1; UGA2 = (UGA2A+FA) >> 7; } else UGA2 = 0; A2T = (int16_t)(A2+UGA2-(A2>>7)); CHECK_TC(A2T,16); } /** @brief UPB function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void UPB(unsigned RATE,int Un,int Bn,unsigned DQ,int& BnP) { CHECK_TC(Un,1); CHECK_TC(Bn,16); CHECK_SM(DQ,16); int UGBn; if(DQ&32767) UGBn = Un ? -128 : 128; else UGBn = 0; int ULBn = (RATE==5) ? Bn>>9 : Bn>>8; BnP = (int16_t)(Bn+UGBn-ULBn); CHECK_TC(BnP,16); } /** @brief XOR function from %G726 Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline static void XOR(unsigned DQn,unsigned DQ,int& Un) { CHECK_FL(DQn,11); CHECK_SM(DQ,16); Un = -(int)((DQn>>10)^(DQ>>15)); CHECK_TC(Un,1); } /** @brief TONE function from %G726 Section 4.2.7 - Tone and transition detector */ inline static void TONE(int A2P,unsigned& TDP) { CHECK_TC(A2P,16); TDP = A2P<(53760-65536); CHECK_UNSIGNED(TDP,1); } /** @brief TRANS function from %G726 Section 4.2.7 - Tone and transition detector */ inline static void TRANS(unsigned TD,unsigned YL,unsigned DQ,unsigned& TR) { CHECK_UNSIGNED(TD,1); CHECK_UM(YL,19); CHECK_SM(DQ,16); unsigned DQMAG = DQ&32767; unsigned YLINT = YL>>15; unsigned YLFRAC = (YL>>10) & 31; unsigned THR1 = (32+YLFRAC)<<YLINT; unsigned THR2; if(YLINT>9) THR2 = 31<<10; else THR2 = THR1; unsigned DQTHR = (THR2+(THR2>>1)) >> 1; TR = DQMAG>DQTHR && TD; CHECK_UNSIGNED(TR,1); } /** @brief COMPRESS function from %G726 Section 4.2.8 - Output PCM format conversion and synchronous coding adjustment */ inline static void COMPRESS(int SR,unsigned LAW,unsigned& SP) { CHECK_TC(SR,16); CHECK_UNSIGNED(LAW,1); int x = SR; #ifdef IMPLEMENT_G191_BUGS // reproduce bugs in G191 reference implementation... if(x==-0x8000) x=-1; else if(!LAW && x<0) x--; #endif // Clamp to 14 bits... if(x>=(1<<13)) x = (1<<13)-1; else if(x<-(1<<13)) x = -(1<<13); if(LAW) SP = G711::ALawEncode(x<<2); else SP = G711::ULawEncode(x<<2); CHECK_UNSIGNED(SP,8); } /** @brief SYNC function from %G726 Section 4.2.8 - Output PCM format conversion and synchronous coding adjustment */ inline static void SYNC(unsigned RATE,unsigned I,unsigned SP,int DLNX,int DSX,unsigned LAW,unsigned& SD) { CHECK_UNSIGNED(SP,8); CHECK_TC(DLNX,12); CHECK_TC(DSX,1); CHECK_UNSIGNED(LAW,1); unsigned ID; unsigned IM; QUAN(RATE,DLNX,DSX,ID); unsigned signMask = 1<<(RATE-1); ID = ID^signMask; IM = I^signMask; unsigned s; if(LAW) { s = SP^0x55; if(!(s&0x80)) s = s^0x7f; } else { s = SP; if(s&0x80) s = s^0x7f; } // s = ALAW/uLAW code converted to range 0x00..0xff // where 0x00 is most-negative and 0xff is most-positive if(ID<IM) { if(s<0xff) ++s; } else if(ID>IM) { if(s>0x00) { --s; if(s==0x7f && !LAW) --s; // TABLE 20/G.726 says uLaw 0xFF decrements to 0x7e and not 0x7f (!?) } } // convert s back to ALAW/uLAW code if(LAW) { if(!(s&0x80)) s = s^0x7f; s = s^0x55; } else { if(s&0x80) s = s^0x7f; } SD = s; CHECK_UNSIGNED(SD,8); } /** @brief LIMO function from %G726 Section A.3.5 - Output limiting (decoder only) */ inline static void LIMO(int SR,int& SO) { CHECK_TC(SR,16); if(SR>=(1<<13)) SO = (1<<13)-1; else if(SR<-(1<<13)) SO = -(1<<13); else SO = SR; CHECK_TC(SO,14); } /** @} */ // End of group /** @defgroup g726_section4B Internal - Functional blocks from Section 4 of G726 @ingroup g726 Some of these have been broken into two parts, the first of which can generate it's outputs using only the saved internal state from the previous itteration of the algorithm. @{ */ /** @brief FIGURE 4/G.726 from Section 4.2.1 - Input PCM format conversion and difference signal computation */ inline void G726::InputPCMFormatConversionAndDifferenceSignalComputation(unsigned S,int SE,int& D) { int SL; EXPAND(S,LAW,SL); SUBTA(SL,SE,D); } /** @brief FIGURE 5/G.726 from Section 4.2.2 - Adaptive quantizer */ inline void G726::AdaptiveQuantizer(int D,unsigned Y,unsigned& I) { unsigned DL; int DS; LOG(D,DL,DS); int DLN; SUBTB(DL,Y,DLN); QUAN(RATE,DLN,DS,I); } /** @brief FIGURE 6/G.726 from Section 4.2.3 - Inverse adaptive quantizer */ inline void G726::InverseAdaptiveQuantizer(unsigned I,unsigned Y,unsigned& DQ) { int DQLN; int DQS; RECONST(RATE,I,DQLN,DQS); int DQL; ADDA(DQLN,Y,DQL); ANTILOG(DQL,DQS,DQ); } /** @brief FIGURE 7/G.726 (Part 1) from Section 4.2.4 - Quantizer scale factor adaptation */ inline void G726::QuantizerScaleFactorAdaptation1(unsigned AL,unsigned& Y) { MIX(AL,YU,YL,Y); } /** @brief FIGURE 7/G.726 (Part 2) from Section 4.2.4 - Quantizer scale factor adaptation */ inline void G726::QuantizerScaleFactorAdaptation2(unsigned I,unsigned Y) { int WI; FUNCTW(RATE,I,WI); unsigned YUT; FILTD(WI,Y,YUT); unsigned YUP; LIMB(YUT,YUP); unsigned YLP; FILTE(YUP,YL,YLP); YU = YUP; // Delay YL = YLP; // Delay } /** @brief FIGURE 8/G.726 (Part 1) from Section 4.2.5 - Adaptation speed control */ inline void G726::AdaptationSpeedControl1(unsigned& AL) { LIMA(AP,AL); } /** @brief FIGURE 8/G.726 (Part 2) from Section 4.2.5 - Adaptation speed control */ inline void G726::AdaptationSpeedControl2(unsigned I,unsigned Y,unsigned TDP,unsigned TR) { unsigned FI; FUNCTF(RATE,I,FI); FILTA(FI,DMS,DMS); // Result 'DMSP' straight to delay storage 'DMS' FILTB(FI,DML,DML); // Result 'DMSP' straight to delay storage 'DMS' unsigned AX; SUBTC(DMS,DML,TDP,Y,AX); // DMSP and DMLP are read from delay storage 'DMS' and 'DML' unsigned APP; FILTC(AX,AP,APP); TRIGA(TR,APP,AP); // Result 'APR' straight to delay storage 'AP' } /** @brief FIGURE 9/G.726 (Part1) from Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline void G726::AdaptativePredictorAndReconstructedSignalCalculator1(int& SE,int& SEZ) { int WBn[6]; for(int i=0; i<6; i++) FMULT(Bn[i],DQn[i],WBn[i]); int WAn[2]; FMULT(A1,SR1,WAn[0]); FMULT(A2,SR2,WAn[1]); ACCUM(WAn,WBn,SE,SEZ); } /** @brief FIGURE 9/G.726 (Part2) from Section 4.2.6 - Adaptative predictor and reconstructed signal calculator */ inline void G726::AdaptativePredictorAndReconstructedSignalCalculator2(unsigned DQ,unsigned TR,int SE,int SEZ,int& SR,int& A2P) { int PK0; unsigned SIGPK; ADDC(DQ,SEZ,PK0,SIGPK); ADDB(DQ,SE,SR); SR2 = SR1; // Delay FLOATB(SR,SR1); // Result 'SR0' straight to delay storage 'SR1' unsigned DQ0; FLOATA(DQ,DQ0); int i; for(i=0; i<6; i++) { int Un; XOR(DQn[i],DQ,Un); int BnP; UPB(RATE,Un,Bn[i],DQ,BnP); TRIGB(TR,BnP,Bn[i]); // Result 'BnR' straight to delay storage 'Bn' } int A2T; UPA2(PK0,PK1,PK2,A1,A2,SIGPK,A2T); LIMC(A2T,A2P); TRIGB(TR,A2P,A2); // Result 'A2R' straight to delay storage 'A2' int A1T; UPA1(PK0,PK1,A1,SIGPK,A1T); int A1P; LIMD(A1T,A2P,A1P); TRIGB(TR,A1P,A1); // Result 'A1R' straight to delay storage 'A1' PK2 = PK1; // Delay PK1 = PK0; // Delay for(i=5; i>0; i--) DQn[i] = DQn[i-1]; // Delay DQn[0] = DQ0; // Delay } /** @brief FIGURE 10/G.726 (Part 1) from Section 4.2.7 - Tone and transition detector */ inline void G726::ToneAndTransitionDetector1(unsigned DQ,unsigned& TR) { TRANS(TD,YL,DQ,TR); } /** @brief FIGURE 10/G.726 (Part 2) from Section 4.2.7 - Tone and transition detector */ inline void G726::ToneAndTransitionDetector2(int A2P,unsigned TR,unsigned& TDP) { TONE(A2P,TDP); TRIGB(TR,TDP,(int&)TD); // Result 'TDR' straight to delay storage 'TD' } /** @brief FIGURE 11/G.726 from Section 4.2.8 - Output PCM format conversion and synchronous coding adjustment */ inline void G726::OutputPCMFormatConversionAndSynchronousCodingAdjustment(int SR,int SE,unsigned Y,unsigned I,unsigned& SD) { unsigned SP; COMPRESS(SR,LAW,SP); int SLX; EXPAND(SP,LAW,SLX); int DX; SUBTA(SLX,SE,DX); unsigned DLX; int DSX; LOG(DX,DLX,DSX); int DLNX; SUBTB(DLX,Y,DLNX); SYNC(RATE,I,SP,DLNX,DSX,LAW,SD); } /** @brief FIGURE A.4/G.726 from Section A.3.3 - Difference signal computation */ inline void G726::DifferenceSignalComputation(int SL,int SE,int& D) { SUBTA(SL,SE,D); } /** @brief FIGURE A.5/G.726 from Section A.3.5 - Output limiting (decoder only) */ inline void G726::OutputLimiting(int SR,int& SO) { LIMO(SR,SO); } /** @} */ // End of group /** @brief The top level method which implements the complete algorithm for both encoding and decoding. @param input Either the PCM input to the encoder or the ADPCM input to the decoder. @param encode A flag which if true makes this method perform the encode function. If the flag is false then the decode function is performed. @return Either the ADPCM output to the encoder or the PCM output to the decoder. */ unsigned G726::EncodeDecode(unsigned input,bool encode) { unsigned AL; AdaptationSpeedControl1(AL); unsigned Y; QuantizerScaleFactorAdaptation1(AL,Y); int SE; int SEZ; AdaptativePredictorAndReconstructedSignalCalculator1(SE,SEZ); unsigned I; if(encode) { int D; if(LAW==G726::PCM16) { int SL = (int16_t)input; SL >>= 2; // Convert input from 16bit to 14bit DifferenceSignalComputation(SL,SE,D); } else InputPCMFormatConversionAndDifferenceSignalComputation(input,SE,D); AdaptiveQuantizer(D,Y,I); } else I = input; unsigned DQ; InverseAdaptiveQuantizer(I,Y,DQ); unsigned TR; ToneAndTransitionDetector1(DQ,TR); int SR; int A2P; AdaptativePredictorAndReconstructedSignalCalculator2(DQ,TR,SE,SEZ,SR,A2P); unsigned TDP; ToneAndTransitionDetector2(A2P,TR,TDP); AdaptationSpeedControl2(I,Y,TDP,TR); QuantizerScaleFactorAdaptation2(I,Y); if(encode) return I; if(LAW==G726::PCM16) { int SO; OutputLimiting(SR,SO); return SO<<2; // Convert result from 14bit to 16 bit } else { unsigned SD; OutputPCMFormatConversionAndSynchronousCodingAdjustment(SR,SE,Y,I,SD); return SD; } } /* Public members of class G726 */ void G726::Reset() { int i; for(i=0; i<6; i++) { Bn[i] = 0; DQn[i] = 32; } A1 = 0; A2 = 0; AP = 0; DML = 0; DMS = 0; PK1 = 0; PK2 = 0; SR1 = 32; SR2 = 32; TD = 0; YL = 34816; YU = 544; } void G726::SetLaw(Law law) { ASSERT_DEBUG((unsigned)law<=PCM16); LAW = law; } void G726::SetRate(Rate rate) { ASSERT_DEBUG((unsigned)(rate-Rate16kBits)<=(unsigned)(Rate40kBits-Rate16kBits)); RATE = rate; } unsigned G726::Encode(unsigned S) { return EncodeDecode(S,true); } unsigned G726::Decode(unsigned I) { I &= (1<<RATE)-1; // Mask off un-needed bits return EncodeDecode(I,false); } unsigned G726::Encode(void* dst, int dstOffset, const void* src, size_t srcSize) { // convert pointers into more useful types uint8_t* out = (uint8_t*)dst; union { const uint8_t* ptr8; const uint16_t* ptr16; } in; in.ptr8 = (const uint8_t*)src; // use given bit offset out += dstOffset>>3; unsigned bitOffset = dstOffset&7; unsigned bits = RATE; // bits per adpcm sample unsigned mask = (1<<bits)-1; // bitmask for an adpcm sample // calculate number of bits to be written unsigned outBits; if(LAW!=PCM16) outBits = bits*srcSize; else { outBits = bits*(srcSize>>1); srcSize &= ~1; // make sure srcSize represents a whole number of samples } // calculate end of input buffer const uint8_t* end = in.ptr8+srcSize; while(in.ptr8<end) { // read a single PCM value from input unsigned pcm; if(LAW==PCM16) pcm = *in.ptr16++; else pcm = *in.ptr8++; // encode the pcm value as an adpcm value unsigned adpcm = Encode(pcm); // shift it to the required output position adpcm <<= bitOffset; // write adpcm value to buffer... unsigned b = *out; // get byte from ouput b &= ~(mask<<bitOffset); // clear bits which we want to write to b |= adpcm; // or in adpcm value *out = (uint8_t)b; // write value back to output // update bitOffset for next adpcm value bitOffset += bits; // loop if not moved on to next byte if(bitOffset<8) continue; // move pointers on to next byte ++out; bitOffset &= 7; // write any left-over bits from the last adpcm value if(bitOffset) *out = (uint8_t)(adpcm>>8); } // return number bits written to dst return outBits; } unsigned G726::Decode(void* dst, const void* src, int srcOffset, unsigned srcSize) { // convert pointers into more useful types union { uint8_t* ptr8; uint16_t* ptr16; } out; out.ptr8 = (uint8_t*)dst; const uint8_t* in = (const uint8_t*)src; // use given bit offset in += srcOffset>>3; unsigned bitOffset = srcOffset&7; unsigned bits = RATE; // bits per adpcm sample while(srcSize>=bits) { // read adpcm value from input unsigned adpcm = *in; if(bitOffset+bits>8) adpcm |= in[1]<<8; // need bits from next byte as well // allign adpcm value to bit 0 adpcm >>= bitOffset; // decode the adpcm value into a pcm value unsigned pcm = Decode(adpcm); // write pcm value to output if(LAW==PCM16) *out.ptr16++ = (int16_t)pcm; else *out.ptr8++ = (uint8_t)pcm; // update bit values for next adpcm value bitOffset += bits; srcSize -= bits; // move on to next byte of input if required if(bitOffset>=8) { bitOffset &= 7; ++in; } } // return number of bytes written to dst return out.ptr8-(uint8_t*)dst; }
/* * post * = procedure 1 */ public static class PostParams extends RECORD { public final AuthPair authPair = mkRECORD(AuthPair::make); public final NameList recipients = mkMember(NameList::make); //public final Name returnToName = mkRECORD(Name::make); public final BOOLEAN postIfInvalidNames = mkBOOLEAN(); public final BOOLEAN allowDLRecipients = mkBOOLEAN(); public final LONG_CARDINAL contentsType = mkLONG_CARDINAL(); public final EncodedList envOptions = mkMember(EncodedList::make); public final BulkData1.Source content = mkRECORD(BulkData1.Source::make); private PostParams() { } public static PostParams make() { return new PostParams(); } }
async def async_get_snapshot_uri(self, profile: Profile) -> str: if not self.capabilities.snapshot: return None media_service = self.device.create_media_service() req = media_service.create_type("GetSnapshotUri") req.ProfileToken = profile.token result = await media_service.GetSnapshotUri(req) return result.Uri
// List shows the list of pages. A parameter of path or user is required. func (s *PagesService) List(ctx context.Context, path, user string, opt *PagesListOptions) (*Pages, error) { var pages Pages params := url.Values{} params.Set("access_token", s.client.config.Token) params.Set("path", path) params.Set("user", user) err := s.client.newRequest(ctx, http.MethodGet, "/_api/pages.list", params, &pages) if err != nil { return nil, err } if opt != nil && opt.ListOptions.Pagenation { offset := 0 var p []PageInfo for { params.Set("offset", fmt.Sprintf("%d", offset)) err := s.client.newRequest(ctx, http.MethodGet, "/_api/pages.list", params, &pages) if err != nil { break } p = append(p, pages.Pages...) offset += 50 } pages.Pages = p } return &pages, nil }
extern crate stdweb; use stdweb::traits::*; use stdweb::unstable::TryInto; use stdweb::web::{document, CanvasRenderingContext2d}; use stdweb::web::html_element::CanvasElement; fn main() { stdweb::initialize(); let canvas: CanvasElement = document() .create_element("canvas") .unwrap() .try_into() .unwrap(); canvas.set_width(200); canvas.set_height(100); document().body().unwrap().append_child(&canvas); let context: CanvasRenderingContext2d = canvas.get_context().unwrap(); context.set_font("bold 16px serif"); context.fill_text("Hello cargo-node", 20.0, 20.0, None); }
<reponame>randomman552/Abacws-Data-Vis export { ModelView } from "./ModelView"
// Scan scans ports from 1 to numPorts (included) // using numWorkers in parallel. // It returns the sorted list of open ports. func Scan(host string, numPorts, numWorkers int) []int { dial := func(host string, port int) bool { address := host + fmt.Sprintf(":%d", port) conn, err := net.Dial("tcp", address) if err != nil { return false } conn.Close() return true } return scan(dial, host, numPorts, numWorkers) }
/** * Does the auto completion for categories, matching with any category already present in the knowledge base. * * @param value the input value. * @return the AutoCompletionCandidates. */ public AutoCompletionCandidates doAutoCompleteCategories(@QueryParameter String value) { List<String> categories; try { categories = PluginImpl.getInstance().getKnowledgeBase().getCategories(); } catch (Exception e) { logger.log(Level.WARNING, "Could not get the categories for autocompletion", e); return null; } AutoCompletionCandidates candidates = new AutoCompletionCandidates(); if (categories == null) { return candidates; } for (String category : categories) { if (category.toLowerCase().startsWith(value.toLowerCase())) { candidates.add(category); } } return candidates; }
Systemic sclerosis complicated by ovarian cancer Dear Editor, Systemic sclerosis (SSc) is a chronic, multisystem, autoimmune disease causing significant morbidity and mortality. There are many reports describing the risk of malignancy with SSc. While a previous study indicated a higher incidence of cancer, especially lung cancer, a recent study with 538 patients showed no overall increase in the incidence of cancer, except for liver cancer. So, it is difficult to precisely determine the risk of malignancy in SSc. Herein, we report a case of SSc in a patient with ovarian cancer. Interestingly, the sclerosis of the skin improved after the subsequent treatment of ovarian cancer. A 54-year-old Japanese woman presented in May 2004 with a 5-month history of swelling and tightening of her fingers and forearms with Raynaud’s phenomenon. She had no personal or family history of autoimmune disease and denied any chemical exposure, which has been reported to induce sclerosis of the skin. Physical examination disclosed nonpitting swelling and sclerosis of the skin proximal to the digits affecting her forearms and sclerodactylia (Fig. 1). Abnormal laboratory findings provided the following values: elevated C-reactive protein, 0.7 mg/dL; erythrocyte sedimentation rate, 51 mm/h; and positive antinuclear antibody (ANA) at a titer of ×640 with a speckled pattern. Anti-ds-DNA, ss-DNA, topoisomerase-I, U1-RNP and centromere antibodies were negative. Upper gastro-intestinal radiography showed mild to moderate hypomotility of her esophagus. A chest X-ray and pulmonary function tests revealed no significant abnormalities. A skin biopsy obtained from her right forearm showed hyperproliferation of swollen, homogenous collagen fibers extending throughout the dermis, and surrounding sweat glands (Fig. 2). According to the American Rheumatism Association (ARA) diagnostic criteria, she was diagnosed with SSc. During systemic investigation with computed tomography, bilateral ovarian masses, each approximately 50–70 mm in size, multiple enlarged retroperitoneal lymph nodes, and ascites were found. There was also an increased serum level of 638 U/mL CA-125 (reference range, 0–35 U/mL). As the abdominal findings were so severe, their treatment took first priority over the SSc for which 100 mg daily tocopherol acetate p.o. had been given with little improvement of the skin lesions. When she received a laparotomy in July 2004, there was severe peritonitis carcinomatosa,
/************************************************************************** Copyright 2019 Vietnamese-German-University Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @author: ngpbh ***************************************************************************/ package com.vgu.se.jocl.utils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; /** * The Class UMLContextUtils contains various methods for querying over * UML context */ public class UMLContextUtils { /** * Validate UML Context. * * @param context the UML context * @return true, if valid */ public static boolean validateContext(JSONArray context) { // TODO: Implement. return false; } /** * Checks if {@code className} is an actual class in this context. * * @param context the UML context * @param className the class name * @return true, if this class is valid */ public static boolean isClass(JSONArray context, String className) { if (context != null && className != null) { for (Object entity : context) { if (entity instanceof JSONObject) { JSONObject obj = (JSONObject) entity; if (className.equals(obj.get("class"))) { return true; } } } } return false; } /** * Checks if {@code associationName} is an actual association in * this context. * * @param context the UML context * @param associationName the association name * @return true, if the association is valid */ public static boolean isAssociationClass(JSONArray context, String associationName) { if (context != null && associationName != null) { for (Object entity : context) { if (entity instanceof JSONObject) { JSONObject jObject = (JSONObject) entity; if (associationName .equals(jObject.get("association"))) { return true; } } } } return false; } /** * Checks if {@code propertyName} is an actual property of * {@code className} in this UML context. * * @param context the UML context * @param className the class name * @param propertyName the property name * @return true, if is property belongs to the class and the class * also belongs to the UML context */ public static boolean isPropertyOfClass(JSONArray context, String className, String propertyName) { if (context != null && className != null && propertyName != null) { for (Object entity : context) { if (entity instanceof JSONObject) { JSONObject jObject = (JSONObject) entity; if (className.equals(jObject.get("class"))) { Object attributes = jObject.get("attributes"); if (attributes instanceof JSONArray) { JSONArray jAtts = (JSONArray) attributes; for (Object att : jAtts) { if (att instanceof JSONObject) { JSONObject jAtt = (JSONObject) att; if (propertyName .equals(jAtt.get("name"))) { return true; } } } } } } } } return false; } /** * Checks if {@code associationName} is an actual association of * {@code className} in this UML context. * * @param context the UML context * @param className the class name * @param associationName the association name * @return true, if is association is valid (and the class is in the * context) */ public static boolean isAssociationOfClass(JSONArray context, String className, String associationName) { if (context != null && className != null && associationName != null) { for (Object entity : context) { if (entity instanceof JSONObject) { JSONObject jObject = (JSONObject) entity; if (jObject.containsKey("association")) { Object ends = jObject.get("ends"); Object classes = jObject.get("classes"); if (ends instanceof JSONArray && classes instanceof JSONArray) { JSONArray jEnds = (JSONArray) ends; JSONArray jClasses = (JSONArray) classes; if (className.equals(jClasses.get(0)) && associationName .equals(jEnds.get(1)) || className.equals(jClasses.get(1)) && associationName.equals( jEnds.get(0))) { return true; } } } } } } return false; } public static String getAssociationOppositeClassName( JSONArray context, String assocName, String className) { for (Object object : context) { if (((JSONObject) object).containsKey("association") && ((JSONObject) object).get("association") .equals(assocName)) { JSONArray classes = (JSONArray) ((JSONObject) object) .get("classes"); if (classes.get(0).equals(className)) return (String) classes.get(1); else return (String) classes.get(0); } } return null; } public static String getAttributeType(JSONArray plainUMLContext, String propertyClass, String propertyName) { for (Object entity : plainUMLContext) { if (((JSONObject) entity).containsKey("class")) { if (((JSONObject) entity).get("class") .equals(propertyClass)) { if (((JSONObject) entity) .containsKey("attributes")) { for (Object association : (JSONArray) ((JSONObject) entity) .get("attributes")) { if (((JSONObject) association).get("name") .equals(propertyName)) { return (String) ((JSONObject) association) .get("type"); } } } } } } return null; } public static boolean isSuperClassOf(JSONArray plainUMLContext, String expectedSuperType, String targetType) { if (targetType == null) return true; if (expectedSuperType.equals(targetType)) return true; while (true) { String superClazz = findDirectSuperClass(plainUMLContext, targetType); if (superClazz == null) return false; if (expectedSuperType.equals(superClazz)) return true; targetType = superClazz; } } private static String findDirectSuperClass( JSONArray plainUMLContext, String targetType) { for (Object entity : plainUMLContext) { JSONObject clazz = (JSONObject) entity; if (clazz.containsKey("class") && targetType.equals(clazz.get("class")) && clazz.containsKey("super")) { return (String) clazz.get("super"); } } return null; } public static String getAssociation(JSONArray context, String className, String endName) { String association = null; for (Object object : context) { if (((JSONObject) object).containsKey("association")) { JSONArray classes = (JSONArray) ((JSONObject) object) .get("classes"); JSONArray ends = (JSONArray) ((JSONObject) object) .get("ends"); for (int index_end = 0; index_end < classes .size(); index_end++) { if (classes.get(index_end).equals(className) && ends.get(index_end).equals(endName)) { association = (String) ((JSONObject) object) .get("association"); break; } ; } } } return association; } public static String getOppositeAssociationEnd(JSONArray context, String assocName, String endName) { String opposAssoc = null; for (int i = 0; i< context.size(); i++) { JSONObject o = (JSONObject) context.get(i); if (o.containsKey("association") && o.get("association").equals(assocName)) { JSONArray ends = (JSONArray) o.get("ends"); opposAssoc = ends.get(0).equals(endName) ? (String) ends.get(1) : (String) ends.get(0); } } return opposAssoc; } public static boolean isAssociationEndOfClass(JSONArray context, String className, String assoc) { for (int i = 0; i < context.size(); i++) { JSONObject o = (JSONObject) context.get(i); if (o.containsKey("association")) { JSONArray classes = (JSONArray) o.get("classes"); JSONArray ends = (JSONArray) o.get("ends"); if (classes.get(0).equals(className) && ends.get(0).equals(assoc)) { return true; } if (classes.get(1).equals(className) && ends.get(1).equals(assoc)) { return true; } } } return false; } }
import { productImageGalleryTheme } from '@vtex/store-ui' export default productImageGalleryTheme
def gost(lis,ky,r,l): lens=l-r+1 if lens <= 5: if ky in lis[r:l+1]: return True else: return False if ky <= lis[r+(lens//2)-1]: return gost(lis,ky,r,r+(lens//2)-1) else: return gost(lis,ky,r+(lens//2),l) n = int(input()) a = input() s=list(map(int,a.split())) q = int(input()) a = input() t=list(map(int,a.split())) i = 0 for it in t: if gost(s,it,0,n-1): i = i+1 print(i)
<gh_stars>0 /* Copyright (c) 2012, 2020, Oracle and/or its affiliates. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License, version 2.0, as published by the Free Software Foundation. This program is also distributed with certain software (including but not limited to OpenSSL) that is licensed under separate terms, as designated in a particular file or component or in included license documentation. The authors of MySQL hereby grant you an additional permission to link the program and your derivative works with the separately licensed software that they have included with MySQL. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License, version 2.0, for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #include "my_config.h" #include <gtest/gtest.h> #include <algorithm> #include <random> #include <vector> #include "my_byteorder.h" #include "my_compiler.h" #include "my_inttypes.h" namespace alignment_unittest { /* Testing performance penalty of accessing un-aligned data. Seems to about 2% on my desktop machine. */ class AlignmentTest : public ::testing::Test { protected: // Increase num_iterations for actual benchmarking! static const int num_iterations = 1; static const int num_records = 100 * 1000; static int *aligned_data; static uchar *unaligned_data; static void SetUpTestCase() { aligned_data = new int[num_records]; unaligned_data = new uchar[(num_records + 1) * sizeof(int)]; for (int ix = 0; ix < num_records; ++ix) { aligned_data[ix] = ix / 10; } std::random_device rng; std::mt19937 urng(rng()); std::shuffle(aligned_data, aligned_data + num_records, urng); memcpy(unaligned_data + 1, aligned_data, num_records * sizeof(int)); } static void TearDownTestCase() { delete[] aligned_data; delete[] unaligned_data; } void SetUp() override { aligned_keys = new uchar *[num_records]; unaligned_keys = new uchar *[num_records]; for (int ix = 0; ix < num_records; ++ix) { aligned_keys[ix] = static_cast<uchar *>(static_cast<void *>(&aligned_data[ix])); unaligned_keys[ix] = &unaligned_data[1 + (ix * sizeof(int))]; } } void TearDown() override { delete[] aligned_keys; delete[] unaligned_keys; } uchar **aligned_keys; uchar **unaligned_keys; }; int *AlignmentTest::aligned_data; uchar *AlignmentTest::unaligned_data; // A copy of the generic, byte-by-byte getter. #define sint4korrgeneric(A) \ (int32)(((int32)((uchar)(A)[0])) + (((int32)((uchar)(A)[1]) << 8)) + \ (((int32)((uchar)(A)[2]) << 16)) + (((int32)((int16)(A)[3]) << 24))) class Mem_compare_uchar_int { public: // SUPPRESS_UBSAN: only executed on intel, misaligned read works OK. bool operator()(const uchar *s1, const uchar *s2) SUPPRESS_UBSAN { return *pointer_cast<const int *>(s1) < *pointer_cast<const int *>(s2); } }; class Mem_compare_sint4 { public: bool operator()(const uchar *s1, const uchar *s2) { return sint4korr(s1) < sint4korr(s2); } }; class Mem_compare_sint4_generic { public: bool operator()(const uchar *s1, const uchar *s2) { return sint4korrgeneric(s1) < sint4korrgeneric(s2); } }; #if defined(__i386__) || defined(__x86_64__) || defined(_WIN32) TEST_F(AlignmentTest, AlignedSort) { for (int ix = 0; ix < num_iterations; ++ix) { std::vector<uchar *> keys(aligned_keys, aligned_keys + num_records); std::sort(keys.begin(), keys.end(), Mem_compare_uchar_int()); } } TEST_F(AlignmentTest, UnAlignedSort) { for (int ix = 0; ix < num_iterations; ++ix) { std::vector<uchar *> keys(unaligned_keys, unaligned_keys + num_records); std::sort(keys.begin(), keys.end(), Mem_compare_uchar_int()); } } TEST_F(AlignmentTest, Sint4Sort) { for (int ix = 0; ix < num_iterations; ++ix) { std::vector<uchar *> keys(unaligned_keys, unaligned_keys + num_records); std::sort(keys.begin(), keys.end(), Mem_compare_sint4()); } } TEST_F(AlignmentTest, Sint4SortGeneric) { for (int ix = 0; ix < num_iterations; ++ix) { std::vector<uchar *> keys(unaligned_keys, unaligned_keys + num_records); std::sort(keys.begin(), keys.end(), Mem_compare_sint4_generic()); } } #endif } // namespace alignment_unittest
// Fetch attempts to fetch arbitrary bytes from a given URL func Fetch(ctx context.Context, fetchURL *url.URL, jsonErrors bool) (io.Reader, error) { if jsonErrors { q := fetchURL.Query() q.Add("json_errors", "true") fetchURL.RawQuery = q.Encode() } fetchPath := fetchURL.String() log.Trace("[network] Beginning fetch from ", fetchPath) start := time.Now() c, cancel := context.WithTimeout(ctx, timeout) defer cancel() req, err := http.NewRequestWithContext(c, http.MethodGet, fetchPath, nil) if err != nil { return nil, fmt.Errorf("Failed to create HTTP request: %+v", err) } req.Header.Set("User-Agent", "eRegs for "+os.Getenv("NAME")) log.Trace("[network] Fetching from ", fetchPath) resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("Fetch failed to complete: %+v", err) } defer resp.Body.Close() if resp.StatusCode >= 400 { fetchError := &Error{} err = json.NewDecoder(resp.Body).Decode(fetchError) if err != nil { return nil, fmt.Errorf("Received error code %d while fetching from %s, unable to extract error message: %+v", resp.StatusCode, fetchPath, err) } return nil, fmt.Errorf("Received error code %d while fetching from %s: %s", resp.StatusCode, fetchPath, fetchError.Exception) } b, err := io.ReadAll(resp.Body) if err != nil { return nil, fmt.Errorf("Failed to read response body: %+v", err) } body := bytes.NewBuffer(b) log.Trace("[network] Received ", len(b), " bytes from ", fetchPath, " in ", time.Since(start)) return body, nil }
/** * Create a daily date range for a given date for the month to which * the date belongs. So if the date is "2010-05-28 16:14:08" then the * returned range would be (2010-05-28 00:00:00 -> 2010-05-28 23:59:59). */ static DateRange createDaily(Date date) { Calendar start = new GregorianCalendar(); start.setTimeZone(TimeZone.getTimeZone("UTC")); start.setTime(date); start.set(start.get(Calendar.YEAR), start.get(Calendar.MONTH), start.get(Calendar.DATE), start.getActualMinimum(Calendar.HOUR_OF_DAY), start.getActualMinimum(Calendar.MINUTE), start.getActualMinimum(Calendar.SECOND)); start.set(Calendar.MILLISECOND, 0); Calendar end = new GregorianCalendar(); end.setTimeZone(TimeZone.getTimeZone("UTC")); end.setTime(date); end.set(end.get(Calendar.YEAR), end.get(Calendar.MONTH), end.get(Calendar.DATE), end.getActualMaximum(Calendar.HOUR_OF_DAY), end.getActualMaximum(Calendar.MINUTE), end.getActualMaximum(Calendar.SECOND)); end.set(Calendar.MILLISECOND, 999); return new DateRange(start.getTime(), end.getTime()); }
/** * Copyright (c) 2018 <NAME> * This file is licensed under the terms of the MIT license. */ #pragma once #include <reg/ClassRegistrar.h> #include <gamebase/impl/engine/RelativeValue.h> namespace gamebase { namespace editor { struct SimpleRelType { enum Enum { Pixels, Percents }; }; class SimpleRelativeValue : public impl::ISerializable { public: SimpleRelativeValue() : m_type(SimpleRelType::Pixels) , m_value(0) {} SimpleRelativeValue(SimpleRelType::Enum type, double value) { set(type, value); } SimpleRelativeValue(impl::RelType::Enum type, double value) { set(type, value); } explicit SimpleRelativeValue(const impl::RelativeValue& value) { set(value.type(), value.value()); } void set(impl::RelType::Enum type, double value); void set(SimpleRelType::Enum type, double value); impl::RelativeValue toRelativeValue() const; SimpleRelType::Enum type() const { return m_type; } double value() const; virtual void serialize(impl::Serializer&) const override; private: SimpleRelType::Enum m_type; double m_value; }; double round(double value, SimpleRelType::Enum type); } }
export * from './button'; export * from './circle'; export * from './image'; export * from './line'; export * from './mouse'; export * from './point'; export * from './rect'; export * from './rounded-rect'; export * from './text';
// DFS perform a depth-first-traversal on the given attribute and invokes callback func (attr *Attribute) DFS(callback func(attr *Attribute)) { callback(attr) for _, each := range attr.subAttributes { each.DFS(callback) } }
// %%Function: QuerySublinePointPcpCore // %%Contact: victork // /* * Returns dim-info of the cp in the line, that a) contains given point or * b) is closest to it from the left or * c) is just closest to it */ LSERR QuerySublinePointPcpCore( PLSSUBL plssubl, PCPOINTUV pptIn, DWORD cDepthQueryMax, PLSQSUBINFO plsqsubinfoResults, DWORD* pcActualDepth, PLSTEXTCELL plstextcell) { PLSC plsc; LSERR lserr = lserrNone; PLSDNODE pdn, pdnPrev = NULL; POINTUV pt, ptInside, ptInsideLocal; LSCP cpLim; LSQIN lsqin; LSQOUT lsqout; PLSSUBL plssublLowerLevels; POINTUV ptStartLowerLevels; PLSQSUBINFO plsqsubinfoLowerLevels; DWORD cDepthQueryMaxLowerLevels; DWORD cActualDepthLowerLevels; long upQuery; Assert(FIsLSSUBL(plssubl)); Assert(!plssubl->fDupInvalid); if (cDepthQueryMax == 0) { return lserrInsufficientQueryDepth; } plsc = plssubl->plsc; cpLim = plssubl->cpLimDisplay; pt.u = 0; pt.v = 0; pdn = plssubl->plsdnFirst; while (FDnodeBeforeCpLim(pdn, cpLim) && (FIsNotInContent(pdn) || !(FIsDnodeReal(pdn)) || FIsZeroWidth(pdn))) { pdn = AdvanceToNextDnodeQuery(pdn, &pt); } if (!FDnodeBeforeCpLim(pdn, cpLim)) { *pcActualDepth = 0; return lserrNone; } upQuery = pptIn->u; if (pt.u <= upQuery) { while (FDnodeBeforeCpLim(pdn, cpLim) && pt.u <= upQuery) { pdnPrev = pdn; pdn = AdvanceToNextDnodeQuery(pdn, &pt); } if (FIsDnodeBorder(pdnPrev)) { if (pdnPrev->fOpenBorder) { Assert(FDnodeBeforeCpLim(pdn, cpLim)); } else { pdn = pdnPrev; Assert(pdn != NULL && !FIsNotInContent(pdn)); pdnPrev = BacktrackToPreviousDnode(pdnPrev, &pt); pdn = pdnPrev; Assert(pdn != NULL && !FIsNotInContent(pdn)); pdnPrev = BacktrackToPreviousDnode(pdnPrev, &pt); } } else { pdn = pdnPrev; pdnPrev = BacktrackToPreviousDnode(pdnPrev, &pt); while (pdn != NULL && (!(FIsDnodeReal(pdn)) || FIsZeroWidth(pdn))) { pdn = pdnPrev; pdnPrev = BacktrackToPreviousDnode(pdnPrev, &pt); } Assert(pdn != NULL && !FIsNotInContent(pdn)); } } pt.v += pdn->u.real.lschp.dvpPos; PrepareQueryCall(plssubl, pdn, &lsqin); ptInside.u = pptIn->u - pt.u; ptInside.v = pptIn->v - pt.v; lserr = (*plsc->lsiobjcontext.rgobj[pdn->u.real.lschp.idObj].lsim.pfnQueryPointPcp) (pdn->u.real.pdobj, &ptInside, &lsqin, &lsqout); if (lserr != lserrNone) return lserr; lserr = FillInQueryResults(plsc, plssubl, plsqsubinfoResults, pdn, &pt, &lsqout); if (lserr != lserrNone) return lserr; if (lsqout.plssubl == NULL) { *pcActualDepth = 1; FillInTextCellInfo(plsc, pdn, &pt, &lsqout, plstextcell); } else { plssublLowerLevels = lsqout.plssubl; plsqsubinfoLowerLevels = plsqsubinfoResults + 1; cDepthQueryMaxLowerLevels = cDepthQueryMax - 1; lserr = LsPointUV2FromPointUV1(plssubl->lstflow, &(lsqout.pointUvStartSubline), &ptInside, plssublLowerLevels->lstflow, &ptInsideLocal); if (lserr != lserrNone) return lserr; lserr = QuerySublinePointPcpCore(plssublLowerLevels, &ptInsideLocal, cDepthQueryMaxLowerLevels, plsqsubinfoLowerLevels, &cActualDepthLowerLevels, plstextcell); if (lserr != lserrNone) return lserr; *pcActualDepth = cActualDepthLowerLevels + 1; ptStartLowerLevels.u = pt.u + lsqout.pointUvStartSubline.u; ptStartLowerLevels.v = pt.v + lsqout.pointUvStartSubline.v; TransformPointsOnLowerLevels(plsqsubinfoLowerLevels, cActualDepthLowerLevels, plstextcell, &ptStartLowerLevels, plssubl->lstflow, plssublLowerLevels->lstflow); } return lserrNone; }
def factorial(n): if n==0: return 0 elif n==1: return 1 else: return n+factorial(n-1) friend1=int(input()) friend2=int(input()) if friend1>friend2: friend1,friend2=friend2,friend1 mid=(friend2-friend1)/2 if not mid.is_integer(): tiredness=(factorial((friend2-friend1)//2))+(factorial(((friend2-friend1)//2)+1)) else: tiredness=2*(factorial((friend2-friend1)//2)) print(tiredness)
def gait(strikes, data, duration, distance=None): import numpy as np step_durations = [] for i in range(1, np.size(strikes)): step_durations.append(strikes[i] - strikes[i-1]) avg_step_duration = np.mean(step_durations) sd_step_durations = np.std(step_durations) number_of_steps = np.size(strikes) cadence = number_of_steps / duration strides1 = strikes[0::2] strides2 = strikes[1::2] stride_durations1 = [] for i in range(1, np.size(strides1)): stride_durations1.append(strides1[i] - strides1[i-1]) stride_durations2 = [] for i in range(1, np.size(strides2)): stride_durations2.append(strides2[i] - strides2[i-1]) strides = [strides1, strides2] stride_durations = [stride_durations1, stride_durations2] avg_number_of_strides = np.mean([np.size(strides1), np.size(strides2)]) avg_stride_duration = np.mean((np.mean(stride_durations1), np.mean(stride_durations2))) sd_stride_durations = np.mean((np.std(stride_durations1), np.std(stride_durations2))) step_period = 1 / avg_step_duration stride_period = 1 / avg_stride_duration step_regularity, stride_regularity, symmetry = \ gait_regularity_symmetry(data, step_period, stride_period) if distance: velocity = distance / duration avg_step_length = number_of_steps / distance avg_stride_length = avg_number_of_strides / distance else: velocity = None avg_step_length = None avg_stride_length = None return number_of_steps, cadence, velocity, \ avg_step_length, avg_stride_length, step_durations, \ avg_step_duration, sd_step_durations, strides, stride_durations, \ avg_number_of_strides, avg_stride_duration, sd_stride_durations, \ step_regularity, stride_regularity, symmetry
package util import ( "fmt" "log" "os" "strconv" "strings" "time" gops "github.com/mitchellh/go-ps" "github.com/shirou/gopsutil/v3/process" ) // ProcEntry a process entry of a process list type ProcEntry struct { Name string `json:"name"` Cmdline string `json:"cmdline"` Token string `json:"token"` PID int `json:"pid"` PPID int `json:"ppid"` } // ProcessList a list of current processes func ProcessList() (list []ProcEntry) { var ( err error p ProcEntry ) procs, err := process.Processes() if err != nil { log.Printf("ProcessList: %v", err) return nil } // loop through processes for _, proc := range procs { p.Cmdline, err = proc.Cmdline() if err != nil { log.Printf("proc cmdline: %v", err) p.Cmdline = "unknown_cmdline" } p.Name, err = proc.Name() if err != nil { log.Printf("proc name: %v", err) p.Name = "unknown_proc" } p.PID = int(proc.Pid) i, err := proc.Ppid() p.PPID = int(i) if err != nil { log.Printf("proc ppid: %v", err) p.PPID = 0 } p.Token, err = proc.Username() if err != nil { log.Printf("proc token: %v", err) uids, err := proc.Uids() if err != nil { p.Token = "<PASSWORD>" } for i, uid := range uids { p.Token += strconv.Itoa(int(uid)) if i != len(uids)-1 { p.Token += ", " } } } list = append(list, p) } return } // ProcExe read exe path of a process func ProcExe(pid int) string { proc, err := process.NewProcess(int32(pid)) if err != nil || proc == nil { log.Printf("No such process (%d): %v", pid, err) return "dead_process" } exe, err := proc.Exe() if err != nil { return fmt.Sprintf("err_%v", err) } exe = strings.Fields(exe)[0] // get rid of other stuff return exe } // ProcCmdline read cmdline data of a process func ProcCmdline(pid int) string { proc, err := process.NewProcess(int32(pid)) if err != nil || proc == nil { log.Printf("No such process (%d): %v", pid, err) return "dead_process" } cmdline, err := proc.Cmdline() if err != nil { return fmt.Sprintf("err_%v", err) } return cmdline } // IsProcAlive check if a process name exists, returns its process(es) func IsProcAlive(procName string) (alive bool, procs []*os.Process) { allprocs, err := gops.Processes() if err != nil { log.Println(err) return } for _, p := range allprocs { if p.Executable() == procName { alive = true proc, err := os.FindProcess(p.Pid()) if err != nil { log.Println(err) } procs = append(procs, proc) } } return } // PidOf PID of a process name func PidOf(name string) []int { pids := make([]int, 1) allprocs, err := gops.Processes() if err != nil { log.Println(err) return pids } for _, p := range allprocs { if p.Executable() == name { proc, err := os.FindProcess(p.Pid()) if err != nil { log.Println(err) } pids = append(pids, proc.Pid) } } return pids } // sleep for a random interval func TakeASnap() { interval := time.Duration(RandInt(100, 2000)) time.Sleep(interval * time.Millisecond) }
def single_dir_expand(matches): if len(matches) == 1 and os.path.isdir(matches[0]): d = matches[0] if d[-1] in ['/','\\']: d = d[:-1] subdirs = [p for p in os.listdir(d) if os.path.isdir( d + '/' + p) and not p.startswith('.')] if subdirs: matches = [ (d + '/' + p) for p in subdirs ] return single_dir_expand(matches) else: return matches else: return matches
<reponame>michaelbonadio/jesterj<gh_stars>0 /* * Copyright 2016 Needham Software LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jesterj.ingest.model.impl; /* * Created with IntelliJ IDEA. * User: gus * Date: 3/17/16 */ import com.copyright.easiertest.Mock; import com.copyright.easiertest.ObjectUnderTest; import org.jesterj.ingest.model.ConfiguredBuildable; import org.jesterj.ingest.model.DocumentProcessor; import org.jesterj.ingest.model.Plan; import org.jesterj.ingest.model.Step; import org.jesterj.ingest.processors.CopyField; import org.jesterj.ingest.processors.ElasticSender; import org.jesterj.ingest.processors.LogAndDrop; import org.jesterj.ingest.processors.SendToSolrCloudProcessor; import org.jesterj.ingest.processors.SimpleDateTimeReformatter; import org.jesterj.ingest.processors.TikaProcessor; import org.jesterj.ingest.routers.DuplicateToAll; import org.jesterj.ingest.scanners.SimpleFileWatchScanner; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.File; import static com.copyright.easiertest.EasierMocks.*; import static org.junit.Assert.*; public class StepImplTest { private static final String ACCESSED = "format_accessed_date"; private static final String CREATED = "format_created_date"; private static final String MODIFIED = "format_modified_date"; private static final String SIZE_TO_INT = "size_to_int_step"; private static final String TIKA = "tika_step"; private static final String SHAKESPEARE = "Shakespeare_scanner"; @ObjectUnderTest StepImpl step; Step testStep; @Mock private ConfiguredBuildable<? extends DocumentProcessor> mockProcessorBuilder; @Mock private DocumentProcessor mockProcessor; public StepImplTest() { prepareMocks(this); } @Before public void setUp() { reset(); } @After public void tearDown() { verify(); } @Test public void testBuildAStep() { replay(); StepImpl.Builder builder = new StepImpl.Builder(); builder.batchSize(5); StepImpl built = builder.build(); } /** * Test the oddball case where we have a final step with no side effects * (i.e. a waste of time!) Bad design, but should not throw an error since * it could be a custom step that's actually got side effects but perhaps is * idempotent and doesn't need to be tracked anyway, or is optional or * best effort */ @Test public void testSideEffectsNoneLastStep() { replay(); testStep = new StepImpl.Builder().withProcessor(new LogAndDrop.Builder().named("foo")).build(); Step[] possibleSideEffects = testStep.getPossibleSideEffects(); assertEquals(0, possibleSideEffects.length); } @Test public void testSideEffectsLastStep() { replay(); testStep = new StepImpl.Builder().withProcessor(new ElasticSender.Builder().named("foo")).build(); Step[] possibleSideEffects = testStep.getPossibleSideEffects(); assertEquals(1, possibleSideEffects.length); } @Test public void testShakespearePlan() { replay(); testStep = getPlan().findStep(SHAKESPEARE); Step[] possibleSideEffects = testStep.getPossibleSideEffects(); assertEquals(2, possibleSideEffects.length); } public Plan getPlan() { PlanImpl.Builder planBuilder = new PlanImpl.Builder(); SimpleFileWatchScanner.Builder scanner = new SimpleFileWatchScanner.Builder(); StepImpl.Builder formatCreated = new StepImpl.Builder(); StepImpl.Builder formatModified = new StepImpl.Builder(); StepImpl.Builder formatAccessed = new StepImpl.Builder(); StepImpl.Builder renameFileszieToInteger = new StepImpl.Builder(); StepImpl.Builder tikaBuilder = new StepImpl.Builder(); StepImpl.Builder sendToSolrBuilder = new StepImpl.Builder(); StepImpl.Builder sendToElasticBuilder = new StepImpl.Builder(); File testDocs = new File("data"); scanner .named(SHAKESPEARE) .withRoot(testDocs) .scanFreqMS(100); formatCreated .named(CREATED) .withProcessor( new SimpleDateTimeReformatter.Builder() .named("format_created") .from("created") .into("created_dt") ); formatModified .named(MODIFIED) .withProcessor( new SimpleDateTimeReformatter.Builder() .named("format_modified") .from("modified") .into("modified_dt") ); formatAccessed .named(ACCESSED) .withProcessor( new SimpleDateTimeReformatter.Builder() .named("format_accessed") .from("accessed") .into("accessed_dt") ); renameFileszieToInteger .named(SIZE_TO_INT) .withProcessor( new CopyField.Builder() .named("copy_size_to_int") .from("file_size") .into("file_size_i") .retainingOriginal(false) ); tikaBuilder .named(TIKA) .routingBy(new DuplicateToAll.Builder() .named("duplicator")) .withProcessor(new TikaProcessor.Builder() .named("tika") ); sendToSolrBuilder .named("solr_sender") .withProcessor( new SendToSolrCloudProcessor.Builder() .withZookeeper("localhost:9983") .usingCollection("jjtest") .placingTextContentIn("_text_") .withDocFieldsIn(".fields") ); // String home = Main.JJ_DIR + System.getProperty("file.separator") + "jj_elastic_client_node"; sendToElasticBuilder .named("elastic_sender") // .withProcessor( // new ElasticNodeSender.Builder() // .named("elastic_node_processor") // .usingCluster("elasticsearch") // .nodeName("jj_elastic_client_node") // .locatedInDir(home) // .forIndex("shakespeare") // .forObjectType("work") .withProcessor( new ElasticSender.Builder() .named("elastic_node_processor") .forIndex("shakespeare") .forObjectType("work") .withServer("localhost", 9300) //.withServer("es.example.com", "9300") // can have multiple servers ); planBuilder .named("myPlan") .withIdField("id") .addStep(scanner) .addStep(formatCreated, SHAKESPEARE) .addStep(formatModified, CREATED) .addStep(formatAccessed, MODIFIED) .addStep(renameFileszieToInteger, ACCESSED) .addStep(tikaBuilder, SIZE_TO_INT); planBuilder.addStep(sendToSolrBuilder, TIKA); planBuilder.addStep(sendToElasticBuilder, TIKA); return planBuilder.build(); } }
<filename>aoj/5/AOJ0555.cpp // // AOJ0555.cpp // // // Created by knuu on 2014/06/11. // // #include <iostream> #include <cstring> using namespace std; int main() { int N,ans=0; char s[11],ring[11]; cin>>s>>N; for (int i=0; i<N; i++) { cin>>ring; for (int j=0; j<strlen(ring); j++) { int k; for (k=0; k<strlen(s); k++) { if (s[k]!=ring[(j+k)%strlen(ring)]) break; } if (k==strlen(s)) { ans++; break; } } } cout<<ans<<endl; }
<reponame>gabrieldtc/CursoEmVideoPython<filename>PyCharm/Desafios/Mundo1/desafio29.py # escreva um programa que leia a velocidade de um carro se ultrapassar de 80Km/h mostar uma mensagem dizendo # que ele foi multado a multa vai custar R$ 7,00 a cada quilimetro ultrapassado velo = int(input('Qual a velocidade que você passou no radar? ')) multa = 0 if velo > 80: multa = float(velo - 80) * 7 print('Você passou a 80km/h e o valor da multa é R${:.2f}'.format(multa)) print('Tenha um bom dia! Dirija com segurança.')
<gh_stars>1-10 package org.usfirst.frc.team5104.robot; public class LogFile { }
/** * Within a search window around the subimages detect most likely match and * thus motion. * * @author Sina Samangooei ([email protected]) * */ public static class TEMPLATE_MATCH extends MotionEstimatorAlgorithm { private float searchProp; private Mode mode; /** * Defaults to allowing a maximum of templatesize/2 movement using the * {@link Mode#CORRELATION} */ public TEMPLATE_MATCH() { this.searchProp = .5f; this.mode = TemplateMatcher.Mode.NORM_SUM_SQUARED_DIFFERENCE; } /** * Given the template's size, search around a border of size * template*searchWindowBorderProp * * @param searchWindowBorderProp * @param mode * the matching mode */ public TEMPLATE_MATCH(float searchWindowBorderProp, Mode mode) { this.searchProp = searchWindowBorderProp; this.mode = mode; } @Override Point2d estimateMotion(VideoSubFrame<FImage> img1sub, VideoSubFrame<FImage>... imagesSub) { final VideoFrame<FImage> current = img1sub.extract(); final VideoFrame<FImage> prev = imagesSub[0]; final Rectangle prevSearchRect = imagesSub[0].roi; final int sw = (int) (prevSearchRect.width * this.searchProp); final int sh = (int) (prevSearchRect.height * this.searchProp); final int sx = (int) (prevSearchRect.x + ((prevSearchRect.width - sw) / 2.f)); final int sy = (int) (prevSearchRect.y + ((prevSearchRect.height - sh) / 2.f)); final Rectangle searchRect = new Rectangle(sx, sy, sw, sh); // System.out.println("Search window: " + searchRect); // MBFImage searchRectDraw = new // MBFImage(img1sub.frame.clone(),img1sub.frame.clone(),img1sub.frame.clone()); // searchRectDraw.drawShape(searchRect, RGBColour.RED); // searchRectDraw.drawPoint(img1sub.roi.getCOG(), RGBColour.GREEN, // 3); final TemplateMatcher matcher = new TemplateMatcher(current.frame, mode, searchRect); matcher.analyseImage(prev.frame); final FValuePixel[] responses = matcher.getBestResponses(1); final FValuePixel firstBest = responses[0]; // for (FValuePixel bestRespose : responses) { // if(bestRespose == null) continue; // if(firstBest == null) firstBest = bestRespose; // bestRespose.translate(current.frame.width/2, // current.frame.height/2); // // searchRectDraw.drawPoint(bestRespose, RGBColour.BLUE, 3); // } final Point2d centerOfGrid = img1sub.roi.calculateCentroid(); // System.out.println("First reponse: " + firstBest ); // System.out.println("Center of template: " + centerOfGrid); // DisplayUtilities.displayName(searchRectDraw, "searchWindow"); if (firstBest == null || Float.isNaN(firstBest.value)) return new Point2dImpl(0, 0); // firstBest.translate(current.frame.width/2, // current.frame.height/2); // System.out.println("First reponse (corrected): " + firstBest ); // System.out.println("Diff: " + centerOfGrid.minus(firstBest)); return centerOfGrid.minus(firstBest); } }
import {Injectable, Logger, UnauthorizedException} from '@nestjs/common'; import {InjectModel} from "@nestjs/sequelize"; import {User} from "./user.model"; import {CreateUserDto} from "./dto/create-user.dto"; import * as bcrypt from 'bcryptjs' import {JwtService} from "@nestjs/jwt"; @Injectable() export class UserService { constructor(@InjectModel(User) private userRepository: typeof User, private jwtService: JwtService) { } async createUser(dto: CreateUserDto) { try { const candidate = await User.findOne({where: {email: dto.email}}) if (!candidate) { const hashPassword = await bcrypt.hash(dto.password, 5) const user = await User.create({...dto, password: hashPassword}) return user } else { return {message: 'Пользователь с таким email уже существует'} } } catch (e) { return e } } async login(dto: CreateUserDto,res) { try { const user = await this.validateUser(dto) const token = this.generateToken(user) return token } catch (e) { throw new UnauthorizedException({message: 'Ошибка авторизации, проверьте введенные данные'}) } } private async generateToken(user: User) { const payload = {id: user.id, email: user.email} return { token: this.jwtService.sign(payload), refreshToken: this.jwtService.sign(payload,{ expiresIn: '1m'}) } } private async validateUser(dto: CreateUserDto) { const user = await User.findOne({where: {email: dto.email}}) const password = await bcrypt.compare(dto.password, user.password) if (user && password) { return user } throw new UnauthorizedException({message: 'Некоректный логин или пароль'}) } }
// Parses the prefix descriptions file at path, clears and fills the output // prefixes phone number prefix to description mapping. // Returns true on success. bool ParsePrefixes(const string& path, absl::btree_map<int32, string>* prefixes) { prefixes->clear(); FILE* input = fopen(path.c_str(), "r"); if (!input) { return false; } AutoCloser<FILE> input_closer(&input, fclose); const int kMaxLineLength = 2*1024; vector<char> buffer(kMaxLineLength); vector<char>::iterator begin, end, sep; string prefix, description; int32 prefix_code; while (fgets(&buffer[0], buffer.size(), input)) { begin = buffer.begin(); end = std::find(begin, buffer.end(), '\0'); if (end == begin) { continue; } --end; if (*end != '\n' && !feof(input)) { return false; } for (; begin != end && std::isspace(*begin); ++begin) {} for (; end != begin && std::isspace(*(end - 1)); --end) {} if (begin == end || *begin == '#') { continue; } sep = std::find(begin, end, '|'); if (sep == end) { continue; } prefix = string(begin, sep); if (!StrToInt(prefix, &prefix_code)) { return false; } (*prefixes)[prefix_code] = string(sep + 1, end); } return ferror(input) == 0; }
Trajectory estimation for a hybrid rocket This paper presents a research work to develop a navigation technique for a class of nano-micro space rocket. Onboard MEMS inertial sensors are used in view of post-flight trajectory estimation. Combined with synchronized measurements of the combustion engine which is of hybrid type and uses N2O and PE, we show how the relative redundancy of the data can be used to calibrate a thrust model. Experimental results are presented and stress the numerous difficulties needed to be resolved. Quantitative results are provided along with estimates of the obtained accuracy.
<filename>string/z.hpp #pragma region str_z #ifndef STR_Z_HPP #define STR_Z_HPP namespace str { vector<int> z(const string &s) { int n = (int)s.size(); vector<int> _z(n); for (int i = 1, l = 0, r = 0; i < n; i++) { if (i <= r) _z[i] = min(_z[i - l], r - i + 1); while (i + _z[i] < n && s[_z[i]] == s[i + _z[i]]) _z[i]++; if (i + _z[i] - 1 > r) l = i, r = i + _z[i] - 1; } return _z; } } #endif #pragma endregion str_z
package rtmprelay import ( "errors" "fmt" "github.com/livego/av" log "github.com/livego/logging" "github.com/livego/protocol/httpflvclient" "github.com/livego/protocol/rtmp/core" ) type FlvPull struct { FlvUrl string RtmpUrl string flvclient *httpflvclient.HttpFlvClient rtmpclient *core.ConnClient isStart bool csChan chan *core.ChunkStream isFlvHdrReady bool databuffer []byte dataNeedLen int testFlag bool } const FLV_HEADER_LENGTH = 13 func NewFlvPull(flvurl *string, rtmpurl *string) *FlvPull { return &FlvPull{ FlvUrl: *flvurl, RtmpUrl: *rtmpurl, isStart: false, csChan: make(chan *core.ChunkStream, 1000), } } func (self *FlvPull) HandleFlvData(packet []byte) error { defer func() { if e := recover(); e != nil { log.Errorf("HandleFlvData cs channel has already been closed:%v", e) return } }() var cs *core.ChunkStream cs = &core.ChunkStream{} messagetype := packet[0] payloadLen := int(packet[1])<<16 + int(packet[2])<<8 + int(packet[3]) timestamp := int(packet[4])<<16 + int(packet[5])<<8 + int(packet[6]) + int(packet[7])<<24 streamid := int(packet[8])<<16 + int(packet[9])<<8 + int(packet[10]) if messagetype == 0x09 { if packet[11] == 0x17 && packet[12] == 0x00 { //log.Printf("it's pps and sps: messagetype=%d, payloadlen=%d, timestamp=%d, streamid=%d", messagetype, payloadLen, timestamp, streamid) cs.TypeID = av.TAG_VIDEO } else if packet[11] == 0x17 && packet[12] == 0x01 { //log.Printf("it's I frame: messagetype=%d, payloadlen=%d, timestamp=%d, streamid=%d", messagetype, payloadLen, timestamp, streamid) cs.TypeID = av.TAG_VIDEO } else if packet[11] == 0x27 { cs.TypeID = av.TAG_VIDEO //log.Printf("it's P frame: messagetype=%d, payloadlen=%d, timestamp=%d, streamid=%d", messagetype, payloadLen, timestamp, streamid) } } else if messagetype == 0x08 { cs.TypeID = av.TAG_AUDIO //log.Printf("it's audio: messagetype=%d, payloadlen=%d, timestamp=%d, streamid=%d", messagetype, payloadLen, timestamp, streamid) } else if messagetype == 0x12 { cs.TypeID = av.MetadatAMF0 //log.Printf("it's metadata: messagetype=%d, payloadlen=%d, timestamp=%d, streamid=%d", messagetype, payloadLen, timestamp, streamid) } else if messagetype == 0xff { cs.TypeID = av.MetadataAMF3 } cs.Data = packet[11:] cs.Length = uint32(payloadLen) cs.StreamID = uint32(streamid) cs.Timestamp = uint32(timestamp) if uint32(payloadLen) != cs.Length { errString := fmt.Sprintf("payload length(%d) is not equal to data length(%d)", payloadLen, cs.Length) return errors.New(errString) } self.csChan <- cs return nil } func (self *FlvPull) sendPublishChunkStream() { for { csPacket, ok := <-self.csChan if ok { self.rtmpclient.Write(*csPacket) //log.Printf("type=%d, length=%d, timestamp=%d, error=%v", // csPacket.TypeID, csPacket.Length, csPacket.Timestamp, err) } else { break } } log.Info("sendPublishChunkStream is ended.") } func (self *FlvPull) Start() error { if self.isStart { errString := fmt.Sprintf("FlvPull(%s->%s) has already started.", self.FlvUrl, self.RtmpUrl) return errors.New(errString) } self.flvclient = httpflvclient.NewHttpFlvClient(self.FlvUrl) if self.flvclient == nil { errString := fmt.Sprintf("FlvPull(%s) error", self.FlvUrl) return errors.New(errString) } self.rtmpclient = core.NewConnClient() self.csChan = make(chan *core.ChunkStream) self.isFlvHdrReady = false self.databuffer = nil err := self.flvclient.Start(self) if err != nil { log.Errorf("flvclient start error:%v", err) close(self.csChan) return err } err = self.rtmpclient.Start(self.RtmpUrl, "publish") if err != nil { log.Errorf("rtmpclient.Start url=%v error", self.RtmpUrl) self.flvclient.Stop() close(self.csChan) return err } self.isStart = true go self.sendPublishChunkStream() return nil } func (self *FlvPull) Stop() { if !self.isStart { log.Errorf("FlvPull(%s->%s) has already stoped.", self.FlvUrl, self.RtmpUrl) return } self.flvclient.Stop() self.rtmpclient.Close(nil) self.isStart = false close(self.csChan) log.Infof("FlvPull(%s->%s) stoped.", self.FlvUrl, self.RtmpUrl) }
Paul Pogba 196 mins per goal 2749 mins played 14 Goals scored 10 Assists Shots on target Total 70% 52 74 Anthony Martial 156 mins per goal 1716 mins played 11 Goals scored 2 Assists Shots on target Total 72% 21 29 Marcus Rashford 209 mins per goal 2089 mins played 10 Goals scored 6 Assists Shots on target Total 59% 38 64 Romelu Lukaku 194 mins per goal 1749 mins played 9 Goals scored 2 Assists Shots on target Total 73% 24 33 Jesse Lingard 275 mins per goal 1377 mins played 5 Goals scored 2 Assists Shots on target Total 60% 12 20 Juan Mata 297 mins per goal 1487 mins played 5 Goals scored 2 Assists Shots on target Total 45% 9 20 Ander Herrera 465 mins per goal 1394 mins played 3 Goals scored 3 Assists Shots on target Total 53% 9 17 Marouane Fellaini 165 mins per goal 329 mins played 2 Goals scored 0 Assists Shots on target Total 40% 2 5 Alexis Sánchez 408 mins per goal 815 mins played 2 Goals scored 4 Assists Shots on target Total 77% 10 13 Victor Lindelöf 1889 mins per goal 1889 mins played 1 Goals scored 0 Assists Shots on target Total 20% 1 5 Nemanja Matic 2037 mins per goal 2037 mins played 1 Goals scored 0 Assists Shots on target Total 50% 3 6 Fred 565 mins per goal 565 mins played 1 Goals scored 0 Assists Shots on target Total 22% 2 9 Luke Shaw 1872 mins per goal 1872 mins played 1 Goals scored 1 Assists Shots on target Total 58% 7 12 Chris Smalling 1318 mins per goal 1318 mins played 1 Goals scored 0 Assists Shots on target Total 33% 2 6 Ashley Young 1747 mins per goal 1747 mins played 1 Goals scored 2 Assists Shots on target Total 29% 2 7 The goals shown for each player include any goals they may have scored in the following competitions in the current season: Premier League, The Emirates FA Cup, UEFA Champions League, Carabao Cup
/** * Extracts all columns from the given expressions. */ public static Set<String> extractColumnsFromExpressions(Set<TransformExpressionTree> expressions) { Set<String> expressionColumns = new HashSet<>(); for (TransformExpressionTree expression : expressions) { expression.getColumns(expressionColumns); } return expressionColumns; }
import { Component, Output, EventEmitter } from '@angular/core'; import { stringify } from 'querystring'; @Component({ selector: 'app-new-task', templateUrl: './new-task.component.html', styleUrls: ['./new-task.component.css'], }) export class NewTaskComponent { private static unique_id: number = 0; title: string; user: string; date: Date; status: string; isEmpty: boolean; @Output() addTask: EventEmitter<{ id: string; title: string; user: string; date: Date; status: string; select: boolean; }> = new EventEmitter(); private static newID() { this.unique_id += 1; } newTask() { if ( this.title == '' || this.user == '' || this.date == null || this.status == '' ) { this.isEmpty = true; return false; } this.addTask.emit({ id: NewTaskComponent.unique_id.toString(), title: this.title, user: this.user, date: this.date, status: this.status, select: false }); NewTaskComponent.newID(); this.clear(); this.isEmpty = false; } clear() { this.title = ''; this.user = ''; this.date = null; this.status = 'started'; } }
<reponame>DoubleRound/iDreambox-java package org.idreambox.pageModel; import java.sql.Timestamp; public class Pfile { private String id; private User user; private String fname; private String url; private Integer isdelete; private String type; private Timestamp createdatetime; private Timestamp modifydatetime; public String getId() { return id; } public void setId(String id) { this.id = id; } public User getUser() { return user; } public void setUser(User user) { this.user = user; } public String getFname() { return fname; } public void setFname(String fname) { this.fname = fname; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public Integer getIsdelete() { return isdelete; } public void setIsdelete(Integer isdelete) { this.isdelete = isdelete; } public String getType() { return type; } public void setType(String type) { this.type = type; } public Timestamp getCreatedatetime() { return createdatetime; } public void setCreatedatetime(Timestamp createdatetime) { this.createdatetime = createdatetime; } public Timestamp getModifydatetime() { return modifydatetime; } public void setModifydatetime(Timestamp modifydatetime) { this.modifydatetime = modifydatetime; } }
/* * Copyright 2017 WebAssembly Community Group participants * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "src/binary-reader-logging.h" #include <cinttypes> #include "src/stream.h" namespace wabt { #define INDENT_SIZE 2 #define LOGF_NOINDENT(...) stream_->Writef(__VA_ARGS__) #define LOGF(...) \ do { \ WriteIndent(); \ LOGF_NOINDENT(__VA_ARGS__); \ } while (0) namespace { void SPrintLimits(char* dst, size_t size, const Limits* limits) { int result; if (limits->has_max) { result = wabt_snprintf(dst, size, "initial: %" PRIu64 ", max: %" PRIu64, limits->initial, limits->max); } else { result = wabt_snprintf(dst, size, "initial: %" PRIu64, limits->initial); } WABT_USE(result); assert(static_cast<size_t>(result) < size); } } // end anonymous namespace BinaryReaderLogging::BinaryReaderLogging(Stream* stream, BinaryReaderDelegate* forward) : stream_(stream), reader_(forward), indent_(0) {} void BinaryReaderLogging::Indent() { indent_ += INDENT_SIZE; } void BinaryReaderLogging::Dedent() { indent_ -= INDENT_SIZE; assert(indent_ >= 0); } void BinaryReaderLogging::WriteIndent() { static char s_indent[] = " " " "; static const size_t s_indent_len = sizeof(s_indent) - 1; size_t i = indent_; while (i > s_indent_len) { stream_->WriteData(s_indent, s_indent_len); i -= s_indent_len; } if (i > 0) { stream_->WriteData(s_indent, indent_); } } void BinaryReaderLogging::LogType(Type type) { if (IsTypeIndex(type)) { LOGF_NOINDENT("funcidx[%d]", static_cast<int>(type)); } else { LOGF_NOINDENT("%s", GetTypeName(type)); } } void BinaryReaderLogging::LogTypes(Index type_count, Type* types) { LOGF_NOINDENT("["); for (Index i = 0; i < type_count; ++i) { LogType(types[i]); if (i != type_count - 1) { LOGF_NOINDENT(", "); } } LOGF_NOINDENT("]"); } void BinaryReaderLogging::LogTypes(TypeVector& types) { LogTypes(types.size(), types.data()); } bool BinaryReaderLogging::OnError(const Error& error) { return reader_->OnError(error); } void BinaryReaderLogging::OnSetState(const State* s) { BinaryReaderDelegate::OnSetState(s); reader_->OnSetState(s); } Result BinaryReaderLogging::BeginModule(uint32_t version) { LOGF("BeginModule(version: %u)\n", version); Indent(); return reader_->BeginModule(version); } Result BinaryReaderLogging::BeginSection(BinarySection section_type, Offset size) { return reader_->BeginSection(section_type, size); } Result BinaryReaderLogging::BeginCustomSection(Offset size, string_view section_name) { LOGF("BeginCustomSection('" PRIstringview "', size: %" PRIzd ")\n", WABT_PRINTF_STRING_VIEW_ARG(section_name), size); Indent(); return reader_->BeginCustomSection(size, section_name); } Result BinaryReaderLogging::OnType(Index index, Index param_count, Type* param_types, Index result_count, Type* result_types) { LOGF("OnType(index: %" PRIindex ", params: ", index); LogTypes(param_count, param_types); LOGF_NOINDENT(", results: "); LogTypes(result_count, result_types); LOGF_NOINDENT(")\n"); return reader_->OnType(index, param_count, param_types, result_count, result_types); } Result BinaryReaderLogging::OnImport(Index index, string_view module_name, string_view field_name) { LOGF("OnImport(index: %" PRIindex ", module: \"" PRIstringview "\", field: \"" PRIstringview "\")\n", index, WABT_PRINTF_STRING_VIEW_ARG(module_name), WABT_PRINTF_STRING_VIEW_ARG(field_name)); return reader_->OnImport(index, module_name, field_name); } Result BinaryReaderLogging::OnImportFunc(Index import_index, string_view module_name, string_view field_name, Index func_index, Index sig_index) { LOGF("OnImportFunc(import_index: %" PRIindex ", func_index: %" PRIindex ", sig_index: %" PRIindex ")\n", import_index, func_index, sig_index); return reader_->OnImportFunc(import_index, module_name, field_name, func_index, sig_index); } Result BinaryReaderLogging::OnImportTable(Index import_index, string_view module_name, string_view field_name, Index table_index, Type elem_type, const Limits* elem_limits) { char buf[100]; SPrintLimits(buf, sizeof(buf), elem_limits); LOGF("OnImportTable(import_index: %" PRIindex ", table_index: %" PRIindex ", elem_type: %s, %s)\n", import_index, table_index, GetTypeName(elem_type), buf); return reader_->OnImportTable(import_index, module_name, field_name, table_index, elem_type, elem_limits); } Result BinaryReaderLogging::OnImportMemory(Index import_index, string_view module_name, string_view field_name, Index memory_index, const Limits* page_limits) { char buf[100]; SPrintLimits(buf, sizeof(buf), page_limits); LOGF("OnImportMemory(import_index: %" PRIindex ", memory_index: %" PRIindex ", %s)\n", import_index, memory_index, buf); return reader_->OnImportMemory(import_index, module_name, field_name, memory_index, page_limits); } Result BinaryReaderLogging::OnImportGlobal(Index import_index, string_view module_name, string_view field_name, Index global_index, Type type, bool mutable_) { LOGF("OnImportGlobal(import_index: %" PRIindex ", global_index: %" PRIindex ", type: %s, mutable: " "%s)\n", import_index, global_index, GetTypeName(type), mutable_ ? "true" : "false"); return reader_->OnImportGlobal(import_index, module_name, field_name, global_index, type, mutable_); } Result BinaryReaderLogging::OnImportEvent(Index import_index, string_view module_name, string_view field_name, Index event_index, Index sig_index) { LOGF("OnImportEvent(import_index: %" PRIindex ", event_index: %" PRIindex ", sig_index: %" PRIindex ")\n", import_index, event_index, sig_index); return reader_->OnImportEvent(import_index, module_name, field_name, event_index, sig_index); } Result BinaryReaderLogging::OnTable(Index index, Type elem_type, const Limits* elem_limits) { char buf[100]; SPrintLimits(buf, sizeof(buf), elem_limits); LOGF("OnTable(index: %" PRIindex ", elem_type: %s, %s)\n", index, GetTypeName(elem_type), buf); return reader_->OnTable(index, elem_type, elem_limits); } Result BinaryReaderLogging::OnMemory(Index index, const Limits* page_limits) { char buf[100]; SPrintLimits(buf, sizeof(buf), page_limits); LOGF("OnMemory(index: %" PRIindex ", %s)\n", index, buf); return reader_->OnMemory(index, page_limits); } Result BinaryReaderLogging::BeginGlobal(Index index, Type type, bool mutable_) { LOGF("BeginGlobal(index: %" PRIindex ", type: %s, mutable: %s)\n", index, GetTypeName(type), mutable_ ? "true" : "false"); return reader_->BeginGlobal(index, type, mutable_); } Result BinaryReaderLogging::OnExport(Index index, ExternalKind kind, Index item_index, string_view name) { LOGF("OnExport(index: %" PRIindex ", kind: %s, item_index: %" PRIindex ", name: \"" PRIstringview "\")\n", index, GetKindName(kind), item_index, WABT_PRINTF_STRING_VIEW_ARG(name)); return reader_->OnExport(index, kind, item_index, name); } Result BinaryReaderLogging::BeginFunctionBody(Index value, Offset size) { LOGF("BeginFunctionBody(%" PRIindex ", size:%" PRIzd ")\n", value, size); return reader_->BeginFunctionBody(value, size); } Result BinaryReaderLogging::OnLocalDecl(Index decl_index, Index count, Type type) { LOGF("OnLocalDecl(index: %" PRIindex ", count: %" PRIindex ", type: %s)\n", decl_index, count, GetTypeName(type)); return reader_->OnLocalDecl(decl_index, count, type); } Result BinaryReaderLogging::OnBlockExpr(Type sig_type) { LOGF("OnBlockExpr(sig: "); LogType(sig_type); LOGF_NOINDENT(")\n"); return reader_->OnBlockExpr(sig_type); } Result BinaryReaderLogging::OnBrExpr(Index depth) { LOGF("OnBrExpr(depth: %" PRIindex ")\n", depth); return reader_->OnBrExpr(depth); } Result BinaryReaderLogging::OnBrIfExpr(Index depth) { LOGF("OnBrIfExpr(depth: %" PRIindex ")\n", depth); return reader_->OnBrIfExpr(depth); } Result BinaryReaderLogging::OnBrTableExpr(Index num_targets, Index* target_depths, Index default_target_depth) { LOGF("OnBrTableExpr(num_targets: %" PRIindex ", depths: [", num_targets); for (Index i = 0; i < num_targets; ++i) { LOGF_NOINDENT("%" PRIindex, target_depths[i]); if (i != num_targets - 1) { LOGF_NOINDENT(", "); } } LOGF_NOINDENT("], default: %" PRIindex ")\n", default_target_depth); return reader_->OnBrTableExpr(num_targets, target_depths, default_target_depth); } Result BinaryReaderLogging::OnF32ConstExpr(uint32_t value_bits) { float value; memcpy(&value, &value_bits, sizeof(value)); LOGF("OnF32ConstExpr(%g (0x04%x))\n", value, value_bits); return reader_->OnF32ConstExpr(value_bits); } Result BinaryReaderLogging::OnF64ConstExpr(uint64_t value_bits) { double value; memcpy(&value, &value_bits, sizeof(value)); LOGF("OnF64ConstExpr(%g (0x08%" PRIx64 "))\n", value, value_bits); return reader_->OnF64ConstExpr(value_bits); } Result BinaryReaderLogging::OnV128ConstExpr(v128 value_bits) { LOGF("OnV128ConstExpr(0x%08x 0x%08x 0x%08x 0x%08x)\n", value_bits.v[0], value_bits.v[1], value_bits.v[2], value_bits.v[3]); return reader_->OnV128ConstExpr(value_bits); } Result BinaryReaderLogging::OnI32ConstExpr(uint32_t value) { LOGF("OnI32ConstExpr(%u (0x%x))\n", value, value); return reader_->OnI32ConstExpr(value); } Result BinaryReaderLogging::OnI64ConstExpr(uint64_t value) { LOGF("OnI64ConstExpr(%" PRIu64 " (0x%" PRIx64 "))\n", value, value); return reader_->OnI64ConstExpr(value); } Result BinaryReaderLogging::OnIfExpr(Type sig_type) { LOGF("OnIfExpr(sig: "); LogType(sig_type); LOGF_NOINDENT(")\n"); return reader_->OnIfExpr(sig_type); } Result BinaryReaderLogging::OnLoopExpr(Type sig_type) { LOGF("OnLoopExpr(sig: "); LogType(sig_type); LOGF_NOINDENT(")\n"); return reader_->OnLoopExpr(sig_type); } Result BinaryReaderLogging::OnTryExpr(Type sig_type) { LOGF("OnTryExpr(sig: "); LogType(sig_type); LOGF_NOINDENT(")\n"); return reader_->OnTryExpr(sig_type); } Result BinaryReaderLogging::OnSimdLaneOpExpr(Opcode opcode, uint64_t value) { LOGF("OnSimdLaneOpExpr (lane: %" PRIu64 ")\n", value); return reader_->OnSimdLaneOpExpr(opcode, value); } Result BinaryReaderLogging::OnSimdShuffleOpExpr(Opcode opcode, v128 value) { LOGF("OnSimdShuffleOpExpr (lane: 0x%08x %08x %08x %08x)\n", value.v[0], value.v[1], value.v[2], value.v[3]); return reader_->OnSimdShuffleOpExpr(opcode, value); } Result BinaryReaderLogging::BeginElemSegment(Index index, Index table_index, bool passive, Type elem_type) { LOGF("BeginElemSegment(index: %" PRIindex ", table_index: %" PRIindex ", passive: %s, elem_type: %s)\n", index, table_index, passive ? "true" : "false", GetTypeName(elem_type)); return reader_->BeginElemSegment(index, table_index, passive, elem_type); } Result BinaryReaderLogging::OnDataSegmentData(Index index, const void* data, Address size) { LOGF("OnDataSegmentData(index:%" PRIindex ", size:%" PRIaddress ")\n", index, size); return reader_->OnDataSegmentData(index, data, size); } Result BinaryReaderLogging::OnModuleNameSubsection(Index index, uint32_t name_type, Offset subsection_size) { LOGF("OnModuleNameSubsection(index:%" PRIindex ", nametype:%u, size:%" PRIzd ")\n", index, name_type, subsection_size); return reader_->OnModuleNameSubsection(index, name_type, subsection_size); } Result BinaryReaderLogging::OnModuleName(string_view name) { LOGF("OnModuleName(name: \"" PRIstringview "\")\n", WABT_PRINTF_STRING_VIEW_ARG(name)); return reader_->OnModuleName(name); } Result BinaryReaderLogging::OnFunctionNameSubsection(Index index, uint32_t name_type, Offset subsection_size) { LOGF("OnFunctionNameSubsection(index:%" PRIindex ", nametype:%u, size:%" PRIzd ")\n", index, name_type, subsection_size); return reader_->OnFunctionNameSubsection(index, name_type, subsection_size); } Result BinaryReaderLogging::OnFunctionName(Index index, string_view name) { LOGF("OnFunctionName(index: %" PRIindex ", name: \"" PRIstringview "\")\n", index, WABT_PRINTF_STRING_VIEW_ARG(name)); return reader_->OnFunctionName(index, name); } Result BinaryReaderLogging::OnLocalNameSubsection(Index index, uint32_t name_type, Offset subsection_size) { LOGF("OnLocalNameSubsection(index:%" PRIindex ", nametype:%u, size:%" PRIzd ")\n", index, name_type, subsection_size); return reader_->OnLocalNameSubsection(index, name_type, subsection_size); } Result BinaryReaderLogging::OnLocalName(Index func_index, Index local_index, string_view name) { LOGF("OnLocalName(func_index: %" PRIindex ", local_index: %" PRIindex ", name: \"" PRIstringview "\")\n", func_index, local_index, WABT_PRINTF_STRING_VIEW_ARG(name)); return reader_->OnLocalName(func_index, local_index, name); } Result BinaryReaderLogging::OnInitExprF32ConstExpr(Index index, uint32_t value_bits) { float value; memcpy(&value, &value_bits, sizeof(value)); LOGF("OnInitExprF32ConstExpr(index: %" PRIindex ", value: %g (0x04%x))\n", index, value, value_bits); return reader_->OnInitExprF32ConstExpr(index, value_bits); } Result BinaryReaderLogging::OnInitExprF64ConstExpr(Index index, uint64_t value_bits) { double value; memcpy(&value, &value_bits, sizeof(value)); LOGF("OnInitExprF64ConstExpr(index: %" PRIindex " value: %g (0x08%" PRIx64 "))\n", index, value, value_bits); return reader_->OnInitExprF64ConstExpr(index, value_bits); } Result BinaryReaderLogging::OnInitExprV128ConstExpr(Index index, v128 value_bits) { LOGF("OnInitExprV128ConstExpr(index: %" PRIindex " value: ( 0x%08x 0x%08x 0x%08x 0x%08x))\n", index, value_bits.v[0], value_bits.v[1], value_bits.v[2], value_bits.v[3]); return reader_->OnInitExprV128ConstExpr(index, value_bits); } Result BinaryReaderLogging::OnInitExprI32ConstExpr(Index index, uint32_t value) { LOGF("OnInitExprI32ConstExpr(index: %" PRIindex ", value: %u)\n", index, value); return reader_->OnInitExprI32ConstExpr(index, value); } Result BinaryReaderLogging::OnInitExprI64ConstExpr(Index index, uint64_t value) { LOGF("OnInitExprI64ConstExpr(index: %" PRIindex ", value: %" PRIu64 ")\n", index, value); return reader_->OnInitExprI64ConstExpr(index, value); } Result BinaryReaderLogging::OnDylinkInfo(uint32_t mem_size, uint32_t mem_align, uint32_t table_size, uint32_t table_align) { LOGF( "OnDylinkInfo(mem_size: %u, mem_align: %u, table_size: %u, table_align: " "%u)\n", mem_size, mem_align, table_size, table_align); return reader_->OnDylinkInfo(mem_size, mem_align, table_size, table_align); } Result BinaryReaderLogging::OnDylinkNeeded(string_view so_name) { LOGF("OnDylinkNeeded(name: " PRIstringview ")\n", WABT_PRINTF_STRING_VIEW_ARG(so_name)); return reader_->OnDylinkNeeded(so_name); } Result BinaryReaderLogging::OnRelocCount(Index count, Index section_index) { LOGF("OnRelocCount(count: %" PRIindex ", section: %" PRIindex ")\n", count, section_index); return reader_->OnRelocCount(count, section_index); } Result BinaryReaderLogging::OnReloc(RelocType type, Offset offset, Index index, uint32_t addend) { int32_t signed_addend = static_cast<int32_t>(addend); LOGF("OnReloc(type: %s, offset: %" PRIzd ", index: %" PRIindex ", addend: %d)\n", GetRelocTypeName(type), offset, index, signed_addend); return reader_->OnReloc(type, offset, index, addend); } Result BinaryReaderLogging::OnSymbol(Index symbol_index, SymbolType type, uint32_t flags) { LOGF("OnSymbol(type: %s flags: 0x%x)\n", GetSymbolTypeName(type), flags); return reader_->OnSymbol(symbol_index, type, flags); } Result BinaryReaderLogging::OnDataSymbol(Index index, uint32_t flags, string_view name, Index segment, uint32_t offset, uint32_t size) { LOGF("OnDataSymbol(name: " PRIstringview " flags: 0x%x)\n", WABT_PRINTF_STRING_VIEW_ARG(name), flags); return reader_->OnDataSymbol(index, flags, name, segment, offset, size); } Result BinaryReaderLogging::OnFunctionSymbol(Index index, uint32_t flags, string_view name, Index func_index) { LOGF("OnFunctionSymbol(name: " PRIstringview " flags: 0x%x index: %" PRIindex ")\n", WABT_PRINTF_STRING_VIEW_ARG(name), flags, func_index); return reader_->OnFunctionSymbol(index, flags, name, func_index); } Result BinaryReaderLogging::OnGlobalSymbol(Index index, uint32_t flags, string_view name, Index global_index) { LOGF("OnGlobalSymbol(name: " PRIstringview " flags: 0x%x index: %" PRIindex ")\n", WABT_PRINTF_STRING_VIEW_ARG(name), flags, global_index); return reader_->OnGlobalSymbol(index, flags, name, global_index); } Result BinaryReaderLogging::OnSectionSymbol(Index index, uint32_t flags, Index section_index) { LOGF("OnSectionSymbol(flags: 0x%x index: %" PRIindex ")\n", flags, section_index); return reader_->OnSectionSymbol(index, flags, section_index); } Result BinaryReaderLogging::OnEventSymbol(Index index, uint32_t flags, string_view name, Index event_index) { LOGF("OnEventSymbol(name: " PRIstringview " flags: 0x%x index: %" PRIindex ")\n", WABT_PRINTF_STRING_VIEW_ARG(name), flags, event_index); return reader_->OnEventSymbol(index, flags, name, event_index); } Result BinaryReaderLogging::OnSegmentInfo(Index index, string_view name, uint32_t alignment, uint32_t flags) { LOGF("OnSegmentInfo(%d name: " PRIstringview ", alignment: %d, flags: 0x%x)\n", index, WABT_PRINTF_STRING_VIEW_ARG(name), alignment, flags); return reader_->OnSegmentInfo(index, name, alignment, flags); } Result BinaryReaderLogging::OnInitFunction(uint32_t priority, Index func_index) { LOGF("OnInitFunction(%d priority: %d)\n", func_index, priority); return reader_->OnInitFunction(priority, func_index); } Result BinaryReaderLogging::OnComdatBegin(string_view name, uint32_t flags, Index count) { LOGF("OnComdatBegin(" PRIstringview ", flags: %d, count: %" PRIindex ")\n", WABT_PRINTF_STRING_VIEW_ARG(name), flags, count); return reader_->OnComdatBegin(name, flags, count); } Result BinaryReaderLogging::OnComdatEntry(ComdatType kind, Index index) { LOGF("OnComdatEntry(kind: %d, index: %" PRIindex ")\n", kind, index); return reader_->OnComdatEntry(kind, index); } #define DEFINE_BEGIN(name) \ Result BinaryReaderLogging::name(Offset size) { \ LOGF(#name "(%" PRIzd ")\n", size); \ Indent(); \ return reader_->name(size); \ } #define DEFINE_END(name) \ Result BinaryReaderLogging::name() { \ Dedent(); \ LOGF(#name "\n"); \ return reader_->name(); \ } #define DEFINE_INDEX(name) \ Result BinaryReaderLogging::name(Index value) { \ LOGF(#name "(%" PRIindex ")\n", value); \ return reader_->name(value); \ } #define DEFINE_INDEX_DESC(name, desc) \ Result BinaryReaderLogging::name(Index value) { \ LOGF(#name "(" desc ": %" PRIindex ")\n", value); \ return reader_->name(value); \ } #define DEFINE_INDEX_INDEX(name, desc0, desc1) \ Result BinaryReaderLogging::name(Index value0, Index value1) { \ LOGF(#name "(" desc0 ": %" PRIindex ", " desc1 ": %" PRIindex ")\n", \ value0, value1); \ return reader_->name(value0, value1); \ } #define DEFINE_INDEX_INDEX_BOOL(name, desc0, desc1, desc2) \ Result BinaryReaderLogging::name(Index value0, Index value1, bool value2) { \ LOGF(#name "(" desc0 ": %" PRIindex ", " desc1 ": %" PRIindex \ ", " desc2 ": %s)\n", \ value0, value1, value2 ? "true" : "false"); \ return reader_->name(value0, value1, value2); \ } #define DEFINE_OPCODE(name) \ Result BinaryReaderLogging::name(Opcode opcode) { \ LOGF(#name "(\"%s\" (%u))\n", opcode.GetName(), opcode.GetCode()); \ return reader_->name(opcode); \ } #define DEFINE_LOAD_STORE_OPCODE(name) \ Result BinaryReaderLogging::name(Opcode opcode, uint32_t alignment_log2, \ Address offset) { \ LOGF(#name "(opcode: \"%s\" (%u), align log2: %u, offset: %" PRIaddress \ ")\n", \ opcode.GetName(), opcode.GetCode(), alignment_log2, offset); \ return reader_->name(opcode, alignment_log2, offset); \ } #define DEFINE0(name) \ Result BinaryReaderLogging::name() { \ LOGF(#name "\n"); \ return reader_->name(); \ } DEFINE_END(EndModule) DEFINE_END(EndCustomSection) DEFINE_BEGIN(BeginTypeSection) DEFINE_INDEX(OnTypeCount) DEFINE_END(EndTypeSection) DEFINE_BEGIN(BeginImportSection) DEFINE_INDEX(OnImportCount) DEFINE_END(EndImportSection) DEFINE_BEGIN(BeginFunctionSection) DEFINE_INDEX(OnFunctionCount) DEFINE_INDEX_INDEX(OnFunction, "index", "sig_index") DEFINE_END(EndFunctionSection) DEFINE_BEGIN(BeginTableSection) DEFINE_INDEX(OnTableCount) DEFINE_END(EndTableSection) DEFINE_BEGIN(BeginMemorySection) DEFINE_INDEX(OnMemoryCount) DEFINE_END(EndMemorySection) DEFINE_BEGIN(BeginGlobalSection) DEFINE_INDEX(OnGlobalCount) DEFINE_INDEX(BeginGlobalInitExpr) DEFINE_INDEX(EndGlobalInitExpr) DEFINE_INDEX(EndGlobal) DEFINE_END(EndGlobalSection) DEFINE_BEGIN(BeginExportSection) DEFINE_INDEX(OnExportCount) DEFINE_END(EndExportSection) DEFINE_BEGIN(BeginStartSection) DEFINE_INDEX(OnStartFunction) DEFINE_END(EndStartSection) DEFINE_BEGIN(BeginCodeSection) DEFINE_INDEX(OnFunctionBodyCount) DEFINE_INDEX(EndFunctionBody) DEFINE_INDEX(OnLocalDeclCount) DEFINE_LOAD_STORE_OPCODE(OnAtomicLoadExpr); DEFINE_LOAD_STORE_OPCODE(OnAtomicRmwExpr); DEFINE_LOAD_STORE_OPCODE(OnAtomicRmwCmpxchgExpr); DEFINE_LOAD_STORE_OPCODE(OnAtomicStoreExpr); DEFINE_LOAD_STORE_OPCODE(OnAtomicWaitExpr); DEFINE_LOAD_STORE_OPCODE(OnAtomicNotifyExpr); DEFINE_INDEX_INDEX(OnBrOnExnExpr, "depth", "event_index"); DEFINE_OPCODE(OnBinaryExpr) DEFINE_INDEX_DESC(OnCallExpr, "func_index") DEFINE_INDEX_INDEX(OnCallIndirectExpr, "sig_index", "table_index") DEFINE0(OnCatchExpr); DEFINE_OPCODE(OnCompareExpr) DEFINE_OPCODE(OnConvertExpr) DEFINE0(OnDropExpr) DEFINE0(OnElseExpr) DEFINE0(OnEndExpr) DEFINE_INDEX_DESC(OnGlobalGetExpr, "index") DEFINE_INDEX_DESC(OnGlobalSetExpr, "index") DEFINE_LOAD_STORE_OPCODE(OnLoadExpr); DEFINE_INDEX_DESC(OnLocalGetExpr, "index") DEFINE_INDEX_DESC(OnLocalSetExpr, "index") DEFINE_INDEX_DESC(OnLocalTeeExpr, "index") DEFINE0(OnMemoryCopyExpr) DEFINE_INDEX(OnDataDropExpr) DEFINE0(OnMemoryFillExpr) DEFINE0(OnMemoryGrowExpr) DEFINE_INDEX(OnMemoryInitExpr) DEFINE0(OnMemorySizeExpr) DEFINE0(OnTableCopyExpr) DEFINE_INDEX(OnElemDropExpr) DEFINE_INDEX(OnTableInitExpr) DEFINE_INDEX(OnTableSetExpr) DEFINE_INDEX(OnTableGetExpr) DEFINE_INDEX(OnTableGrowExpr) DEFINE_INDEX(OnTableSizeExpr) DEFINE0(OnRefNullExpr) DEFINE0(OnRefIsNullExpr) DEFINE0(OnNopExpr) DEFINE0(OnRethrowExpr); DEFINE_INDEX_DESC(OnReturnCallExpr, "func_index") DEFINE_INDEX_INDEX(OnReturnCallIndirectExpr, "sig_index", "table_index") DEFINE0(OnReturnExpr) DEFINE0(OnSelectExpr) DEFINE_LOAD_STORE_OPCODE(OnStoreExpr); DEFINE_INDEX_DESC(OnThrowExpr, "event_index") DEFINE0(OnUnreachableExpr) DEFINE_OPCODE(OnUnaryExpr) DEFINE_OPCODE(OnTernaryExpr) DEFINE_END(EndCodeSection) DEFINE_BEGIN(BeginElemSection) DEFINE_INDEX(OnElemSegmentCount) DEFINE_INDEX(BeginElemSegmentInitExpr) DEFINE_INDEX(EndElemSegmentInitExpr) DEFINE_INDEX_INDEX(OnElemSegmentElemExprCount, "index", "count") DEFINE_INDEX(OnElemSegmentElemExpr_RefNull) DEFINE_INDEX_INDEX(OnElemSegmentElemExpr_RefFunc, "index", "func_index") DEFINE_INDEX(EndElemSegment) DEFINE_END(EndElemSection) DEFINE_BEGIN(BeginDataSection) DEFINE_INDEX(OnDataSegmentCount) DEFINE_INDEX_INDEX_BOOL(BeginDataSegment, "index", "memory_index", "passive") DEFINE_INDEX(BeginDataSegmentInitExpr) DEFINE_INDEX(EndDataSegmentInitExpr) DEFINE_INDEX(EndDataSegment) DEFINE_END(EndDataSection) DEFINE_BEGIN(BeginDataCountSection) DEFINE_INDEX(OnDataCount) DEFINE_END(EndDataCountSection) DEFINE_BEGIN(BeginNamesSection) DEFINE_INDEX(OnFunctionNamesCount) DEFINE_INDEX(OnLocalNameFunctionCount) DEFINE_INDEX_INDEX(OnLocalNameLocalCount, "index", "count") DEFINE_END(EndNamesSection) DEFINE_BEGIN(BeginRelocSection) DEFINE_END(EndRelocSection) DEFINE_INDEX_INDEX(OnInitExprGlobalGetExpr, "index", "global_index") DEFINE_BEGIN(BeginDylinkSection) DEFINE_INDEX(OnDylinkNeededCount) DEFINE_END(EndDylinkSection) DEFINE_BEGIN(BeginLinkingSection) DEFINE_INDEX(OnSymbolCount) DEFINE_INDEX(OnSegmentInfoCount) DEFINE_INDEX(OnInitFunctionCount) DEFINE_INDEX(OnComdatCount) DEFINE_END(EndLinkingSection) DEFINE_BEGIN(BeginEventSection); DEFINE_INDEX(OnEventCount); DEFINE_INDEX_INDEX(OnEventType, "index", "sig_index") DEFINE_END(EndEventSection); // We don't need to log these (the individual opcodes are logged instead), but // we still need to forward the calls. Result BinaryReaderLogging::OnOpcode(Opcode opcode) { return reader_->OnOpcode(opcode); } Result BinaryReaderLogging::OnOpcodeBare() { return reader_->OnOpcodeBare(); } Result BinaryReaderLogging::OnOpcodeIndex(Index value) { return reader_->OnOpcodeIndex(value); } Result BinaryReaderLogging::OnOpcodeIndexIndex(Index value, Index value2) { return reader_->OnOpcodeIndexIndex(value, value2); } Result BinaryReaderLogging::OnOpcodeUint32(uint32_t value) { return reader_->OnOpcodeUint32(value); } Result BinaryReaderLogging::OnOpcodeUint32Uint32(uint32_t value, uint32_t value2) { return reader_->OnOpcodeUint32Uint32(value, value2); } Result BinaryReaderLogging::OnOpcodeUint64(uint64_t value) { return reader_->OnOpcodeUint64(value); } Result BinaryReaderLogging::OnOpcodeF32(uint32_t value) { return reader_->OnOpcodeF32(value); } Result BinaryReaderLogging::OnOpcodeF64(uint64_t value) { return reader_->OnOpcodeF64(value); } Result BinaryReaderLogging::OnOpcodeV128(v128 value) { return reader_->OnOpcodeV128(value); } Result BinaryReaderLogging::OnOpcodeBlockSig(Type sig_type) { return reader_->OnOpcodeBlockSig(sig_type); } Result BinaryReaderLogging::OnEndFunc() { return reader_->OnEndFunc(); } } // namespace wabt
package cn.ittiger.player.demo; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import cn.ittiger.player.FullScreenVideoPlayerView; import cn.ittiger.player.PlayerManager; /** * @author: ylhu * @time: 2017/12/5 */ public class FullScreenActivity extends AppCompatActivity { FullScreenVideoPlayerView mVideoPlayerView; String mVideoUrl = "http://www.eywedu.com.cn/sanzijing/UploadFiles_2038/szj-01.mp4"; String mVideoTitle = "三字经"; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_fullscreen_video); mVideoPlayerView = (FullScreenVideoPlayerView) findViewById(R.id.video_player_view); mVideoPlayerView.bind(mVideoUrl, mVideoTitle); mVideoPlayerView.startPlayVideo(); } @Override protected void onResume() { super.onResume(); PlayerManager.getInstance().resume(); } @Override protected void onPause() { super.onPause(); PlayerManager.getInstance().pause(); } @Override protected void onDestroy() { super.onDestroy(); PlayerManager.getInstance().release(); } }
Share. Hopefully without the Rampancy. Hopefully without the Rampancy. Microsoft is reportedly prepping voice command software called Cortana to compete with Apple’s Siri and Android’s Google Now. According to ZDNet, Cortana “will be able to learn and adapt, relying on machine-learning technology and the "Satori" knowledge repository powering Bing.” ZDNet’s sources report that Cortana (presumably named after Halo’s AI companion) will be “core” to future versions of Windows Phone, Windows and Xbox One. Back in July, Microsoft CEO Steve Ballmer referenced “a family of devices powered by a service-enabled shell,” which now appears to be in reference to Cortana’s capabilities. According to ZDNet, that shell “won't simply surface information stored on users' phones, PCs and consoles like a search engine can do today.” In Ballmer’s words, it “will support the experiences layer and broker information among our services to bring them together on our devices in ways that will enable richer and deeper app experiences.” Windows Phone currently includes basic voice commands, and Microsoft has already trumped up the voice capabilities of Kinect for its upcoming Xbox One. For now, Cortana has yet to be officially announced or confirmed, but we’ve reached out to Microsoft for more information and will update this story with any comment we receive. Exit Theatre Mode Andrew Goldfarb is IGN’s news editor. Keep up with pictures of the latest food he’s been eating by following @garfep on Twitter or garfep on IGN.
package vproxy.component.proxy; import vproxy.connection.Connection; import vproxy.connection.Connector; import java.util.function.Consumer; public interface ConnectorProvider { void provide(Connection accepted, String address, int port, Consumer<Connector> providedCallback); }
def check_response(func): def new_f(self, *args, **kwargs) -> dict: r = func(self, *args, **kwargs) if r.status_code not in range(200, 299): if r.status_code == 401: raise AuthenticationError(r) raise RegularError(r) content = r.content.decode() if content: return json.loads(content) return {} return new_f
def tile_coords_to_latlong(zoom, tile_col, tile_row): n = 2 ** zoom longitude = tile_col / n * 360.0 - 180.0 latitude = math.degrees(math.atan(math.sinh(math.pi * (1 - 2 * tile_row / n)))) return latitude, longitude
/** * Adds an (intersecting) {@link QueryCriteria} that limits the results to results that the user is allowed to see * * @param queryWhere The {@link QueryWhere} instance that defines the query criteria * @param userId The user id * @param groupIds The user's group ids */ private void addUserRolesLimitCriteria( QueryWhere queryWhere, String userId, List<String> groupIds ) { List<QueryCriteria> newBaseCriteriaList = new ArrayList<QueryCriteria>(2); QueryCriteria userRolesLimitingCriteria = new QueryCriteria( QueryParameterIdentifiers.TASK_USER_ROLES_LIMIT_LIST, false, QueryCriteriaType.NORMAL, 2); userRolesLimitingCriteria.setFirst(true); userRolesLimitingCriteria.getValues().add(userId); userRolesLimitingCriteria.getValues().add(groupIds); newBaseCriteriaList.add(userRolesLimitingCriteria); if( ! queryWhere.getCriteria().isEmpty() ) { QueryCriteria originalBaseCriteriaGroup = new QueryCriteria(false); originalBaseCriteriaGroup.setCriteria(queryWhere.getCriteria()); newBaseCriteriaList.add(originalBaseCriteriaGroup); } queryWhere.setCriteria(newBaseCriteriaList); }
<gh_stars>0 package server import ( "context" "io" "net/url" "github.com/golang/protobuf/jsonpb" "github.com/gorilla/websocket" "github.com/mattn/go-colorable" "github.com/pkg/errors" "github.com/tilt-dev/tilt/internal/hud" "github.com/tilt-dev/tilt/internal/hud/webview" "github.com/tilt-dev/tilt/pkg/logger" "github.com/tilt-dev/tilt/pkg/model" "github.com/tilt-dev/tilt/pkg/model/logstore" proto_webview "github.com/tilt-dev/tilt/pkg/webview" ) // This file defines machinery to connect to the HUD server websocket and // read logs from a running Tilt instance. // In future, we can use WebsocketReader more generically to read state // from a running Tilt, and do different things with that state depending // on the handler provided (if we ever implement e.g. `tilt status`). // (If we never use the WebsocketReader elsewhere, we might want to collapse // it and the LogStreamer handler into a single struct.) type WebsocketReader struct { url url.URL conn WebsocketConn marshaller jsonpb.Marshaler unmarshaller jsonpb.Unmarshaler handler ViewHandler } func ProvideWebsockerReader() *WebsocketReader { return &WebsocketReader{ // TODO(maia): pass this URL instead of hardcoding / wire this url: url.URL{Scheme: "ws", Host: "localhost:10350", Path: "/ws/view"}, handler: NewLogStreamer(), marshaller: jsonpb.Marshaler{OrigName: false, EmitDefaults: true}, unmarshaller: jsonpb.Unmarshaler{}, } } type ViewHandler interface { Handle(v proto_webview.View) error } type LogStreamer struct { logstore *logstore.LogStore printer *hud.IncrementalPrinter checkpoint logstore.Checkpoint } func NewLogStreamer() *LogStreamer { // TODO(maia): wire this (/ maybe this isn't the thing that needs to be wired, but // should be created after we have a conn to pass it?) printer := hud.NewIncrementalPrinter(hud.Stdout(colorable.NewColorableStdout())) return &LogStreamer{ logstore: logstore.NewLogStore(), printer: printer, } } func (ls *LogStreamer) Handle(v proto_webview.View) error { fromCheckpoint := logstore.Checkpoint(v.LogList.FromCheckpoint) toCheckpoint := logstore.Checkpoint(v.LogList.ToCheckpoint) if fromCheckpoint == -1 { // Server has no new logs to send return nil } segments := v.LogList.Segments if fromCheckpoint < ls.checkpoint { // The server is re-sending some logs we already have, so slice them off. deleteCount := ls.checkpoint - fromCheckpoint segments = segments[deleteCount:] } // TODO(maia): filter for the resources that we care about (`tilt logs resourceA resourceC`) // --> and if there's only one resource, don't prefix logs with resource name? for _, seg := range segments { // TODO(maia): secrets??? ls.logstore.Append(webview.LogSegmentToEvent(seg, v.LogList.Spans), model.SecretSet{}) } ls.printer.Print(ls.logstore.ContinuingLines(ls.checkpoint)) if toCheckpoint > ls.checkpoint { ls.checkpoint = toCheckpoint } return nil } func (wsr *WebsocketReader) Listen(ctx context.Context) error { logger.Get(ctx).Debugf("connecting to %s", wsr.url.String()) var err error wsr.conn, _, err = websocket.DefaultDialer.Dial(wsr.url.String(), nil) if err != nil { return errors.Wrapf(err, "dialing websocket %s", wsr.url.String()) } defer wsr.conn.Close() done := make(chan struct{}) go func() { defer close(done) for { messageType, reader, err := wsr.conn.NextReader() if err != nil { // uh do i need to do anything with this error? or does it just mean that the socket has closed? return } if messageType == websocket.TextMessage { err = wsr.handleTextMessage(ctx, reader) if err != nil { // will I want this to be an Info sometimes?? logger.Get(ctx).Verbosef("Error handling websocket message: %v", err) } } } }() for { select { case <-done: return nil case <-ctx.Done(): err := ctx.Err() if err != context.Canceled { return err } return wsr.conn.Close() } } } func (wsr *WebsocketReader) handleTextMessage(ctx context.Context, reader io.Reader) error { v := proto_webview.View{} err := wsr.unmarshaller.Unmarshal(reader, &v) if err != nil { return errors.Wrap(err, "Unmarshalling websocket message") } err = wsr.handler.Handle(v) if err != nil { return errors.Wrap(err, "Handling Tilt state from websocket") } // If server is using the incremental logs protocol, send back an ACK if v.LogList.ToCheckpoint > 0 { err = wsr.sendIncrementalLogResp(ctx, &v) if err != nil { return errors.Wrap(err, "sending websocket ack") } } return nil } // Ack a websocket message so the next time the websocket sends data, it only // sends logs from here on forward func (wsr *WebsocketReader) sendIncrementalLogResp(ctx context.Context, v *proto_webview.View) error { resp := proto_webview.AckWebsocketRequest{ ToCheckpoint: v.LogList.ToCheckpoint, TiltStartTime: v.TiltStartTime, } w, err := wsr.conn.NextWriter(websocket.TextMessage) if err != nil { return errors.Wrap(err, "getting writer") } defer func() { err := w.Close() if err != nil { logger.Get(ctx).Verbosef("closing writer: %v", err) } }() err = wsr.marshaller.Marshal(w, &resp) if err != nil { return errors.Wrap(err, "sending response") } return nil }
/** * Created by Mohammed Aouf ZOUAG on 13/05/2016. */ public class OrderDetail { private int productID; private int quantity; private double priceTTC; private String productName; private String productImage; public OrderDetail(JSONObject object) { try { productID = object.getInt("productID"); quantity = object.getInt("quantity"); priceTTC = object.getDouble("priceTTC"); productName = object.getString("productName"); productImage = object.getString("productImage"); } catch (JSONException e) { e.printStackTrace(); } } public static List<OrderDetail> parseSuppliers(JSONArray array) { List<OrderDetail> orderDetails = new ArrayList<>(); for (int i = 0; i < array.length(); i++) { try { JSONObject orderDetail = array.getJSONObject(i); orderDetails.add(new OrderDetail(orderDetail)); } catch (JSONException e) { e.printStackTrace(); } } return orderDetails; } public int getProductID() { return productID; } public void setProductID(int productID) { this.productID = productID; } public int getQuantity() { return quantity; } public void setQuantity(int quantity) { this.quantity = quantity; } public double getPriceTTC() { return priceTTC; } public void setPriceTTC(double priceTTC) { this.priceTTC = priceTTC; } public String getProductName() { return productName; } public void setProductName(String productName) { this.productName = productName; } public String getProductImage() { return productImage; } public void setProductImage(String productImage) { this.productImage = productImage; } }
<filename>src/sequenceEqual.ts<gh_stars>0 import { reporter, ReporterClass, toStateHolder, Reporter, ISArray, IStateHolder } from "soboku"; import { ISObservable } from "../index.d"; import { SObservable } from "./observable"; function isEqual(x: any, y: any): boolean { return x === y; } class SequenceEqualClass<T> extends SObservable<T, true, Reporter<T>> { private readonly compare: (x: any, y: any) => boolean; private readonly sequence: IStateHolder<T[]>; private i = 0; constructor(sequence: T[] | ISArray<T>, compare = isEqual) { super(reporter<T>()); this.compare = compare; this.sequence = toStateHolder(sequence); } protected onInput(val: T): void { const sequence = this.sequence.s(); if (this.compare(sequence[this.i], val) === false) { this.i = 0; return; } if (++this.i === sequence.length) { this.i = 0; this.output.next(true); } } protected onReset() { this.i = 0; } } export function sequenceEqual<T>(sequence: T[] | ISArray<T>, compareFunc?: (x: T, y: T) => boolean): ISObservable<T, true, Reporter<T>> { return new SequenceEqualClass(sequence, compareFunc); }
def auto_mounter(shares): mountstocrawl = [] if len(shares['nfsshares']) > 0 or len(shares['smbshares']) > 0: mounts = mounter(shares, mountdir=args.mountpoint, nfsmntopt=args.nfsmntopt, smbmntopt=args.smbmntopt, smbtype=args.smbtype, smbuser=args.smbuser, smbpass=args.smbpass) logger.info('Unmounting any existing mountpoints...') mounts.umount_shares() logger.info('Trying to mount shares...') mounts_status = mounts.mount_shares() hosts = [] for h in mounts_status: if h['host'] not in hosts: hosts.append(h['host']) hostcount = len(hosts) mounted = 0 for mount in mounts_status: if mount['mounted']: logger.info('Mounted \'%s\' %s share at %s' % (mount['sharename'], mount['sharetype'], mount['mountpoint'])) mountstocrawl.append(mount['mountpoint']) mounted += 1 else: logger.warning( 'Failed mounting \'%s\' %s share (%s)' % (mount['sharename'], mount['sharetype'], mount['output'])) else: logger.warning('No open shares found, exiting') sys.exit(1) if mounted > 0: logger.info('Mounted %s shares (%s hosts)', mounted, hostcount) else: logger.warning('No shares could be mounted, exiting') sys.exit(1) return mountstocrawl
<gh_stars>1-10 #include <stdlib.h> #include <time.h> #ifdef _WIN32 #include <Windows.h> #else #include <unistd.h> #endif #include "progress.h" // Sleep for n milliseconds, OS independant void cross_sleep(unsigned int delay) { #ifdef _WIN32 Sleep(delay); #else usleep(delay * 1000); #endif } void p() { // Print progress from 0 to 100% for (unsigned int i = 0; i <= 100; i++) { // Use default progress bar progress_print(i, 100); // Sleep for an amount of time from 0 to 100ms between each iteration cross_sleep(rand() % 100); } } void pf(char format[12], unsigned char length, unsigned char show_percentage) { // Print progress from 0 to 100% for (unsigned int i = 0; i <= 100; i++) { // Use customized progress bar progress_printf(i, 100, format, length, show_percentage); // Sleep for an amount of time from 0 to 100ms between each iteration cross_sleep(rand() % 100); } } int main() { // Initialize random number generation time_t t; srand((unsigned) time(&t)); // Default progress bar p(); fprintf(stderr, "\n"); // No delimiters, no percentage, custom characters and length pf(":;-", 29, 0); fprintf(stderr, "\n"); // Delimiters, percentage, custom characters and length pf("(=> )", 24, 1); fprintf(stderr, "\n"); return 0; }
/** * OSGi (single) service importer. This implementation creates a managed OSGi service proxy that handles the OSGi * service dynamics. The returned proxy will select only the best matching OSGi service for the configuration criteria. * If the select service goes away (at any point in time), the proxy will automatically search for a replacement without * the user intervention. * * <p/> Note that the proxy instance remains the same and only the backing OSGi service changes. Due to the dynamic * nature of OSGi, the backing object can change during method invocations. * * @author Costin Leau * @author Adrian Colyer * @author Hal Hildebrand * */ public final class OsgiServiceProxyFactoryBean extends AbstractServiceImporterProxyFactoryBean implements ApplicationEventPublisherAware { /** * Wrapper around internal commands. * * @author Costin Leau * */ private class Executor implements ImporterInternalActions { public void addStateListener(ImporterStateListener stateListener) { stateListeners.add(stateListener); } public void removeStateListener(ImporterStateListener stateListener) { stateListeners.remove(stateListener); } public boolean isSatisfied() { return !mandatory || (proxy == null || proxy.getServiceReference().getBundle() != null); } } private static final Log log = LogFactory.getLog(OsgiServiceProxyFactoryBean.class); private long retryTimeout; private RetryTemplate retryTemplate; /** proxy cast to a specific interface to allow specific method calls */ private ImportedOsgiServiceProxy proxy; /** proxy infrastructure hook exposed to allow clean up */ private Runnable destructionCallback; private Runnable initializationCallback; /** application publisher */ private ApplicationEventPublisher applicationEventPublisher; /** internal listeners */ private final List<ImporterStateListener> stateListeners = Collections.synchronizedList(new ArrayList<ImporterStateListener>(4)); private final ImporterInternalActions controller; /** convenience field * */ private volatile boolean mandatory = true; private volatile boolean sticky = true; private final Object monitor = new Object(); public OsgiServiceProxyFactoryBean() { controller = new ImporterController(new Executor()); } @Override public void afterPropertiesSet() { super.afterPropertiesSet(); mandatory = Availability.MANDATORY.equals(getAvailability()); } /** * {@inheritDoc} * * Returns a managed proxy to the best matching OSGi service. */ public Object getObject() { return super.getObject(); } Object createProxy(boolean lazyProxy) { if (log.isDebugEnabled()) log.debug("Creating a single service proxy ..."); // first create the TCCL interceptor to register its listener with the // dynamic interceptor boolean serviceTccl = ImportContextClassLoaderEnum.SERVICE_PROVIDER.equals(getImportContextClassLoader()); final ServiceProviderTCCLInterceptor tcclAdvice = (serviceTccl ? new ServiceProviderTCCLInterceptor() : null); final OsgiServiceLifecycleListener tcclListener = (serviceTccl ? tcclAdvice.new ServiceProviderTCCLListener() : null); Class<?> filterClass = ClassUtils.getParticularClass(getInterfaces()); String filterClassName = (filterClass != null ? filterClass.getName() : null); final ServiceDynamicInterceptor lookupAdvice = new ServiceDynamicInterceptor(getBundleContext(), filterClassName, getUnifiedFilter(), getAopClassLoader()); lookupAdvice.setMandatoryService(Availability.MANDATORY.equals(getAvailability())); lookupAdvice.setUseBlueprintExceptions(isUseBlueprintExceptions()); lookupAdvice.setSticky(sticky); OsgiServiceLifecycleListener[] listeners = (serviceTccl ? ObjectUtils.addObjectToArray(getListeners(), tcclListener) : getListeners()); lookupAdvice.setListeners(listeners); synchronized (monitor) { lookupAdvice.setRetryTimeout(retryTimeout); retryTemplate = lookupAdvice.getRetryTemplate(); } lookupAdvice.setApplicationEventPublisher(applicationEventPublisher); // add the listeners as a list since it might be updated after the proxy // has been created lookupAdvice.setStateListeners(stateListeners); lookupAdvice.setServiceImporter(this); lookupAdvice.setServiceImporterName(getBeanName()); // create a proxy creator using the existing context ServiceProxyCreator creator = new AbstractServiceProxyCreator(getInterfaces(), getAopClassLoader(), getBeanClassLoader(), getBundleContext(), getImportContextClassLoader()) { ServiceInvoker createDispatcherInterceptor(ServiceReference reference) { return lookupAdvice; } Advice createServiceProviderTCCLAdvice(ServiceReference reference) { return tcclAdvice; } }; ProxyPlusCallback proxyPlusCallback = creator.createServiceProxy(lookupAdvice.getServiceReference()); synchronized (monitor) { proxy = proxyPlusCallback.proxy; destructionCallback = new DisposableBeanRunnableAdapter(proxyPlusCallback.destructionCallback); } lookupAdvice.setProxy(proxy); // start the lookup only after the proxy has been assembled if (!lazyProxy) { lookupAdvice.afterPropertiesSet(); } else { initializationCallback = new Runnable() { public void run() { lookupAdvice.afterPropertiesSet(); } }; } return proxy; } @Override Runnable getProxyInitializer() { return initializationCallback; } @Override Runnable getProxyDestructionCallback() { synchronized (monitor) { return destructionCallback; } } /** * Add the given listener to the array but in the first position. * * @param listeners * @param listener * @return */ private OsgiServiceLifecycleListener[] addListener(OsgiServiceLifecycleListener[] listeners, OsgiServiceLifecycleListener listener) { int size = (listeners == null ? 1 : listeners.length + 1); OsgiServiceLifecycleListener[] list = new OsgiServiceLifecycleListener[size]; list[0] = listener; if (listeners != null) System.arraycopy(listeners, 0, list, 1, listeners.length); return list; } /** * Sets how long (in milliseconds) should this importer wait between failed attempts at rebinding to a service that * has been unregistered. * * <p/> It is possible to change this value after initialization (while the proxy is in place). The new values will * be used immediately by the proxy. Any in-flight waiting will be restarted using the new values. Note that if both * values are the same, no restart will be applied. * * @param timeoutInMillis Timeout to set, in milliseconds */ public void setTimeout(long timeoutInMillis) { RetryTemplate rt; synchronized (monitor) { this.retryTimeout = timeoutInMillis; rt = retryTemplate; } if (rt != null) { rt.reset(timeoutInMillis); } } /** * Returns the timeout (in milliseconds) this importer waits while trying to find a backing service. * * @return timeout in milliseconds */ public long getTimeout() { synchronized (monitor) { return retryTimeout; } } /** * Sets the stickiness of this proxy. If 'true' (default), the proxy will rebind only if the backing service is no * longer available. If 'false', the rebind will occur every time a 'better' candidate appears. A better service is * defined by having either a higher ranking or the same ranking and a lower service id. * * @param sticky sticky flag */ public void setSticky(boolean sticky) { this.sticky = sticky; } public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) { synchronized (monitor) { this.applicationEventPublisher = applicationEventPublisher; } } }
/** * @author Shalena Omapersad <[email protected]> * * Tests the circle overview page user interface of the WebApp. */ @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT) public class CircleOverviewUITest { private WebDriver driver; @BeforeEach void setUp() { WebDriverFactory webDriverFactory = new WebDriverFactory(); driver = webDriverFactory.getWebDriver(); driver.findElement(By.id("username")).sendKeys("[email protected]"); driver.findElement(By.id("password")).sendKeys("a"); driver.findElement(By.id("login-submit")).click(); } @AfterEach void tearDown() { driver.quit(); } @Test void toCareCircleTest() { WebElement careCircleLink = driver.findElement(By.linkText("Precise Care")); careCircleLink.click(); String expectedUrl = "http://localhost:8080/circles/45"; assertEquals(expectedUrl, driver.getCurrentUrl()); assertEquals("Circle dashboard", driver.getTitle()); WebElement careCircleBreadCrumb = driver.findElement(By.linkText("Care Circles")); careCircleBreadCrumb.click(); assertEquals("My circles", driver.getTitle()); } @Test void createNewCircleTest() { WebElement newCircleLink = driver.findElement(By.linkText("Create new Care Circle")); newCircleLink.click(); String expectedUrl = "http://localhost:8080/circles/new"; assertEquals(expectedUrl, driver.getCurrentUrl()); WebElement circleName = driver.findElement(By.id("circleName")); circleName.sendKeys("test_group"); WebElement saveCircle = driver.findElement(By.id("save_circle")); saveCircle.click(); assertEquals("Circle dashboard", driver.getTitle()); } }
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals from collections import namedtuple from textwrap import dedent from pants.base.exceptions import TaskError from pants.java.distribution.distribution import DistributionLocator from pants.option.custom_types import target_option class JvmToolMixin(object): """A mixin for registering and accessing JVM-based tools. Must be mixed in to something that can register and use options, e.g., a Task or a Subsystem. :API: public """ class InvalidToolClasspath(TaskError): """Indicates an invalid jvm tool classpath.""" class JvmTool(namedtuple('JvmTool', ['scope', 'key', 'classpath', 'main', 'custom_rules'])): """Represents a jvm tool classpath request.""" def dep_spec(self, options): """Returns the target address spec that points to this JVM tool's classpath dependencies. :rtype: string """ option = self.key.replace('-', '_') dep_spec = options.for_scope(self.scope)[option] if dep_spec.startswith('['): raise ValueError(dedent("""\ JVM tool configuration now expects a single target address, use the following in pants.ini: [{scope}] {key}: //tool/classpath:address """.format(scope=self.scope, key=self.key))) return dep_spec def is_default(self, options): """Return `True` if this option was not set by the user. :rtype: bool """ return options.for_scope(self.scope).is_default(self.key.replace('-', '_')) _jvm_tools = [] # List of JvmTool objects. @classmethod def subsystem_dependencies(cls): return super(JvmToolMixin, cls).subsystem_dependencies() + (DistributionLocator,) @classmethod def get_jvm_options_default(cls, bootstrap_option_values): """Subclasses may override to provide different defaults for their JVM options. :param bootstrap_option_values: The values of the "bootstrap options" (e.g., pants_workdir). Implementations can use these when generating the default. See src/python/pants/options/options_bootstrapper.py for details. """ return ['-Xmx256m'] @classmethod def register_options(cls, register): super(JvmToolMixin, cls).register_options(register) register('--jvm-options', type=list, advanced=True, metavar='<option>...', default=cls.get_jvm_options_default(register.bootstrap), help='Run with these JVM options.') @classmethod def register_jvm_tool(cls, register, key, classpath_spec=None, main=None, custom_rules=None, fingerprint=True, classpath=None, help=None, removal_version=None, removal_hint=None): """Registers a jvm tool under `key` for lazy classpath resolution. Classpaths can be retrieved in `execute` scope via `tool_classpath_from_products`. NB: If the tool's `main` class name is supplied the tool classpath will be shaded. :param register: A function that can register options with the option system. :param unicode key: The key the tool configuration should be registered under. :param unicode classpath_spec: The tool classpath target address spec that can be used to override this tool's classpath; by default, `//:[key]`. :param unicode main: The fully qualified class name of the tool's main class if shading of the tool classpath is desired. :param list custom_rules: An optional list of `Shader.Rule`s to apply before the automatically generated binary jar shading rules. This is useful for excluding classes shared between the tool and the code it runs over. The canonical example is the `org.junit.Test` annotation read by junit runner tools from user code. In this sort of case the shared code must have a uniform name between the tool and the user code and so the shared code must be excluded from shading. :param bool fingerprint: Indicates whether to include the jvm tool in the task's fingerprint. Note that unlike for other options, fingerprinting is enabled for tools by default. :param list classpath: A list of one or more `JarDependency` objects that form this tool's default classpath. If the classpath is optional, supply an empty list; otherwise the default classpath of `None` indicates the `classpath_spec` must point to a target defined in a BUILD file that provides the tool classpath. :param unicode help: An optional custom help string; otherwise a reasonable one is generated. :param string removal_version: A semver at which this tool will be removed. :param unicode removal_hint: A hint on how to migrate away from this tool. """ def formulate_help(): if classpath: return ('Target address spec for overriding the classpath of the {} jvm tool which is, ' 'by default: {}'.format(key, classpath)) else: return 'Target address spec for specifying the classpath of the {} jvm tool.'.format(key) help = help or formulate_help() register('--{}'.format(key), advanced=True, type=target_option, default='//:{}'.format(key) if classpath_spec is None else classpath_spec, help=help, fingerprint=fingerprint, removal_version=removal_version, removal_hint=removal_hint) # TODO(<NAME>): Move towards requiring tool specs point to jvm_binary targets. # These already have a main and are a natural place to house any custom shading rules. That # would eliminate the need to pass main and custom_rules here. # It is awkward that jars can no longer be inlined as dependencies - this will require 2 targets # for every tool - the jvm_binary, and a jar_library for its dependencies to point to. It may # be worth creating a JarLibrary subclass - say JarBinary, or else mixing in a Binary interface # to JarLibrary to endow it with main and shade_rules attributes to allow for single-target # definition of resolvable jvm binaries. jvm_tool = cls.JvmTool(register.scope, key, classpath, main, custom_rules) JvmToolMixin._jvm_tools.append(jvm_tool) @classmethod def prepare_tools(cls, round_manager): """Subclasses must call this method to ensure jvm tool products are available.""" round_manager.require_data('jvm_build_tools_classpath_callbacks') @staticmethod def get_registered_tools(): """Returns all registered jvm tools. :rtype: list of :class:`JvmToolMixin.JvmTool` """ return JvmToolMixin._jvm_tools @staticmethod def reset_registered_tools(): """Needed only for test isolation.""" JvmToolMixin._jvm_tools = [] def set_distribution(self, minimum_version=None, maximum_version=None, jdk=False): try: self._dist = DistributionLocator.cached(minimum_version=minimum_version, maximum_version=maximum_version, jdk=jdk) except DistributionLocator.Error as e: raise TaskError(e) @property def dist(self): if getattr(self, '_dist', None) is None: # Use default until told otherwise. self.set_distribution() return self._dist @classmethod def tool_jar_from_products(cls, products, key, scope): """Get the jar for the tool previously registered under key in the given scope. :param products: The products of the current pants run. :type products: :class:`pants.goal.products.Products` :param string key: The key the tool configuration was registered under. :param string scope: The scope the tool configuration was registered under. :returns: A single jar path. :rtype: string :raises: `JvmToolMixin.InvalidToolClasspath` when the tool classpath is not composed of exactly one jar. """ classpath = cls.tool_classpath_from_products(products, key, scope) if len(classpath) != 1: params = dict(tool=key, scope=scope, count=len(classpath), classpath='\n\t'.join(classpath)) raise cls.InvalidToolClasspath('Expected tool {tool} in scope {scope} to resolve to one ' 'jar, instead found {count}:\n\t{classpath}'.format(**params)) return classpath[0] @staticmethod def tool_classpath_from_products(products, key, scope): """Get a classpath for the tool previously registered under key in the given scope. :param products: The products of the current pants run. :type products: :class:`pants.goal.products.Products` :param string key: The key the tool configuration was registered under. :param string scope: The scope the tool configuration was registered under. :returns: A list of paths. :rtype: list """ callback_product_map = products.get_data('jvm_build_tools_classpath_callbacks') or {} callback = callback_product_map.get(scope, {}).get(key) if not callback: raise TaskError('No bootstrap callback registered for {key} in {scope}' .format(key=key, scope=scope)) return callback()
/** * Adds all the headers as reference parameters. */ public EPRRecipe addReferenceParameters(Iterable<? extends Header> headers) { for (Header h : headers) addReferenceParameter(h); return this; }
<reponame>toddheslin/create-fastify import { FastifyRequest } from 'fastify'; /** * The generic payload from a Hasura action webhook. */ interface HasuraRequestBody<Input> { /** * Name of the action */ action: string; /** * Input parameters to the GraphQL mutation request (the action) */ input: Input; /** * Headers passed through from the GraphQL mutation request (the action) */ session_variables: SessionVariables; } interface SessionVariables { /** * The ID of the user making the Hasura request */ ['x-hasura-user-id']?: string; /** * The role present in the headers of the Hasura request */ ['x-hasura-role']?: string; }
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: transactions/transactions.proto package transactions import ( fmt "fmt" proto "github.com/gogo/protobuf/proto" gossip "github.com/quorumcontrol/messages/v2/build/go/gossip" signatures "github.com/quorumcontrol/messages/v2/build/go/signatures" io "io" math "math" math_bits "math/bits" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type Transaction_Type int32 const ( Transaction_UNKNOWN Transaction_Type = 0 Transaction_SETDATA Transaction_Type = 1 Transaction_SETOWNERSHIP Transaction_Type = 2 Transaction_ESTABLISHTOKEN Transaction_Type = 3 Transaction_MINTTOKEN Transaction_Type = 4 Transaction_SENDTOKEN Transaction_Type = 5 Transaction_RECEIVETOKEN Transaction_Type = 6 Transaction_STAKE Transaction_Type = 7 ) var Transaction_Type_name = map[int32]string{ 0: "UNKNOWN", 1: "SETDATA", 2: "SETOWNERSHIP", 3: "ESTABLISHTOKEN", 4: "MINTTOKEN", 5: "SENDTOKEN", 6: "RECEIVETOKEN", 7: "STAKE", } var Transaction_Type_value = map[string]int32{ "UNKNOWN": 0, "SETDATA": 1, "SETOWNERSHIP": 2, "ESTABLISHTOKEN": 3, "MINTTOKEN": 4, "SENDTOKEN": 5, "RECEIVETOKEN": 6, "STAKE": 7, } func (x Transaction_Type) String() string { return proto.EnumName(Transaction_Type_name, int32(x)) } func (Transaction_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{9, 0} } type SetDataPayload struct { Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (m *SetDataPayload) Reset() { *m = SetDataPayload{} } func (m *SetDataPayload) String() string { return proto.CompactTextString(m) } func (*SetDataPayload) ProtoMessage() {} func (*SetDataPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{0} } func (m *SetDataPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SetDataPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SetDataPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SetDataPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_SetDataPayload.Merge(m, src) } func (m *SetDataPayload) XXX_Size() int { return m.Size() } func (m *SetDataPayload) XXX_DiscardUnknown() { xxx_messageInfo_SetDataPayload.DiscardUnknown(m) } var xxx_messageInfo_SetDataPayload proto.InternalMessageInfo func (m *SetDataPayload) GetPath() string { if m != nil { return m.Path } return "" } func (m *SetDataPayload) GetValue() []byte { if m != nil { return m.Value } return nil } type SetOwnershipPayload struct { Authentication []string `protobuf:"bytes,1,rep,name=authentication,proto3" json:"authentication,omitempty"` } func (m *SetOwnershipPayload) Reset() { *m = SetOwnershipPayload{} } func (m *SetOwnershipPayload) String() string { return proto.CompactTextString(m) } func (*SetOwnershipPayload) ProtoMessage() {} func (*SetOwnershipPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{1} } func (m *SetOwnershipPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SetOwnershipPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SetOwnershipPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SetOwnershipPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_SetOwnershipPayload.Merge(m, src) } func (m *SetOwnershipPayload) XXX_Size() int { return m.Size() } func (m *SetOwnershipPayload) XXX_DiscardUnknown() { xxx_messageInfo_SetOwnershipPayload.DiscardUnknown(m) } var xxx_messageInfo_SetOwnershipPayload proto.InternalMessageInfo func (m *SetOwnershipPayload) GetAuthentication() []string { if m != nil { return m.Authentication } return nil } type TokenMonetaryPolicy struct { Maximum uint64 `protobuf:"varint,1,opt,name=maximum,proto3" json:"maximum,omitempty"` } func (m *TokenMonetaryPolicy) Reset() { *m = TokenMonetaryPolicy{} } func (m *TokenMonetaryPolicy) String() string { return proto.CompactTextString(m) } func (*TokenMonetaryPolicy) ProtoMessage() {} func (*TokenMonetaryPolicy) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{2} } func (m *TokenMonetaryPolicy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TokenMonetaryPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_TokenMonetaryPolicy.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *TokenMonetaryPolicy) XXX_Merge(src proto.Message) { xxx_messageInfo_TokenMonetaryPolicy.Merge(m, src) } func (m *TokenMonetaryPolicy) XXX_Size() int { return m.Size() } func (m *TokenMonetaryPolicy) XXX_DiscardUnknown() { xxx_messageInfo_TokenMonetaryPolicy.DiscardUnknown(m) } var xxx_messageInfo_TokenMonetaryPolicy proto.InternalMessageInfo func (m *TokenMonetaryPolicy) GetMaximum() uint64 { if m != nil { return m.Maximum } return 0 } type EstablishTokenPayload struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` MonetaryPolicy *TokenMonetaryPolicy `protobuf:"bytes,2,opt,name=monetary_policy,json=monetaryPolicy,proto3" json:"monetary_policy,omitempty"` } func (m *EstablishTokenPayload) Reset() { *m = EstablishTokenPayload{} } func (m *EstablishTokenPayload) String() string { return proto.CompactTextString(m) } func (*EstablishTokenPayload) ProtoMessage() {} func (*EstablishTokenPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{3} } func (m *EstablishTokenPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *EstablishTokenPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_EstablishTokenPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *EstablishTokenPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_EstablishTokenPayload.Merge(m, src) } func (m *EstablishTokenPayload) XXX_Size() int { return m.Size() } func (m *EstablishTokenPayload) XXX_DiscardUnknown() { xxx_messageInfo_EstablishTokenPayload.DiscardUnknown(m) } var xxx_messageInfo_EstablishTokenPayload proto.InternalMessageInfo func (m *EstablishTokenPayload) GetName() string { if m != nil { return m.Name } return "" } func (m *EstablishTokenPayload) GetMonetaryPolicy() *TokenMonetaryPolicy { if m != nil { return m.MonetaryPolicy } return nil } type MintTokenPayload struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Amount uint64 `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty"` } func (m *MintTokenPayload) Reset() { *m = MintTokenPayload{} } func (m *MintTokenPayload) String() string { return proto.CompactTextString(m) } func (*MintTokenPayload) ProtoMessage() {} func (*MintTokenPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{4} } func (m *MintTokenPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *MintTokenPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_MintTokenPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *MintTokenPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_MintTokenPayload.Merge(m, src) } func (m *MintTokenPayload) XXX_Size() int { return m.Size() } func (m *MintTokenPayload) XXX_DiscardUnknown() { xxx_messageInfo_MintTokenPayload.DiscardUnknown(m) } var xxx_messageInfo_MintTokenPayload proto.InternalMessageInfo func (m *MintTokenPayload) GetName() string { if m != nil { return m.Name } return "" } func (m *MintTokenPayload) GetAmount() uint64 { if m != nil { return m.Amount } return 0 } type SendTokenPayload struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` Amount uint64 `protobuf:"varint,3,opt,name=amount,proto3" json:"amount,omitempty"` Destination string `protobuf:"bytes,4,opt,name=destination,proto3" json:"destination,omitempty"` } func (m *SendTokenPayload) Reset() { *m = SendTokenPayload{} } func (m *SendTokenPayload) String() string { return proto.CompactTextString(m) } func (*SendTokenPayload) ProtoMessage() {} func (*SendTokenPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{5} } func (m *SendTokenPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *SendTokenPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_SendTokenPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *SendTokenPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_SendTokenPayload.Merge(m, src) } func (m *SendTokenPayload) XXX_Size() int { return m.Size() } func (m *SendTokenPayload) XXX_DiscardUnknown() { xxx_messageInfo_SendTokenPayload.DiscardUnknown(m) } var xxx_messageInfo_SendTokenPayload proto.InternalMessageInfo func (m *SendTokenPayload) GetId() string { if m != nil { return m.Id } return "" } func (m *SendTokenPayload) GetName() string { if m != nil { return m.Name } return "" } func (m *SendTokenPayload) GetAmount() uint64 { if m != nil { return m.Amount } return 0 } func (m *SendTokenPayload) GetDestination() string { if m != nil { return m.Destination } return "" } type ReceiveTokenPayload struct { SendTokenTransactionId string `protobuf:"bytes,1,opt,name=send_token_transaction_id,json=sendTokenTransactionId,proto3" json:"send_token_transaction_id,omitempty"` Tip []byte `protobuf:"bytes,2,opt,name=tip,proto3" json:"tip,omitempty"` TreeState *signatures.TreeState `protobuf:"bytes,3,opt,name=tree_state,json=treeState,proto3" json:"tree_state,omitempty"` Leaves [][]byte `protobuf:"bytes,4,rep,name=Leaves,proto3" json:"Leaves,omitempty"` Proof *gossip.Proof `protobuf:"bytes,5,opt,name=proof,proto3" json:"proof,omitempty"` } func (m *ReceiveTokenPayload) Reset() { *m = ReceiveTokenPayload{} } func (m *ReceiveTokenPayload) String() string { return proto.CompactTextString(m) } func (*ReceiveTokenPayload) ProtoMessage() {} func (*ReceiveTokenPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{6} } func (m *ReceiveTokenPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *ReceiveTokenPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_ReceiveTokenPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *ReceiveTokenPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_ReceiveTokenPayload.Merge(m, src) } func (m *ReceiveTokenPayload) XXX_Size() int { return m.Size() } func (m *ReceiveTokenPayload) XXX_DiscardUnknown() { xxx_messageInfo_ReceiveTokenPayload.DiscardUnknown(m) } var xxx_messageInfo_ReceiveTokenPayload proto.InternalMessageInfo func (m *ReceiveTokenPayload) GetSendTokenTransactionId() string { if m != nil { return m.SendTokenTransactionId } return "" } func (m *ReceiveTokenPayload) GetTip() []byte { if m != nil { return m.Tip } return nil } func (m *ReceiveTokenPayload) GetTreeState() *signatures.TreeState { if m != nil { return m.TreeState } return nil } func (m *ReceiveTokenPayload) GetLeaves() [][]byte { if m != nil { return m.Leaves } return nil } func (m *ReceiveTokenPayload) GetProof() *gossip.Proof { if m != nil { return m.Proof } return nil } type TokenPayload struct { TransactionId string `protobuf:"bytes,1,opt,name=transaction_id,json=transactionId,proto3" json:"transaction_id,omitempty"` Tip string `protobuf:"bytes,2,opt,name=tip,proto3" json:"tip,omitempty"` TreeState *signatures.TreeState `protobuf:"bytes,3,opt,name=tree_state,json=treeState,proto3" json:"tree_state,omitempty"` Leaves [][]byte `protobuf:"bytes,4,rep,name=leaves,proto3" json:"leaves,omitempty"` Proof *gossip.Proof `protobuf:"bytes,5,opt,name=proof,proto3" json:"proof,omitempty"` } func (m *TokenPayload) Reset() { *m = TokenPayload{} } func (m *TokenPayload) String() string { return proto.CompactTextString(m) } func (*TokenPayload) ProtoMessage() {} func (*TokenPayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{7} } func (m *TokenPayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *TokenPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_TokenPayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *TokenPayload) XXX_Merge(src proto.Message) { xxx_messageInfo_TokenPayload.Merge(m, src) } func (m *TokenPayload) XXX_Size() int { return m.Size() } func (m *TokenPayload) XXX_DiscardUnknown() { xxx_messageInfo_TokenPayload.DiscardUnknown(m) } var xxx_messageInfo_TokenPayload proto.InternalMessageInfo func (m *TokenPayload) GetTransactionId() string { if m != nil { return m.TransactionId } return "" } func (m *TokenPayload) GetTip() string { if m != nil { return m.Tip } return "" } func (m *TokenPayload) GetTreeState() *signatures.TreeState { if m != nil { return m.TreeState } return nil } func (m *TokenPayload) GetLeaves() [][]byte { if m != nil { return m.Leaves } return nil } func (m *TokenPayload) GetProof() *gossip.Proof { if m != nil { return m.Proof } return nil } type StakePayload struct { GroupId string `protobuf:"bytes,1,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` Amount uint64 `protobuf:"varint,2,opt,name=amount,proto3" json:"amount,omitempty"` DstKey *signatures.PublicKey `protobuf:"bytes,3,opt,name=dst_key,json=dstKey,proto3" json:"dst_key,omitempty"` VerKey *signatures.PublicKey `protobuf:"bytes,4,opt,name=ver_key,json=verKey,proto3" json:"ver_key,omitempty"` } func (m *StakePayload) Reset() { *m = StakePayload{} } func (m *StakePayload) String() string { return proto.CompactTextString(m) } func (*StakePayload) ProtoMessage() {} func (*StakePayload) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{8} } func (m *StakePayload) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *StakePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_StakePayload.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *StakePayload) XXX_Merge(src proto.Message) { xxx_messageInfo_StakePayload.Merge(m, src) } func (m *StakePayload) XXX_Size() int { return m.Size() } func (m *StakePayload) XXX_DiscardUnknown() { xxx_messageInfo_StakePayload.DiscardUnknown(m) } var xxx_messageInfo_StakePayload proto.InternalMessageInfo func (m *StakePayload) GetGroupId() string { if m != nil { return m.GroupId } return "" } func (m *StakePayload) GetAmount() uint64 { if m != nil { return m.Amount } return 0 } func (m *StakePayload) GetDstKey() *signatures.PublicKey { if m != nil { return m.DstKey } return nil } func (m *StakePayload) GetVerKey() *signatures.PublicKey { if m != nil { return m.VerKey } return nil } type Transaction struct { Type Transaction_Type `protobuf:"varint,1,opt,name=type,proto3,enum=v2transactions.Transaction_Type" json:"type,omitempty"` SetDataPayload *SetDataPayload `protobuf:"bytes,2,opt,name=set_data_payload,json=setDataPayload,proto3" json:"set_data_payload,omitempty"` SetOwnershipPayload *SetOwnershipPayload `protobuf:"bytes,3,opt,name=set_ownership_payload,json=setOwnershipPayload,proto3" json:"set_ownership_payload,omitempty"` EstablishTokenPayload *EstablishTokenPayload `protobuf:"bytes,4,opt,name=establish_token_payload,json=establishTokenPayload,proto3" json:"establish_token_payload,omitempty"` MintTokenPayload *MintTokenPayload `protobuf:"bytes,5,opt,name=mint_token_payload,json=mintTokenPayload,proto3" json:"mint_token_payload,omitempty"` SendTokenPayload *SendTokenPayload `protobuf:"bytes,6,opt,name=send_token_payload,json=sendTokenPayload,proto3" json:"send_token_payload,omitempty"` ReceiveTokenPayload *ReceiveTokenPayload `protobuf:"bytes,7,opt,name=receive_token_payload,json=receiveTokenPayload,proto3" json:"receive_token_payload,omitempty"` StakePayload *StakePayload `protobuf:"bytes,8,opt,name=stake_payload,json=stakePayload,proto3" json:"stake_payload,omitempty"` } func (m *Transaction) Reset() { *m = Transaction{} } func (m *Transaction) String() string { return proto.CompactTextString(m) } func (*Transaction) ProtoMessage() {} func (*Transaction) Descriptor() ([]byte, []int) { return fileDescriptor_8af93f5a77b9779a, []int{9} } func (m *Transaction) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Transaction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Transaction.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Transaction) XXX_Merge(src proto.Message) { xxx_messageInfo_Transaction.Merge(m, src) } func (m *Transaction) XXX_Size() int { return m.Size() } func (m *Transaction) XXX_DiscardUnknown() { xxx_messageInfo_Transaction.DiscardUnknown(m) } var xxx_messageInfo_Transaction proto.InternalMessageInfo func (m *Transaction) GetType() Transaction_Type { if m != nil { return m.Type } return Transaction_UNKNOWN } func (m *Transaction) GetSetDataPayload() *SetDataPayload { if m != nil { return m.SetDataPayload } return nil } func (m *Transaction) GetSetOwnershipPayload() *SetOwnershipPayload { if m != nil { return m.SetOwnershipPayload } return nil } func (m *Transaction) GetEstablishTokenPayload() *EstablishTokenPayload { if m != nil { return m.EstablishTokenPayload } return nil } func (m *Transaction) GetMintTokenPayload() *MintTokenPayload { if m != nil { return m.MintTokenPayload } return nil } func (m *Transaction) GetSendTokenPayload() *SendTokenPayload { if m != nil { return m.SendTokenPayload } return nil } func (m *Transaction) GetReceiveTokenPayload() *ReceiveTokenPayload { if m != nil { return m.ReceiveTokenPayload } return nil } func (m *Transaction) GetStakePayload() *StakePayload { if m != nil { return m.StakePayload } return nil } func init() { proto.RegisterEnum("v2transactions.Transaction_Type", Transaction_Type_name, Transaction_Type_value) proto.RegisterType((*SetDataPayload)(nil), "v2transactions.SetDataPayload") proto.RegisterType((*SetOwnershipPayload)(nil), "v2transactions.SetOwnershipPayload") proto.RegisterType((*TokenMonetaryPolicy)(nil), "v2transactions.TokenMonetaryPolicy") proto.RegisterType((*EstablishTokenPayload)(nil), "v2transactions.EstablishTokenPayload") proto.RegisterType((*MintTokenPayload)(nil), "v2transactions.MintTokenPayload") proto.RegisterType((*SendTokenPayload)(nil), "v2transactions.SendTokenPayload") proto.RegisterType((*ReceiveTokenPayload)(nil), "v2transactions.ReceiveTokenPayload") proto.RegisterType((*TokenPayload)(nil), "v2transactions.TokenPayload") proto.RegisterType((*StakePayload)(nil), "v2transactions.StakePayload") proto.RegisterType((*Transaction)(nil), "v2transactions.Transaction") } func init() { proto.RegisterFile("transactions/transactions.proto", fileDescriptor_8af93f5a77b9779a) } var fileDescriptor_8af93f5a77b9779a = []byte{ // 877 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdd, 0x6e, 0xe3, 0x44, 0x14, 0xae, 0xf3, 0xbb, 0x39, 0x49, 0x83, 0x35, 0xa1, 0xbb, 0xd9, 0x05, 0x85, 0xc8, 0xab, 0x45, 0xbd, 0x4a, 0x50, 0x40, 0x48, 0xac, 0x04, 0x52, 0x96, 0x5a, 0x6a, 0x94, 0x36, 0x8d, 0xc6, 0x86, 0x4a, 0x48, 0x28, 0x9a, 0xc4, 0x43, 0x32, 0x6a, 0xec, 0x31, 0x9e, 0x71, 0xc0, 0x97, 0x5c, 0x70, 0xcf, 0x4b, 0xf0, 0x16, 0x3c, 0x00, 0x97, 0x7b, 0x07, 0x97, 0xa8, 0x7d, 0x11, 0x64, 0x3b, 0x6e, 0x6c, 0xd7, 0xab, 0x0a, 0x71, 0x95, 0xf3, 0x9d, 0xf1, 0xf9, 0xfc, 0xcd, 0xc9, 0x39, 0x9f, 0xe1, 0x23, 0xe9, 0x11, 0x47, 0x90, 0x95, 0x64, 0xdc, 0x11, 0xc3, 0x34, 0x18, 0xb8, 0x1e, 0x97, 0x1c, 0xb5, 0x77, 0xa3, 0x74, 0xf6, 0x45, 0x67, 0xcd, 0x85, 0x60, 0xee, 0x30, 0xfe, 0x89, 0x1f, 0x7a, 0xf1, 0x81, 0x60, 0x6b, 0x87, 0x48, 0xdf, 0xa3, 0x62, 0x78, 0x08, 0xe3, 0x43, 0xed, 0x35, 0xb4, 0x0d, 0x2a, 0xcf, 0x88, 0x24, 0x73, 0x12, 0x6c, 0x39, 0xb1, 0x10, 0x82, 0x8a, 0x4b, 0xe4, 0xa6, 0xab, 0xf4, 0x95, 0xd3, 0x06, 0x8e, 0x62, 0xf4, 0x3e, 0x54, 0x77, 0x64, 0xeb, 0xd3, 0x6e, 0xa9, 0xaf, 0x9c, 0xb6, 0x70, 0x0c, 0xb4, 0x2f, 0xa1, 0x63, 0x50, 0x79, 0xf5, 0x93, 0x43, 0x3d, 0xb1, 0x61, 0x6e, 0x42, 0xf0, 0x31, 0xb4, 0x89, 0x2f, 0x37, 0xd4, 0x91, 0x6c, 0x45, 0x42, 0x5d, 0x5d, 0xa5, 0x5f, 0x3e, 0x6d, 0xe0, 0x5c, 0x56, 0x1b, 0x42, 0xc7, 0xe4, 0x37, 0xd4, 0xb9, 0xe4, 0x0e, 0x95, 0xc4, 0x0b, 0xe6, 0x7c, 0xcb, 0x56, 0x01, 0xea, 0x42, 0xdd, 0x26, 0x3f, 0x33, 0xdb, 0xb7, 0x23, 0x09, 0x15, 0x9c, 0x40, 0x2d, 0x80, 0x13, 0x5d, 0x48, 0xb2, 0xdc, 0x32, 0xb1, 0x89, 0x2a, 0x53, 0x92, 0x1d, 0x62, 0xd3, 0x44, 0x72, 0x18, 0xa3, 0x0b, 0x78, 0xcf, 0xde, 0x13, 0x2f, 0xdc, 0x88, 0x39, 0x12, 0xdf, 0x1c, 0xbd, 0x1c, 0x64, 0x9b, 0x36, 0x28, 0x10, 0x81, 0xdb, 0x76, 0x06, 0x6b, 0x5f, 0x81, 0x7a, 0xc9, 0x1c, 0xf9, 0xe8, 0x5b, 0x9f, 0x42, 0x8d, 0xd8, 0xdc, 0x77, 0x64, 0xf4, 0xb2, 0x0a, 0xde, 0x23, 0xcd, 0x05, 0xd5, 0xa0, 0x8e, 0x95, 0xa9, 0x6f, 0x43, 0x89, 0x59, 0xfb, 0xea, 0x12, 0x3b, 0xf0, 0x95, 0x0a, 0xf9, 0xca, 0x69, 0x3e, 0xd4, 0x87, 0xa6, 0x45, 0x85, 0x64, 0x4e, 0xdc, 0xe0, 0x4a, 0x54, 0x92, 0x4e, 0x69, 0x7f, 0x29, 0xd0, 0xc1, 0x74, 0x45, 0xd9, 0x8e, 0x66, 0xde, 0xfa, 0x05, 0x3c, 0x17, 0xd4, 0xb1, 0x16, 0x32, 0x4c, 0x2e, 0x52, 0x8d, 0x58, 0xdc, 0x8b, 0x79, 0x2a, 0x12, 0xa9, 0xe6, 0xe1, 0x78, 0x62, 0x21, 0x15, 0xca, 0x92, 0xb9, 0xfb, 0x19, 0x08, 0x43, 0xf4, 0x39, 0x80, 0xf4, 0x28, 0x5d, 0x08, 0x49, 0x24, 0x8d, 0x24, 0x36, 0x47, 0xcf, 0x06, 0xbb, 0x51, 0x6a, 0xcc, 0x4c, 0x8f, 0x52, 0x23, 0x3c, 0xc6, 0x0d, 0x99, 0x84, 0xe1, 0xb5, 0x2e, 0x28, 0xd9, 0x51, 0xd1, 0xad, 0xf4, 0xcb, 0xa7, 0x2d, 0xbc, 0x47, 0xe8, 0x25, 0x54, 0x5d, 0x8f, 0xf3, 0x1f, 0xba, 0xd5, 0x88, 0xea, 0x78, 0xb0, 0x1f, 0xe4, 0x79, 0x98, 0xc4, 0xf1, 0x99, 0xf6, 0x87, 0x02, 0xad, 0xcc, 0x95, 0x5e, 0x41, 0xbb, 0xf0, 0x1e, 0xc7, 0xf2, 0x5d, 0xf2, 0x1b, 0xff, 0x5b, 0xfe, 0x36, 0x23, 0x7f, 0xfb, 0x1f, 0xe4, 0xff, 0xae, 0x40, 0xcb, 0x90, 0xe4, 0x86, 0x26, 0xf2, 0x9f, 0xc3, 0x93, 0xb5, 0xc7, 0x7d, 0xf7, 0x20, 0xbc, 0x1e, 0xe1, 0x89, 0xf5, 0xae, 0x71, 0x42, 0x9f, 0x40, 0xdd, 0x12, 0x72, 0x71, 0x43, 0x83, 0x62, 0xd5, 0x73, 0x7f, 0xb9, 0x65, 0xab, 0x29, 0x0d, 0x70, 0xcd, 0x12, 0x72, 0x4a, 0x83, 0xb0, 0x62, 0x47, 0xbd, 0xa8, 0xa2, 0xf2, 0x48, 0xc5, 0x8e, 0x7a, 0x53, 0x1a, 0x68, 0xbf, 0xd6, 0xa0, 0x99, 0xfa, 0xff, 0xd1, 0x67, 0x50, 0x91, 0x81, 0x1b, 0x8f, 0x7b, 0x7b, 0xd4, 0x7f, 0xb0, 0x45, 0x07, 0x30, 0x30, 0x03, 0x97, 0xe2, 0xe8, 0x69, 0x74, 0x0e, 0xaa, 0xa0, 0x72, 0x61, 0x11, 0x49, 0x16, 0x6e, 0x7c, 0xe1, 0xfd, 0x1e, 0xf6, 0xf2, 0x0c, 0x59, 0x1f, 0xc2, 0x6d, 0x91, 0xf5, 0xa5, 0x6b, 0x38, 0x09, 0x99, 0x78, 0x62, 0x37, 0xf7, 0x74, 0xe5, 0xe2, 0xb5, 0x2e, 0xb0, 0x26, 0xdc, 0x11, 0x05, 0x7e, 0xf5, 0x3d, 0x3c, 0xa3, 0x89, 0xad, 0xec, 0xd7, 0x22, 0xa1, 0x8e, 0x5b, 0xf5, 0x2a, 0x4f, 0x5d, 0xe8, 0x42, 0xf8, 0x84, 0x16, 0x9a, 0xd3, 0x0c, 0x90, 0xcd, 0x1c, 0x99, 0x63, 0x8e, 0x27, 0xe4, 0x41, 0x17, 0xf3, 0x26, 0x83, 0x55, 0x3b, 0x6f, 0x3b, 0x33, 0x40, 0xa9, 0x05, 0x4e, 0xf8, 0x6a, 0xc5, 0x7c, 0x79, 0xd3, 0xc1, 0xaa, 0xc8, 0xdb, 0xd0, 0x35, 0x9c, 0x78, 0xb1, 0x4f, 0xe4, 0x28, 0xeb, 0xc5, 0x7d, 0x2d, 0x30, 0x15, 0xdc, 0xf1, 0x0a, 0x9c, 0x66, 0x0c, 0xc7, 0x22, 0x9c, 0xf3, 0x7b, 0xc2, 0x27, 0x11, 0xe1, 0x87, 0x0f, 0x34, 0xa6, 0x96, 0x01, 0xb7, 0x44, 0x0a, 0x69, 0xbf, 0x28, 0x50, 0x09, 0x87, 0x09, 0x35, 0xa1, 0xfe, 0xcd, 0x6c, 0x3a, 0xbb, 0xba, 0x9e, 0xa9, 0x47, 0x21, 0x30, 0x74, 0xf3, 0x6c, 0x6c, 0x8e, 0x55, 0x05, 0xa9, 0xd0, 0x32, 0x74, 0xf3, 0xea, 0x7a, 0xa6, 0x63, 0xe3, 0x7c, 0x32, 0x57, 0x4b, 0x08, 0x41, 0x5b, 0x37, 0xcc, 0xf1, 0x9b, 0x8b, 0x89, 0x71, 0x6e, 0x5e, 0x4d, 0xf5, 0x99, 0x5a, 0x46, 0xc7, 0xd0, 0xb8, 0x9c, 0xcc, 0xcc, 0x18, 0x56, 0x42, 0x68, 0xe8, 0xb3, 0xb3, 0x18, 0x56, 0x43, 0x0e, 0xac, 0x7f, 0xad, 0x4f, 0xbe, 0xd5, 0xe3, 0x4c, 0x0d, 0x35, 0xa0, 0x6a, 0x98, 0xe3, 0xa9, 0xae, 0xd6, 0xdf, 0x98, 0x7f, 0xde, 0xf6, 0x94, 0xb7, 0xb7, 0x3d, 0xe5, 0x9f, 0xdb, 0x9e, 0xf2, 0xdb, 0x5d, 0xef, 0xe8, 0xed, 0x5d, 0xef, 0xe8, 0xef, 0xbb, 0xde, 0xd1, 0x77, 0xaf, 0xd7, 0x4c, 0x6e, 0xfc, 0xe5, 0x60, 0xc5, 0xed, 0xe1, 0x8f, 0x3e, 0xf7, 0x7c, 0x7b, 0xc5, 0x1d, 0xe9, 0xf1, 0xed, 0xd0, 0xa6, 0x42, 0x90, 0x35, 0x15, 0xc3, 0xdd, 0x68, 0xb8, 0xf4, 0xd9, 0xd6, 0x1a, 0xae, 0x79, 0xe6, 0xfb, 0xbd, 0xac, 0x45, 0x9f, 0xdf, 0x4f, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x33, 0xcf, 0x06, 0xc3, 0xe3, 0x07, 0x00, 0x00, } func (m *SetDataPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SetDataPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SetDataPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Value) > 0 { i -= len(m.Value) copy(dAtA[i:], m.Value) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Value))) i-- dAtA[i] = 0x12 } if len(m.Path) > 0 { i -= len(m.Path) copy(dAtA[i:], m.Path) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Path))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *SetOwnershipPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SetOwnershipPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SetOwnershipPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Authentication) > 0 { for iNdEx := len(m.Authentication) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Authentication[iNdEx]) copy(dAtA[i:], m.Authentication[iNdEx]) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Authentication[iNdEx]))) i-- dAtA[i] = 0xa } } return len(dAtA) - i, nil } func (m *TokenMonetaryPolicy) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *TokenMonetaryPolicy) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *TokenMonetaryPolicy) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Maximum != 0 { i = encodeVarintTransactions(dAtA, i, uint64(m.Maximum)) i-- dAtA[i] = 0x8 } return len(dAtA) - i, nil } func (m *EstablishTokenPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *EstablishTokenPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *EstablishTokenPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.MonetaryPolicy != nil { { size, err := m.MonetaryPolicy.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *MintTokenPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *MintTokenPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *MintTokenPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Amount != 0 { i = encodeVarintTransactions(dAtA, i, uint64(m.Amount)) i-- dAtA[i] = 0x10 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *SendTokenPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *SendTokenPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *SendTokenPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Destination) > 0 { i -= len(m.Destination) copy(dAtA[i:], m.Destination) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Destination))) i-- dAtA[i] = 0x22 } if m.Amount != 0 { i = encodeVarintTransactions(dAtA, i, uint64(m.Amount)) i-- dAtA[i] = 0x18 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0x12 } if len(m.Id) > 0 { i -= len(m.Id) copy(dAtA[i:], m.Id) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Id))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *ReceiveTokenPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *ReceiveTokenPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *ReceiveTokenPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Proof != nil { { size, err := m.Proof.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a } if len(m.Leaves) > 0 { for iNdEx := len(m.Leaves) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Leaves[iNdEx]) copy(dAtA[i:], m.Leaves[iNdEx]) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Leaves[iNdEx]))) i-- dAtA[i] = 0x22 } } if m.TreeState != nil { { size, err := m.TreeState.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if len(m.Tip) > 0 { i -= len(m.Tip) copy(dAtA[i:], m.Tip) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Tip))) i-- dAtA[i] = 0x12 } if len(m.SendTokenTransactionId) > 0 { i -= len(m.SendTokenTransactionId) copy(dAtA[i:], m.SendTokenTransactionId) i = encodeVarintTransactions(dAtA, i, uint64(len(m.SendTokenTransactionId))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *TokenPayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *TokenPayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *TokenPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Proof != nil { { size, err := m.Proof.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a } if len(m.Leaves) > 0 { for iNdEx := len(m.Leaves) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Leaves[iNdEx]) copy(dAtA[i:], m.Leaves[iNdEx]) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Leaves[iNdEx]))) i-- dAtA[i] = 0x22 } } if m.TreeState != nil { { size, err := m.TreeState.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if len(m.Tip) > 0 { i -= len(m.Tip) copy(dAtA[i:], m.Tip) i = encodeVarintTransactions(dAtA, i, uint64(len(m.Tip))) i-- dAtA[i] = 0x12 } if len(m.TransactionId) > 0 { i -= len(m.TransactionId) copy(dAtA[i:], m.TransactionId) i = encodeVarintTransactions(dAtA, i, uint64(len(m.TransactionId))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *StakePayload) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *StakePayload) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *StakePayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.VerKey != nil { { size, err := m.VerKey.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } if m.DstKey != nil { { size, err := m.DstKey.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if m.Amount != 0 { i = encodeVarintTransactions(dAtA, i, uint64(m.Amount)) i-- dAtA[i] = 0x10 } if len(m.GroupId) > 0 { i -= len(m.GroupId) copy(dAtA[i:], m.GroupId) i = encodeVarintTransactions(dAtA, i, uint64(len(m.GroupId))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Transaction) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Transaction) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Transaction) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.StakePayload != nil { { size, err := m.StakePayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x42 } if m.ReceiveTokenPayload != nil { { size, err := m.ReceiveTokenPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x3a } if m.SendTokenPayload != nil { { size, err := m.SendTokenPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x32 } if m.MintTokenPayload != nil { { size, err := m.MintTokenPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a } if m.EstablishTokenPayload != nil { { size, err := m.EstablishTokenPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } if m.SetOwnershipPayload != nil { { size, err := m.SetOwnershipPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if m.SetDataPayload != nil { { size, err := m.SetDataPayload.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintTransactions(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } if m.Type != 0 { i = encodeVarintTransactions(dAtA, i, uint64(m.Type)) i-- dAtA[i] = 0x8 } return len(dAtA) - i, nil } func encodeVarintTransactions(dAtA []byte, offset int, v uint64) int { offset -= sovTransactions(v) base := offset for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return base } func (m *SetDataPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Path) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } l = len(m.Value) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *SetOwnershipPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l if len(m.Authentication) > 0 { for _, s := range m.Authentication { l = len(s) n += 1 + l + sovTransactions(uint64(l)) } } return n } func (m *TokenMonetaryPolicy) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Maximum != 0 { n += 1 + sovTransactions(uint64(m.Maximum)) } return n } func (m *EstablishTokenPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.MonetaryPolicy != nil { l = m.MonetaryPolicy.Size() n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *MintTokenPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.Amount != 0 { n += 1 + sovTransactions(uint64(m.Amount)) } return n } func (m *SendTokenPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Id) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } l = len(m.Name) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.Amount != 0 { n += 1 + sovTransactions(uint64(m.Amount)) } l = len(m.Destination) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *ReceiveTokenPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.SendTokenTransactionId) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } l = len(m.Tip) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.TreeState != nil { l = m.TreeState.Size() n += 1 + l + sovTransactions(uint64(l)) } if len(m.Leaves) > 0 { for _, b := range m.Leaves { l = len(b) n += 1 + l + sovTransactions(uint64(l)) } } if m.Proof != nil { l = m.Proof.Size() n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *TokenPayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.TransactionId) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } l = len(m.Tip) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.TreeState != nil { l = m.TreeState.Size() n += 1 + l + sovTransactions(uint64(l)) } if len(m.Leaves) > 0 { for _, b := range m.Leaves { l = len(b) n += 1 + l + sovTransactions(uint64(l)) } } if m.Proof != nil { l = m.Proof.Size() n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *StakePayload) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.GroupId) if l > 0 { n += 1 + l + sovTransactions(uint64(l)) } if m.Amount != 0 { n += 1 + sovTransactions(uint64(m.Amount)) } if m.DstKey != nil { l = m.DstKey.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.VerKey != nil { l = m.VerKey.Size() n += 1 + l + sovTransactions(uint64(l)) } return n } func (m *Transaction) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Type != 0 { n += 1 + sovTransactions(uint64(m.Type)) } if m.SetDataPayload != nil { l = m.SetDataPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.SetOwnershipPayload != nil { l = m.SetOwnershipPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.EstablishTokenPayload != nil { l = m.EstablishTokenPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.MintTokenPayload != nil { l = m.MintTokenPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.SendTokenPayload != nil { l = m.SendTokenPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.ReceiveTokenPayload != nil { l = m.ReceiveTokenPayload.Size() n += 1 + l + sovTransactions(uint64(l)) } if m.StakePayload != nil { l = m.StakePayload.Size() n += 1 + l + sovTransactions(uint64(l)) } return n } func sovTransactions(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozTransactions(x uint64) (n int) { return sovTransactions(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *SetDataPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SetDataPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SetDataPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Path", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Path = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Value = append(m.Value[:0], dAtA[iNdEx:postIndex]...) if m.Value == nil { m.Value = []byte{} } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SetOwnershipPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SetOwnershipPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SetOwnershipPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Authentication", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Authentication = append(m.Authentication, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *TokenMonetaryPolicy) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: TokenMonetaryPolicy: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: TokenMonetaryPolicy: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Maximum", wireType) } m.Maximum = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Maximum |= uint64(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *EstablishTokenPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: EstablishTokenPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: EstablishTokenPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field MonetaryPolicy", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.MonetaryPolicy == nil { m.MonetaryPolicy = &TokenMonetaryPolicy{} } if err := m.MonetaryPolicy.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *MintTokenPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: MintTokenPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: MintTokenPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Amount", wireType) } m.Amount = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Amount |= uint64(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *SendTokenPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: SendTokenPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: SendTokenPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Id = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Amount", wireType) } m.Amount = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Amount |= uint64(b&0x7F) << shift if b < 0x80 { break } } case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Destination", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Destination = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *ReceiveTokenPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: ReceiveTokenPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: ReceiveTokenPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SendTokenTransactionId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.SendTokenTransactionId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Tip", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Tip = append(m.Tip[:0], dAtA[iNdEx:postIndex]...) if m.Tip == nil { m.Tip = []byte{} } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TreeState", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.TreeState == nil { m.TreeState = &signatures.TreeState{} } if err := m.TreeState.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Leaves", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Leaves = append(m.Leaves, make([]byte, postIndex-iNdEx)) copy(m.Leaves[len(m.Leaves)-1], dAtA[iNdEx:postIndex]) iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Proof", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.Proof == nil { m.Proof = &gossip.Proof{} } if err := m.Proof.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *TokenPayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: TokenPayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: TokenPayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TransactionId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.TransactionId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Tip", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Tip = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TreeState", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.TreeState == nil { m.TreeState = &signatures.TreeState{} } if err := m.TreeState.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Leaves", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ byteLen |= int(b&0x7F) << shift if b < 0x80 { break } } if byteLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + byteLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.Leaves = append(m.Leaves, make([]byte, postIndex-iNdEx)) copy(m.Leaves[len(m.Leaves)-1], dAtA[iNdEx:postIndex]) iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Proof", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.Proof == nil { m.Proof = &gossip.Proof{} } if err := m.Proof.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *StakePayload) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: StakePayload: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: StakePayload: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field GroupId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } m.GroupId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Amount", wireType) } m.Amount = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Amount |= uint64(b&0x7F) << shift if b < 0x80 { break } } case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field DstKey", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.DstKey == nil { m.DstKey = &signatures.PublicKey{} } if err := m.DstKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field VerKey", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.VerKey == nil { m.VerKey = &signatures.PublicKey{} } if err := m.VerKey.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Transaction) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Transaction: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Transaction: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) } m.Type = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Type |= Transaction_Type(b&0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SetDataPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.SetDataPayload == nil { m.SetDataPayload = &SetDataPayload{} } if err := m.SetDataPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SetOwnershipPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.SetOwnershipPayload == nil { m.SetOwnershipPayload = &SetOwnershipPayload{} } if err := m.SetOwnershipPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field EstablishTokenPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.EstablishTokenPayload == nil { m.EstablishTokenPayload = &EstablishTokenPayload{} } if err := m.EstablishTokenPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field MintTokenPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.MintTokenPayload == nil { m.MintTokenPayload = &MintTokenPayload{} } if err := m.MintTokenPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SendTokenPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.SendTokenPayload == nil { m.SendTokenPayload = &SendTokenPayload{} } if err := m.SendTokenPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 7: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ReceiveTokenPayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.ReceiveTokenPayload == nil { m.ReceiveTokenPayload = &ReceiveTokenPayload{} } if err := m.ReceiveTokenPayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 8: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field StakePayload", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTransactions } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthTransactions } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthTransactions } if postIndex > l { return io.ErrUnexpectedEOF } if m.StakePayload == nil { m.StakePayload = &StakePayload{} } if err := m.StakePayload.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipTransactions(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) < 0 { return ErrInvalidLengthTransactions } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipTransactions(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 depth := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTransactions } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTransactions } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } case 1: iNdEx += 8 case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowTransactions } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthTransactions } iNdEx += length case 3: depth++ case 4: if depth == 0 { return 0, ErrUnexpectedEndOfGroupTransactions } depth-- case 5: iNdEx += 4 default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } if iNdEx < 0 { return 0, ErrInvalidLengthTransactions } if depth == 0 { return iNdEx, nil } } return 0, io.ErrUnexpectedEOF } var ( ErrInvalidLengthTransactions = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowTransactions = fmt.Errorf("proto: integer overflow") ErrUnexpectedEndOfGroupTransactions = fmt.Errorf("proto: unexpected end of group") )
SOPA/PIPA, the controversial anti-piracy bill that would have allowed the U.S. government to monitor and remove "rogue" websites, was shelved in January after millions of Americans protested what essentially boiled down to online censorship. But while this was taking place, SOPA author Rep. Lamar Smith (R-TX, pictured above) was quietly pushing another, even more invasive online surveillance bill through Congress. Submitted in May 2011, HR 1981 (PDF) aka "Protecting Children From Internet Pornographers Act of 2011," a proposed amendment to Chapter 18 USC 2703, shifts the focus from piracy to an irrefutably heinous crime: child pornography. Law enforcement has identified data retention requirements for ISPs as the number one tool they need to identify and prosecute sexual predators on the Internet. Without this tool, sexual predators can surf the Internet preying on our children without fear of being caught. To be fair, there are many good intentions in HR 1981. Some sections enhance the punishment for child exploitation. Others expand the protection of targeted minors. But the part you freedom and Internet-loving advocates might care about is a new section requiring "electronic communication service or remote computing service" providers to store both temporary and static IP addresses for at least 18 months (currently ISPs are required to store this for three months). Law enforcers would then be able to subpoena companies for this information during investigations of unregistered sex offenders. Or as Kevin Fogarty at IT World put it, the bill essentially assumes 99.76 percent of Americans are guilty of child exploitation until proven innocent. "Since it is empowering U.S. Marshals to investigate people who have not yet been convicted, under PCFIPA, the only thing required to get a valid subpoena to examine all the online activity 99.762 percent of the U.S. population, is an investigating officer willing to say the subpoena has something to do with investigation of online child porn." Even if many of you are willing to give up your anonymity for the sake of protecting children, you should be aware of two major implications. First of all, if your ISP is forced to store such data, hopefully with air-tight encryption, for 18 months rather than three, guess who's going to pay for that expensive storage? You. Me. 90-year-old Me-ma. Second of all, although IP addresses alone won't say much more than your geolocation, an ISP can mine a lot more: browsing history, IMs, emails, social networking activity, etc. Another section in this bill promises not to bring any cause of action against an ISP disclosing information to a law enforcer. The Future of HR 1981 and What You Can Do NOW According to The Next Web, the bill sailed through the House last year and is in the Union Calendar for "expedited consideration." OpenCongress and the Electronic Frontier Foundation provide easy links to contact your Congressman. As we saw with SOPA, when millions of Americans take a stand, it works. For more from Sara, follow her on Twitter @sarapyin. For the top stories in tech, follow us on Twitter at @PCMag.
<reponame>ecoqba/cayenne-particle // no used
Frontline Science: Tumor necrosis factor‐α stimulation and priming of human neutrophil granule exocytosis Neutrophil granule exocytosis plays an important role in innate and adaptive immune responses. The present study examined TNF‐α stimulation or priming of exocytosis of the 4 neutrophil granule subsets. TNF‐α stimulated exocytosis of secretory vesicles and gelatinase granules and primed specific and azurophilic granule exocytosis to fMLF stimulation. Both stimulation and priming of exocytosis by TNF‐α were dependent on p38 MAPK activity. Bioinformatic analysis of 1115 neutrophil proteins identified by mass spectrometry as being phosphorylated by TNF‐α exposure found that actin cytoskeleton regulation was a major biologic function. A role for p38 MAPK regulation of the actin cytoskeleton was confirmed experimentally. Thirteen phosphoproteins regulated secretory vesicle quantity, formation, or release, 4 of which—Raf1, myristoylated alanine‐rich protein kinase C (PKC) substrate (MARCKS), Abelson murine leukemia interactor 1 (ABI1), and myosin VI—were targets of the p38 MAPK pathway. Pharmacologic inhibition of Raf1 reduced stimulated exocytosis of gelatinase granules and priming of specific granule exocytosis. We conclude that differential regulation of exocytosis by TNF‐α involves the actin cytoskeleton and is a necessary component for priming of the 2 major neutrophil antimicrobial defense mechanisms: oxygen radical generation and release of toxic granule contents.
By Mary Brown, I was sipping a soy latte after doing yoga on my recent eco-vacation and I started pondering a question: With the world turning “green”, wouldn’t it make sense to invest in green energy and divest from those evil, sinful companies? I gave it some serious thought and when I got home and, after I paid for my carbon offsets, I ran some numbers with some help from my hedge fund friends. First, we identified publicly traded companies that operate in the renewable energy sector. I’ll call them the “Green Group”. We came up with these stocks… Ascent Solar Technologies Inc Ballard Power Systems Inc Brookfield Renewable Partners Canadian Solar Inc Enphase Energy Inc First Solar Inc Gevo Inc Green Plains Inc JA Solar Holdings Co Ocean Power Technologies Inc Renesola Ltd Renewable Energy Group Inc SunPower Corp Sunrun Inc Vivint Solar Inc Yingli Green Energy Holdings Co Ltd Our second group we called “Sin Stocks”. These companies make chemicals and alcohol and guns and promote gambling. The list also includes defense stocks, “Big Pharma” and of course, the dreaded “Big Oil”. This list was bigger …. Alliance One International Inc Altria Group Inc American Outdoor Brands Corporation Anadarko Petroleum Corp Anheuser-Busch InBev SA/NV Archer-Daniels-Midland Co Basic Energy Services Inc Boston Beer Co Inc. (The) Boyd Gaming Corp BP PLC British American Tobacco PLC Caesars Entertainment Corp Chevron Corp Church & Dwight Co. Inc. Coca-Cola Co (The) Constellation Brands Inc CoreCivic Inc Craft Brew Alliance Inc CV Sciences Inc Diageo PLC Dow Chemical Co (The) E. I. du Pont de Nemours and Co Eli Lilly and Co Encore Capital Group Inc Exxon Mobil Corp FirstCash Inc General Dynamics Corp General Electric Co Halliburton Co Healthier Choices Management Corp Las Vegas Sands Corp Leucadia National Corp Lockheed Martin Corp McDonald’s Corp MGM Resorts International Molson Coors Brewing Co Monsanto Co Northrop Grumman Corp Occidental Petroleum Corp Oshkosh Corp PepsiCo Inc Pfizer Inc Philip Morris International Inc Phillips 66 Raytheon Co. RCI Hospitality Holdings Inc Schweitzer-Mauduit Intl Inc Sturm Ruger & Co Inc. Surna Inc Syngenta AG, Basel United Technologies Corp Universal Corp Valero Energy Corp Vector Group Ltd Western Alliance Bancorporation World Acceptance Corp Wynn Resorts Ltd To compare the two groups, were ran stock performance back to Jan 1, 2008. That was our starting point because that is when there were enough Green stocks in the list to be significant. Also, it included the Great Recession. The graph below compares the Greens versus the Sinners with the S&P 500 plotted for good measure. Starting with $100 in each group, the Sinners finished with $245, the S&P with $168 and the Greens had $20. Being a sinner was 12 times more profitable. Conclusion: People love to “sin”. It’s a good idea to invest in the companies that service them. Notes… Not all stocks were included due to liquidity screens. Past performance is not indicative of future results. Airplanes may or may not be moved before 20 feet of sea level occurs. Graph represents the value of $100 invested in each of three groups of stocks Advertisements Share this: Print Email Twitter Facebook Pinterest LinkedIn Reddit
<gh_stars>0 package pmel.sdig.las.shared.autobean; public class SuggestQuery { String name; String query; public String getQuery() { return query; } public void setQuery(String query) { this.query = query; } public String getName() { return name; } public void setName(String name) { this.name = name; } }