content
stringlengths
10
4.9M
<gh_stars>1-10 package org.middlepath.mcapi.nbt; import java.io.UnsupportedEncodingException; import org.middlepath.mcapi.utils.BinaryUtils; public class StringNBTTag extends NBTTag<String> { public StringNBTTag(String name, String value, NBTTagType type) { super(name, value, type); } public StringNBTTag(byte[] bytes, int startIndex) { super(bytes, startIndex, NBTTagType.TAG_STRING); } @Override protected String parseNBTTagValue(byte[] bytes, int startIndex) { int index = 1 + startIndex + this.getNameLength(); short valueLength = BinaryUtils.bytesToShortBigEndian(bytes[index++], bytes[index++]); byte[] value = new byte[valueLength]; System.arraycopy(bytes, index, value, 0, valueLength); try { return new String(value, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return null; } @Override public int getValueLength() { return 2 + this.value.length(); } @Override public byte[] getValueBytes() { return getValue().getBytes(); } @Override public byte[] getValueBytesLength() { return BinaryUtils.convertShortToBytesBigEndian((short)getValueBytes().length); } }
/** * If identifier1 and identifier2 are equals, it returns 0. * If identifier1 and identifier2 are not equals: * - the external identifier that is equal to firstIdentifier will always come first * - if both external identifiers are different from the the first identifier, it will return the results of * identifier1.compareTo(identifier2) * * @param identifier1 : first identifier to compare * @param identifier2 : second identifier to compare * @param firstIdentifier : the default identifier that we want to have first * @return a int. */ public static int compareIdentifiersWithDefaultIdentifier(String identifier1, String identifier2, String firstIdentifier) { int comp; comp = identifier1.compareTo(identifier2); if (comp == 0){ return 0; } if (firstIdentifier != null && firstIdentifier.equals(identifier1)){ return -1; } else if (firstIdentifier != null && firstIdentifier.equals(identifier2)){ return 1; } else { return comp; } }
<reponame>ngngardner/cuticulus """Full size image dataset with images resized to a specified input.""" import logging import re from glob import glob import numpy as np from beartype import beartype from PIL import Image from cuticulus.core.datasets.imutils import autocrop from cuticulus.core.datasets.splitter import DatasetSplitter from cuticulus.messages import not_considered log = logging.getLogger('rich') class FullDataset(DatasetSplitter): """Full size image dataset.""" @beartype def __init__( self, size: tuple[int, int], name: str = '', rebuild: bool = False, save: bool = True, ): """Initialize the dataset. Args: size (tuple): Tuple with (rows, cols). name (str): The name of the dataset. rebuild (bool): Whether to rebuild the dataset. save(bool): Whether to save the generated files for the dataset. """ name = '{0}_full'.format(name) super().__init__( size=size, name=name, rebuild=rebuild, save=save, ) @beartype def preprocess(self, img: np.ndarray) -> np.ndarray: """Preprocess the image. Automatically crop the images to squares centered on the middle of the image, where the ant head is typically located. Since this process is automatic, some images may not be cropped correctly. Args: img (np.ndarray): The image to preprocess. Returns: np.ndarray: The preprocessed image. """ arr = autocrop(img) img = Image.fromarray(arr) img = img.resize(self.size, Image.ANTIALIAS) return super().preprocess(np.array(img)) @beartype def build_dataset(self) -> tuple: """Process images. Returns: tuple: The ids and images. Raises: ValueError: Failed to create dataset. """ images = [] labels = [] ids = [] for fin in glob(str(self.base_path / 'data' / '*.jpg')): filename = re.search(r'[\d]+\.jpg', fin).group() iid = int(filename.split('.')[0]) try: label = self.get_label(iid) except Exception: log.info(not_considered(iid)) # get image and apply preprocessing img = self.preprocess(self.get_image(iid)) images.append(img) labels.append(label) ids.append(iid) if len(images) != len(labels) != len(ids): raise ValueError('Number of images and labels do not match.') return ( np.array(images), np.array(labels), np.array(ids), )
package com.example.weekthree.controller.response; import lombok.*; @Getter @Setter @Builder @NoArgsConstructor @AllArgsConstructor public class MemberDeleteResponse { private Long memberId; public static MemberDeleteResponse convertToMemberDeleteResponse(Long id) { return MemberDeleteResponse.builder() .memberId(id) .build(); } }
def simulate(self, challenge=None): self.set_simulated() self.prepare_simulate_proof() transcript = self.simulate_proof(challenge=challenge) transcript.stmt_hash = self.prehash_statement().digest() return transcript
<filename>project/events.go<gh_stars>0 package project import ( "context" "fmt" "log" "time" "cloud.google.com/go/bigquery" "github.com/emicklei/moneypenny/model" "github.com/google/uuid" ) func appendEventsForAnomalies(anomalies []ProjectStatsReport, detector AnomalyDetector, p model.Params) error { ctx := context.Background() client, err := bigquery.NewClient(ctx, p.TargetProjectID()) if err != nil { return err } defer client.Close() inserter := client.Dataset(p.TargetDatasetID()).Table(p.TargetTableID()).Inserter() // build events events := []AnomalyEvent{} for _, each := range anomalies { event := AnomalyEvent{ EventID: uuid.New().String(), EventCreationTime: time.Now(), ProjectID: each.LastDay.ProjectID, ProjectName: each.LastDay.ProjectName, Charges: fs(each.LastDay.Charges), ChargesPercentage: fs(each.ChargesPercentage), Credits: fs(each.LastDay.Credits), Mean: fs(each.Mean), StandardDeviation: fs(each.StandardDeviation), DetectionDay: each.LastDay.Day, Detector: detector.String(), } events = append(events, event) } log.Printf("appending %d events to %s\n", len(events), p.TargetTableFQN) return inserter.Put(ctx, events) } // this exists because the pkg cannot handle float64 fields directly. TODO func fs(f float64) string { return fmt.Sprintf("%f", f) }
OFFICIALS at Belfast City Council tipped off councillors that a journalist was probing their failure to declare property and business interests. Councillors were encouraged "as a matter of urgency" to complete their declaration forms – just hours after an Irish News reporter looked through the public register at City Hall. In an email, a senior staff member in the chief executive's department wrote: "A journalist has recently called with us to inspect the register of members' interests. "We do not have any record of you having made a return for the register and there must be a possibility that the journalist will be reporting on the matter in the local newspapers." He added: "I would encourage you to complete the attached form and return it to ... the members' support office as a matter of urgency." The unusual intervention emerges just weeks after The Irish News published an analysis of hundreds of council and company records. The investigation revealed almost two thirds of the north's councillors have not declared any land or property interests, while many more gave vague or incomplete disclosures. Some councillors had still not submitted their declaration forms – a year after the 11 new 'supercouncils' were formed. Councillors are required under their code of conduct to complete the forms to help assure the public that decisions are not being made for their own personal or financial benefit. Local authorities are only required to maintain a register for public inspection, with breaches of the code investigated by the new public services ombudsman. Jonathan Isaby, chief executive of the TaxPayers' Alliance, said councillors should be declaring their interests routinely. "It shouldn't need the threat of embarrassment to prod them into action, and it looks remarkably cosy that council staff appear to have been tipping off councillors about journalists' enquiries on this subject," he said. "Ratepayers deserve full and proper transparency so that they can make their own minds up as to whether or not their local representatives are acting in their best interests." Meanwhile, it has emerged that 14 Belfast councillors did not submit their forms in 2014-15, and eight did not submit their 2015-16 forms until earlier this year. One councillor, independent unionist Ruth Patterson, has still not submitted her declaration form. Belfast City Council had previously not recorded when councillors submitted their forms, but has reviewed its procedures after concerns were raised. Councillors also agreed in April to have the declaration forms published online for the first time following a proposal by Alliance. Alliance's Nuala McAllister said: "Having had two years to fill in the relevant forms, it is worrying that so many waited until they couldn't get away with it any longer. "Elected representatives should be doing everything they can to increase levels of openness and transparency, rather than allowing their failure to adhere to basic rules overshadow decisions made." The staff email warning councillors about a journalist was sent in January this year. In a statement a Belfast City Council spokesman said: "To ensure, fair and balanced reporting, it is not only in the interests of the subject of a media query but also the media outlet itself that the individuals be advised of any query in relation to them to give them the opportunity to respond if they wish and review the accuracy of any published stories. There are no procedural reasons why they shouldn't be told."
class OutputFormat: """Output Format base class.""" def __init_subclass__(cls, *args, **kwargs): super().__init_subclass__(*args, **kwargs) output_formats.add(cls) def __init__(self, args: argparse.Namespace): self.args = args self.output_path = self.args.output_file @property def log(self): return logging.getLogger(f"{__name__}.{self.__class__.__name__}") def format(self, shapes: Iterator[Shape]) -> None: raise NotImplementedError
// return options are string, error; error; string? // if this function is meant to consolidate all outputting functionality, then this thing should definitely handle errors itself func (o *Outputter) Output(data interface{}) error { var msg string var err error switch o.Format { case "json": msg, err = jsonProcessor(data) default: msg, err = jsonProcessor(data) } if err != nil { fmt.Printf("Error producing output: %v", err) return err } else { _, err := o.Destination.Write([]byte(msg)) return err } }
def validate_minibatch_size_str(minibatch_size_str): if not isinstance(minibatch_size_str, str): return False a = minibatch_size_str.split("/") assert len(a) != 0 for elem in a: b = elem.split('=') if len(b) != 2: if len(a) == 1 and len(b) == 1: return validate_range_str(elem) else: return False try: i = b[0] if i <= 0: return False except: return False if not validate_range_str(b[1]): return False return True
import { GroupsGroup, PhotosPhoto, UsersUser } from '@vkontakte/api-schema-typescript' import { getBiggestSize } from '@/utils/get-biggest-size' import { getName } from '@/utils/get-name' /** * Конвертирует фотографии из PhotosPhoto * в ViewerPhoto для передачи в просмотрщик */ export const photoToViewerPhoto = ( photo: PhotosPhoto, owner?: UsersUser | GroupsGroup, ): ViewerPhoto => { return { date: photo.date, url: getBiggestSize(photo.sizes!).url, owner: { name: getName(owner), photo: owner?.photo_50, }, } }
function helloGCSGeneric(event, callback) { const file = event.data; const context = event.context; console.log(`Event ${context.eventId}`); console.log(` Event Type: ${context.eventType}`); console.log(` Bucket: ${file.bucket}`); console.log(` File: ${file.name}`); console.log(` Metageneration: ${file.metageneration}`); console.log(` Created: ${file.timeCreated}`); console.log(` Updated: ${file.updated}`); callback(); } export { helloGCSGeneric };
def mountedsamples(self): return [s.sample for s in self.mountedshares if s.user == current_user and s.sample.is_accessible_for(current_user, direct_only=True) and not s.sample.isdeleted]
<reponame>paulfd/alo<gh_stars>0 /* Copyright 2006-2012 <NAME> <<EMAIL>> Copyright 2006 <NAME> <<EMAIL>> Copyright 2018 Stevie <<EMAIL>> Copyright 2018 <NAME> <<EMAIL>> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ /** Include standard C headers */ #include <math.h> #include <stdlib.h> #include <stdio.h> #include <time.h> #include <sys/time.h> #include "lv2/lv2plug.in/ns/ext/atom/atom.h" #include "lv2/lv2plug.in/ns/ext/atom/util.h" #include "lv2/lv2plug.in/ns/ext/time/time.h" #include "lv2/lv2plug.in/ns/ext/urid/urid.h" #include "lv2/lv2plug.in/ns/lv2core/lv2.h" #include <lv2/lv2plug.in/ns/ext/midi/midi.h> #define ALO_URI "http://devcurmudgeon.com/alo" typedef struct { LV2_URID atom_Blank; LV2_URID atom_Float; LV2_URID atom_Object; LV2_URID midi_MidiEvent; LV2_URID atom_Path; LV2_URID atom_Resource; LV2_URID atom_Sequence; LV2_URID time_Position; LV2_URID time_barBeat; LV2_URID time_beatsPerMinute; LV2_URID time_beatsPerBar; LV2_URID time_speed; } AloURIs; typedef enum { ALO_INPUT = 0, ALO_OUTPUT = 1, ALO_BARS = 2, ALO_CONTROL = 3, ALO_LOOP1 = 4, ALO_LOOP2 = 5, ALO_LOOP3 = 6, ALO_LOOP4 = 7, ALO_LOOP5 = 8, ALO_LOOP6 = 9, ALO_THRESHOLD = 10, ALO_MIDIIN = 11, ALO_MIDI_BASE = 12, ALO_PER_BEAT_LOOPS = 13, ALO_CLICK = 14, } PortIndex; typedef enum { // NB: for all states, we are always recording in the background STATE_RECORDING, // no loop is set, we are only recording STATE_LOOP_ON, // the loop is playing STATE_LOOP_OFF // the loop is not playing } State; typedef enum { STATE_OFF, // No click STATE_ATTACK, // Envelope rising STATE_DECAY, // Envelope lowering STATE_SILENT // Silent } ClickState; static const size_t STORAGE_MEMORY = 2880000; static const int NUM_LOOPS = 6; static const bool LOG_ENABLED = false; void log(const char *message, ...) { if (!LOG_ENABLED) { return; } FILE* f; f = fopen("/root/alo.log", "a+"); char buffer[2048]; va_list argumentList; va_start(argumentList, message); vsnprintf(&buffer[0], sizeof(buffer), message, argumentList); va_end(argumentList); fwrite(buffer, 1, strlen(buffer), f); fprintf(f, "\n"); fclose(f); } /// /// Convert an input parameter expressed as db into a linear float value /// static float dbToFloat(float db) { if (db <= -90.0f) return 0.0f; return powf(10.0f, db * 0.05f); } /** Every plugin defines a private structure for the plugin instance. All data associated with a plugin instance is stored here, and is available to every instance method. */ typedef struct { LV2_URID_Map* map; // URID map feature AloURIs uris; // Cache of mapped URIDs // Port buffers struct { const float* input; float* loops[NUM_LOOPS]; float* bars; LV2_Atom_Sequence* control; float* threshold; float* output; float* midi_base; // start note for midi control of loops float* pb_loops; // number of loops in per-beat mode float* click; // click mode on/off LV2_Atom_Sequence* midiin; // midi input } ports; // Variables to keep track of the tempo information sent by the host double rate; // Sample rate float bpm; // Beats per minute (tempo) float bpb; // Beats per bar float speed; // Transport speed (usually 0=stop, 1=play) float threshold; // minimum level to trigger loop start uint32_t loop_beats; // loop length in beats uint32_t loop_samples; // loop length in samples uint32_t current_bb; // which beat of the bar we are on (1, 2, 3, 0) uint32_t current_lb; // which beat of the loop we are on (1, 2, ...) uint32_t pb_loops; // number of loops in per-beat mode State state[NUM_LOOPS]; // we're recording, playing or not playing bool button_state[NUM_LOOPS]; bool midi_control = false; uint32_t button_time[NUM_LOOPS]; // last time button was pressed float* loops[NUM_LOOPS]; // pointers to memory for playing loops uint32_t phrase_start[NUM_LOOPS]; // index into recording/loop float* recording; // pointer to memory for recording - for all loops uint32_t loop_index; // index into loop for current play point ClickState clickstate; uint32_t elapsed_len; // Frames since the start of the last click uint32_t wave_offset; // Current play offset in the wave // One cycle of a sine wave float* wave; uint32_t wave_len; // Envelope parameters uint32_t attack_len; uint32_t decay_len; } Alo; /** The `instantiate()` function is called by the host to create a new plugin instance. The host passes the plugin descriptor, sample rate, and bundle path for plugins that need to load additional resources (e.g. waveforms). The features parameter contains host-provided features defined in LV2 extensions, but this simple plugin does not use any. This function is in the ``instantiation'' threading class, so no other methods on this instance will be called concurrently with it. */ static LV2_Handle instantiate(const LV2_Descriptor* descriptor, double rate, const char* bundle_path, const LV2_Feature* const* features) { Alo* self = (Alo*)calloc(1, sizeof(Alo)); self->rate = rate; self->bpb = 4; self->loop_beats = 0; self->current_bb = 0; self->current_lb = 0; self->recording = (float *)calloc(STORAGE_MEMORY, sizeof(float)); for (int i = 0; i < NUM_LOOPS; i++) { self->loops[i] = (float *)calloc(STORAGE_MEMORY, sizeof(float)); self->phrase_start[i] = 0; self->state[i] = STATE_RECORDING; } self->loop_index = 0; self->threshold = 0.0; LV2_URID_Map* map = NULL; for (int i = 0; features[i]; ++i) { if (!strcmp(features[i]->URI, LV2_URID_URI "#map")) { map = (LV2_URID_Map*)features[i]->data; } } if (!map) { fprintf(stderr, "Host does not support urid:map.\n"); free(self); return NULL; } // Map URIS AloURIs* const uris = &self->uris; self->map = map; uris->atom_Blank = map->map(map->handle, LV2_ATOM__Blank); uris->atom_Float = map->map(map->handle, LV2_ATOM__Float); uris->atom_Object = map->map(map->handle, LV2_ATOM__Object); uris->atom_Path = map->map(map->handle, LV2_ATOM__Path); uris->atom_Resource = map->map(map->handle, LV2_ATOM__Resource); uris->atom_Sequence = map->map(map->handle, LV2_ATOM__Sequence); uris->time_Position = map->map(map->handle, LV2_TIME__Position); uris->time_barBeat = map->map(map->handle, LV2_TIME__barBeat); uris->time_beatsPerMinute = map->map(map->handle, LV2_TIME__beatsPerMinute); uris->time_speed = map->map(map->handle, LV2_TIME__speed); uris->time_beatsPerBar = map->map(map->handle, LV2_TIME__beatsPerBar); uris->midi_MidiEvent = map->map (map->handle, LV2_MIDI__MidiEvent); // Generate one cycle of a sine wave at the desired frequency const double freq = 440.0 * 2.0; const double amp = 0.5; self->wave_len = (uint32_t)(rate / freq); self->wave = (float*)malloc(self->wave_len * sizeof(float)); for (uint32_t i = 0; i < self->wave_len; ++i) { self->wave[i] = (float)(sin(i * 2 * M_PI * freq / rate) * amp); } return (LV2_Handle)self; } /** The `connect_port()` method is called by the host to connect a particular port to a buffer. The plugin must store the data location, but data may not be accessed except in run(). This method is in the ``audio'' threading class, and is called in the same context as run(). */ static void connect_port(LV2_Handle instance, uint32_t port, void* data) { Alo* self = (Alo*)instance; switch ((PortIndex)port) { case ALO_INPUT: self->ports.input = (const float*)data; log("Connect ALO_INPUT %d", port); break; case ALO_OUTPUT: self->ports.output = (float*)data; log("Connect ALO_OUTPUT %d", port); break; case ALO_BARS: self->ports.bars = (float*)data; log("Connect ALO_BEATS %d %d", port); break; case ALO_CONTROL: self->ports.control = (LV2_Atom_Sequence*)data; log("Connect ALO_CONTROL %d", port); break; case ALO_THRESHOLD: self->ports.threshold = (float*)data; log("Connect ALO_THRESHOLD %d %d", port); break; case ALO_MIDIIN: self->ports.midiin = (LV2_Atom_Sequence*)data; log("Connect ALO_MIDIIN %d %d", port); break; case ALO_MIDI_BASE: self->ports.midi_base = (float*)data; log("Connect ALO_MIDI_BASE %d %d", port); break; case ALO_PER_BEAT_LOOPS: self->ports.pb_loops = (float*)data; log("Connect ALO_PER_BEAT_LOOPS %d %d", port); break; case ALO_CLICK: self->ports.click = (float*)data; log("Connect ALO_CLICK %d %d", port); break; default: int loop = port - 4; self->ports.loops[loop] = (float*)data; log("Connect ALO_LOOP %d", loop); } } static void reset(Alo* self) { self->pb_loops = (uint32_t)floorf(*(self->ports.pb_loops)); self->loop_beats = (uint32_t)floorf(self->bpb) * (uint32_t)floorf(*(self->ports.bars)); self->loop_samples = self->loop_beats * self->rate * 60.0f / self->bpm; if (self->loop_samples > STORAGE_MEMORY) { self->loop_samples = STORAGE_MEMORY; } self->loop_index = 0; log("Loop beats: %d", self->loop_beats); log("BPM: %G", self->bpm); log("Loop_samples: %d", self->loop_samples); for (int i = 0; i < NUM_LOOPS; i++) { self->button_state[i] = (*self->ports.loops[i]) > 0.0f ? true : false; self->state[i] = STATE_RECORDING; self->phrase_start[i] = 0; log("STATE: RECORDING (reset) [%d]", i); } self->clickstate = STATE_OFF; uint32_t click = (uint32_t)floorf(*(self->ports.click)); if (click != 0) { self->clickstate = STATE_SILENT; } } /** The `activate()` method is called by the host to initialise and prepare the plugin instance for running. The plugin must reset all internal state except for buffer locations set by `connect_port()`. Since this plugin has no other internal state, this method does nothing. This method is in the ``instantiation'' threading class, so no other methods on this instance will be called concurrently with it. */ static void activate(LV2_Handle instance) { log("Activate"); } /** Update the current (midi) position based on a host message. This is called by run() when a time:Position is received. */ static void update_position(Alo* self, const LV2_Atom_Object* obj) { AloURIs* const uris = &self->uris; // Received new transport position/speed LV2_Atom *beat = NULL, *bpm = NULL, *bpb = NULL, *speed = NULL; lv2_atom_object_get(obj, uris->time_barBeat, &beat, uris->time_beatsPerMinute, &bpm, uris->time_speed, &speed, uris->time_beatsPerBar, &bpb, NULL); if (bpb && bpb->type == uris->atom_Float) { if (self->bpb != ((LV2_Atom_Float*)bpb)->body) { self->bpb = ((LV2_Atom_Float*)bpb)->body; reset(self); } } if (bpm && bpm->type == uris->atom_Float) { if (round(self->bpm) != round(((LV2_Atom_Float*)bpm)->body)) { // Tempo changed, update BPM self->bpm = ((LV2_Atom_Float*)bpm)->body; reset(self); } } if (speed && speed->type == uris->atom_Float) { if (self->speed != ((LV2_Atom_Float*)speed)->body) { // Speed changed, e.g. 0 (stop) to 1 (play) // reset the loop start self->speed = ((LV2_Atom_Float*)speed)->body; reset(self); log("Speed change: %G", self->speed); log("Loop: [%d][%d]", self->loop_beats, self->loop_samples); }; } if (beat && beat->type == uris->atom_Float) { // Received a beat position, synchronise // const float frames_per_beat = 60.0f / self->bpm * self->rate; const float bar_beat = ((LV2_Atom_Float*)beat)->body; // const float beat_beats = bar_beats - floorf(bar_beats); if (self->current_bb != (uint32_t)bar_beat) { // we are onto the next beat self->current_bb = (uint32_t)bar_beat; if (self->current_lb == self->loop_beats) { self->current_lb = 0; } log("Beat:[%d][%d] index[%d] beat[%G]", self->current_bb, self->current_lb, self->loop_index, bar_beat); self->current_lb += 1; } } } /** Adjust self->state based on button presses. */ static void button_logic(LV2_Handle instance, bool new_button_state, int i) { Alo* self = (Alo*)instance; struct timeval te; gettimeofday(&te, NULL); // get current time long long milliseconds = te.tv_sec*1000LL + te.tv_usec/1000; log("Button logic [%d]", i); self->button_state[i] = new_button_state; int difference = milliseconds - self->button_time[i]; self->button_time[i] = milliseconds; if (new_button_state == true) { log("button ON for loop [%d]", i); } else { log("button OFF for loop [%d]", i); } if (difference < 1000) { // double press, user is resetting // so back to recording mode self->state[i] = STATE_RECORDING; self->phrase_start[i] = 0; log("STATE: RECORDING (button reset) [%d]", i); } } /** ** Taken directly from metro.c ** Play back audio for the range [begin..end) relative to this cycle. This is called by run() in-between events to output audio up until the current time. */ static void click(Alo* self, uint32_t begin, uint32_t end) { float* const output = self->ports.output; const uint32_t frames_per_beat = 60.0f / self->bpm * self->rate; if (self->speed == 0.0f) { memset(output, 0, (end - begin) * sizeof(float)); return; } for (uint32_t i = begin; i < end; ++i) { switch (self->clickstate) { case STATE_ATTACK: // Amplitude increases from 0..1 until attack_len output[i] = self->wave[self->wave_offset] * self->elapsed_len / (float)self->attack_len; if (self->elapsed_len >= self->attack_len) { self->clickstate = STATE_DECAY; } break; case STATE_DECAY: // Amplitude decreases from 1..0 until attack_len + decay_len output[i] = 0.0f; output[i] = self->wave[self->wave_offset] * (1 - ((self->elapsed_len - self->attack_len) / (float)self->decay_len)); if (self->elapsed_len >= self->attack_len + self->decay_len) { self->clickstate = STATE_SILENT; } break; case STATE_SILENT: case STATE_OFF: output[i] = 0.0f; } // We continuously play the sine wave regardless of envelope self->wave_offset = (self->wave_offset + 1) % self->wave_len; // Update elapsed time and start attack if necessary if (++self->elapsed_len == frames_per_beat) { self->clickstate = STATE_ATTACK; self->elapsed_len = 0; } } } /** The `run()` method is the main process function of the plugin. It processes a block of audio in the audio context. Since this plugin is `lv2:hardRTCapable`, `run()` must be real-time safe, so blocking (e.g. with a mutex) or memory allocation are not allowed. */ static void run(LV2_Handle instance, uint32_t n_samples) { Alo* self = (Alo*)instance; const float* const input = self->ports.input; float sample = 0.0; float* const output = self->ports.output; float* const recording = self->recording; self->threshold = dbToFloat(*self->ports.threshold); uint32_t last_t = 0; for (uint32_t pos = 0; pos < n_samples; pos++) { // recording always happens sample = input[pos]; output[pos] = 0; // log("Sample: %.9f", sample); recording[self->loop_index] = sample; for (int i = 0; i < NUM_LOOPS; i++) { if (self->phrase_start[i] && self->phrase_start[i] == self->loop_index) { if (self->button_state[i]) { self->state[i] = STATE_LOOP_ON; log("[%d]PHRASE: LOOP ON [%d]", i, self->loop_index); self->clickstate = STATE_OFF; } else { if (self->state[i] == STATE_RECORDING) { self->phrase_start[i] = 0; log("[%d]PHRASE: Abandon phrase [%d]", i, self->loop_index); } else { self->state[i] = STATE_LOOP_OFF; log("[%d]PHRASE: LOOP OFF [%d]", i, self->loop_index); } } } if (self->loop_index % (self->loop_samples / self->loop_beats) == 0) { if (self->pb_loops > i && self->state[i] != STATE_RECORDING) { if (self->button_state[i]) { self->state[i] = STATE_LOOP_ON; log("[%d]BEAT: LOOP ON [%d]", i, self->loop_index); } else { self->state[i] = STATE_LOOP_OFF; log("[%d]BEAT: LOOP OFF [%d]", i, self->loop_index); } } } float* const loop = self->loops[i]; if (self->state[i] == STATE_RECORDING && self->button_state[i]) { loop[self->loop_index] = sample; if (self->phrase_start[i] == 0 && self->speed != 0) { if (fabs(sample) > self->threshold) { self->phrase_start[i] = self->loop_index; log("[%d]>>> DETECTED PHRASE START [%d]<<<", i, self->loop_index); } } } if (self->state[i] == STATE_LOOP_ON && self->speed != 0) { output[pos] += loop[self->loop_index]; } } self->loop_index += 1; if (self->loop_index >= self->loop_samples) { self->loop_index = 0; } } const LV2_Atom_Sequence* midiin = self->ports.midiin; for (const LV2_Atom_Event* ev = lv2_atom_sequence_begin(&midiin->body); !lv2_atom_sequence_is_end(&midiin->body, midiin->atom.size, ev); ev = lv2_atom_sequence_next(ev)) { // Play the click for the time slice from last_t until now if (self->clickstate != STATE_OFF) { if (self->clickstate != STATE_SILENT) { click(self, last_t, ev->time.frames); } // Update time for next iteration and move to next event last_t = ev->time.frames; } if (ev->body.type == self->uris.midi_MidiEvent) { const uint8_t* const msg = (const uint8_t*)(ev + 1); int i = msg[1] - (uint32_t)floorf(*(self->ports.midi_base)); if (i >= 0 && i < NUM_LOOPS) { if (lv2_midi_message_type(msg) == LV2_MIDI_MSG_NOTE_ON) { button_logic(self, true, i); } if (lv2_midi_message_type(msg) == LV2_MIDI_MSG_NOTE_OFF) { button_logic(self, false, i); } self->midi_control = true; } } } if (self->clickstate != STATE_OFF) { // Play for remainder of cycle click(self, last_t, n_samples); } if (self->midi_control == false) { for (int i = 0; i < NUM_LOOPS; i++) { bool new_button_state = (*self->ports.loops[i]) > 0.0f ? true : false; if (new_button_state != self->button_state[i]) { button_logic(self, new_button_state, i); } } } const AloURIs* uris = &self->uris; // from metro.c // Work forwards in time frame by frame, handling events as we go const LV2_Atom_Sequence* in = self->ports.control; for (const LV2_Atom_Event* ev = lv2_atom_sequence_begin(&in->body); !lv2_atom_sequence_is_end(&in->body, in->atom.size, ev); ev = lv2_atom_sequence_next(ev)) { // Check if this event is an Object // (or deprecated Blank to tolerate old hosts) if (ev->body.type == uris->atom_Object || ev->body.type == uris->atom_Blank) { const LV2_Atom_Object* obj = (const LV2_Atom_Object*)&ev->body; if (obj->body.otype == uris->time_Position) { // Received position information, update update_position(self, obj); } } } } /** The `deactivate()` method is the counterpart to `activate()`, and is called by the host after running the plugin. It indicates that the host will not call `run()` again until another call to `activate()` and is mainly useful for more advanced plugins with ``live'' characteristics such as those with auxiliary processing threads. As with `activate()`, this plugin has no use for this information so this method does nothing. This method is in the ``instantiation'' threading class, so no other methods on this instance will be called concurrently with it. */ static void deactivate(LV2_Handle instance) { log("Deactivate"); } /** Destroy a plugin instance (counterpart to `instantiate()`). This method is in the ``instantiation'' threading class, so no other methods on this instance will be called concurrently with it. */ static void cleanup(LV2_Handle instance) { Alo* self = (Alo*)instance; for (int i = 0; i < NUM_LOOPS; i++) { free(self->loops[i]); } free(self->recording); free(self); } /** The `extension_data()` function returns any extension data supported by the plugin. Note that this is not an instance method, but a function on the plugin descriptor. It is usually used by plugins to implement additional interfaces. This plugin does not have any extension data, so this function returns NULL. This method is in the ``discovery'' threading class, so no other functions or methods in this plugin library will be called concurrently with it. */ static const void* extension_data(const char* uri) { return NULL; } /** Every plugin must define an `LV2_Descriptor`. It is best to define descriptors statically to avoid leaking memory and non-portable shared library constructors and destructors to clean up properly. */ static const LV2_Descriptor descriptor = { ALO_URI, instantiate, connect_port, activate, run, deactivate, cleanup, extension_data }; /** The `lv2_descriptor()` function is the entry point to the plugin library. The host will load the library and call this function repeatedly with increasing indices to find all the plugins defined in the library. The index is not an indentifier, the URI of the returned descriptor is used to determine the identify of the plugin. This method is in the ``discovery'' threading class, so no other functions or methods in this plugin library will be called concurrently with it. */ LV2_SYMBOL_EXPORT const LV2_Descriptor* lv2_descriptor(uint32_t index) { switch (index) { case 0: return &descriptor; default: return NULL; } }
<gh_stars>1-10 import {Input} from 'antd'; import {InputProps} from 'antd/lib/input'; import autobind from "autobind-decorator"; import React from "react"; import {fioConverterWithoutTrim} from "../validator"; function convertValue(value?: string): string | null | undefined { return value && fioConverterWithoutTrim(value); } export type CustomFioInputProps = InputProps & {} export class CustomFioInput extends React.Component<CustomFioInputProps> { public componentDidUpdate(): void { const value = this.props.value && convertValue(this.props.value as string); if (this.props.value !== value) { this.onChange({target: {value}} as any); } } public render(): React.ReactNode { return ( <Input {...this.props} onChange={this.onChange} /> ); } @autobind private onChange(event: React.ChangeEvent<HTMLInputElement>): void { const value = event.target.value && convertValue(event.target.value); this.props.onChange({ ...event, target: { ...event.target, value } }); } }
/* * NAME: dbAllocCtl() * * FUNCTION: attempt to allocate a specified number of contiguous * blocks starting within a specific dmap. * * this routine is called by higher level routines that search * the dmap control pages above the actual dmaps for contiguous * free space. the result of successful searches by these * routines are the starting block numbers within dmaps, with * the dmaps themselves containing the desired contiguous free * space or starting a contiguous free space of desired size * that is made up of the blocks of one or more dmaps. these * calls should not fail due to insufficent resources. * * this routine is called in some cases where it is not known * whether it will fail due to insufficient resources. more * specifically, this occurs when allocating from an allocation * group whose size is equal to the number of blocks per dmap. * in this case, the dmap control pages are not examined prior * to calling this routine (to save pathlength) and the call * might fail. * * for a request size that fits within a dmap, this routine relies * upon the dmap's dmtree to find the requested contiguous free * space. for request sizes that are larger than a dmap, the * requested free space will start at the first block of the * first dmap (i.e. blkno). * * PARAMETERS: * bmp - pointer to bmap descriptor * nblocks - actual number of contiguous free blocks to allocate. * l2nb - log2 number of contiguous free blocks to allocate. * blkno - starting block number of the dmap to start the allocation * from. * results - on successful return, set to the starting block number * of the newly allocated range. * * RETURN VALUES: * 0 - success * -ENOSPC - insufficient disk resources * -EIO - i/o error * * serialization: IWRITE_LOCK(ipbmap) held on entry/exit; */ static int dbAllocCtl(struct bmap * bmp, s64 nblocks, int l2nb, s64 blkno, s64 * results) { int rc, nb; s64 b, lblkno, n; struct metapage *mp; struct dmap *dp; if (l2nb <= L2BPERDMAP) { lblkno = BLKTODMAP(blkno, bmp->db_l2nbperpage); mp = read_metapage(bmp->db_ipbmap, lblkno, PSIZE, 0); if (mp == NULL) return -EIO; dp = (struct dmap *) mp->data; rc = dbAllocDmapLev(bmp, dp, (int) nblocks, l2nb, results); if (rc == 0) mark_metapage_dirty(mp); release_metapage(mp); return (rc); } assert((blkno & (BPERDMAP - 1)) == 0); for (n = nblocks, b = blkno; n > 0; n -= nb, b += nb) { lblkno = BLKTODMAP(b, bmp->db_l2nbperpage); mp = read_metapage(bmp->db_ipbmap, lblkno, PSIZE, 0); if (mp == NULL) { rc = -EIO; goto backout; } dp = (struct dmap *) mp->data; if (dp->tree.stree[ROOT] != L2BPERDMAP) { release_metapage(mp); jfs_error(bmp->db_ipbmap->i_sb, "the dmap is not all free\n"); rc = -EIO; goto backout; } nb = min_t(s64, n, BPERDMAP); if ((rc = dbAllocDmap(bmp, dp, b, nb))) { release_metapage(mp); goto backout; } write_metapage(mp); } *results = blkno; return (0); backout: for (n = nblocks - n, b = blkno; n > 0; n -= BPERDMAP, b += BPERDMAP) { lblkno = BLKTODMAP(b, bmp->db_l2nbperpage); mp = read_metapage(bmp->db_ipbmap, lblkno, PSIZE, 0); if (mp == NULL) { jfs_error(bmp->db_ipbmap->i_sb, "I/O Error: Block Leakage\n"); continue; } dp = (struct dmap *) mp->data; if (dbFreeDmap(bmp, dp, b, BPERDMAP)) { release_metapage(mp); jfs_error(bmp->db_ipbmap->i_sb, "Block Leakage\n"); continue; } write_metapage(mp); } return (rc); }
/* Allocate (if not already allocated) all necessary memory pages to * access 'size' bytes at 'addr'. These two fields do not need to be * aligned to page boundaries. * If some page already exists, add permissions. */ void mem_map(struct mem_t *mem, unsigned int addr, int size, enum mem_access_t perm) { unsigned int tag1, tag2, tag; struct mem_page_t *page; tag1 = addr & ~(MEM_PAGE_SIZE-1); tag2 = (addr + size - 1) & ~(MEM_PAGE_SIZE-1); for (tag = tag1; tag <= tag2; tag += MEM_PAGE_SIZE) { page = mem_page_get(mem, tag); if (!page) page = mem_page_create(mem, tag, perm); page->perm |= perm; } }
<filename>src/main/java/pl/gov/coi/pomocua/ads/dev/FakeOffersCreator.java package pl.gov.coi.pomocua.ads.dev; import lombok.RequiredArgsConstructor; import org.springframework.context.annotation.Profile; import org.springframework.stereotype.Component; import pl.gov.coi.pomocua.ads.Location; import pl.gov.coi.pomocua.ads.accomodations.AccommodationOffer; import pl.gov.coi.pomocua.ads.accomodations.AccommodationsRepository; import pl.gov.coi.pomocua.ads.authentication.CurrentUser; import pl.gov.coi.pomocua.ads.materialaid.MaterialAidCategory; import pl.gov.coi.pomocua.ads.materialaid.MaterialAidOffer; import pl.gov.coi.pomocua.ads.materialaid.MaterialAidOfferRepository; import pl.gov.coi.pomocua.ads.transport.TransportOffer; import pl.gov.coi.pomocua.ads.transport.TransportOfferRepository; import javax.annotation.PostConstruct; import java.time.LocalDate; import java.util.List; @Component @Profile("dev") @RequiredArgsConstructor public class FakeOffersCreator { private final TransportOfferRepository transportOfferRepository; private final AccommodationsRepository accommodationsRepository; private final MaterialAidOfferRepository materialAidOfferRepository; private final CurrentUser currentUser; @PostConstruct public void transport() { TransportOffer o1 = new TransportOffer(); o1.title = "Transport busem 8osobowy"; o1.description = "Witam, mam busa 8 osobowego jestem wstanie pomóż w transporcie. " + "Mogę też przewieź rzeczy pod granice."; o1.userId = currentUser.getCurrentUserId(); o1.userFirstName = "Marta"; o1.origin = new Location("Pomorskie", "Gdynia"); o1.destination = new Location("Pomorskie", "Gdynia"); o1.capacity = 11; o1.transportDate = LocalDate.now(); o1.phoneNumber = "+48123456789"; TransportOffer o2 = new TransportOffer(); o2.title = "Darmowy transport na granicę i z granicy z Ostrowa i okolic"; o2.description = "Darmowy transport z Ostrowa i okolic na granicę z Ukraniną i z granicy " + "mam 4 miejsca mam foteliki dla dzieci najleipiej w weekend"; o2.userId = currentUser.getCurrentUserId(); o2.userFirstName = "Mariusz"; o2.origin = new Location("Pomorskie", "Gdańsk"); o2.destination = new Location("Mazowieckie", "Warszawa"); o2.capacity = 10; o2.transportDate = LocalDate.now(); o2.phoneNumber = "+48123456780"; transportOfferRepository.save(o1); transportOfferRepository.save(o2); } @PostConstruct public void accommodation() { AccommodationOffer o1 = new AccommodationOffer(); o1.title = "Mieszkanie w bloku, 2 osoby - Rzeszów, woj. podkarpackie"; o1.description = "nocleg noclegmazowieckie transport Dolnośląskie, miejscowość Wrocław - ok. 5 km od Dworca głównego. Kawalerka na wyłączność pomieści 2 osoby + zwierzęta są mile widziane. Okres: 2 miesiące, Bezpłatnie...."; o1.userId = currentUser.getCurrentUserId(); o1.userFirstName = "Basia"; o1.location = new Location("podkarpackie", "Rzeszów"); o1.hostLanguage = List.of(AccommodationOffer.Language.PL, AccommodationOffer.Language.UA); o1.guests = 2; o1.lengthOfStay = AccommodationOffer.LengthOfStay.MONTH_2; o1.phoneNumber = "+48123456789"; AccommodationOffer o2 = new AccommodationOffer(); o2.title = "Mieszkanie w bloku, 4 osoby - Międzygórze, woj. podlaskie"; o2.description = "Kawalerka na wyłączność pomieści 2 osoby + zwierzęta są mile widziane. Okres: 2 miesiące, Bezpłatnie...."; o2.userId = currentUser.getCurrentUserId(); o2.userFirstName = "Piotr"; o2.location = new Location("podlaskie", "Międzygórze"); o2.hostLanguage = List.of(AccommodationOffer.Language.PL, AccommodationOffer.Language.UA); o2.guests = 4; o2.lengthOfStay = AccommodationOffer.LengthOfStay.LONGER; o2.phoneNumber = "+48123456780"; accommodationsRepository.save(o1); accommodationsRepository.save(o2); } @PostConstruct public void materialAid() { MaterialAidOffer o1 = new MaterialAidOffer(); o1.title = "Oddam materac dwuosobowy"; o1.description = "Materac w bardzo dobrym stanie, do odbioru w Gdańsku"; o1.userId = currentUser.getCurrentUserId(); o1.userFirstName = "Krystyna"; o1.category = MaterialAidCategory.HOUSEHOLD_GOODS; o1.location = new Location("Pomorskie", "Gdańsk"); o1.phoneNumber = "+48123456789"; MaterialAidOffer o2 = new MaterialAidOffer(); o2.title = "Mam do oddania zabawki dziecięce"; o2.description = "worek zabawek do oddania, wszystkie w dobrym stanie, dla dziecka w wieku 5-10 lat"; o2.userId = currentUser.getCurrentUserId(); o2.userFirstName = "Maria"; o2.category = MaterialAidCategory.FOR_CHILDREN; o2.location = new Location("Mazowieckie", "Warszawa"); o2.phoneNumber = "+48123456780"; materialAidOfferRepository.save(o1); materialAidOfferRepository.save(o2); } }
<reponame>cybarox/netbox from django.contrib.auth import authenticate from django.contrib.auth.models import Group, User from django.db.models import Count from rest_framework.exceptions import AuthenticationFailed from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.routers import APIRootView from rest_framework.status import HTTP_201_CREATED from rest_framework.views import APIView from rest_framework.viewsets import ViewSet from netbox.api.viewsets import NetBoxModelViewSet from users import filtersets from users.models import ObjectPermission, Token, UserConfig from utilities.querysets import RestrictedQuerySet from utilities.utils import deepmerge from . import serializers class UsersRootView(APIRootView): """ Users API root view """ def get_view_name(self): return 'Users' # # Users and groups # class UserViewSet(NetBoxModelViewSet): queryset = RestrictedQuerySet(model=User).prefetch_related('groups').order_by('username') serializer_class = serializers.UserSerializer filterset_class = filtersets.UserFilterSet class GroupViewSet(NetBoxModelViewSet): queryset = RestrictedQuerySet(model=Group).annotate(user_count=Count('user')).order_by('name') serializer_class = serializers.GroupSerializer filterset_class = filtersets.GroupFilterSet # # REST API tokens # class TokenViewSet(NetBoxModelViewSet): queryset = RestrictedQuerySet(model=Token).prefetch_related('user') serializer_class = serializers.TokenSerializer filterset_class = filtersets.TokenFilterSet def get_queryset(self): """ Limit the non-superusers to their own Tokens. """ queryset = super().get_queryset() # Workaround for schema generation (drf_yasg) if getattr(self, 'swagger_fake_view', False): return queryset.none() if self.request.user.is_superuser: return queryset return queryset.filter(user=self.request.user) class TokenProvisionView(APIView): """ Non-authenticated REST API endpoint via which a user may create a Token. """ permission_classes = [] def post(self, request): serializer = serializers.TokenProvisionSerializer(data=request.data) serializer.is_valid() # Authenticate the user account based on the provided credentials user = authenticate( request=request, username=serializer.data['username'], password=serializer.data['password'] ) if user is None: raise AuthenticationFailed("Invalid username/password") # Create a new Token for the User token = Token(user=user) token.save() data = serializers.TokenSerializer(token, context={'request': request}).data return Response(data, status=HTTP_201_CREATED) # # ObjectPermissions # class ObjectPermissionViewSet(NetBoxModelViewSet): queryset = ObjectPermission.objects.prefetch_related('object_types', 'groups', 'users') serializer_class = serializers.ObjectPermissionSerializer filterset_class = filtersets.ObjectPermissionFilterSet # # User preferences # class UserConfigViewSet(ViewSet): """ An API endpoint via which a user can update his or her own UserConfig data (but no one else's). """ permission_classes = [IsAuthenticated] def get_queryset(self): return UserConfig.objects.filter(user=self.request.user) def list(self, request): """ Return the UserConfig for the currently authenticated User. """ userconfig = self.get_queryset().first() return Response(userconfig.data) def patch(self, request): """ Update the UserConfig for the currently authenticated User. """ # TODO: How can we validate this data? userconfig = self.get_queryset().first() userconfig.data = deepmerge(userconfig.data, request.data) userconfig.save() return Response(userconfig.data)
//method to authenticate user with firebase private void firebaseAuthWithGoogle(String idToken) { AuthCredential credential = GoogleAuthProvider.getCredential(idToken, null); firebaseAuth.signInWithCredential(credential) .addOnCompleteListener(this, new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { Intent intent =new Intent(SignUpScreen.this,HomePage.class); intent.putExtra("flag",false); startActivity(intent); } else { Toast.makeText(SignUpScreen.this, task.getException().toString(),Toast.LENGTH_LONG).show(); } } }); }
def mds(self, anchors_diff_RP): start = time() upper_idx = torch.triu_indices(self.n_anchors + 2, self.n_anchors + 2, offset=1) distances = torch.zeros(anchors_diff_RP.shape[0], self.n_anchors + 2, self.n_anchors + 2) for row in range(anchors_diff_RP.shape[0]): distances[row, upper_idx[0, :], upper_idx[1, :]] = anchors_diff_RP[row, :] distances[row] = distances[row] + distances[row].T centering_matrix = torch.eye(self.n_anchors+2) - 1. / (self.n_anchors + 2) * torch.ones((self.n_anchors + 2, self.n_anchors + 2)) normalized_distances = -0.5 * centering_matrix @ distances @ centering_matrix diagonalization = torch.symeig(normalized_distances, eigenvectors=True) eigval = diagonalization.eigenvalues[:,-2:] eigvec = diagonalization.eigenvectors[:,:, -2:] eigval = eigval.reshape(-1) eigvec = eigvec.permute(1, 0, 2).reshape(self.n_anchors + 2, -1) rec_anchors = torch.sqrt(eigval) * eigvec rec_anchors = rec_anchors - rec_anchors[-1] rec_anchors = rec_anchors.reshape(self.n_anchors + 2, distances.shape[0], 2).permute((1, 0, 2)) anchor = ComplexTensor(real=rec_anchors[:, 1:, 1], imag=rec_anchors[:, 1:, 0]) self.time_logger["mds"] += time() - start return anchor
/// Make sure the client can accept the provided media type. pub fn validate_content_type( headers: &HeaderMap, content_type: &'static str, ) -> Result<(), GraphError> { let header_value = match headers.get(header::ACCEPT) { None => return Ok(()), Some(v) => v, }; let full_type = header::HeaderValue::from_static(content_type); let wildcard = header::HeaderValue::from_static("*"); let double_wildcard = header::HeaderValue::from_static("*/*"); let top_type = content_type.split("/").next().unwrap_or(""); let top_type_wildcard = header::HeaderValue::from_str(&format!("{}/*", top_type)); assert!( top_type_wildcard.is_ok(), format!("could not form top-type wildcard from {}", top_type) ); let acceptable_content_types: Vec<actix_web::http::HeaderValue> = vec![ full_type, wildcard, double_wildcard, top_type_wildcard.unwrap(), ]; // FIXME: this is not a full-blown Accept parser if acceptable_content_types.iter().any(|c| c == header_value) { Ok(()) } else { Err(GraphError::InvalidContentType) } }
Should Patients Over 85 Years Old Be Operated on for Colorectal Cancer? Background: The aim of this study is to evaluate risk factors for mortality, morbidity, and long-term survival in very old patients with colorectal cancer compared with old patients. Methods: Patients operated on with colorectal cancer aged 75 years old or older were divided into 2 groups: Group A (75–84 years, n = 93) and Group B (≥85, n = 21). Results: The serum albumin level, oxygen pressure in arterial blood gases, and forced expiratory volume in 1 second in Group B were significantly lower than in Group A, respectively (P = 0.0094, 0.0264, 0.0363). Pulmonary complications were developed significantly more frequently in Group B than in Group A (P = 0.0019). Group B had a significantly higher mortality rate than Group A (P = 0.0477). There was no significant difference between the 2 groups in the 2- and 5-year survival rates. Conclusions: Very old patients with colorectal cancer should not be denied surgery on account of chronological age alone, although the perioperative risks for the very old are very high.
/* * Copyright 2022 The Furiko Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cmd_test import ( "regexp" "testing" "github.com/stretchr/testify/assert" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" execution "github.com/furiko-io/furiko/apis/execution/v1alpha1" "github.com/furiko-io/furiko/pkg/cli/cmd" "github.com/furiko-io/furiko/pkg/cli/console" runtimetesting "github.com/furiko-io/furiko/pkg/runtime/testing" "github.com/furiko-io/furiko/pkg/utils/testutils" ) var ( parameterizableJobConfig = &execution.JobConfig{ ObjectMeta: metav1.ObjectMeta{ Name: "parameterizable-jobconfig", Namespace: DefaultNamespace, }, Spec: execution.JobConfigSpec{ Concurrency: execution.ConcurrencySpec{ Policy: execution.ConcurrencyPolicyAllow, }, Option: &execution.OptionSpec{ Options: []execution.Option{ { Type: execution.OptionTypeString, Name: "name", Label: "<NAME>", Required: true, String: &execution.StringOptionConfig{ Default: "Example User", TrimSpaces: true, }, }, }, }, }, Status: execution.JobConfigStatus{ State: execution.JobConfigReady, }, } parameterizableJobConfigWithRequired = &execution.JobConfig{ ObjectMeta: metav1.ObjectMeta{ Name: "parameterizable-jobconfig", Namespace: DefaultNamespace, }, Spec: execution.JobConfigSpec{ Concurrency: execution.ConcurrencySpec{ Policy: execution.ConcurrencyPolicyAllow, }, Option: &execution.OptionSpec{ Options: []execution.Option{ { Type: execution.OptionTypeString, Name: "name", Label: "<NAME>", Required: true, }, }, }, }, Status: execution.JobConfigStatus{ State: execution.JobConfigReady, }, } ) var ( adhocJobCreated = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "adhoc-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "adhoc-jobconfig", StartPolicy: &execution.StartPolicySpec{}, }, } adhocJobCreatedWithConcurrencyPolicy = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "adhoc-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "adhoc-jobconfig", StartPolicy: &execution.StartPolicySpec{ ConcurrencyPolicy: execution.ConcurrencyPolicyEnqueue, }, }, } adhocJobCreatedWithStartAfter = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "adhoc-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "adhoc-jobconfig", StartPolicy: &execution.StartPolicySpec{ ConcurrencyPolicy: execution.ConcurrencyPolicyEnqueue, StartAfter: testutils.Mkmtimep(startTime), }, }, } adhocJobCreatedWithStartAfterAllow = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "adhoc-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "adhoc-jobconfig", StartPolicy: &execution.StartPolicySpec{ ConcurrencyPolicy: execution.ConcurrencyPolicyAllow, StartAfter: testutils.Mkmtimep(startTime), }, }, } parameterizableJobCreatedWithDefaultOptionValues = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "parameterizable-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "parameterizable-jobconfig", StartPolicy: &execution.StartPolicySpec{}, }, } parameterizableJobCreatedWithCustomOptionValues = &execution.Job{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "parameterizable-jobconfig-", Namespace: DefaultNamespace, }, Spec: execution.JobSpec{ ConfigName: "parameterizable-jobconfig", StartPolicy: &execution.StartPolicySpec{}, OptionValues: `{"name":"<NAME>"}`, }, } ) func TestRunCommand(t *testing.T) { runtimetesting.RunCommandTests(t, []runtimetesting.CommandTest{ { Name: "display help", Args: []string{"run", "--help"}, Stdout: runtimetesting.Output{ Contains: cmd.RunExample, }, }, { Name: "need an argument", Args: []string{"run"}, WantError: assert.Error, }, { Name: "jobconfig does not exist", Args: []string{"run", "adhoc-jobconfig"}, WantError: runtimetesting.AssertErrorIsNotFound(), }, { Name: "created job", Args: []string{"run", "adhoc-jobconfig"}, Fixtures: []runtime.Object{adhocJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, adhocJobCreated), }, }, }, Stdout: runtimetesting.Output{ Matches: regexp.MustCompile(`^Job [^\s]+ created`), }, }, { Name: "created job with concurrency policy", Args: []string{"run", "adhoc-jobconfig", "--concurrency-policy", "Enqueue"}, Fixtures: []runtime.Object{adhocJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, adhocJobCreatedWithConcurrencyPolicy), }, }, }, Stdout: runtimetesting.Output{ Matches: regexp.MustCompile(`^Job [^\s]+ created`), }, }, { Name: "created job with start after", Args: []string{"run", "adhoc-jobconfig", "--at", startTime}, Fixtures: []runtime.Object{adhocJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, adhocJobCreatedWithStartAfter), }, }, }, Stdout: runtimetesting.Output{ Matches: regexp.MustCompile(`^Job [^\s]+ created`), }, }, { Name: "created job with invalid start after", Args: []string{"run", "adhoc-jobconfig", "--at", "1234"}, Fixtures: []runtime.Object{adhocJobConfig}, WantError: assert.Error, }, { Name: "created job with start after and concurrency policy", Args: []string{"run", "adhoc-jobconfig", "--at", startTime, "--concurrency-policy", "Allow"}, Fixtures: []runtime.Object{adhocJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, adhocJobCreatedWithStartAfterAllow), }, }, }, Stdout: runtimetesting.Output{ Matches: regexp.MustCompile(`^Job [^\s]+ created`), }, }, { Name: "created job with default option values", Args: []string{"run", "parameterizable-jobconfig", "--use-default-options"}, Fixtures: []runtime.Object{parameterizableJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, parameterizableJobCreatedWithDefaultOptionValues), }, }, }, Stdout: runtimetesting.Output{ Matches: regexp.MustCompile(`^Job [^\s]+ created`), }, }, { Name: "created job with prompt input", Args: []string{"run", "parameterizable-jobconfig"}, Fixtures: []runtime.Object{parameterizableJobConfig}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, parameterizableJobCreatedWithCustomOptionValues), }, }, }, Stdin: runtimetesting.Input{ Procedure: func(c *console.Console) { c.ExpectString("<NAME>") c.SendLine("<NAME>") }, }, Stdout: runtimetesting.Output{ ContainsAll: []string{ "Please input option values.", "<NAME>", }, Matches: regexp.MustCompile(`Job [^\s]+ created`), }, }, { Name: "prompt stdin input for required option", Args: []string{"run", "parameterizable-jobconfig", "--use-default-options"}, Fixtures: []runtime.Object{parameterizableJobConfigWithRequired}, WantActions: runtimetesting.CombinedActions{ Furiko: runtimetesting.ActionTest{ Actions: []runtimetesting.Action{ runtimetesting.NewCreateJobAction(DefaultNamespace, parameterizableJobCreatedWithCustomOptionValues), }, }, }, Stdin: runtimetesting.Input{ Procedure: func(c *console.Console) { c.ExpectString("Full Name") c.SendLine("<NAME>") }, }, Stdout: runtimetesting.Output{ ContainsAll: []string{ "Please input option values.", "Full Name", }, Matches: regexp.MustCompile(`Job [^\s]+ created`), }, }, }) }
/** * Custom action that reads the local dataset and writes to the non-local dataset. */ public static class LocalDatasetReader extends AbstractCustomAction { private static final Logger LOG = LoggerFactory.getLogger(LocalDatasetReader.class); private Metrics metrics; private final String actionName; private LocalDatasetReader(String name) { this.actionName = name; } @Override protected void configure() { super.configure(); setName(actionName); } @UseDataSet("wordcount") private KeyValueTable wordCount; @UseDataSet("result") private KeyValueTable result; @Override public void run() { LOG.info("Read the local dataset"); try { File waitFile = new File(getContext().getRuntimeArguments().get("wait.file")); waitFile.createNewFile(); getContext().execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { int uniqueWordCount = 0; try (CloseableIterator<KeyValue<byte[], byte[]>> scanner = wordCount.scan(null, null)) { while (scanner.hasNext()) { scanner.next(); uniqueWordCount++; } } result.write("UniqueWordCount", String.valueOf(uniqueWordCount)); metrics.gauge("unique.words", uniqueWordCount); } }); File doneFile = new File(getContext().getRuntimeArguments().get("done.file")); while (!doneFile.exists()) { TimeUnit.MILLISECONDS.sleep(50); } } catch (Exception e) { LOG.error("Exception occurred while running custom action ", e); } } }
import { Tree } from '@nrwl/devkit'; import { removeCypressTsConfigPath } from './remove-cypress-ts-config-path'; import { CypressProject } from '../../shared/model/cypress-project.enum'; import { getCypressProjectName, isHavingCypressProject, } from '../../shared/utils/cypress-project'; import { deleteInTree } from '../../shared/utils/tree'; import { removeProjectConfiguration } from '../../shared/utils/project-configuration'; export const removeCypressProject = ( tree: Tree, application: string, domain: string, projectType: CypressProject ): void => { const projectName = getCypressProjectName(application, domain, projectType); const otherCypressProjectType = projectType === CypressProject.E2E ? CypressProject.Storybook : CypressProject.E2E; removeProjectConfiguration(tree, projectName); deleteInTree( tree, `libs/${application}/${domain}/.cypress/src/integration/${projectType}` ); if (projectType === CypressProject.Storybook) { deleteStorybookFolder(tree, application, domain); deleteInTree( tree, `libs/${application}/${domain}/.cypress/storybook-cypress.json` ); } else { deleteInTree(tree, `libs/${application}/${domain}/.cypress/cypress.json`); } if ( !isHavingCypressProject(application, domain, otherCypressProjectType, tree) ) { removeCypressTsConfigPath(tree, application, domain); deleteE2EFolder(tree, application, domain); } }; const deleteStorybookFolder = ( tree: Tree, application: string, domain: string ) => { const cypressProjectFolder = `libs/${application}/${domain}/.storybook`; deleteInTree(tree, cypressProjectFolder); }; const deleteE2EFolder = (tree: Tree, application: string, domain: string) => { const cypressProjectFolder = `libs/${application}/${domain}/.cypress`; deleteInTree(tree, cypressProjectFolder); };
So DC published their new Vibe comic this week. The creator credit inside the book read as follows. A few people thought that was off and got in touch with Gerry Conway. @quest4earth2 @fotocub @dccomics Yes, Vibe was created by Chuck Patton and me; looks like I need to contact DC on Chuck’s behalf… — Gerry Conway (@gerryconway) February 21, 2013 So Gerry got in touch with DC. So I contacted DC about the mistaken “created by” credit on Vibe. Helpfully, they’re removing the “created by” credit entirely. — Gerry Conway (@gerryconway) February 22, 2013 Mark Waid added his take; @markwaid Classy.At least I got them to acknowledge creator participation for Vertigo on Arrow. — Gerry Conway (@gerryconway) February 22, 2013 But it looks like there may be another battle on Gerry’s hands over the Arrow series. @loveandcapes Help me out. When did Felicity first appear?(Pre- or post-76?) — Gerry Conway (@gerryconway) February 22, 2013 @loveandcapes Damn.I’m gettin’ on their case tomorrow.I haven’t followed Arrow closely.How is she being used? — Gerry Conway (@gerryconway) February 22, 2013 About Rich Johnston Chief writer and founder of Bleeding Cool. Father of two. Comic book clairvoyant. Political cartoonist. (Last Updated ) Related Posts None found
Stay in treatment: Predicting dropout from pediatric weight management study protocol Introduction Childhood obesity is a serious public health concern. Multidisciplinary pediatric weight management programs have been deemed effective. However, effectiveness of these programs is impacted by attrition, limiting health benefits to children, and inefficiently utilizing scarce resources. Methods We have developed a model (the Outcomes Forecasting System, OFS) that isolates variables associated with attrition from pediatric weight management, with the potential to forecast participant dropout. In Aim 1, we will increase the power and precision of the OFS and then validate the model through the consistent acquisition of key patient, family, and treatment data, from three different weight management sites. In Aim 2, external validity will be established through the application of the OFS at a fourth pediatric weight management program. Aim 3 will be a pilot clinical trial, incorporating an intervention built on the results of Aims 1 and 2 and utilizing the OFS to reduce attrition. Discussion A greater understanding of the patient, family, and disease-specific factors that predict dropout from pediatric weight management can be utilized to prevent attrition. The goal of the current study is to refine the OFS to a level of precision and efficiency to be a valuable tool to any weight management program. By identifying the most pertinent factors driving attrition across weight management sites, new avenues for treatment will be identified. This study will result in a valuable forecasting tool that will be applicable for diverse programs and populations, decrease program costs, and improve patient retention, adherence, and outcomes. Clinicaltrials.gov identifier NCT04364282. Introduction Despite the high prevalence of pediatric obesity, intensive treatment programs are scarce and typically concentrated within tertiary centers . Such programs have been deemed effective by the United States (U.S.) Preventive Services Task Force, which has strongly advocated for increased access; however, such programs are typically intensive (26+ contact hours over 6-12 months) . Unfortunately, their effectiveness has been hampered by attrition, which ranges from 27 to 73% . Attrition from pediatric obesity treatment is a challenge, and results in significant financial losses for treatment programs and reduces the benefits children receive from these interventions . Existing evidence of attrition is based on retrospective studies using varying definitions, variables, and outcomes . These studies have typically focused on sociodemographic differences between patients who drop out of or complete treatment . The heterogeneity between studies likely reflects the complex interplay between children, their families, and obesity as a disease, and the varying treatment modalities used to meet the needs of diverse families . Several factors can account for attrition in a program. For instance, psychosocial concerns such as stress and dysfunction in the family, or a child's experience with bullying can increase the chances of drop out . Higher levels of attrition have been reported in the presence of weight-related co-morbidities, who are often the children in greatest need of treatment . Finally, studies on engagement and retention have noted challenges related to social determinants of health: lack of transportation, insurance coverage, and busy work schedules can lead to attrition, even when families indicate they are highly satisfied with the program . To date, there is a paucity of interventions to prevent attrition from weight management . Patient retention efforts often involve frequent electronic queries, monitoring clinic schedules, and phone calls from clinical staff, all of which are time-and resource-intensive . Developing and implementing forecasting models to decrease attrition holds promise. By acquiring pertinent variables and prospectively following patients in different programs, settings, and locations, models can be developed, refined, and deployed to identify accurate profiles of those at the highest risk for attrition. From this, retention efforts can be focused on those most likely to cease attending treatment, and outcomes may be enhanced by addressing variables that contribute to attrition. As such, validation is key to the successful translation of tools and models into clinical practice. A comprehensive internal, external, and temporal validation process using diverse clinic populations is the next step to test and translate a forecasting model into clinical practice successfully . Existing evidence is clear in demonstrating the problem of high attrition rates from treatment programs. Still, findings are inconsistent across studies, limiting the usefulness of the results . We have developed a model that utilizes attrition-related variables to forecast participant dropout. We now seek to expand, refine, and validate this model to enable us to forecast with high precision the risk of a patient and family dropping out of pediatric obesity treatment. Methods Stay in Treatment (SIT): Predicting Dropout from Pediatric Weight Management is funded by the National Institute of Nursing Research (R01NR017639, originally titled War of Attrition) is registered on clinic altrials.gov (Identifier: NCT04364282). The Wake Forest University Health Sciences Institutional Review Board reviewed and approved all the study protocol and procedures as the single IRB of record in January 2020, and all other institutions entered into a reliance agreement. The full study protocol, informed consent document, and study results will be published on clinicaltrials.gov upon study completion. Study purpose and hypotheses The overall goals of this study are to increase the precision and power of our attrition prediction model by testing it in additional multidisciplinary pediatric weight management programs and to demonstrate its internal, external, and temporal validity. Aim 1 is to install an Outcomes Forecasting System (OFS) in three pediatric weight management programs, calibrate it, and build its precision using a conceptual model of adherence. Through this model and preliminary work, we have identified several plausible variables (described in section 2.4.1) that will be used to refine and validate the OFS. We hypothesize that the attrition forecasting model will accurately predict patient and family dropout from treatment, with an area under the curve (AUC) greater than 0.70 and with similar accuracy in predicting weight outcomes. We further hypothesize that an attrition forecasting model built across three different but similarly structured weight management programs will have both internal and temporal validity. Aim 2 is to install and externally validate the accuracy of an omnibus OFS in an additional weight management program. The OFS will be applied to a fourth pediatric weight management program not involved in the original data collection and calibration. We hypothesize that an omnibus OFS will be similarly accurate in predicting dropout and weight outcomes in a fourth site, as it was in the three that built its precision and calibration. Via a randomized pilot trial, Aim 3 will establish the feasibility and utility of an intervention using the OFS in three multidisciplinary pediatric weight management programs to identify patients and families at the highest risk of dropping out. We hypothesize the OFS will improve effectiveness of pediatric weight management by reducing attrition in high-risk patients and families. By identifying the most pertinent factors driving attrition across weight management sites, we can intervene to prevent families from dropping out and increase their exposure to necessary treatment. Our rigorous and reproducible tool will be made available for broad dissemination to improve adherence, decrease costs, and improve outcomes. Results will be designed for rapid uptake and could change practice through meaningfully addressing the critical need for more tailored pediatric weight management programs. We also have included weight-and behavior-related outcomes in the prediction model, adding richness to our findings. Sites and partners All involved sites house tertiary care multidisciplinary obesity treatment programs. The Brenner FIT® (Families in Training) program is located in Winston-Salem, North Carolina, and has 12 years of experience in clinical treatment and research. The Optimal Wellness for Life (OWL) program is located at Boston Children's Hospital in Boston, Massachusetts, and has over 20 years of experience in clinical treatment and research. The Promoting Health in Teens and Kids (PHIT Kids) program is located at Children's Mercy Kansas City, Missouri, and has 13 years of experience in clinical treatment and research. The fourth site, which will be used for external validation, is the Center for Healthy Weight and Nutrition at the Nationwide Children's Hospital at The Ohio State University, and also has extensive experience in clinical treatment and research (Table 1). Conceptual model Rapoff's Model of Adherence to Pediatric Medical Regimens is the conceptual model for the study . The Principal Investigator (JS) previously adapted the model for obesity treatment, which guided his preliminary research on attrition from weight management programs ( Fig. 1) . First, the model considers the child and their family in relation to sociodemographic factors, family function and structure, stress, and physical and mental health-all factors believed to play a role in attrition. Second, it focuses on obesity as a disease process and focuses on symptoms, weight severity, perceived severity, and comorbidities. Third, it takes into account the treatment program, including the cost, side-effects, efficacy, patient satisfaction, and approach. Study design In the initial phase (Aim 1), we will obtain key data consistently across three treatment sites to refine our model of attrition prediction. The second phase will establish temporal validation through continued use of the OFS within the three original sites (second part of Aim 1), and external validation, installing the OFS in a fourth weight management program (Aim 2). The OFS was originally developed on existing data from a clinical database of the Brenner FIT program (Wake Forest School of Medicine IRB#00007733), then applied to another existing data set from PHIT Kids program in Kansas City (University of Missouri Kansas City IRB#12070346). The OFS allows the development of an on-going dynamic model that is tailored to each site. Attrition predictors, based on our previous work and that of others, plus conceptual model, have been specified via the study measures and questionnaires. Study design for Aim 1 Aim 1 is a prospective, longitudinal observational study to collect comprehensive data on child and family, obesity, and treatment-related variables. All of these programs are similar enough to support the data elements collected but unique enough to add variability and strength to the model. We will recruit all eligible children ages 7-18 years who are referred for obesity treatment, and a parent or legal guardian. The inclusion criteria will be as follows: children must have obesity (body mass index ≥ 95th percentile for age and sex), assent to participate, have at least one parent/legal guardian (hereafter called "parent") consent to participate and consent to the child's participation, and speak either English or Spanish. The parent has to be the primary parent accompanying the child to treatment, and the child's primary residence must be with that parent. Children will be excluded if they cannot complete measures and study activities, or the parent or child refuses to participate, does not want to complete six months of treatment, or anticipates being unable to participate in follow-up data collection. Patients referred for weight management explicitly for the treatment of type 2 diabetes will be excluded. Patients diagnosed with diabetes at the initial evaluation will remain eligible. Patients with chronic health conditions impacting weight or genetic conditions associated with weight gain will be excluded from participation. Measures will be administered electronically by touch screen tablets or virtually with real-time verification by research staff. Paper questionnaires will be available. We will use the Research Electronic Data Capture (REDCap) system for secure, web-based data entry and management. Data collectors will verify complete data entry and data will undergo verification bi-weekly. All sites have access to and familiarity with REDCap. Children and parents will be recruited from three weight management sites initially. Data collectors will meet the children and parents at the clinic, research center, or virtually and obtain consent and assent, collect measures, and complete measurements within two weeks before or after starting the weight management program. As patients and families participate longitudinally, the data collector will collect appointment attendance, subsequent anthropometric data, and duration of treatment. Follow-up data will be collected on children and parents after six months. Those children and parents with delays in completing the measures within six months but still actively engaged in treatment will receive an extra month to complete follow-up data collection. Measures and variables are captured by our Conceptual Model (Fig. 1). Nearly all measures of complex psychosocial, behavioral, and family variables have established validity and reliability, with some adapted from existing measures. The domains and supporting evidence of our conceptual model are child and family variables, disease (obesity) variables, and treatment variables as follows. Child and family variables 2.5.1. Sociodemographics Age, race, ethnicity, sex, and socioeconomic status will be captured, including any food insecurity by the 2-item Hunger Vital Sign , and parent employment and education level. Child and parent weight and child height will be measured by direct measure using established protocols; we have developed protocols for home measurement, observed remotely by research staff, based on CDC guidelines . Family factors The Family Nutrition and Physical Activity Screening Tool will be used to assess eating, activity, and other habits within the family and home environment . It is a measure for capturing family health habits, including family meal patterns and eating habits, meal and beverage quality, media and electronic entertainment use, family activity, child activity, and sleep. The measure has established construct validity and internal consistency (α = 0.84). While primarily used as a screening tool, it has been used in longitudinal studies of childhood obesity , and its brief nature makes it a practical tool for clinical use. The Family Assessment Device (FAD) will be used to assess family functioning . The FAD is a seven scale measure, and the General Functioning Subscale (12-item) is an acceptable proxy for an overall picture of family function with minimal burden. This scale has been used previously in obesity research with excellent reliability (α = 0.92) . Parenting type will be determined by the Child Report of Parent Behavior Inventory, a valid and reliable self-report measure on parent behavior across three dimensions: psychological control vs autonomy, acceptance vs rejection, and firm vs lax control . We will determine the structure of the family using a self-report questionnaire to determine if it is a blended family and the number of adults and children living in the household. The Confusion, Hubbub, and Order Scale (CHAOS) will assess home environment organization, confusion, and hurriedness and is a distinct variable from the socioeconomic status of the home and family . The scale is reliable and consistent, having been validated against direct observation of household behaviors . Child perception of the family will be assessed by the PROMIS Pediatric Family Relationships measure, short-form (8 questions) . Stress The Parent-Perceived Stress Scale will be used to measure stress perception over the previous month; it has established validity (0.52-0.76) and reliability (α = 0.84-0.86) . The Child-PROMIS Psychological Stress Experience Short Form 4a will be used to capture psychological stress reactions, feeling overwhelmed, perceived lack of control of one's life, and cognitive-perceptual disruption . Physical and mental health The PROMIS Pediatric/Parent Proxy Profile 25 -Short Forms for children assesses anxiety, depression, fatigue, pain, physical function/ mobility, and peer relationships . We will use both the child self-report and the Parent Proxy Report. The PROMIS-29 Profile 2.0 -Short Forms will be used to examine parent general and psychosocial health; it assesses anxiety, depression, fatigue, pain, physical function, sleep, and ability to participate in social activities . The Importance, Confidence, and Readiness Measure will be used to measure motivation and self-efficacy in weight management with parents and children and has established reliability (α = 0.8) . Health literacy The Newest Vital Sign will be used to measure health literacy, capturing the parent's ability to understand words, numbers, and forms . This brief measure is reliable and valid in English and Spanish and only takes 3 min to administer. Given the age range of our study population and the significant role of parents and family in pediatric weight management, only the parent's health literacy will be assessed. Physical and emotional symptoms The nature and extent of weight-based victimization (teasing and bullying because of weight) will be assessed with a questionnaire adapted from Puhl et al., the Weight-based Victimization Questionnaire . Child and parent are provided with a detailed definition of bullying, followed by the questions determining duration and nature of weight-based victimization. Physical symptoms will be captured by the PROMIS Pediatric/Parent Proxy Profile 25 -Short Forms, as detailed earlier, which includes items on fatigue, pain, physical function, and ability to participate in social activities . Weight severity Children's weight will be by direct measure using established protocols. Body mass index (BMI) will be calculated, and given the anticipated weight status ranges, percent of the 95th percentile will be calculated and used as the primary determinant of obesity severity . Perceived weight severity Perceived severity of weight will be assessed using a single selfreported item with adaptation for the parent as has been used in previous obesity research . Comorbidities The presence of major and minor weight-related comorbidities, using a framework established by Skelton et al. , will be captured through the electronic health record (her), as will any weight-related laboratory studies commonly obtained by referring providers or within the programs (liver function tests, glucose, hemoglobin A1c, lipid profile). Also, it will be noted if the patient is receiving treatment for any weight-related comorbidities: anti-hypertension agents, lipid-lowering agents, diabetes medications, or treatment for sleep apnea (CPAP, BiPAP). Costs The cost of the program for the patient and its perceived impact on the family will be assessed. First, parents will be asked to self-report outof-pocket clinic visit expenses (e.g., copays) at initial and follow-up visits to estimate per-visit cost. Impact of the cost of treatment will be evaluated through a measure of financial toxicity resulting from treatment. This will be captured using a measure developed by de Souza et al. . with established validity and reliability, which has been minimally modified for use in pediatric weight management programs. Effectiveness and satisfaction We will use short items of overall satisfaction with treatment and an overall question of quality of care used in previous research. Also, we will use a measure adapted from the RAND Patient Satisfaction Questionnaire Short form (PSQ-18) , with additional questions to cover domains of satisfaction outlined by Skelton et al. . Effectiveness of the OFS on treatment outcomes will be assessed using BMI measurement at subsequent visits. Approach We will account for different treatment program sites in the analytic plan and include visit frequency and total visit number, and specialized treatment tracks (stated interest in future bariatric surgery, telemedicine, weight-loss medications). These will be included in the analysis of differential attrition rates between sites. Distance to treatment Given the current COVID-19 pandemic, all programs have incorporated or expanded telemedicine capabilities. The number or proportion of telemedicine visits will be captured. For in-person appointments, distance from home to treatment will be determined in two ways: by parent report of time in minutes spent driving to the visit, and miles from home zip code to clinic location, as determined by Google Maps. Data collection Baseline data collection will take approximately 45 min for the parents and 30 min for the children. The six-month follow-up data collection will take 30 min for each parent and child. At both data collection visits, the child and parent will each receive a $25 gift certificate. Data collectors will track children's and parents' program participation within the EHR system. Participation will be tracked by visit as missed appointments or attended appointments, and program completion as completed six months of treatment or dropped out. Outcomes Attrition from treatment will be the primary outcome, captured in three distinct ways: total appointments attended, overall proportion of the appointments attended, and the dichotomous treatment completion versus dropout. These will be defined as: • Total appointments attended: the number of clinic visits attended by participants in six month time period; • Proportion of appointments attended: total number of clinic visits attended/total number of clinic visits offered to participants; • Treatment completion versus dropout: participants still active in treatment after six months; inactive defined as a missed appointment with no rescheduled appointment by participants despite two telephone calls and a letter over a one month time period, or no clinic attendance in six weeks without a future scheduled appointment. The secondary outcomes will include a change in percent of the 95th percentile BMI, change in parent weight, and change in measures of health and health behaviors such as children's general and/or psychosocial health and family health habits. The OFS will also be formulated to build power and precision to predict the change in percent of the 95th percentile BMI. To address potential selection bias, missing BMI data will be multiply imputed using information from the prediction model (i. e., the predicted probability of attrition) and other demographic predictors with prognostic value for weight (e.g., baseline BMI, age, sex, height). Process measures will include a brief exit interview to determine the reasons for dropping out, factors that kept patients enrolled, general satisfaction or dissatisfaction, and overall experience. To not unduly influence the primary outcome of attrition, if, upon contact, dropouts elect to re-enroll or begin treatment again, they will still be considered dropped out and have data collected, but they can re-engage in treatment if they desire. Their decision to re-enroll will be noted in the data, and the family will be assisted in scheduling an appointment. Statistical analyses The development and refinement of the OFS, and the statistical methods involved will be finalized in a separate statistical analysis plan (SAP). This plan will conform to the recommendations provided in the Transparent Reporting of a Multivariable Prediction Model (TRIPOD) guidelines . In short, attrition prediction in this study will utilize Bayesian methods. Bayesian predictive modeling is similar to typical approaches, such as multivariable logistic regression, where predictors are incorporated into a model, and their associations (e.g., odds ratios) are estimated along with the uncertainty of those associations. In Bayesian modeling, prior probabilities can be considered as a 'best guess' to what the odds ratios will be. This will accommodate differences across sites, and allow the prediction model to be applied to future sites by using estimated associations from all of the other sites as the prior probability distribution of the new sites. This will allow the model to be flexible concerning site heterogeneity. This approach was developed by Houle et al. in the study of headaches . The OFS is coded in the R language using several different publicly available packages. The sample size considerations related to the model discrimination methods are reported below. Statistical power For Aim 1, each site will enroll n = 100 dyads over two years (N = 300 dyads). This sample size will allow us to evaluate the hypothesis that the predictive accuracy of the forecasting model is AUC ≥0.70. Assuming a 50% attrition rate (i.e., event rate), N = 300 provides power = .80 to reject a one-sided null hypothesis test, assuming that the overall performance of the model is AUC≥ 0.772. Furthermore, this sample size will allow the evaluation of individual parameters and parameter blocks within the model. For example, this sample size provides power = .80 to detect an OR≥ 1.5 for any single predictor of attrition, assuming that this predictor is moderately correlated with the other predictors in the model (R 2 = 0.40) and ignoring across-site heterogeneity in the estimates. Additional analyses will extend these models with the inclusion of covariates such as age and age group (7-12, 13-18 years), sex/gender, race/ethnicity, primary language spoken, etc. Combined with variables of race, ethnicity, and geography, a proxy variable of culture will be created. This is an important consideration given the family-based variables important to the conceptual model. Temporal validation We will conduct a temporal validation to examine if the performance of the system changes over time. Considering the issue of changing attrition over time strengthens the overall study design, findings, and potential clinical application, which involves evaluating the prediction model on subsequent patients from the same centers on which the prediction model was built. The same study design and processes will be used in the temporal validation, recruiting from the three weight management sites, enrolling patients, and prospectively following them in treatment. The model will be refined over years 1 and 2. Temporal validation will take place during year 3, in which each of the three sites is enrolling an additional 50 child-parent dyads (150 total). Multiple metrics, including predictive accuracy (sensitivity, specificity) and AUC, will be considered. Study design for Aim 2 For Aim 2, we will install and externally validate the accuracy of a powerful omnibus OFS in an additional weight management program. The OFS will be applied to a fourth pediatric weight management program not involved in the original data collection to build external validity. The study design will mirror that in Aim 1, prospectively enrolling all eligible children ages 7-18 years of age and a parent or guardian in a longitudinal observational study. Processes and procedures will be the same as in Aim 1. All variables included in Aim 1 will also be collected at the fourth site using the same processes for recruitment, enrollment, and tracking. External validation will occur at the Center for Healthy Weight and Nutrition at Nationwide Children's Hospital. As in Aim 1, we will recruit 100 child-parent dyads over two years. Eligibility, data management, study procedures, participant tracking, retention, and study measures will be the same as Aim 1. 2.8.6.1. Statistical Power. This hypothesis will be tested using a onesided non-inferiority test comparing the AUC from Aim 1 to that obtained from a novel site. A sample size of n = 200 in Aim 2 pooled with n = 300 from Aim1 provides 80% power to examine this hypothesis for a non-inferiority region of 0.037 in the AUC. Thus, assuming that the model performance in Aim 1 is AUC = 0.775, the lower bound of model performance in Aim 2 must be > 0.738 to be considered non-inferior. Differences of this magnitude or smaller are not clinically meaningful and support the global utility of the model. Study design for Aim 3 In Aim 3, we will operationalize the OFS to identify patients and families at the highest risk of dropping out of treatment, and institute an attrition-reduction intervention. Data collected in Aims 1 and 2 will inform the final design, with operations mirroring those of the first two aims. We will use a stepped-wedge cluster randomized trial, a pragmatic study design well suited for service delivery research . We will conduct the intervention at the three original sites. 2.8.7.1. Stepped wedge cluster randomization. This will be a naturalistic study, assessing influence of knowing dropout risk on treatment course. While not as powerful as a randomized controlled trial, a naturalistic design is appropriate for a complex disease (obesity) in a complex setting (multidisciplinary treatment programs) with diverse participants (children and families) . We will use the same Study Procedures as Aim 1, with three arms to study attrition reduction through use of the OFS: Control, Passive, and Active. Site activity will alternate every 3 months with the stepwise addition of passive and active interventions at sites: Control arm: the data collection activities conducted in Aims 1 and 2 will occur, but individual risk of patient attrition using the OFS will not provided to the clinical team. Passive: the same data will be collected as in the previous observational phases and Control arm, with the OFS generating an individual risk profile after beginning the weight management program and completing study measures. Study staff will provide the risk profile to clinical teams only on a monthly basis. For communicating probability/ risk of dropout, a single-page print out will be provided to clinicians shortly after their first visit to the weight management clinic. The purpose of a passive arm is to assess if clinician knowledge/awareness of dropout risk would modify behaviors and clinical interactions to an extent that participant attrition is influenced. Active: a risk profile for all enrolled patients will be provided to clinical teams as in the Passive intervention. Patients and families in the highest risk category (High Risk-defined as top quartile of dropout risk estimates) will be targeted for intervention. The Active intervention is based on evidence-based approaches : • Monitoring: Monthly query of high-risk patients and families to determine if active (visit to or scheduled appointment with program in upcoming 4 weeks) or potentially inactive (no appointment in past 4 weeks and no appointment presently scheduled). Queries will be prepared by study staff and provided to the clinical team. Goal is to provide additional active monitoring of High Risk group, with subsequent contact made by study staff and/or clinical team of patients and families without a recent clinic appointment. • Awareness: Weekly notification of High Risk patients and families with upcoming clinic visit. Personalized contact if patient cancels or does not arrive for appointment; brief phone call made during the scheduled clinic visit time, allowing patient/family to reschedule or discuss reasons for missed appointment . • Personalized mobile phone message (text, SMS): made by study staff the day before appointment . Since this method may incur costs for some patients and families, it will be optional ("opt-in"). • Establishing relationship: single follow-up phone call by clinic staff after initial visit to facilitate relationship building with family, show to improve continuity and follow-up visit adherence . The above will be done only for High Risk patients/families due to the added effort, which would be burdensome for all patients. Existing appointment reminder systems in place (similar between sites: automated reminder phone calls, mailed letters) will continue unchanged. We will use the same recruitment strategies as in Aims 1 and 2. We will recruit 50 child-parent dyads at each site (150 total dyads) over 2 years, with the same eligibility criteria. Primary and secondary outcomes will also be the same as Aim 1 (Primary: attrition, number of total visits, percentage of visits attended; Secondary: change in child weight status, change in parent weight, change in measures of health behaviors). Most importantly, we will compare intervention (passive and active) versus control attrition. 2.8.7.2. Statistical design. Exploratory analyses will be run which extend these models through the inclusion of covariates such as age, sex, race/ethnicity, etc. Pair-wise interactions with study arm will be included in the models to determine if the effect of the intervention differs depending on the level of the covariate (e.g., differential effects for males and females). These interactions will be removed if not significant. The covariate main effects will be retained in the models to determine if they are associated with changes in the outcomes (e.g., sex might be related to the change in percent of 95th percentile, but the intervention could be equally effective for both sexes) and to assess the intervention effect after adjustment for the participant covariates. Statistical Power. Participants will be followed for 6 months, to determine differences between active and passive interventions and control cohorts. A formal sample size will not be calculated, as this is an exploratory aim, designed to allow estimation of effect size to power a future, more definitive study. Discussion This manuscript provides a summary of the study design and methods of the Stay in Treatment (SIT) Study: Predicting Dropout from Pediatric Weight Management. Aim 1 is to install an Outcomes Forecasting System (OFS) in three pediatric weight management programs, calibrate it, and build its precision using a conceptual model of adherence. Aim 2 is to install and externally validate the accuracy of an omnibus OFS in an additional weight management program. The outputs from this study will then be used to develop a pilot intervention study to implement the OFS in pediatric weight management programs to decrease participant and family dropout. The outcomes of this study will identify the most pertinent factors driving attrition from pediatric weight management, which may lead to new avenues for treatment as well as improved adherence and program engagement. The OFS will be made available to pediatric weight management programs to improve adherence and potentially patient outcomes. With improvement in electronic health records' (EHR) ability to incorporate prediction models, the OFS could potentially improve existing EHR models, making it even more practical and feasible to use. Machine learning tools are increasingly utilized in healthcare; this study will guide its use in pediatric weight management through the use of theoretical models of adherence and extensive validation. Conclusion The goal of this study is to validate the OFS so it can be quickly disseminated to pediatric weight management clinics. If successful, it will be instrumental in forecasting attrition and improving the care of obesity in pediatrics. Funding This study was funded by a grant awarded to Dr. Skelton from the National Institute of Nursing Research (R01NR017639). Declaration of competing interest Dr. Rhodes is the Site Principal Investigator for a clinical trial sponsored by Astra Zeneca. Dr. Fleischman is the Site Principal Investigator for clinical trials sponsored by Soleno therapeutics, Millendo therapeutics, and is a Co-Investigator for a clinical trial sponsored by Levo therapeutics. Dr. Sweeney is a member of Paediatric Obesity Global Advisory Board for Novo Norkisk. Dr. Hampl's institution receives royalties from a book she co-edited published by McGraw-Hill Education. Dr. Eneli has research funding for a Registry from Rhythm Pharmaceuticals. Dr. Houle is a research and statistical consultant to GlaxoSmithKline and Eli Lilly. He is also the Chief Scientist at StatReviewer.
<reponame>flooey/improved-initiative<gh_stars>0 import * as React from "react"; import { StatBlockComponent } from "../Components/StatBlock"; import { StatBlockHeader } from "../Components/StatBlockHeader"; import { TextEnricher } from "../TextEnricher/TextEnricher"; import { CombatantViewModel } from "./CombatantViewModel"; import { linkComponentToObservables } from "./linkComponentToObservables"; interface CombatantDetailsProps { combatantViewModel: CombatantViewModel; enricher: TextEnricher; displayMode: "default" | "active"; } interface CombatantDetailsState { } export class CombatantDetails extends React.Component<CombatantDetailsProps, CombatantDetailsState> { constructor(props) { super(props); linkComponentToObservables(this); } public render() { if (!this.props.combatantViewModel) { return null; } const currentHp = this.props.combatantViewModel.HP(); const tags = this.props.combatantViewModel.Combatant.Tags().map(tag => { if (tag.HasDuration) { return `${tag.Text} (${tag.DurationRemaining()} more rounds)`; } return tag.Text; }); const notes = this.props.combatantViewModel.Combatant.CurrentNotes(); const renderedNotes = notes ? this.props.enricher.EnrichText(notes) : null; const statBlock = this.props.combatantViewModel.Combatant.StatBlock(); return <div className="c-combatant-details"> <StatBlockHeader name={this.props.combatantViewModel.Name()} source={statBlock.Source} type={statBlock.Type} imageUrl={statBlock.ImageURL} /> <div className="c-combatant-details__hp"><span className="stat-label">Current HP</span> {currentHp}</div> {tags.length > 0 && <div className="c-combatant-details__tags"><span className="stat-label">Tags</span> {tags.join("; ")}</div>} <StatBlockComponent statBlock={statBlock} displayMode={this.props.displayMode} enricher={this.props.enricher} hideName /> {notes && notes.length > 0 && <div className="c-combatant-details__notes">{renderedNotes}</div>} </div>; } }
def read_coordinates(file_path, md_rows): return np.loadtxt(file_path, skiprows = md_rows, usecols = (1,2))
def delete_firebase_user(obj): try: print('Deleting uid from firebase - ', obj.uid) firebase_auth.delete_user(obj.uid) except: pass
Our opinion: Albany County wants to buy the Family Court building that it’s been renting. It’s paid dearly for it already. Well, Albany County taxpayers, does your government have a deal for you. Turns out that you — as in, all 304,000 county residents — just might be able to own that spiffy Family Court building across from the Palace Theatre after all. What, you thought owned it already? Nope, it merely seems like it’s the county’s own building. Funny what some $2 million a year in rent payments can make some people think. Too bad that one condition of any sale agreement that the County Legislature might be able to reach with the real owner, Columbia Development, can’t stipulate a closing date of, oh, some time back in 2005. That’s when the county started leasing what then was a brand new facility to house a court system that was overwhelmed by the need for more space. How bad a deal did that turn out to be? Well, rent payments alone on the courthouse now have exceeded $13.5 million. Oh, and factor in taxes — yes, taxpayers, you’re on the hook for that bill, too — and it’s more like $15 million. Some lease agreement, isn’t it — with eight more years and $16.6 million in rent payments remaining? And all for a building that’s assessed at just $8.5 million. There’s good reason to suspect, meanwhile, that the $15 million the county has sunk into the courthouse already is quite close to what it cost to build it. Oh, the questions that need to be asked as County Executive Daniel McCoy and legislature Chairman Shawn Morse try to negotiate their way out of one really bad deal and into another with complications of its own. How much should the county be willing to pay — with borrowed money, alas — for a building that it would have been better off owning all along? That’s hard to say, maddeningly enough. The appraised value of the courthouse, $23.6 million, is misleading: It includes the steady payment of that staggering amount of annual rent. It would make sense to factor in such payments if the building were to be sold to a private party. But it makes no sense here. It’s not like the county, once it owned the courthouse, would collect $2 million a year in rent from itself. What happens if there’s no deal? Then the fiscal picture gets even murkier for a county government already steeped in fiscal gimmickry. This is the same County Legislature, remember, that barely cut a $565 million budget down to $557 million, yet slashed a proposed 19 percent property tax increase by more than half, to 8 percent. It did so, in large part, by betting on unrealistically high sales tax revenue. That same budget also assumes savings — prematurely, typically — of about $1 million a year by buying the courthouse rather than continuing to rent it. Talk about outstanding debts, as in seven years of woefully insufficient accountability. Family Court is where cases are heard involving seemingly every hardship that can afflict children and families. But where do taxpayers go to show their displeasure at paying so dearly for a courthouse with nothing to show for it except for bills and more bills?
/* eslint no-console: 0 */ // import Vue from 'vue' import Vue from 'vue'; import axios from 'axios'; //@ts-ignore import { csrfToken } from 'rails-ujs'; axios.defaults.headers.common['X-CSRF-Token'] = csrfToken(); new Vue({ el: '#salmon', data () { return { isOpen: false, openingEvent: {}, events: [], stages: [], weaponName: null, selectedStage: null, }; }, mounted () { axios.post('/graphql', { query: `{ events{ id startAt endAt hours stage { id name } eventsWeapons { weapon{ name imageUrl } } } opening{ id startAt endAt hours stage { name } eventsWeapons { weapon{ name imageUrl countText} sinceLastEventTimes } } stages{ id name } }`, variables: null }).then((res) => { this.stages = res.data.data.stages; this.events = res.data.data.events; if (res.data.data.opening != null){ this.isOpen = true; this.openingEvent = res.data.data.opening; } }); }, methods: { eventSearch () { axios.post('/graphql', { operationName: 'events', query: `query events ($weaponName: String $stageId: Int){ events(weaponName: $weaponName stageId: $stageId){ id startAt endAt hours stage { id name } eventsWeapons { weapon{ name imageUrl } } } }`, variables: { 'weaponName': this.weaponName, 'stageId': parseInt(this.selectedStage) } }).then((res) => { this.events = res.data.data.events; }); } } });
<reponame>mcaz/next.js-ssr-portfolio<filename>src/env/frontend/components/pageTemplates/Store/Store.const.ts export const TEMPLATE_NAME = 'Store';
Audi has been the scrappy underdog fighting its way to the top of the premium market. With Audi having arrived as an elite luxury automaker, it faces Volvo as the new kid intent on forcing its way back into competition. The two automakers are positioned for a skirmish: Each is introducing an impressive, new large crossover aimed at roughly the same affluent buyers. The new Q7 from Audi is a revised version of its current popular crossover, featuring a new design language that will be evident on all new Audi models and a lighter structure due to expanded use of aluminum instead of steel. Q7 will weigh 716 pounds less than the model it replaces, with a distinctive grille that differs noticeably from its predecessor. Volvo, the Swedish automaker that has been owned since 2010 by Chinese automaking conglomerate Zhejiang Geely Holding Corp., has redesigned XC90 for the first time in a decade. Previously, Ford Motor Co. (F) owned Volvo. Since Geely’s acquisition, Volvo sales have withered in the U.S. while the brand has gained popularity in China. Volvo executives, taking an unconventional strategy, have decided to beef up Internet marketing and sales while reducing the number of auto shows where the new model will be shown. XC90 will be on display at the North American International Auto Show in Detroit in early January, in Geneva in March and in Beijing in April. Sales of both models will begin in the U.S. this spring. The stakes are quite high for U.S. Volvo dealers, who have been relatively starved for new products, while U.S. Audi dealers have enjoyed a prosperous run with a spate of new sedans and crossovers, large and small. Both of the big crossovers can carry seven passengers. Prior to its decision to limit auto show appearances, Volvo displayed an XC90 prototype at the Los Angeles show in November. In a review, Cars.com said the vehicle was full of “high-quality materials that would look right at home inside a Mercedes-Benz S-Class, combined with technology including a massive touch-screen that wouldn’t look out of place in a concept car—think Tesla-Model-S-touch-screen big.” Audi, based in Ingolstadt, Germany, gained attention for its current generation of cars with its “Nuvolari” design, featuring an exceptionally large grille, named for the legendary 1950s-era race car driver, Tazio Nuvolari. Its new styling theme also has a distinctive fascia, whose design resembles a trapezoid. Both companies, Volkswagen and Geely, have plans to manufacture their luxury crossovers in emerging markets, VW in Mexico and Geely in China. The strategy will make Q7 and XC90 more cost competitive than they had been as European-built vehicles. “Worldwide, every third Audi will be an SUV (or crossover) by 2020,” Audi CEO Rupert Stadler said in 2013 during a groundbreaking ceremony at the plant near Puebla, Mexico. “The new Audi plant in Mexico will make a major contribution to that.” Audi builds its smaller Q5 SUV—or crossover, if you like—in China as well. Volvo is taking a big chance by de-emphasizing auto shows, since they are major marketing events for dealers from the surrounding area. If dealers get the idea that Volvo is trying to sidestep them, the tactic could arise their ire to the automaker’s detriment. The German/Swedish/Chinese dustup should be lively and worth watching, in terms of proving which models consumers prefer and which marketing techniques prove more effective.
Narrating emotional events in schizophrenia. Research has indicated that schizophrenia patients report similar amounts of experienced emotion in response to emotional material compared with nonpatients. However, less is known about how schizophrenia patients describe and make sense of their emotional life events. We adopted a narrative approach to investigate schizophrenia patients' renderings of their emotional life experiences. In Study 1, patients' (n=42) positive and negative narratives were similarly personal, tellable, engaged, and appropriate. However, negative narratives were less grammatically clear than positive narratives, and positive narratives were more likely to involve other people than negative narratives. In Study 2, emotional (positive and negative) narratives were less tellable and detached, yet more linear and social compared with neutral narratives for both schizophrenia patients (n=24) and healthy controls (n=19). However, patients' narratives about emotional life events were less appropriate to context and less linear, and patients' narratives, whether emotional or not, were less tellable and more detached compared with controls' narratives. Although schizophrenia patients are capable of recounting life events that trigger different emotions, the telling of these life events is fraught with difficulty.
<reponame>uwblueprint/richmond-centre-for-disability<filename>lib/applications/field-resolvers.ts import { ApolloError } from 'apollo-server-micro'; import { FieldResolver } from '@lib/graphql/resolvers'; // Resolver type import { Applicant, Application, ApplicationProcessing } from '@lib/graphql/types'; // Application type /** * Field resolver to return the type of application * @returns Type of application (NewApplication, RenewalCApplication, ReplacementApplication) */ export const __resolveApplicationType: FieldResolver< Application, 'NewApplication' | 'RenewalApplication' | 'ReplacementApplication' > = async parent => { switch (parent.type) { case 'NEW': return 'NewApplication'; case 'RENEWAL': return 'RenewalApplication'; case 'REPLACEMENT': return 'ReplacementApplication'; default: throw new ApolloError('Application is of invalid type'); } }; /** * Field resolver to fetch the applicant that the application belongs to * @returns Applicant object */ export const applicationApplicantResolver: FieldResolver< Application, Omit< Applicant, | 'mostRecentPermit' | 'activePermit' | 'permits' | 'completedApplications' | 'guardian' | 'medicalInformation' > > = async (parent, _args, { prisma }) => { return await prisma.application.findUnique({ where: { id: parent.id } }).applicant(); }; /** * Fetch processing data of application * @returns Application processing object */ export const applicationProcessingResolver: FieldResolver<Application, ApplicationProcessing> = ( parent, _args, { prisma } ) => { return prisma.application.findUnique({ where: { id: parent.id } }).applicationProcessing(); };
<filename>rust/21 isbn.rs<gh_stars>0 pub fn is_valid_isbn(isbn: &str) -> bool { let mut iter = isbn.chars().filter(|&x| x.is_digit(10) || x == 'X'); let mut n = 0; for i in 0.. { if i > 10 { return false; } let x = iter.next(); if x.is_none() { if i == 10 { break; } else { return false; } } let x = x.unwrap(); if x != 'X' { n += x.to_digit(10).unwrap() * (10 - i); } else if i == 9 { n += 10; } else { return false; } } n % 11 == 0 }
<reponame>ibcom/mydigitalstructure-learn-xero<filename>node_modules/xero-node/dist/gen/model/files/fileObject.d.ts import { User } from '././user'; export declare class FileObject { /** * File Name */ 'name'?: string; /** * MimeType of the file (image/png, image/jpeg, application/pdf, etc..) */ 'mimeType'?: string; /** * Numeric value in bytes */ 'size'?: number; /** * Created date in UTC */ 'createdDateUtc'?: string; /** * Updated date in UTC */ 'updatedDateUtc'?: string; 'user'?: User; /** * File object\'s UUID */ 'id'?: string; /** * Folder relation object\'s UUID */ 'folderId'?: string; static discriminator: string | undefined; static attributeTypeMap: Array<{ name: string; baseName: string; type: string; }>; static getAttributeTypeMap(): { name: string; baseName: string; type: string; }[]; }
package changelog import ( "github.com/tryfix/kstream/producer" "time" ) type options struct { buffered bool bufferSize int flushInterval time.Duration producer producer.Producer } type Options func(config *options) func (c *options) apply(id string, options ...Options) error { if err := c.applyDefaults(id); err != nil { return err } for _, opt := range options { opt(c) } return nil } func (c *options) applyDefaults(id string) error { return nil } func Producer(p producer.Producer) Options { return func(config *options) { config.producer = p } } func Buffered(size int) Options { return func(config *options) { config.buffered = true config.bufferSize = size } } func FlushInterval(d time.Duration) Options { return func(config *options) { config.flushInterval = d } }
<filename>201912/agc041/1.cpp #include "base.hpp" //#include "consts.hpp" void solve() { int N,A,B; cin >>N>>A>>B; if((B - A) % 2 == 0) { cout << (B - A) / 2 ln; } else { cout << min(B - (1 - A), (N + 1 - A) - (1 - (N + 1 - B))) / 2 ln; } }
def testOpen(self): handle = file_util.LocalFileHandle('file:///dir/file') with handle.Open(mode='w') as f: f.write(b'hello\nworld') with handle.Open() as f: self.assertEqual(f.read(), b'hello\nworld')
<filename>src/main/java/com/appsflyer/donkey/server/ring/route/RingRouteCreator.java /* * Copyright 2020-2021 AppsFlyer * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.appsflyer.donkey.server.ring.route; import com.appsflyer.donkey.server.handler.AdapterFactory; import com.appsflyer.donkey.server.ring.handler.RingAdapterFactory; import com.appsflyer.donkey.server.ring.handler.RingHandler; import com.appsflyer.donkey.server.route.AbstractRouteCreator; import com.appsflyer.donkey.server.route.RouteDefinition; import com.appsflyer.donkey.server.route.RouteList; import io.vertx.ext.web.Route; import io.vertx.ext.web.Router; public final class RingRouteCreator extends AbstractRouteCreator { public static AbstractRouteCreator create(Router router, RouteList routeList) { return new RingRouteCreator(router, routeList); } private final AdapterFactory adapterFactory; private RingRouteCreator(Router router, RouteList routeList) { super(router, routeList); adapterFactory = RingAdapterFactory.create(); } @Override protected void buildRoute(Route route, RouteDefinition rd) { setPath(route, rd); setMethods(route, rd); setConsumes(route, rd); setProduces(route, rd); if (rd.handler() instanceof RingHandler) { addBodyHandler(route); route.handler(adapterFactory.requestAdapter()); addHandler(route, rd.handler(), rd.handlerMode()); route.handler(adapterFactory.responseAdapter()); } else { addHandler(route, rd.handler(), rd.handlerMode()); } } }
def load_image(self, image_index): return read_image_bgr(self.image_path(image_index))
package wusc.edu.pay.core.payrule.dao.impl; import java.util.HashMap; import java.util.Map; import org.springframework.stereotype.Repository; import wusc.edu.pay.common.core.dao.BaseDaoImpl; import wusc.edu.pay.core.payrule.dao.UserPayRuleSettingDao; import wusc.edu.pay.facade.payrule.entity.UserPayRuleSetting; /** * ClassName: UserPayRuleSettingDaoImpl <br/> * Function: <br/> * date: 2014-6-27 上午9:25:15 <br/> * * @author laich */ @Repository(value="userPayRuleSettingDao") public class UserPayRuleSettingDaoImpl extends BaseDaoImpl<UserPayRuleSetting> implements UserPayRuleSettingDao { /*** * 根据支付规则ID和用户编号查询用户支付规则关联表 * @param payRuleId * @param userNo * @return */ public UserPayRuleSetting getRuleSetByRuleAndUserNo(Long payRuleId, String userNo) { Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("payRuleId", payRuleId); paramMap.put("userNo", userNo); return super.getBy(paramMap); } }
package sqlite.feature.many2many.err3; import com.abubusoft.kripton.android.annotation.BindDao; import com.abubusoft.kripton.android.annotation.BindDaoMany2Many; import com.abubusoft.kripton.android.annotation.BindGeneratedDao; import com.abubusoft.kripton.android.annotation.BindSqlDelete; import com.abubusoft.kripton.android.annotation.BindSqlInsert; import com.abubusoft.kripton.android.annotation.BindSqlParam; import com.abubusoft.kripton.android.annotation.BindSqlSelect; import java.util.List; import sqlite.feature.many2many.City; import sqlite.feature.many2many.Person; @BindDao(PersonCityErr3.class) @BindGeneratedDao( dao = PersonCityErr1Dao.class ) @BindDaoMany2Many( entity1 = Person.class, entity2 = City.class ) public interface GeneratedPersonCityErr1Dao extends PersonCityErr1Dao { @BindSqlSelect( where = "personId=:personId" ) List<PersonCityErr3> selectByPersonId(@BindSqlParam("personId") long personId); @BindSqlSelect( where = "cityId=:cityId" ) List<PersonCityErr3> selectByCityId(@BindSqlParam("cityId") long cityId); @BindSqlDelete( where = "id=:id" ) int deleteById(@BindSqlParam("id") long id); @BindSqlDelete( where = "personId=:personId" ) int deleteByPersonId(@BindSqlParam("personId") long personId); @BindSqlDelete( where = "cityId=:cityId" ) int deleteByCityId(@BindSqlParam("cityId") long cityId); @BindSqlInsert int insert(@BindSqlParam("bean") PersonCityErr3 bean); }
// Camera setup, used if frames are loaded from camera std::string gstreamer_pipeline(int sensor_id, int capture_width, int capture_height, int display_width, int display_height, int framerate, int flip_method) { return "nvarguscamerasrc sensor-id=" + std::to_string(sensor_id) + " ! video/x-raw(memory:NVMM), width=(int)" + std::to_string(capture_width) + ", height=(int)" + std::to_string(capture_height) + ", format=(string)NV12, framerate=(fraction)" + std::to_string(framerate) + "/1 ! nvvidconv flip-method=" + std::to_string(flip_method) + " ! video/x-raw, format=(string)BGRx ! videoconvert ! video/x-raw, format=(string)BGR ! appsink drop=true sync=false"; }
import { GangRow } from 'components'; import { gangService } from 'app/service'; import { Gang } from 'slate-rp-interfaces'; import { Card, Loading } from 'slate-frontend'; import React, { useEffect, useState } from 'react'; import { defaultGangContainerState, GangContainerState } from './'; export function GangContainer() { const [state, setState] = useState<GangContainerState>(defaultGangContainerState); useEffect(() => { async function fetchGangs(): Promise<void> { const gangs: Gang[] = await gangService.getAll(); setState({ gangs, showSpinner: false, }); } fetchGangs(); }, []); return ( <Card> <Loading isLoading={state.showSpinner}> {state.gangs.length === 0 && !state.showSpinner && <p>There are no gangs yet!</p>} {state.gangs.map((gang) => ( <GangRow key={gang.id} gang={gang} /> ))} </Loading> </Card> ); }
/* * Add a set of alert statuses to ZK */ public void addAlertStatusSet(Map<String, Map<String, AlertValueAndStatus>> statusSet) throws HelixException { if (_alertStatusMap == null) { _alertStatusMap = new HashMap<String, Map<String, String>>(); } _alertStatusMap.clear(); for (String alert : statusSet.keySet()) { Map<String, AlertValueAndStatus> currStatus = statusSet.get(alert); if (currStatus != null) { addAlertStatus(alert, currStatus); } } AlertStatus alertStatus = _accessor.getProperty(_keyBuilder.alertStatus()); int alertStatusSize = 0; if (alertStatus != null) { alertStatusSize = alertStatus.getMapFields().size(); } if (_alertStatusMap.size() > 0 || alertStatusSize > 0) { persistAlertStatus(); } }
def popmain_get_population_data_all_thailand_cities(city_list): base_wiki_url = 'https://en.wikipedia.org/wiki/' wiki_url_dict = gen_wiki_url_from_city_list(base_wiki_url, city_list) df_cols = ['City', 'Area (km2)', 'Population', 'Population Density (/km2)'] info_df = pd.DataFrame(columns = df_cols) for city, wiki_url in wiki_url_dict.items(): area, pop, density = get_info_from_wiki_url(wiki_url) info_df.loc[len(info_df.index)] = [city, area, pop, density] return info_df
package main import ( "bytes" "io" "testing" . "gopkg.in/check.v1" ) func Test(t *testing.T) { TestingT(t) } type EventReaderSuite struct{} var _ = Suite(&EventReaderSuite{}) func (s *EventReaderSuite) TestParseFromReader(c *C) { source := bytes.NewBuffer([]byte("first-message\rsecond-message\r{\"third\":\"message\"}")) reader := NewEventReader(source) event, err := reader.ReadEvent() c.Assert(err, IsNil) c.Assert(string(event), Equals, "first-message") event, err = reader.ReadEvent() c.Assert(err, IsNil) c.Assert(string(event), Equals, "second-message") event, err = reader.ReadEvent() c.Assert(err, IsNil) c.Assert(string(event), Equals, `{"third":"message"}`) event, err = reader.ReadEvent() c.Assert(err, Equals, io.EOF) c.Assert(string(event), Equals, "") }
<reponame>isabella232/tower-wear package com.o3dr.android.dp.wear.lib.utils.unit.providers.length; import org.beyene.sius.operation.Operation; import org.beyene.sius.unit.UnitIdentifier; import org.beyene.sius.unit.length.Constants; import org.beyene.sius.unit.length.LengthUnit; import org.beyene.sius.unit.length.Meter; /** * Created by <NAME> on 1/20/15. */ public class MetricLengthUnitProvider extends LengthUnitProvider { @Override public LengthUnit fromBaseToTarget(Meter base) { double absBase = Math.abs(base.getValue()); if(absBase >= Constants.METER_PER_KILOMETER) return Operation.convert(base, UnitIdentifier.KILOMETER); else return base; } }
def hide_model_visuals(self, model_name): visuals = self.get_model_visuals(model_name) self.hide_visuals(visuals=visuals)
/** * function check the item present in the array or not * @param parent the array against the item to be mapped * @param item the item to be searched * @return true if found else false */ public static final boolean contains(final int[] parent, final int item){ for(int arrayItem : parent){ if(arrayItem == item) { CoreLogger.log("", ""); return true; } } return false; }
def download_file(ftp, path): download = BytesIO() ftp.retrbinary('RETR ' + path, download.write) download.seek(0) return download
// sample of how many attempts before an event occurs long long int skip_sample(double p_event) { if(p_event == 0.0) { return LLONG_MAX; } double u = random_uniform(); double num = floor(log1p(-u)/log1p(-p_event)); if(num > (double)(LLONG_MAX-1)) { return LLONG_MAX; } return (long long int)num; }
def bezel_button_positions(self, st_bmp): sz = st_bmp.GetSize() pos = st_bmp.GetPosition() bsz = self.bez_butt_sz pos_rb = (sz[0] + pos[0] - bsz[0]/2, sz[1]/2 + pos[1] - bsz[1]/2) pos_bb = (sz[0]/2 + pos[0] - bsz[0]/2, sz[1] + pos[1] - bsz[1]/2) return [pos_rb, pos_bb]
// Emoji returns an emoji object for the given guild and emoji IDs. func (c *Client) Emoji(guildID discord.GuildID, emojiID discord.EmojiID) (*discord.Emoji, error) { var emj *discord.Emoji return emj, c.RequestJSON(&emj, "GET", EndpointGuilds+guildID.String()+"/emojis/"+emojiID.String()) }
<filename>packages/PIPS/validation/Semantics-New/NSAD_2011.sub/merchat_thesis_4-09.c // <NAME>: Réduction du nombre de variables en analyse de relations // linéaires // figure 4.9 // <NAME>: Accélération abstraite pour l'amélioration de la précision en // Analyse des Relations Linéaires // figure 4.6 // $Id$ // parameters #define DO_CONTROL 0 #define DO_CHECKING 1 #define BAD (s >= 3) // tools #include <stdlib.h> #include <stdio.h> int rand_b(void) { return rand() % 2; } int rand_z(void) { return rand() - rand(); } #define OR(t1, t2) {if (rand_b()) {t1} else {t2}} #define LOOP(t) {while (rand_b()) {t}} void deadlock() { printf("deadlock\n"); while (1); } #define ASSUME(c) {if (!(c)) deadlock();} #if DO_CONTROL == 0 #define CONTROL(c) #else void control_error(void) { fprintf(stderr, "control error"); exit(1); } #define CONTROL(c) {if (!(c)) control_error();} #endif #if DO_CHECKING == 0 #define CHECK #else void checking_error(void) { fprintf(stderr, "checking error"); exit(2); } #ifdef GOOD #define CHECK {if (!(GOOD)) checking_error();} #else #ifdef BAD #define CHECK {if (BAD) checking_error();} #endif #endif #endif #define COMMAND_NOCHECK(g, a) {ASSUME(g); a;} #define COMMAND(g, a) {COMMAND_NOCHECK(g, a); CHECK;} // control and commands #define S1 CONTROL(s == 1 && d <= 8 && v <= 1 && t <= 2) #define S2 CONTROL(s == 1 && d <= 8 && v <= 1 && t >= 3) #define S3 CONTROL(s == 1 && d <= 8 && v == 2 && t <= 2) #define S4 CONTROL(s == 1 && d == 9 && v <= 1 && t <= 2) #define S5 CONTROL(s == 1 && d == 9 && v == 2 && t <= 2) #define S6 CONTROL(s == 1 && d <= 8 && v == 2 && t >= 3) #define S7 CONTROL(s == 1 && d == 9 && v == 2 && t >= 3) #define S8 CONTROL(s == 1 && d == 9 && v <= 1 && t >= 3) #define S9 CONTROL(s == 2) #define G1 (s == 1 && t <= 2) #define G1a (s == 1 && t <= 1) #define G1b (s == 1 && t == 2) #define A1 {v = 0; t++;} #define C1 COMMAND(G1, A1) #define C1a COMMAND(G1a, A1) #define C1b COMMAND(G1b, A1) #define G2 (s == 1 && v <= 1 && d <= 8) #define G2a (s == 1 && v <= 0 && d <= 7) #define G2b (s == 1 && v <= 0 && d == 8) #define G2c (s == 1 && v == 1 && d <= 7) #define G2d (s == 1 && v == 1 && d == 8) #define A2 {v++; d++;} #define C2 COMMAND(G2, A2) #define C2a COMMAND(G2a, A2) #define C2b COMMAND(G2b, A2) #define C2c COMMAND(G2c, A2) #define C2d COMMAND(G2d, A2) #define G3 (s == 1 && t >= 3) #define A3 {s = 2;} #define C3 COMMAND(G3, A3) #define G4 (s == 1 && d >= 10) #define A4 {s = 3;} #define C4 COMMAND(G4, A4) #define G5 (s == 1 && v >= 3) #define A5 {s = 4;} #define C5 COMMAND(G5, A5) #define INI {s = 1; d = v = t = 0;} // transition system void ts_singlestate(void) { int s, d, v, t; INI; CHECK; LOOP(OR(C1, OR(C2, OR(C3, OR(C4, C5))))) } void ts_restructured(void) { int s, d, v, t; INI; CHECK; S1; LOOP(OR(C1a; S1, OR(C2a; S1, C2c; S3; C1a; S1))); OR( OR(C2c; S3; C1b; S2, C1b; S2); LOOP(C2a; S2); OR(C3; S9, OR(C2c; S6; C3; S9, OR(C2d; S7; C3; S9, C2b; S8; C3; S9))), OR( OR(C2b; S4, C2d; S5; C1a; S4); LOOP(C1a; S4); C1b; S8, C2d; S5; C1b; S8 ); C3; S9; ) } int main(void) { ts_singlestate(); ts_restructured(); return 0; }
// ======================== PROTECTED METHODS of sad::animations::Factory ======================== void sad::animations::Factory::copy(const sad::animations::Factory& f) { for(sad::PtrHash<sad::String, sad::animations::Factory::AbstractDelegate>::const_iterator it = f.m_delegates.const_begin(); it != f.m_delegates.const_end(); ++it) { m_delegates.insert(it.key(), it.value()->clone()); } }
<filename>src/components/page-question/page-question.ts import { Component, Input, Output, EventEmitter, OnInit, OnDestroy, OnChanges, SimpleChanges } from '@angular/core'; import { UtilsProvider } from '../../providers/utils/utils'; /** * Generated class for the PageQuestionComponent component. * * See https://angular.io/api/core/Component for more info on Angular * Components. */ @Component({ selector: 'page-question', templateUrl: 'page-question.html' }) // export class PageQuestionComponent { export class PageQuestionComponent implements OnInit { @Input() inputIndex ; @Input() data: any; @Input() isLast: boolean; @Input() isFirst: boolean; @Output() nextCallBack = new EventEmitter(); @Output() updateLocalData = new EventEmitter(); @Output() previousCallBack = new EventEmitter() @Input() evidenceId: string; @Input() hideButton: boolean; @Input() submissionId: any; @Input() imageLocalCopyId: string; @Input() generalQuestion: boolean; @Input() schoolId; notNumber: boolean; questionValid: boolean; text: string; constructor(private utils : UtilsProvider) { console.log('Hello PageQuestionComponent Component'); this.text = 'Hello World'; } ngOnInit() { this.data.startTime = this.data.startTime ? this.data.startTime : Date.now(); } // ngOnDestroy(){} // ngOnChanges(changes: SimpleChanges) { // for (let propName in changes) { // let chng = changes[propName]; // let cur = JSON.stringify(chng.currentValue); // let prev = JSON.stringify(chng.previousValue); // console.log("NG ON DESTORY") // this.data.isCompleted = this.utils.isPageQuestionComplete(this.data); // // this.data.isCompleted = true; // // this.data.pageQuestions.array.forEach(element => { // // if(!element.isCompleted){ // // this.data.isCompleted = false; // // } // // }); this.utils.isQuestionComplete(this.data.pageQuestions) // } // } // ngOnInit() { // console.log("hello"); // // this.isaNumber(); // this.data.startTime = this.data.startTime ? this.data.startTime : Date.now(); // // this.getErrorMsg(); // // this.checkForValidation(); // console.log(JSON.stringify(this.data)) // } updateLocalDataInPageQuestion(): void { this.updateLocalData.emit(); } }
Parent outrage grows over Tasmanian Government plan to lower school starting age Updated An online campaign is building in resistance to the Tasmanian Government's plans to lower the school starting age from the year children turn five to three-and-a-half. The changes to the Education Act will be debated by Parliament in a few months and implemented in 2020. There are claims the plan has not been well thought out and academic evidence is being ignored but the Government said the opposite was true. Education Minister Jeremy Rockliff said the change would combat high levels of disadvantage and low levels of educational attainment. But this was refuted by Jannette Armstrong, a union representative for childcare workers and the creator of a social media page campaigning against the reforms. Ms Armstrong believes increasing support for disadvantaged families would be more beneficial. "They'll be potentially labelled earlier as troublesome or failing and earlier they get this sense of you know what, school's not for me and that will stay with them forever," she said. But Tasmania's Children's Commissioner Mark Morrissey said the change could help disadvantaged children. "The reality is that Tasmania has far too many children and young people not receiving rich play-based developmental opportunities that will be critical to them living successful lives," he said. Mr Rockliff said overseas and national research showed quality early learning was key to improving educational outcomes. The academic evidence A New Zealand study between 2007 and 2012 found introducing formal literacy learning earlier does not improve children's reading development in the long run and instead, may be damaging. Research in the United Kingdom and United States has shown children who attend primary schools with a play-based curriculum perform better than schools focusing on formal learning. Other international studies have linked the early introduction of structured learning with increases in mental health problems. One of the leading academics in the field, David Whitebread from the University of Cambridge, said any formal learning under the age of six was likely to be counterproductive, particularly for disadvantaged kids. "All the evidence we have suggests what early schooling does is actually increase the divide," he said. Marie Hammer from Monash University in Melbourne said it depended on the quality of the curriculum, and added it must be play-based and that teachers must be trained in early education. "Pressure on children in Asian schools where they have to perform at a very early age has led to quite an increase in suicide rates and self-harm," she said. So is the curriculum play-based and if not will it be changed? The Education Minister said there were no plans for the Government to change the curriculum. "Kindergarten is play-based and to a certain extent as well prep is play-based," he said. But Ms Armstrong disagreed and said preparatory classes were academically focused and she believed pressure would only increase on young children. "Recently with the federal budget that came out in May there was talk around implementing more NAPLAN-style testing for children in that prep cohort so it's pretty scary," Ms Armstrong said. Concerns about class sizes, teacher training and ratios Parents have questioned if there will be a cap on class sizes for younger children, whether preparatory teachers will have aides because their students will be younger and if teachers will be retrained. The Education Minister would not commit to a cap on class sizes but he said extra support would be provided to teachers and infrastructure would be upgraded. "We have over three-and-a-half years to ensure that we have the resources in place, both human resource and of course capital requirements to ensure this is a smooth transition," Mr Rockliff said. Toileting concerns among parents Tillie Butterworth, who is expecting her fourth child later this year, is among a number of parents concerned about the change. "One of my children was barely toilet trained at three-and-a-half he was not at all ready, the other one possibly would have been ready but I think at three-and-a-half years old they need care more than they need schooling," Ms Butterworth said. Ms Armstrong said the education department has told her nappy change tables would be placed in kindergarten classrooms. "That would really contradict the current policies in the education department where teachers and teachers aides aren't really allowed to touch the children," she said. "I don't know how these children are going to be assisted to have their nappies changed or be assisted if they're still learning the toileting process and how children at this stage when they're upset and they just need a cuddle are not going to be able to get that nurturing." Mr Rockliff called for calm. "I understand the questions that people have and we will answer those questions in terms of any changes and how that may impact," he said. Topics: education, state-parliament, children---preschoolers, hobart-7000, launceston-7250, burnie-7320, tas First posted
/** * Iterates <code>{@link AccountingLine}</code> instances in a given <code>{@link FinancialDocument}</code> instance and * compares them to see if they are all in the same Sub-Fund Group. * @see org.kuali.ole.sys.document.validation.Validation#validate(org.kuali.ole.sys.document.validation.event.AttributedDocumentEvent) */ public boolean validate(AttributedDocumentEvent event) { boolean valid = true; String baseSubFundGroupCode = null; int index = 0; List<AccountingLine> lines = getAccountingDocumentForValidation().getSourceAccountingLines(); for (AccountingLine line : lines) { if (index == 0) { baseSubFundGroupCode = line.getAccount().getSubFundGroupCode(); } else { String currentSubFundGroup = line.getAccount().getSubFundGroupCode(); if (!currentSubFundGroup.equals(baseSubFundGroupCode)) { GlobalVariables.getMessageMap().putError(OLEConstants.ACCOUNTING_LINE_ERRORS, OLEKeyConstants.AuxiliaryVoucher.ERROR_DIFFERENT_SUB_FUND_GROUPS); return false; } } index++; } return true; }
<reponame>Giancarl021/Next-Level-Week-01 import { celebrate, Joi, Segments } from 'celebrate'; const options = { abortEarly: false }; class PointValidator { show() { return celebrate({ [Segments.PARAMS]: Joi.object().keys({ id: Joi.number().required() }) }, options); } index() { return celebrate({ [Segments.QUERY]: Joi.object().keys({ city: Joi.string().required(), uf: Joi.string().required(), items: Joi.string().required() }) }, options); } create() { return celebrate({ [Segments.BODY]: Joi.object().keys({ name: Joi.string().required(), email: Joi.string().pattern(/\S+@\S+\.\S+/).required(), whatsapp: Joi.string().pattern(/\+(9[976]\d|8[987530]\d|6[987]\d|5[90]\d|42\d|3[875]\d|2[98654321]\d|9[8543210]|8[6421]|6[6543210]|5[87654321]|4[987654310]|3[9643210]|2[70]|7|1)\d{1,14}$/).required(), latitude: Joi.number().required(), longitude: Joi.number().required(), city: Joi.string().required(), uf: Joi.string().length(2).required(), items: Joi.string().required() }) }, options); } } export default PointValidator;
import DatasetsGrid from "src/components/Collection/components/CollectionDatasetsGrid/components/DatasetsGrid"; import styled from "styled-components"; export const CollectionDatasetsGrid = styled(DatasetsGrid)` grid-template-columns: 12fr 5fr 4fr repeat(2, 3fr) 2fr auto; th, td { word-break: break-word; /* word break on columns; maintains grid fr allocation on small viewports */ } `;
// Copyright (c) 2003 Compaq Corporation. All rights reserved. // Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved. package tlc2.tool.impl; import tla2sany.semantic.SemanticNode; import tlc2.TLCGlobals; import tlc2.tool.IActionItemList; import tlc2.tool.coverage.CostModel; import tlc2.util.Context; class ActionItemList implements IActionItemList { private static final boolean coverage = TLCGlobals.isCoverageEnabled(); /** * We assume that this.pred is null iff the list is empty. */ public final SemanticNode pred; // Expression of the action public final Context con; // Context of the action private final int kind; public final ActionItemList next; public final CostModel cm; public final static ActionItemList Empty = new ActionItemList(null, null, 0, null, null); /* Constructors */ private ActionItemList(SemanticNode pred, Context con, int kind, ActionItemList next, CostModel cm) { this.pred = pred; this.con = con; this.kind = kind; this.next = next; this.cm = cm; } public final SemanticNode carPred() { return this.pred; } public final Context carContext() { return this.con; } /** * The meaning of this.kind is given as follows: * kind > 0: pred of a conjunction * kind = -1: pred * kind = -2: UNCHANGED pred * kind = -3: pred' # pred */ public final int carKind() { return this.kind; } public final ActionItemList cdr() { return this.next; } public final IActionItemList cons(SemanticNode pred, Context con, CostModel cm, int kind) { return new ActionItemList(pred, con, kind, this, coverage ? cm.get(pred) : cm); } public final boolean isEmpty() { return this == Empty; } }
import sys from collections import deque h,w = map(int, sys.stdin.readline().split()) maze = [list(sys.stdin.readline()) for _ in range(h)] dis = [[-1 for _ in range(w)] for _ in range(h)] que = deque() que.append((0,0)) dis[0][0] = 1 while que: y,x = que.popleft() for dx, dy in ((1,0), (0,1), (-1, 0), (0, -1)): nx = x + dx ny = y + dy if (nx < 0) or (nx > w-1) or (ny < 0) or (ny > h-1): continue if (maze[ny][nx] == '.') and (dis[ny][nx] == -1): dis[ny][nx] = dis[y][x] + 1 que.append((ny, nx)) ans = 0 black_num = 0 for m in maze: black_num += m.count('#') if dis[h-1][w-1] == -1: ans = -1 else: ans = w*h - black_num - dis[h-1][w-1] print(ans)
package main import ( "encoding/base64" "fmt" "io/ioutil" "net/http" "os" "regexp" "strconv" "strings" "time" "github.com/bwmarrin/discordgo" ) /** Used to validate a user's permissions before moving forward with a command. Prevents command abuse. If the user has administrator permissions, just automatically allow them to perform any bot command. **/ func userHasValidPermissions(s *discordgo.Session, m *discordgo.MessageCreate, permission int64) bool { perms, err := s.UserChannelPermissions(m.Author.ID, m.ChannelID) if err != nil { attemptSendMsg(s, m, "Error occurred while validating your permissions.") logError("Failed to acquire user permissions! " + err.Error()) return false } if perms|permission == perms || perms|discordgo.PermissionAdministrator == perms { return true } return false } /** Given a userID, generates a DM if one does not already exist with the user and sends the specified message to them. **/ func dmUser(s *discordgo.Session, userID string, message string) { channel, err := s.UserChannelCreate(userID) if err != nil { logError("Failed to create DM with user. " + err.Error()) return } _, err = s.ChannelMessageSend(channel.ID, message) if err != nil { logError("Failed to send message! " + err.Error()) return } logSuccess("Sent DM to user") } /** A helper function for Handle_nick. Ensures the user targeted a user using @; if they did, attempt to rename the specified user. **/ func attemptRename(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if regex.MatchString(command[1]) && len(command) > 2 { userID := stripUserID(command[1]) err := s.GuildMemberNickname(m.GuildID, userID, strings.Join(command[2:], " ")) if err == nil { attemptSendMsg(s, m, "Done!") logSuccess("Successfully renamed user") } else { attemptSendMsg(s, m, fmt.Sprintf("Failed to set nickname.\n```%s```", err.Error())) logError("Failed to set nickname! " + err.Error()) return } return } attemptSendMsg(s, m, "Usage: `~nick @<user> <new name>`") } /** A helper function for Handle_kick. Ensures the user targeted a user using @; if they did, attempt to kick the specified user. **/ func attemptKick(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if len(command) >= 2 { if regex.MatchString(command[1]) { userID := stripUserID(command[1]) if len(command) > 2 { reason := strings.Join(command[2:], " ") // dm user why they were kicked guild, err := s.Guild(m.GuildID) if err != nil { logError("Unable to load guild! " + err.Error()) } guildName := "error: could not retrieve" if guild != nil { guildName = guild.Name } dmUser(s, userID, fmt.Sprintf("You have been kicked from **%s** by %s#%s because: %s\n", guildName, m.Author.Username, m.Author.Discriminator, reason)) // kick with reason err = s.GuildMemberDeleteWithReason(m.GuildID, userID, reason) if err != nil { attemptSendMsg(s, m, "Failed to kick the user.") logError("Failed to kick user! " + err.Error()) return } attemptSendMsg(s, m, fmt.Sprintf(":wave: Kicked %s for the following reason: '%s'.", command[1], reason)) logSuccess("Kicked user with reason") } else { // dm user they were kicked guild, err := s.Guild(m.GuildID) if err != nil { logError("Unable to load guild! " + err.Error()) } guildName := "error: could not retrieve" if guild != nil { guildName = guild.Name } dmUser(s, userID, fmt.Sprintf("You have been kicked from **%s** by %s#%s.\n", guildName, m.Author.Username, m.Author.Discriminator)) // kick without reason err = s.GuildMemberDelete(m.GuildID, userID) if err != nil { attemptSendMsg(s, m, "Failed to kick the user.") logError("Failed to kick user! " + err.Error()) return } attemptSendMsg(s, m, fmt.Sprintf(":wave: Kicked %s.", command[1])) logSuccess("Kicked user") } return } } attemptSendMsg(s, m, "Usage: `~kick @<user> (reason: optional)`") } /** A helper function for Handle_ban. Ensures the user targeted a user using @; if they did, attempt to ban the specified user. **/ func attemptBan(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if len(command) >= 2 { if regex.MatchString(command[1]) { userID := stripUserID(command[1]) if len(command) > 2 { reason := strings.Join(command[2:], " ") // dm user why they were banned guild, err := s.Guild(m.GuildID) if err != nil { logError("Unable to load guild! " + err.Error()) } guildName := "error: could not retrieve" if guild != nil { guildName = guild.Name } dmUser(s, userID, fmt.Sprintf("You have been banned from **%s** by %s#%s because: %s\n", guildName, m.Author.Username, m.Author.Discriminator, reason)) // ban with reason err = s.GuildBanCreateWithReason(m.GuildID, userID, reason, 0) if err != nil { attemptSendMsg(s, m, "Failed to ban the user.") logError("Failed to ban user! " + err.Error()) if err != nil { logWarning("Failed to send failure message! " + err.Error()) } return } attemptSendMsg(s, m, fmt.Sprintf(":hammer: Banned %s for the following reason: '%s'.", command[1], reason)) logSuccess("Banned user with reason without issue") } else { // ban without reason err := s.GuildBanCreate(m.GuildID, userID, 0) if err != nil { attemptSendMsg(s, m, "Failed to ban the user.") logError("Failed to ban user! " + err.Error()) return } // dm user they were banned guild, err := s.Guild(m.GuildID) if err != nil { logError("Unable to load guild! " + err.Error()) } guildName := "error: could not retrieve" if guild != nil { guildName = guild.Name } dmUser(s, userID, fmt.Sprintf("You have been banned from **%s** by %s#%s.\n", guildName, m.Author.Username, m.Author.Discriminator)) attemptSendMsg(s, m, fmt.Sprintf(":hammer: Banned %s.", command[1])) logSuccess("Banned user with reason without issue") } return } } attemptSendMsg(s, m, "Usage: `~ban @<user> (reason: optional)`") } /** Attempts to purge the last <number> messages, then removes the purge command. */ func attemptPurge(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) if len(command) == 2 { messageCount, err := strconv.Atoi(command[1]) if err != nil { attemptSendMsg(s, m, "Usage: `~purge <number> (optional: @user)`") return } if messageCount < 1 { attemptSendMsg(s, m, ":frowning: Sorry, you must purge at least 1 message. Try again.") logWarning("User attempted to purge < 1 message.") return } for messageCount > 0 { messagesToPurge := 0 // can only purge 100 messages per invocation if messageCount > 100 { messagesToPurge = 100 } else { messagesToPurge = messageCount } // get the last (messagesToPurge) messages from the channel messages, err := s.ChannelMessages(m.ChannelID, messagesToPurge, m.ID, "", "") if err != nil { attemptSendMsg(s, m, ":frowning: I couldn't pull messages from the channel. Try again.") logError("Failed to pull messages from channel! " + err.Error()) return } // stop purging if there is nothing left to purge if len(messages) < messagesToPurge { messageCount = 0 } // get the message IDs var messageIDs []string for _, message := range messages { messageIDs = append(messageIDs, message.ID) } // delete all the marked messages err = s.ChannelMessagesBulkDelete(m.ChannelID, messageIDs) if err != nil { logWarning("Failed to bulk delete messages! Attempting to continue... " + err.Error()) } messageCount -= messagesToPurge } time.Sleep(time.Second) err = s.ChannelMessageDelete(m.ChannelID, m.ID) if err != nil { logError("Failed to delete invoked command! " + err.Error()) return } logSuccess("Purged all messages, including command invoked") } else { attemptSendMsg(s, m, "Usage: `~purge <number>`") } } /** Attempts to copy over the last <number> messages to the given channel, then outputs its success */ func attemptCopy(s *discordgo.Session, m *discordgo.MessageCreate, command []string, preserveMessages bool) { logInfo(strings.Join(command, " ")) var commandInvoked string if preserveMessages { commandInvoked = "cp" } else { commandInvoked = "mv" } if len(command) != 3 { attemptSendMsg(s, m, fmt.Sprintf("Usage: `~%s <number <= 100> <#channel>`", commandInvoked)) return } messageCount, err := strconv.Atoi(command[1]) if err != nil { attemptSendMsg(s, m, "Failed to read the message count.") return } // verify correctly invoking channel if !strings.HasPrefix(command[2], "<#") || !strings.HasSuffix(command[2], ">") { attemptSendMsg(s, m, fmt.Sprintf("Usage: `~%s <number <= 100> <#channel>`", commandInvoked)) return } channel := strings.ReplaceAll(command[2], "<#", "") channel = strings.ReplaceAll(channel, ">", "") // retrieve messages from current invoked channel messages, err := s.ChannelMessages(m.ChannelID, messageCount, m.ID, "", "") if err != nil { attemptSendMsg(s, m, "Ran into an error retrieving messages. :slight_frown:") return } // construct an embed for each message for index := range messages { var embed discordgo.MessageEmbed embed.Type = "rich" message := messages[len(messages)-1-index] // remove messages if calling mv command if !preserveMessages { err := s.ChannelMessageDelete(m.ChannelID, message.ID) if err != nil { logWarning("Failed to delete a message. Attempting to continue... " + err.Error()) } } // populating author information in the embed var embedAuthor discordgo.MessageEmbedAuthor if message.Author != nil { member, err := s.GuildMember(m.GuildID, message.Author.ID) nickname := "" if err == nil { nickname = member.Nick } else { logWarning("Could not find a nickname for the user! " + err.Error()) } embedAuthor.Name = "" if nickname != "" { embedAuthor.Name += nickname + " (" } embedAuthor.Name += message.Author.Username + "#" + message.Author.Discriminator if nickname != "" { embedAuthor.Name += ")" } embedAuthor.IconURL = message.Author.AvatarURL("") } embed.Author = &embedAuthor // preserve message timestamp embed.Timestamp = string(message.Timestamp) var contents []*discordgo.MessageEmbedField // output message text logInfo("Message Content: " + message.Content) if message.Content != "" { embed.Description = message.Content } // output attachments logInfo(fmt.Sprintf("Attachments: %d\n", len(message.Attachments))) if len(message.Attachments) > 0 { for _, attachment := range message.Attachments { contents = append(contents, createField("Attachment: "+attachment.Filename, attachment.ProxyURL, false)) } } // output embed contents (up to 10... jesus christ...) logInfo(fmt.Sprintf("Embeds: %d\n", len(message.Embeds))) if len(message.Embeds) > 0 { for _, embed := range message.Embeds { contents = append(contents, createField("Embed Title", embed.Title, false)) contents = append(contents, createField("Embed Text", embed.Description, false)) if embed.Image != nil { contents = append(contents, createField("Embed Image", embed.Image.ProxyURL, false)) } if embed.Thumbnail != nil { contents = append(contents, createField("Embed Thumbnail", embed.Thumbnail.ProxyURL, false)) } if embed.Video != nil { contents = append(contents, createField("Embed Video", embed.Video.URL, false)) } if embed.Footer != nil { contents = append(contents, createField("Embed Footer", embed.Footer.Text, false)) } } } // ouput reactions on a message if len(message.Reactions) > 0 { reactionText := "" for index, reactionSet := range message.Reactions { reactionText += reactionSet.Emoji.Name + " x" + strconv.Itoa(reactionSet.Count) if index < len(message.Reactions)-1 { reactionText += ", " } } contents = append(contents, createField("Reactions", reactionText, false)) } embed.Fields = contents // send response _, err := s.ChannelMessageSendEmbed(channel, &embed) if err != nil { logError("Failed to send result message! " + err.Error()) return } } _, err = s.ChannelMessageSend(m.ChannelID, "Copied "+strconv.Itoa(messageCount)+" messages from <#"+m.ChannelID+"> to <#"+channel+">! :smile:") if err != nil { logError("Failed to send success message! " + err.Error()) return } logSuccess("Copied messages and sent success message") } /** Helper function for handleProfile. Attempts to retrieve a user's avatar and return it in an embed. */ func attemptProfile(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) if len(command) == 2 { regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if regex.MatchString(command[1]) { userID := strings.TrimSuffix(command[1], ">") userID = strings.TrimPrefix(userID, "<@") userID = strings.TrimPrefix(userID, "!") // this means the user has a nickname var embed discordgo.MessageEmbed embed.Type = "rich" // get user user, err := s.User(userID) if err != nil { attemptSendMsg(s, m, "Error retrieving the user. :frowning:") logError("Could not retrieve user from session! " + err.Error()) return } // get member data from the user member, err := s.GuildMember(m.GuildID, userID) nickname := "" if err == nil { nickname = member.Nick } else { fmt.Println(err) } // title the embed embed.Title = "Profile Picture for " if nickname != "" { embed.Title += nickname + " (" } embed.Title += user.Username + "#" + user.Discriminator if nickname != "" { embed.Title += ")" } // attach the user's avatar as 512x512 image var image discordgo.MessageEmbedImage image.URL = user.AvatarURL("512") embed.Image = &image _, err = s.ChannelMessageSendEmbed(m.ChannelID, &embed) if err != nil { logError("Failed to send result message! " + err.Error()) return } logSuccess("Returned user profile picture") return } } attemptSendMsg(s, m, "Usage: `~profile @user`") } func attemptAbout(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) if len(command) == 2 { regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if regex.MatchString(command[1]) { userID := stripUserID(command[1]) logInfo(strings.Join(command, " ")) member, err := s.GuildMember(m.GuildID, userID) if err != nil { logError("Could not retrieve user from the session! " + err.Error()) attemptSendMsg(s, m, "Error retrieving the user. :frowning:") return } var embed discordgo.MessageEmbed embed.Type = "rich" // title the embed embed.Title = "About " + member.User.Username + "#" + member.User.Discriminator var contents []*discordgo.MessageEmbedField joinDate, err := member.JoinedAt.Parse() if err != nil { logError("Failed to parse Discord dates! " + err.Error()) attemptSendMsg(s, m, "Error parsing Discord's dates. :frowning:") return } nickname := "N/A" if member.Nick != "" { nickname = member.Nick } contents = append(contents, createField("Server Join Date", joinDate.Format("01/02/2006"), false)) contents = append(contents, createField("Nickname", nickname, false)) // get user's roles in readable form guildRoles, err := s.GuildRoles(m.GuildID) if err != nil { logError("Failed to retrieve guild roles! " + err.Error()) attemptSendMsg(s, m, "Error retrieving the guild's roles. :frowning:") return } var rolesAttached []string for _, role := range guildRoles { for _, roleID := range member.Roles { if role.ID == roleID { rolesAttached = append(rolesAttached, role.Name) } } } contents = append(contents, createField("Roles", strings.Join(rolesAttached, ", "), false)) embed.Fields = contents // send response _, err = s.ChannelMessageSendEmbed(m.ChannelID, &embed) if err != nil { logError("Couldn't send the message... " + err.Error()) return } logSuccess("Returned user information") return } } attemptSendMsg(s, m, "Usage: `~about @user`") } /** Outputs the bot's current uptime. **/ func handleUptime(s *discordgo.Session, m *discordgo.MessageCreate, start []string) { logInfo(start[0]) start_time, err := time.Parse("2006-01-02 15:04:05.999999999 -0700 MST", start[0]) if err != nil { logError("Could not parse start time! " + err.Error()) attemptSendMsg(s, m, "Error parsing the date... :frowning:") } attemptSendMsg(s, m, fmt.Sprintf(":robot: Uptime: %s", time.Since(start_time).Truncate(time.Second/10).String())) logSuccess("Reported uptime") } /** Toggles "deafened" state of the specified user. **/ func vcDeaf(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionVoiceDeafenMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to deafen/undeafen other members.") return } if len(command) != 2 { attemptSendMsg(s, m, "Usage: `~vcdeaf @user`") return } // 1. get user ID regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if !regex.MatchString(command[1]) { attemptSendMsg(s, m, "Usage: `~vcdeaf @user`") return } userID := stripUserID(command[1]) logInfo(strings.Join(command, " ")) member, err := s.GuildMember(m.GuildID, userID) if err != nil { logError("Could not retrieve user from the session! " + err.Error()) attemptSendMsg(s, m, "Error retrieving the user. :frowning:") return } // 2. toggle deafened state err = s.GuildMemberDeafen(m.GuildID, userID, !member.Deaf) if err != nil { logError("Failed to toggle deafened state of the user! " + err.Error()) attemptSendMsg(s, m, "Failed to toggle deafened state of the user.") return } attemptSendMsg(s, m, "Toggled 'deafened' state of the user.") } /** Toggles "VC muted" state of the specified user. **/ func vcMute(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionVoiceMuteMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to mute/unmute other members.") return } if len(command) != 2 { attemptSendMsg(s, m, "Usage: `~vcmute @user`") return } // 1. get user ID regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if !regex.MatchString(command[1]) { attemptSendMsg(s, m, "Usage: `~vcdeaf @user`") return } userID := stripUserID(command[1]) logInfo(strings.Join(command, " ")) member, err := s.GuildMember(m.GuildID, userID) if err != nil { logError("Could not retrieve user from the session! " + err.Error()) attemptSendMsg(s, m, "Error retrieving the user. :frowning:") return } // 2. toggled vc muted state err = s.GuildMemberMute(m.GuildID, userID, !member.Mute) if err != nil { logError("Failed to toggle muted state of the user! " + err.Error()) attemptSendMsg(s, m, "Failed to toggle muted state of the user.") return } attemptSendMsg(s, m, "Toggled 'muted' state of the user from the channel.") } /** Moves the user to the specified voice channel. **/ func vcMove(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionVoiceDeafenMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to move other members across voice channels.") return } if len(command) != 3 { attemptSendMsg(s, m, "Usage: `~vcmove @user #!<voice channel>`") return } // 1. get user ID regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if !regex.MatchString(command[1]) { attemptSendMsg(s, m, "Usage: `~vcdeaf @user`") return } userID := stripUserID(command[1]) logInfo(strings.Join(command, " ")) // 2. get voice channel if !strings.HasPrefix(command[2], "<#") || !strings.HasSuffix(command[2], ">") { attemptSendMsg(s, m, "Usage: `~vcmove @user #!<voice channel>`") return } channel := strings.ReplaceAll(command[2], "<#", "") channel = strings.ReplaceAll(channel, ">", "") // 3. move user to voice channel err := s.GuildMemberMove(m.GuildID, userID, &channel) if err != nil { logError("Failed to move the user to that channel! " + err.Error()) attemptSendMsg(s, m, "Failed to move the user to that channel. Is it a voice channel?") return } attemptSendMsg(s, m, "Moved the user.") } /** Kicks the specified user from the voice channel they are in, if any. **/ func vcKick(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionVoiceMuteMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to mute/unmute other members.") return } if len(command) != 2 { attemptSendMsg(s, m, "Usage: `~vckick @user`") return } // 1. get user ID regex := regexp.MustCompile(`^\<\@\!?[0-9]+\>$`) if !regex.MatchString(command[1]) { attemptSendMsg(s, m, "Usage: `~vckick @user`") return } userID := stripUserID(command[1]) // 2. remove user from voice channel err := s.GuildMemberMove(m.GuildID, userID, nil) if err != nil { logError("Failed to remove the user from that channel! " + err.Error()) attemptSendMsg(s, m, "Failed to remove the user from that channel. Is it a voice channel?") return } attemptSendMsg(s, m, "Kicked the user from the channel.") } /** Forces the bot to exit with code 0. Note that in Heroku the bot will restart automatically. **/ func handleShutdown(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) if m.Author.ID == "172311520045170688" { attemptSendMsg(s, m, "Shutting Down.") s.Close() os.Exit(0) } else { attemptSendMsg(s, m, "You dare try and go against the wishes of <@172311520045170688> ..? ") time.Sleep(10 * time.Second) attemptSendMsg(s, m, "Bruh this gonna be you when sage and his boys get here... I just pinged him so you better be afraid :slight_smile:") time.Sleep(2 * time.Second) attemptSendMsg(s, m, "https://media4.giphy.com/media/3o6Ztm3eJNDBy4NfiM/giphy.gif") } } /** Generates an invite code to the channel in which ~invite was invoked if the user has the permission to create instant invites. **/ func handleInvite(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) if !userHasValidPermissions(s, m, discordgo.PermissionCreateInstantInvite) { attemptSendMsg(s, m, "Sorry, you aren't allowed to create an instant invite.") return } var invite discordgo.Invite invite.Temporary = false invite.MaxAge = 21600 // 6 hours invite.MaxUses = 0 // infinite uses inviteResult, err := s.ChannelInviteCreate(m.ChannelID, invite) if err != nil { attemptSendMsg(s, m, "Error creating invite. Try again in a moment.") logError("Failed to generate invite! " + err.Error()) return } else { attemptSendMsg(s, m, fmt.Sprintf(":mailbox_with_mail: Here's your invitation! https://discord.gg/%s", inviteResult.Code)) } logSuccess("Generated and sent invite") } /** Nicknames the user if they target themselves, or nicknames a target user if the user who invoked ~nick has the permission to change nicknames. **/ func handleNickname(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !(userHasValidPermissions(s, m, discordgo.PermissionChangeNickname) && strings.Contains(command[1], m.Author.ID)) && !(userHasValidPermissions(s, m, discordgo.PermissionManageNicknames)) { attemptSendMsg(s, m, "Sorry, you aren't allowed to change nicknames.") return } attemptRename(s, m, command) } /** Kicks a user from the server if the invoking user has the permission to kick users. **/ func handleKick(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionKickMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to kick users.") return } attemptKick(s, m, command) } /** Bans a user from the server if the invoking user has the permission to ban users. **/ func handleBan(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionBanMembers) { attemptSendMsg(s, m, "Sorry, you aren't allowed to ban users.") return } attemptBan(s, m, command) } /** Removes the <number> most recent messages from the channel where the command was called. **/ func handlePurge(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionManageMessages) { attemptSendMsg(s, m, "Sorry, you aren't allowed to remove messages.") return } attemptPurge(s, m, command) } /** Copies the <number> most recent messages from the channel where the command was called and pastes it in the requested channel. **/ func handleCopy(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionManageMessages) { attemptSendMsg(s, m, "Sorry, you aren't allowed to manage messages.") return } attemptCopy(s, m, command, true) } /** Same as above, but purges each message it copies **/ func handleMove(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if !userHasValidPermissions(s, m, discordgo.PermissionManageMessages) { attemptSendMsg(s, m, "Sorry, you aren't allowed to manage messages.") return } attemptCopy(s, m, command, false) } /** Allows user to create, remove, or edit emojis associated with the server. **/ func emoji(s *discordgo.Session, m *discordgo.MessageCreate, command []string) { logInfo(strings.Join(command, " ")) // validate calling user has permission to manage emojis if !userHasValidPermissions(s, m, discordgo.PermissionManageEmojis) { attemptSendMsg(s, m, "Sorry, you aren't allowed to manage emojis.") return } if len(command) == 1 { // send usage information var embed discordgo.MessageEmbed embed.Type = "rich" embed.Title = "Emoji Commands" embed.Description = "Create, delete, or edit your server emojis quickly and easily." var contents []*discordgo.MessageEmbedField contents = append(contents, createField("emoji create <name> <url>", "Create a new server emoji with the given name using the image from the provided URL.", false)) contents = append(contents, createField("emoji rename <emoji> <new name>", "Set an existing emoji's name to the name passed in <new name>.", false)) contents = append(contents, createField("emoji delete <emoji>", "Remove the selected emoji from the server.", false)) embed.Fields = contents _, err := s.ChannelMessageSendEmbed(m.ChannelID, &embed) if err != nil { logError("Failed to send instructions message embed! " + err.Error()) return } logSuccess("Sent user help embed for emoji") return } // which command was invoked? switch command[1] { case "create": logInfo("User invoked create for emoji command") // verify correct number of arguments if len(command) != 4 { attemptSendMsg(s, m, "Usage:\n`emoji create <name> <url>`") return } // verify alphanumeric and underscores matched, err := regexp.MatchString(`^[a-zA-Z0-9_]*$`, command[1]) if err != nil { attemptSendMsg(s, m, "Failed to determine whether the name provided was valid.") logError("Failed to match regex! " + err.Error()) return } if !matched { attemptSendMsg(s, m, "Invalid name. Please provide a name using alphanumeric characters or underscores only.") return } // convert image to base64 string resp, err := http.Get(command[3]) if err != nil { attemptSendMsg(s, m, "Failed to get a response from the provided URL.") logError("No response from URL!" + err.Error()) return } defer resp.Body.Close() bytes, err := ioutil.ReadAll(resp.Body) if err != nil { attemptSendMsg(s, m, "Failed to read the response from the provided URL.") logError("Couldn't read response from URL!" + err.Error()) return } var base64Image string mimeType := http.DetectContentType(bytes) switch mimeType { case "image/jpeg": base64Image += "data:image/jpeg;base64," case "image/png": base64Image += "data:image/png;base64," case "image/gif": base64Image += "data:image/gif;base64," default: attemptSendMsg(s, m, "Invalid URL provided. Please provide a jp(e)g, png, or gif image URL.") return } size, err := strconv.Atoi(resp.Header.Get("Content-Length")) if err != nil { attemptSendMsg(s, m, "Unable to detect file size from provided image URL.") return } downloadSize := int64(size) if downloadSize > 262144 { attemptSendMsg(s, m, fmt.Sprintf("Please choose an image with smaller file size. Image has a size %.1fKB, which is > 256KB.", float64(downloadSize)/float64(1024.0))) logError("Failed to create new emoji due to size constraints!") return } base64Image += base64.StdEncoding.EncodeToString(bytes) emoji, err := s.GuildEmojiCreate(m.GuildID, command[2], base64Image, nil) if err != nil { attemptSendMsg(s, m, fmt.Sprintf("Failed to create new emoji.\n```%s```", err.Error())) logError("Failed to create new emoji!" + err.Error()) return } attemptSendMsg(s, m, fmt.Sprintf("Created emoji successfully! %s", emoji.MessageFormat())) logSuccess("Created new emoji successfully") case "delete": logInfo("User invoked delete for emoji command") // verify correct number of arguments if len(command) != 3 { attemptSendMsg(s, m, "Usage:\n`emoji delete <emoji>`") return } // validate emoji string formatting matched, err := regexp.MatchString(`^(<a?)?:\w+:(\d{18}>)$`, command[2]) if err != nil { attemptSendMsg(s, m, "Failed to determine whether the emoji was valid.") logError("Failed to match regex! " + err.Error()) return } if !matched { attemptSendMsg(s, m, "Invalid argument. Please provide a valid server emoji.") logError("Regex did not match!") return } emojiID := strings.TrimSuffix(strings.Split(command[2], ":")[len(strings.Split(command[2], ":"))-1], ">") err = s.GuildEmojiDelete(m.GuildID, emojiID) if err != nil { attemptSendMsg(s, m, fmt.Sprintf("Failed to remove emoji from the server.\n```%s```", err.Error())) logError("Failed to remove emoji from the server! " + err.Error()) return } attemptSendMsg(s, m, "Removed the emoji from the server.") logSuccess("Successfully deleted emoji") case "rename": logInfo("User invoked rename for emoji command") // verify correct number of arguments if len(command) != 4 { attemptSendMsg(s, m, "Usage:\n`emoji rename <emoji> <new name>`") return } // verify valid emoji formatting provided for 2 matched, err := regexp.MatchString(`^(<a?)?:\w+:(\d{18}>)$`, command[2]) if err != nil { attemptSendMsg(s, m, "Failed to determine whether the emoji provided was valid.") logError("Failed to match regex! " + err.Error()) return } if !matched { attemptSendMsg(s, m, "Invalid argument. Please provide a valid server emoji.") return } // verify name is alphanumeric for 3 matched, err = regexp.MatchString(`^[a-zA-Z0-9_]*$`, command[3]) if err != nil { attemptSendMsg(s, m, "Failed to determine whether the name provided was valid.") logError("Failed to match regex! " + err.Error()) return } if !matched { attemptSendMsg(s, m, "Invalid name. Please provide a name using alphanumeric characters or underscores only.") return } emojiID := strings.TrimSuffix(strings.Split(command[2], ":")[len(strings.Split(command[2], ":"))-1], ">") // set new name _, err = s.GuildEmojiEdit(m.GuildID, emojiID, command[3], nil) if err != nil { attemptSendMsg(s, m, fmt.Sprintf("Failed to rename the emoji.\n```%s```", command[3])) logError("Failed to rename emoji! " + err.Error()) return } attemptSendMsg(s, m, fmt.Sprintf("Renamed the emoji to %s.", command[3])) logSuccess("Successfully renamed emoji") return } }
def elftype(self): if not self._elftype: Ehdr = {32: elf.Elf32_Ehdr, 64: elf.Elf64_Ehdr}[self.elfclass] elftype = self.leak.field(self.libbase, Ehdr.e_type) self._elftype = {constants.ET_NONE: 'NONE', constants.ET_REL: 'REL', constants.ET_EXEC: 'EXEC', constants.ET_DYN: 'DYN', constants.ET_CORE: 'CORE'}[elftype] return self._elftype
#include<stdio.h> int f[100005]={0}; int high[100005]={0}; int main(){ int n,m,u,v; scanf("%d%d",&n,&m); for(int i=1;i<=n;i++) scanf("%d",&high[i]); for(int i=1;i<=m;i++){ scanf("%d%d",&u,&v); if(high[u]>high[v]) f[v]=1; else if(high[v]>high[u]) f[u]=1; else{ f[v]=1; f[u]=1; } } int ans=0; for(int i=1;i<=n;i++) if(!f[i]) ans++; printf("%d",ans); return 0; }
<reponame>dozack/canopen-stack /****************************************************************************** Copyright 2020 Embedded Office GmbH & Co. KG Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ******************************************************************************/ #ifndef TS_VERSION_H_ #define TS_VERSION_H_ #ifdef __cplusplus /* for compatibility with C++ environments */ extern "C" { #endif /****************************************************************************** * INCLUDES ******************************************************************************/ #include "ts_env.h" /****************************************************************************** * PUBLIC DEFINES ******************************************************************************/ #define TS_NAME "Test: Framework" #define TS_VER_BASE 10 #define TS_VER_FORMAT "VVMMBB" #define TS_VERSION (10000) #if defined(__CC_ARM) # define TS_ENV_NAME "ENV : ARM Compiler" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VRPBBB" # define TS_ENV_VER (__ARMCC_VERSION) #endif /* defined(__CC_ARM) */ #if defined(__DCC__) # define TS_ENV_NAME "ENV : Diab C/C++" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VRPP" # define TS_ENV_VER (__VERSION_NUMBER__) #endif /* defined(__DCC__) */ #if defined(__ghs__) # define TS_ENV_NAME "ENV : Green Hill C/C++" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VRP" # define TS_ENV_VER (__GHS_VERSION_NUMBER__) #endif /* defined(__ghs__) */ #if defined(__GNUC__) # define TS_ENV_NAME "ENV : GCC C/C++" # define TS_ENV_VER_BASE 10 # if defined(__GNUC_PATCHLEVEL__) # define TS_ENV_VER_FORMAT "RRBBPP" # define TS_ENV_VER (__GNUC__ * 10000 \ + __GNUC_MINOR__ * 100 \ + __GNUC_PATCHLEVEL__) # else # define TS_ENV_VER_FORMAT "RRBBXX" # define TS_ENV_VER (__GNUC__ * 10000 \ + __GNUC_MINOR__ * 100) # endif #endif /* defined(__GNUC__) */ #if defined(__ICCARM__) # define TS_ENV_NAME "ENV : IAR C/C++" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VRR" # define TS_ENV_VER (__VER__) #endif /* defined(__ICCARM__) */ #if defined(__KEIL__) # define TS_ENV_NAME "ENV : KEIL CARM" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VRR" # define TS_ENV_VER (__CA__) #endif /* defined(__KEIL__) */ #if defined(__CWCC__) # define TS_ENV_NAME "ENV : Metrowerks CodeWarrior" # define TS_ENV_VER_BASE 16 # define TS_ENV_VER_FORMAT "VRPP" # define TS_ENV_VER (__CWCC__) #endif /* defined(__CWCC__) */ #if defined(_MSC_VER) # define TS_ENV_NAME "ENV : Microsoft Visual C++" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VVRR" # define TS_ENV_VER (_MSC_VER) #endif /* defined(_MSC_VER) */ #if defined(__TMS470__) # define TS_ENV_NAME "ENV : TI CCS Compiler" # define TS_ENV_VER_BASE 10 # define TS_ENV_VER_FORMAT "VVVRRRPPP" # define TS_ENV_VER (__TI_COMPILER_VERSION__) #endif /* defined(__TMS470__) */ #if !defined(TS_ENV_NAME) #error "Compiler is unknown" #endif #if defined(__CORTEX_M) # if (__CORTEX_M == 0x00) # define TS_BSP_NAME "BSP : Cortex M0 CMSIS" # define TS_BSP_VER_BASE 16 # define TS_BSP_VER_FORMAT "VVVVMMMM" # define TS_BSP_VER (__CM0_CMSIS_VERSION) # endif /* #if (__CORTEX_M == 0x00) */ # if (__CORTEX_M == 0x03) # define TS_BSP_NAME "BSP : Cortex M3 CMSIS" # define TS_BSP_VER_BASE 16 # define TS_BSP_VER_FORMAT "VVVVMMMM" # define TS_BSP_VER (__CM3_CMSIS_VERSION) # endif /* #if (__CORTEX_M == 0x03) */ # if (__CORTEX_M == 0x04) # if defined(__FPU_PRESENT) # define TS_BSP_NAME "BSP : Cortex M4F CMSIS" # else # define TS_BSP_NAME "BSP : Cortex M4 CMSIS" # endif # define TS_BSP_VER_BASE 16 # define TS_BSP_VER_FORMAT "VVVVMMMM" # define TS_BSP_VER (__CM4_CMSIS_VERSION) # endif /* #if (__CORTEX_M == 0x04) */ #endif /* #if defined(__CORTEX_M) */ /****************************************************************************** * PUBLIC FUNCTIONS ******************************************************************************/ /*------------------------------------------------------------------------------------------------*/ /*! * \details This function prints the given value as a version string with a given format definition * string. The format definition accepts the following digit characters: * - any letter (A-Z): print as version number * - character '-': supress the corresponding value(s) * - character 'x': print as variant 'x' * Different version/variant parts are separated with a '.' * * The number of format digits defines the multiplier of this part of the version * information. e.g. value 123 with format "vvr" is 12.3, whereas value 123 with * format "vrr" is 1.23. * * To support hex-values, the given base must be 16. This allows to print the value 0x123 * with format "vrr" as 1.23 * * \param version * Single version value, encoded as specified with the format definition * * \param format * The format specification * * \param base * Base of a single digit in the format specifier (base must be 10 or 16) */ /*------------------------------------------------------------------------------------------------*/ void TS_PrintVersion(const uint32_t version, const char * format, const uint8_t base); #ifdef __cplusplus /* for compatibility with C++ environments */ } #endif #endif /* TS_VERSION_H_ */
import React, { useState } from "react"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; import { IconProp } from "@fortawesome/fontawesome-svg-core"; import Link from "next/link"; import { motion } from "framer-motion"; interface IDropDownItems { icon: string; title: string; target: string; } interface ISidebarDropDown { dropDownItems: IDropDownItems[]; title: string; icon: IconProp; handleClick: any; } const SidebarDropDown: React.FC<ISidebarDropDown> = ({ dropDownItems, title, icon, handleClick, }) => { const [isHover, toggleHover] = useState(false); const toggleHoverMenu = () => { toggleHover(!isHover); }; const subMenuAnimate = { enter: { opacity: 1, rotateX: 0, transition: { duration: 0.2, }, display: "block", }, exit: { opacity: 0, rotateX: -15, transition: { duration: 0.2, delay: 0.3, }, transitionEnd: { display: "none", }, }, }; return ( <> <motion.div onClick={toggleHoverMenu} className="relative flex rounded-md cursor-pointer items-center hover:bg-gray-200 transition-all mx-auto justify-between" style={{ padding: "8px 35px 8px 20px" }} > <div className="text-2xl lg:text-base flex items-center justify-center"> <span className="w-5 lg:mr-2 mr-5"> <FontAwesomeIcon icon={icon as IconProp} className="text-primary-sideBarIconColor" /> </span> <span className="text-primary-sideBarIconColor">{title}</span> </div> <div className="absolute right-1"> <FontAwesomeIcon icon="chevron-down" className={`lg:text-sm text-xl text-gray-400 transition-all transform ${ isHover && "rotate-180" }`} /> </div> </motion.div> <div className="pl-6 bg-transparent"> {dropDownItems.map((item, index: number) => { return ( <motion.div initial="exit" animate={isHover ? "enter" : "exit"} variants={subMenuAnimate} className="w-full" onClick={() => toggleHover(false)} key={index} > <Link href={item.target}> <a onClick={handleClick} className="w-full py-2 px-4 lg:text-base text-xl flex items-center hover:bg-gray-200 rounded-md cursor-pointer transition-all" > <span className="w-5 mr-2"> <FontAwesomeIcon icon={item.icon as IconProp} className="text-primary-sideBarIconColor" /> </span> <span className="text-primary-sideBarIconColor"> {item.title} </span> </a> </Link> </motion.div> ); })} </div> </> ); }; export default SidebarDropDown;
def _precompute(self): N = self.N N[0] = sum(N[r] * r for r in range(1, self.max_r + 1)) self.Z = Z = averaging_transform.transform(N, self.max_r) self.b, self.a = self._regress(Z) assert self.b < -1, ("Log-linear slope > -1 (%f); SGT not applicable" % self.b) self.linear_cutoff = self._find_cutoff() self.norm_constant = self._find_norm_constant()
Age at menarche: risk factor for gestational diabetes Abstract This study examines the relationship between the age at menarche and gestational diabetes mellitus (GDM). This retrospective study included subjects who were diagnosed with GDM at a pregnancy polyclinic in Kocaeli, Turkey between 2014 and 2018. The mean ages at menarche were 12.6 and 13.03 years in the GDM group and control group, respectively. The analysis results showed that pre-pregnancy cycle duration, age at menarche and pre-pregnancy BMI are statistically significant in terms of the development of gestational diabetes. (p < .05).In our study showed that the risk of GDM was found to be 2.3 times higher in pregnant women with a menarche age of <12 years. If the pre-pregnancy BMI value is more than 25 kg/cm2, the risk of GDM was found to be approximately 2 times higher. The study indicated that age at menarche, cycle duration, and BMI were risk factors for GDM. IMPACT STATEMENT What is already known on this subject? GDM has a lasting health impact on both the mother and the foetus. While several risk factors have previously been identified for GDM such as family history, obesity, advanced maternal age, significant gaps remain in our understanding of the risk factor and pathogenesis. Recent studies suggested that earlier menarche was significantly associated with an increased risk of GDM. What do the results of this study add? There might be ethnic differences on the relationship between the GDM and menarche age. There is no study examining the relationship between the age of menarche and GDM in Turkey. In presented study, we determined the risk factors of GDM including the age of menarche, cycle duration and BMI. What are the implications of these findings for clinical practice and / or further research? Therefore, a comprehensive evaluation of the menstrual history by healthcare professionals is important for future pregnancy risks. It is important to understand risk factors for GDM and to establish preventive strategies among high-risk populations. In addition, this study will shed light on future epidemiological and cohort studies.
def save_data(data, file_path): output_dir = os.path.dirname(file_path) if not os.path.exists(output_dir): os.makedirs(output_dir) with open(file_path, 'wb') as data_file: np.savez(data_file, num_steps=data.num_steps, noise_free_motion=data.filter.motion_commands, real_observations=data.filter.observations, noise_free_observations=data.debug.noise_free_observations, real_robot_path=data.debug.real_robot_path, noise_free_robot_path=data.debug.noise_free_robot_path)
<reponame>AnthonyNg404/Parallel-Computing<filename>hw1-knl/dgemm-blas.c #include <cblas.h> const char* dgemm_desc = "Reference dgemm."; /* * This routine performs a dgemm operation * C := C + A * B * where A, B, and C are lda-by-lda matrices stored in column-major format. * On exit, A and B maintain their input values. * This function wraps a call to the BLAS-3 routine DGEMM, * via the standard FORTRAN interface - hence the reference semantics. */ void square_dgemm(int n, double* A, double* B, double* C) { cblas_dgemm(CblasColMajor, CblasNoTrans, CblasNoTrans, n, n, n, 1., A, n, B, n, 1., C, n); }
. It is known that there is a large extent of working dissatisfaction within some professional groups of the health service system. Especially in the hospital sector, many "struggles for power" take place. Unfortunately, these struggles are often only examined with regard to individual points of view without considering the system-oriented background. The following text will reflect the discussion in social sciences as to the distribution of power within the health service system. The double meaning of the distribution of power will be explained and, by means of decisive phenomenons, the development of cost and efficiency structures on the one hand as well as the development of health political processes in decision-making on the other hand will be described. In the section "The Hospital in the 20th Century", the structures that still can be widely found nowadays will be specified and examined as to aspects in behavioural sciences. Finally, as a logical consequence, an urgently needed reorientation from administration towards management will be pointed out.
// protobuf_goos matches os representations. func protobufGoos(s string) string { switch s { case "darwin": return "osx" } return s }
<gh_stars>0 package files // io.go - Responsible for performing input and output operations. import ( "bytes" "config" "github.com/opalmer/lzma" "io" "io/ioutil" "os" "path/filepath" "strings" ) // File - The main object used for storing and processing a single file. type File struct { sourcepath string source *os.File tempout *os.File readsize int64 outpath string shouldCompress bool shouldEncrypt bool } func outpath(file *File) string { // Setup the output path outpath := filepath.Join(config.Destination, file.source.Name()) if file.shouldCompress { outpath += ".lzma" } if file.shouldEncrypt { outpath += ".aes" } return outpath } func readsize(file *File) int64 { stat, err := file.source.Stat() if err != nil { log.Fatal("Failed to start %s (err: %s)", file.source.Name(), err) } size := stat.Size() if size > maxReadSize { size = maxReadSize } return size } func shouldCompress(file *File) bool { name := strings.ToLower(file.source.Name()) if strings.HasSuffix(name, ".iso") { return false } return true } func shouldEncrypt(file *File) bool { name := strings.ToLower(file.source.Name()) if strings.HasSuffix(name, ".iso") { return false } return true } func compress(file *File, data []byte, bytesRead int64) ([]byte, error) { var compressed bytes.Buffer lzmaWriter := lzma.NewWriterSizeLevel( &compressed, bytesRead, lzma.BestCompression) _, err := lzmaWriter.Write(data) lzmaWriter.Close() if err != nil { log.Warningf( "Compression failed for %s (err: %s)", file.source.Name(), err) return nil, err } return compressed.Bytes(), nil } // TODO func encrypt(file *File, data []byte) ([]byte, error) { return data, nil } // Open - Opens the input and output files where applicable, also sets up the // output path. func (file *File) open() error { // Open the source file source, err := os.Open(file.sourcepath) if err != nil { return err } // Open the temporary output file. tempout, err := ioutil.TempFile(os.TempDir(), "gcp") if err != nil { source.Close() return err } // Establish the attributes we'll need for working // with the file. // NOTE: Order matters here. file.source = source file.tempout = tempout file.readsize = readsize(file) file.shouldCompress = shouldCompress(file) file.shouldEncrypt = shouldEncrypt(file) file.outpath = outpath(file) return nil } // Performs the main IO operations responsible for // processing the file. The results end up in the // temporary output path. func (file *File) process() error { log.Debugf("%s -> %s", file.source.Name(), file.tempout.Name()) defer file.source.Close() // Files which are neither compressed or encrypted will // just be coped over to their temporary output. if !file.shouldCompress && !file.shouldEncrypt { io.Copy(file.tempout, file.source) return nil } // Iterate over the whole file and compress and/or encrypt for { data := make([]byte, file.readsize) bytesRead, err := file.source.Read(data) bytesRead64 := int64(bytesRead) if err == io.EOF { break } else if err != nil { log.Warningf( "Failed to read %s (err: %s)", file.source.Name(), err) return err } // It's possible we didn't read as many bytes // from the file as we allocated for `data`. If this // is the case, resize data so it matches the number // of bytes read. Otherwise we end up with empty bytes // in the file we're writing to disk. if file.readsize > bytesRead64 { data = append([]byte(nil), data[:bytesRead]...) } if file.shouldCompress { data, err = compress(file, data, bytesRead64) if err != nil { return err } } if file.shouldEncrypt { data, err = encrypt(file, data) if err != nil { return err } } file.tempout.Write(data) } return nil } // Responsible for saving the file to the final location. func (file *File) save() error { log.Infof("%s -> %s", file.source.Name(), file.outpath) err := file.tempout.Sync() if err != nil { log.Warning("Failed to sync temp output") return err } err = file.tempout.Close() if err != nil { log.Warning("Failed to close temp output") return err } directory := filepath.Dir(file.outpath) err = os.MkdirAll(directory, 0700) if err != nil { log.Warningf("Failed to create %s", directory) return err } err = os.Rename(file.tempout.Name(), file.outpath) if err != nil { log.Warning("Failed to rename file") return err } return nil } // Performs some final cleanup in the event of an error. This is mainly // aimed at closing the file handles and removing the temp. output file. We // ignore errors in this block of code because we expect processfiles() to // call log.Fatal* soon after this function. func (file *File) clean() { if file.source != nil { file.source.Close() } if file.tempout != nil { file.tempout.Close() os.Remove(file.tempout.Name()) } }
package tree import ( "net/url" "os" "path" "path/filepath" "strings" "github.com/mgoltzsche/ctnr/pkg/fs" "github.com/mgoltzsche/ctnr/pkg/fs/source" "github.com/mgoltzsche/ctnr/pkg/idutils" "github.com/openSUSE/umoci/pkg/fseval" "github.com/opencontainers/go-digest" "github.com/pkg/errors" ) type FsBuilder struct { fs fs.FsNode fsEval fseval.FsEval sources *source.Sources httpHeaderCache source.HttpHeaderCache err error } func FromDir(rootfs string, rootless bool) (fs.FsNode, error) { b := NewFsBuilder(NewFS(), fs.NewFSOptions(rootless)) b.CopyDir(rootfs, "/", nil) return b.FS() } func NewFsBuilder(rootfs fs.FsNode, opts fs.FSOptions) *FsBuilder { fsEval := fseval.DefaultFsEval var attrMapper fs.AttrMapper if opts.Rootless { fsEval = fseval.RootlessFsEval attrMapper = fs.NewRootlessAttrMapper(opts.IdMappings) } else { attrMapper = fs.NewAttrMapper(opts.IdMappings) } return &FsBuilder{ fs: rootfs, fsEval: fsEval, sources: source.NewSources(fsEval, attrMapper), httpHeaderCache: source.NoopHttpHeaderCache(""), } } func (b *FsBuilder) FS() (fs.FsNode, error) { return b.fs, errors.Wrap(b.err, "fsbuilder") } func (b *FsBuilder) HttpHeaderCache(cache source.HttpHeaderCache) { b.httpHeaderCache = cache } func (b *FsBuilder) Hash(attrs fs.AttrSet) (d digest.Digest, err error) { if b.err != nil { return d, errors.Wrap(b.err, "fsbuilder") } return b.fs.Hash(attrs) } func (b *FsBuilder) Write(w fs.Writer) error { if b.err != nil { return errors.Wrap(b.err, "fsbuilder") } return b.fs.Write(w) } type fileSourceFactory func(file string, fi os.FileInfo, usr *idutils.UserIds) (fs.Source, error) func (b *FsBuilder) createFile(file string, fi os.FileInfo, usr *idutils.UserIds) (fs.Source, error) { return b.sources.File(file, fi, usr) } func (b *FsBuilder) createOverlayOrFile(file string, fi os.FileInfo, usr *idutils.UserIds) (fs.Source, error) { return b.sources.FileOverlay(file, fi, usr) } // Copies all files that match the provided glob source pattern. // Source tar archives are extracted into dest. // Source URLs are also supported. // See https://docs.docker.com/engine/reference/builder/#add func (b *FsBuilder) AddAll(srcfs string, sources []string, dest string, usr *idutils.UserIds) { if b.err != nil { return } if len(sources) == 0 { b.err = errors.New("add: no source provided") return } if len(sources) > 1 { dest = filepath.Clean(dest) + string(filepath.Separator) } for _, src := range sources { if isUrl(src) { b.AddURL(src, dest) if b.err != nil { return } } else { if err := b.copy(srcfs, src, dest, usr, b.createOverlayOrFile); err != nil { b.err = errors.Wrap(err, "add "+src) return } } } } func (b *FsBuilder) AddURL(rawURL, dest string) { url, err := url.Parse(rawURL) if err != nil { b.err = errors.Wrapf(err, "add URL %s", url) return } // append source base name to dest if dest ends with / if dest, err = destFilePath(path.Dir(url.Path), dest); err != nil { b.err = errors.Wrapf(err, "add URL %s", url) return } if _, err = b.fs.AddUpper(dest, source.NewSourceURL(url, b.httpHeaderCache, idutils.UserIds{})); err != nil { b.err = errors.Wrapf(err, "add URL %s", url) return } } // Copies all files that match the provided glob source pattern to dest. // See https://docs.docker.com/engine/reference/builder/#copy func (b *FsBuilder) CopyAll(srcfs string, sources []string, dest string, usr *idutils.UserIds) { if b.err != nil { return } if len(sources) == 0 { b.err = errors.New("copy: no source provided") return } if len(sources) > 1 { dest = filepath.Clean(dest) + string(filepath.Separator) } for _, src := range sources { if err := b.copy(srcfs, src, dest, usr, b.createOverlayOrFile); err != nil { b.err = errors.Wrap(err, "copy "+src) return } } } func (b *FsBuilder) copy(srcfs, src, dest string, usr *idutils.UserIds, factory fileSourceFactory) (err error) { // sources from glob pattern src = filepath.Join(srcfs, src) matches, err := filepath.Glob(src) if err != nil { return errors.Wrap(err, "source file pattern") } if len(matches) == 0 { return errors.Errorf("source pattern %q does not match any files", src) } if len(matches) > 1 { dest = filepath.Clean(dest) + string(filepath.Separator) } for _, file := range matches { origSrcName := filepath.Base(file) if file, err = secureSourceFile(srcfs, file); err != nil { return } if err = b.addFiles(file, origSrcName, dest, usr, factory); err != nil { return } } return } func (b *FsBuilder) AddFiles(srcFile, dest string, usr *idutils.UserIds) { if b.err != nil { return } if err := b.addFiles(srcFile, filepath.Base(srcFile), dest, usr, b.createFile); err != nil { b.err = err } } func (b *FsBuilder) addFiles(srcFile, origSrcName, dest string, usr *idutils.UserIds, factory fileSourceFactory) (err error) { fi, err := b.fsEval.Lstat(srcFile) if err != nil { return } if fi.IsDir() { var parent fs.FsNode if parent, err = b.fs.Mkdirs(dest); err != nil { return } err = b.copyDirContents(srcFile, dest, parent, usr) } else { var src fs.Source if src, err = factory(srcFile, fi, usr); err != nil { return } t := src.Attrs().NodeType if t != fs.TypeDir && t != fs.TypeOverlay { // append source base name to dest if dest ends with / if dest, err = destFilePath(origSrcName, dest); err != nil { return } } _, err = b.fs.AddUpper(dest, src) } return } // Copies the directory recursively including the directory itself. func (b *FsBuilder) CopyDir(srcFile, dest string, usr *idutils.UserIds) { if b.err != nil { return } fi, err := b.fsEval.Lstat(srcFile) if err != nil { b.err = errors.WithMessage(err, "add") return } _, err = b.copyFiles(srcFile, dest, b.fs, fi, usr) b.err = errors.WithMessage(err, "add") } // Adds file/directory recursively func (b *FsBuilder) copyFiles(file, dest string, parent fs.FsNode, fi os.FileInfo, usr *idutils.UserIds) (r fs.FsNode, err error) { src, err := b.sources.File(file, fi, usr) if err != nil { return } if src == nil || src.Attrs().NodeType == "" { panic("no source returned or empty node type received from source") } r, err = parent.AddUpper(dest, src) if err != nil { return } if src.Attrs().NodeType == fs.TypeDir { err = b.copyDirContents(file, dest, r, usr) } return } // Adds directory contents recursively func (b *FsBuilder) copyDirContents(dir, dest string, parent fs.FsNode, usr *idutils.UserIds) (err error) { files, err := b.fsEval.Readdir(dir) if err != nil { return errors.New(err.Error()) } for _, f := range files { childSrc := filepath.Join(dir, f.Name()) if _, err = b.copyFiles(childSrc, f.Name(), parent, f, usr); err != nil { return } } return } func secureSourceFile(root, file string) (f string, err error) { // TODO: use fseval if f, err = filepath.EvalSymlinks(file); err != nil { return "", errors.Wrap(err, "secure source") } if !filepath.HasPrefix(f, root) { err = errors.Errorf("secure source: source file %s is outside context directory", file) } return } func destFilePath(srcFileName string, dest string) (string, error) { if strings.HasSuffix(dest, "/") { if srcFileName == "" { return "", errors.Errorf("cannot derive file name for destination %q from source. Please specify file name within destination!", dest) } return filepath.Join(dest, srcFileName), nil } return dest, nil } func isUrl(v string) bool { v = strings.ToLower(v) return strings.HasPrefix(v, "https://") || strings.HasPrefix(v, "http://") }
############################################ # Copyright (c) 2012 <NAME> <EMAIL> # # Check if the given graph has a Hamiltonian cycle. # # Author: <NAME> <EMAIL> ############################################ from z3 import * def gencon(gr): """ Input a graph as an adjacency list, e.g. {0:[1,2], 1:[2], 2:[1,0]}. Produces solver to check if the given graph has a Hamiltonian cycle. Query the solver using s.check() and if sat, then s.model() spells out the cycle. Two example graphs from http://en.wikipedia.org/wiki/Hamiltonian_path are tested. ======================================================= Explanation: Generate a list of Int vars. Constrain the first Int var ("Node 0") to be 0. Pick a node i, and attempt to number all nodes reachable from i to have a number one higher (mod L) than assigned to node i (use an Or constraint). ======================================================= """ L = len(gr) cv = [Int('cv%s'%i) for i in range(L)] s = Solver() s.add(cv[0]==0) for i in range(L): s.add(Or([cv[j]==(cv[i]+1)%L for j in gr[i]])) return s def examples(): # Example Graphs: The Dodecahedral graph from http://en.wikipedia.org/wiki/Hamiltonian_path grdodec = { 0: [1, 4, 5], 1: [0, 7, 2], 2: [1, 9, 3], 3: [2, 11, 4], 4: [3, 13, 0], 5: [0, 14, 6], 6: [5, 16, 7], 7: [6, 8, 1], 8: [7, 17, 9], 9: [8, 10, 2], 10: [9, 18, 11], 11: [10, 3, 12], 12: [11, 19, 13], 13: [12, 14, 4], 14: [13, 15, 5], 15: [14, 16, 19], 16: [6, 17, 15], 17: [16, 8, 18], 18: [10, 19, 17], 19: [18, 12, 15] } import pprint pp = pprint.PrettyPrinter(indent=4) pp.pprint(grdodec) sdodec=gencon(grdodec) print(sdodec.check()) print(sdodec.model()) # ======================================================= # See http://en.wikipedia.org/wiki/Hamiltonian_path for the Herschel graph # being the smallest possible polyhdral graph that does not have a Hamiltonian # cycle. # grherschel = { 0: [1, 9, 10, 7], 1: [0, 8, 2], 2: [1, 9, 3], 3: [2, 8, 4], 4: [3, 9, 10, 5], 5: [4, 8, 6], 6: [5, 10, 7], 7: [6, 8, 0], 8: [1, 3, 5, 7], 9: [2, 0, 4], 10: [6, 4, 0] } pp.pprint(grherschel) sherschel=gencon(grherschel) print(sherschel.check()) # ======================================================= if __name__ == "__main__": examples()
<gh_stars>10-100 //go:build go1.18 package go2linq import ( "math" "reflect" "testing" ) // https://github.com/jskeet/edulinq/blob/master/src/Edulinq.Tests/MinTest.cs // https://github.com/jskeet/edulinq/blob/master/src/Edulinq.Tests/MaxTest.cs func Test_Min_string_int(t *testing.T) { type args struct { source Enumerator[string] selector func(string) int lesser Lesser[int] } tests := []struct { name string args args want int wantErr bool expectedErr error }{ {name: "NilSelector", args: args{ source: Empty[string](), lesser: Order[int]{}, }, wantErr: true, expectedErr: ErrNilSelector, }, {name: "EmptySequenceWithSelector", args: args{ source: Empty[string](), selector: func(s string) int { return len(s) }, lesser: Order[int]{}, }, wantErr: true, expectedErr: ErrEmptySource, }, {name: "SimpleSequenceWithSelector", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) int { return len(s) }, lesser: Order[int]{}, }, want: 1, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := Min(tt.args.source, tt.args.selector, tt.args.lesser) if (err != nil) != tt.wantErr { t.Errorf("Min() error = %v, wantErr %v", err, tt.wantErr) return } if tt.wantErr { if err != tt.expectedErr { t.Errorf("Min() error = '%v', expectedErr '%v'", err, tt.expectedErr) } return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Min() = %v, want %v", got, tt.want) } }) } } func Test_Min_string_rune(t *testing.T) { type args struct { source Enumerator[string] selector func(string) rune lesser Lesser[rune] } tests := []struct { name string args args want rune wantErr bool expectedErr error }{ {name: "SimpleSequenceWithSelector2", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) rune { return []rune(s)[0] }, lesser: LesserFunc[rune](func(r1, r2 rune) bool { return r1 < r2 }), }, want: '0', }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := Min(tt.args.source, tt.args.selector, tt.args.lesser) if (err != nil) != tt.wantErr { t.Errorf("MinErr() error = %v, wantErr %v", err, tt.wantErr) return } if tt.wantErr { if err != tt.expectedErr { t.Errorf("Min() error = '%v', expectedErr '%v'", err, tt.expectedErr) } return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Min() = %v, want %v", got, tt.want) } }) } } func Test_MinEl_string_int(t *testing.T) { type args struct { source Enumerator[string] selector func(string) int lesser Lesser[int] } tests := []struct { name string args args want string }{ {name: "MinElement", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) int { return len(s) }, lesser: Order[int]{}, }, want: "0", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := MinEl(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("MinEl() = %v, want %v", got, tt.want) } }) } } func Test_MinEl_string_rune(t *testing.T) { type args struct { source Enumerator[string] selector func(string) rune lesser Lesser[rune] } tests := []struct { name string args args want string }{ {name: "MinElement2", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) rune { return []rune(s)[0] }, lesser: LesserFunc[rune](func(r1, r2 rune) bool { return r1 < r2 }), }, want: "0", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := MinEl(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("MinEl() = %v, want %v", got, tt.want) } }) } } func Test_Min_int(t *testing.T) { type args struct { source Enumerator[int] selector func(int) int lesser Lesser[int] } tests := []struct { name string args args want int }{ {name: "SimpleSequenceNoSelector", args: args{ source: NewOnSlice(5, 10, 6, 2, 13, 8), selector: Identity[int], lesser: Order[int]{}, }, want: 2, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Min(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Min() = %v, want %v", got, tt.want) } }) } } func Test_Min_float64(t *testing.T) { type args struct { source Enumerator[float64] selector func(float64) float64 lesser Lesser[float64] } tests := []struct { name string args args want float64 }{ {name: "SequenceContainingBothInfinities", args: args{ source: NewOnSlice(1., math.Inf(+1), math.Inf(-1)), selector: Identity[float64], lesser: Order[float64]{}, }, want: math.Inf(-1), }, {name: "SequenceContainingNaN", args: args{ source: NewOnSlice(1., math.Inf(+1), math.NaN(), math.Inf(-1)), selector: Identity[float64], lesser: Order[float64]{}, }, want: math.Inf(-1), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Min(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Min() = %v, want %v", got, tt.want) } }) } } func Test_Max_string_int(t *testing.T) { type args struct { source Enumerator[string] selector func(string) int lesser Lesser[int] } tests := []struct { name string args args want int }{ {name: "SimpleSequenceWithSelector", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) int { return len(s) }, lesser: Order[int]{}, }, want: 5, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Max(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Max() = %v, want %v", got, tt.want) } }) } } func Test_Max_string_rune(t *testing.T) { type args struct { source Enumerator[string] selector func(string) rune lesser Lesser[rune] } tests := []struct { name string args args want rune }{ {name: "SimpleSequenceWithSelector", args: args{ source: NewOnSlice("zyx", "ab", "abcde", "0"), selector: func(s string) rune { return []rune(s)[0] }, lesser: LesserFunc[rune](func(r1, r2 rune) bool { return r1 < r2 }), }, want: 'z', }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Max(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Max() = %v, want %v", got, tt.want) } }) } } func Test_Max_int(t *testing.T) { type args struct { source Enumerator[int] selector func(int) int lesser Lesser[int] } tests := []struct { name string args args want int }{ {name: "SimpleSequenceWithSelector", args: args{ source: NewOnSlice(5, 10, 6, 2, 13, 8), selector: Identity[int], lesser: Order[int]{}, }, want: 13, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Max(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Max() = %v, want %v", got, tt.want) } }) } } func Test_Max_float64(t *testing.T) { type args struct { source Enumerator[float64] selector func(float64) float64 lesser Lesser[float64] } tests := []struct { name string args args want float64 }{ {name: "SimpleSequenceFloat64", args: args{ source: NewOnSlice(-2.5, 2.5, 0.), selector: Identity[float64], lesser: Order[float64]{}, }, want: 2.5, }, {name: "SequenceContainingBothInfinities", args: args{ source: NewOnSlice(1., math.Inf(+1), math.Inf(-1)), selector: Identity[float64], lesser: Order[float64]{}, }, want: math.Inf(+1), }, {name: "SequenceContainingNaN", args: args{ source: NewOnSlice(1., math.Inf(+1), math.NaN(), math.Inf(-1)), selector: Identity[float64], lesser: Order[float64]{}, }, want: math.Inf(+1), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Max(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("Max() = %v, want %v", got, tt.want) } }) } } func Test_MaxEl_string_int(t *testing.T) { type args struct { source Enumerator[string] selector func(string) int lesser Lesser[int] } tests := []struct { name string args args want string }{ {name: "MaxElement", args: args{ source: NewOnSlice("xyz", "ab", "abcde", "0"), selector: func(s string) int { return len(s) }, lesser: Order[int]{}, }, want: "abcde", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := MaxEl(tt.args.source, tt.args.selector, tt.args.lesser); !reflect.DeepEqual(got, tt.want) { t.Errorf("MaxEl() = %v, want %v", got, tt.want) } }) } }
def plot_hloops_90(to_show=[57,155], filename='hloop-compare-90', **kwargs): ana.set_sns(default=True, grid=True, size='talk', style='ticks', palette='deep', latex=True) sns.set_palette(sns.color_palette("deep")) fig, axes = plt.subplots(2, 2, figsize=(16,12)) for i, nr in enumerate(to_show): cur_ax = axes[i] ana.set_sns(size='talk', style='ticks', palette='Paired', grid=True, latex=True) meas[nr] = ana.Hloop(nr) if not(kwargs.get('mirror')) and nr == 57: meas[nr].set_factor(-1) meas[nr].factor = 1 ax = cur_ax[0] meas[nr].plot_strayfield(ax, nolegend=True, show_plusses=False, show_crosses=False) ax.plot(meas[nr].up_fitted.B, meas[nr].up_fitted.Bx8, label='Up (fitted)') ax.plot(meas[nr].down_fitted.B, meas[nr].down_fitted.Bx8, label='Down (fitted)') if kwargs.get('xlim'): ax.set_xlim(*kwargs.get('xlim')) else: ax.set_xlim(-750, 750) ax.set_ylim(np.min([meas[nr].up_fitted.Bx8.min(), meas[nr].down_fitted.Bx8.min()])-.05, np.max([meas[nr].up_fitted.Bx8.max(), meas[nr].down_fitted.Bx8.max()])+.05) if nr == 57: ax.set_title('M57: Plusses ($90^\circ$)') else: ax.set_title('M155: Plusses ($-90^\circ$)') with sns.color_palette('bright'): if nr == 57 and not(kwargs.get('mirror')): bbox = (.11, .59, .34, .35) else: bbox = (.11, .12, .34, .35) inset = inset_axes(ax, width='100%', height='100%', bbox_to_anchor=bbox, bbox_transform=ax.transAxes) max_b = meas[nr].up.B.max() B_ext, B_stray = meas[nr].get_downminusup_strayfield( fitted_data=True) inset.plot([-max_b, max_b], [0, 0], '--', color='orange', linewidth=.75) inset.plot(B_ext, B_stray) inset.set_xlim(-750, 750) inset.set_title("Difference (fitted)") ax.legend(loc='upper right') ax = cur_ax[1] meas[nr].plot_strayfield(ax, nolegend=True, show_plusses=False, show_crosses=False) ax.plot(meas[nr].up_fitted.B, meas[nr].up_fitted.Bx9, label='Up (fitted)') ax.plot(meas[nr].down_fitted.B, meas[nr].down_fitted.Bx9, label='Down (fitted)') if kwargs.get('xlim'): ax.set_xlim(*kwargs.get('xlim')) else: ax.set_xlim(-750, 750) ax.set_ylim(np.min([meas[nr].up_fitted.Bx9.min(), meas[nr].down_fitted.Bx9.min()])-.05, np.max([meas[nr].up_fitted.Bx9.max(), meas[nr].down_fitted.Bx9.max()])+.05) if nr == 57: ax.set_title('M57: Crosses ($90^\circ$)') else: ax.set_title('M155: Crosses ($-90^\circ$)') with sns.color_palette('bright'): if nr == 57 and kwargs.get('mirror'): bbox = (.11, .12, .34, .35) else: bbox = (.11, .59, .34, .35) inset2 = inset_axes(ax, width='100%', height='100%', bbox_to_anchor=bbox, bbox_transform=ax.transAxes) f_up = scipy.interpolate.interp1d(meas[nr].up_fitted.B, meas[nr].up_fitted.Bx9) f_down = scipy.interpolate.interp1d(meas[nr].down_fitted.B, meas[nr].down_fitted.Bx9) B = np.linspace(meas[nr].up.B.min(), meas[nr].up.B.max(), int(1e5)) downminusup = f_down(B) - f_up(B) inset2.plot([-max_b, max_b], [0, 0], '--', color='orange', linewidth=.75) inset2.plot(B, downminusup, color='green') inset2.set_xlim(-750, 750) inset2.set_title("Difference (fitted)") if nr == 57: inset.set_yticklabels(['', '$0.0$', '', '$0.5$']) inset2.set_yticklabels(['', '$0.0$', '', '$0.5$']) ax.legend(loc='upper right') plt.savefig("%s.pdf" % filename)
// Return true if string is only made of 0..9 chars bool str_is_nbr (std::string snm) { unsigned idx; for(idx=0 ; idx < snm.size(); idx++) if(!isdigit(snm[idx])) break; return (idx==snm.size() ? true: false); }
/// Wait on the event. This blocks the current thread. pub fn wait(&self) { let current_thread = Thread::current(); let mut inner = self.inner.lock(); if inner.notified { if inner.variant == EventVariant::AutoUnsignal { inner.notified = false; } } else { // unnotified, block here unsafe { inner.queue.push(current_thread); } current_thread.set_state(State::Blocked); drop(inner); Thread::yield_now(); } }
import { gql } from '@apollo/client'; export const GET_AUTHORIZED_USER = gql` query { currentUser { id username likedRecipes } } `; const RECIPE_DETAILS = gql` fragment RecipeDetails on Recipe { id name pictureUrl preparationTimeInMinutes numberOfServings longDescription tags ingredients stepByStepDirections } `; export const GET_RECIPES = gql` query { allRecipes { ...RecipeDetails } } ${RECIPE_DETAILS} `; export const LIKED_RECIPES = gql` query { likedRecipesByCurrentUser { ...RecipeDetails } } ${RECIPE_DETAILS} `; export const IS_RECIPE_LIKED_BY_CURRENT_USER = gql` query isRecipeLikedByCurrentUser($id: ID!) { isRecipeLikedByCurrentUser(id: $id) } `; export const RECIPES_ADDED_BY_USER = gql` query { recipesAddedByCurrentUser { ...RecipeDetails } } ${RECIPE_DETAILS} `;
def _GetEntityGroup(ref): entity_group = entity_pb.Reference() entity_group.CopyFrom(ref) assert (entity_group.path().element_list()[0].has_id() or entity_group.path().element_list()[0].has_name()) del entity_group.path().element_list()[1:] return entity_group
<filename>js/packages/proko/src/views/artCreate/steps/Launch/index.tsx import React, { useEffect, useState } from 'react'; import { Steps, Button, Upload, Input, Statistic, Slider, Progress, Spin, InputNumber, Form, } from 'antd'; import { IMetadataExtension, MAX_METADATA_LEN } from '@oyster/common'; import { Connection } from '@solana/web3.js'; import { MintLayout } from '@solana/spl-token'; import { getAssetCostToStore, LAMPORT_MULTIPLIER } from '../../../../utils/assets'; import { ArtCard } from '../../../../components/ArtCard'; import { AmountLabel } from '../../../../components/AmountLabel'; import './index.less'; export default (props: { confirm: () => void; attributes: IMetadataExtension; connection: Connection; progress: number; }) => { const files = (props.attributes.properties?.files || []).filter(f => typeof f !== 'string') as File[]; const fileNames = (props.attributes.properties?.files || []).map(f => typeof f === 'string' ? f : f?.name); const metadata = { ...(props.attributes as any), files: fileNames, }; const [cost, setCost] = useState(0); useEffect(() => { const rentCall = Promise.all([ props.connection.getMinimumBalanceForRentExemption(MintLayout.span), props.connection.getMinimumBalanceForRentExemption(MAX_METADATA_LEN), ]); if (files.length && props.progress <= 0) getAssetCostToStore([ ...files, new File([JSON.stringify(metadata)], 'metadata.json'), ]).then(async lamports => { const sol = lamports / LAMPORT_MULTIPLIER; // TODO: cache this and batch in one call const [mintRent, metadataRent] = await rentCall; // const uriStr = 'x'; // let uriBuilder = ''; // for (let i = 0; i < MAX_URI_LENGTH; i++) { // uriBuilder += uriStr; // } const additionalSol = (metadataRent + mintRent) / LAMPORT_MULTIPLIER; // TODO: add fees based on number of transactions and signers if (sol + additionalSol !== cost) { setCost(sol + additionalSol); } }); }, [files, setCost]); return ( <> <div className="launch"> <div className="grid"> <div className="grid--3_cols grid--4_offset"> {props.attributes.image && ( <ArtCard image={props.attributes.image} file={fileNames?.[0] || ''} category={props.attributes.properties?.category} name={props.attributes.name} symbol={props.attributes.symbol} small={true} /> )} </div> <div className="grid--3_cols"> <Statistic className="launch__royalty_percentage" title="Royalty Percentage" value={props.attributes.seller_fee_basis_points} precision={2} suffix="%" /> {cost ? ( <AmountLabel title="Cost to Create" newClasses="launch__royalty_percentage" amount={cost.toFixed(5)} /> ) : ( <Spin /> )} <div className="launch__royalty_ctas"> <div className="launch__royalty_cta_el"> <Button type="primary" size="large" onClick={props.confirm} className="next_cta" > Pay with SOL </Button> </div> <div className="launch__royalty_cta_el"> <Button disabled={true} size="large" onClick={props.confirm} className="next_cta" > Pay with Credit Card </Button> </div> </div> </div> </div> </div> </> ); };
This article originally appeared on ModernFarmer.com. Related Content Are Floating Farms in Our Future? Tommy Romano never thought he’d be a farmer. On the surface, his professional background seems about as far from agriculture as you can get. He studied Bioastronautics at the University of Colorado Boulder, and after graduating, worked for several aerospace companies in California and Colorado. Romano’s duties included test engineering, satellite operations, ground station development and operations, control system design and data analysis—in other words, he helped develop ways to help keep humans alive and healthy in outer space. These days, however, he’s using his knowledge of space-habitat design to advance food systems right here on Earth. So, what do astronauts and arugula have in common? They both need the same things to live. “Just like humans, plants need nutrients, vitamins, water at the right temperature, and sunlight,” Romano says. “I use my knowledge of controlled environments to maintain the optimum conditions for the plants at all times within the growing environment. We make sure all those parameters are in the right range so the plants can grow.” Romano’s operation, Infinite Harvest uses similar technology and philosophies to those used on the space station—only on terra firma. Unlike other vertical farms that use market-ready technology, Romano developed Infinite Harvest’s proprietary building management system from the ground up, using his knowledge of space habitat design. It controls all aspects of the air, water, and lighting systems. “Much like the International Space Station, we use and reuse as many things as possible to minimize waste and increase energy and water efficiencies. Recycling and regenerative systems are highly integrated into our design,” he says. “For example, the heat exhaust waste from one component in the system may be used to heat something else in the facility, rather than using a boiler to specifically create heat. This drives down the energy required and waste produced by the system.” Just as every aspect of space-station operations are monitored closely by automated systems, so are Infinite Harvest’s leafy greens. “In space, if something starts to fail, or an emergency arises, the computer system will either alert the crew, take some autonomous corrective or safe-ing action, or both,” Romano explains. “Our building management system does the same thing, ensuring continuous operations so that the plants never stop growing.” By now, you’ve probably heard about vertical farms and the many advantages this way of growing has over open farming. Vertical farming can be done all year long in urban environments with no lulls in food production. Floods, droughts, hail, and pests aren’t issues because everything about the plants’ environment—temperature, light, food, humidity, water—is meticulously controlled. Since there are no pests, there’s no need for pesticides and herbicides, and no harmful runoff. Perhaps most importantly, vertical farming requires a fraction of the arable land and water used in traditional farming—a key factor in Colorado, whose population continues to grow rapidly even as the West continues to experience mega-droughts​. Not everyone is a fan of vertical farming, though. For thousands of years, farming meant getting down and dirty—literally. Some critics say vertical farming just isn’t the “real” thing. “Many question the nutritional value of hydroponically-grown vegetables because there’s no dirt to provide the required nutrients or micro-organisms,” Romano says. “But just think how the soil nutrient content can vary from one corner of farmland to the other, or from day to day due to rain or water run-off. A controlled system is just that—a system that controls the amount of minerals and nutrients at the right time and amounts throughout the grow cycle of the crop. This gives hydroponic-based systems the potential to grow crops that have more nutrients than soil-based crops.” At Infinite Harvest’s 5,400 square-foot grow facility in Lakewood, Colorado, Romano oversees a team of ten greenhouse technicians who manage planting, harvesting, packing, and delivery. Baby kale, arugula, microgreens, and two varieties of basil (Thai and Genovese) grow in neatly-stacked trays that tower to the warehouse ceiling. A neon, pinkish-purple hue—LED lights designed by Romano—bathes the lush vegetation in other-wordly light. Roughly 160,000 plants, in various stages of development, are growing at any given time. Here in this completely man-made environment, plants may never see natural sunlight until they’re loaded onto a truck on delivery day. So, where does all this food go? Romano’s clients include celebrity chefs, upscale eateries, and prominent restaurant groups. Infinite Harvest sells exclusively to mid and high-level restaurants in the Denver metro area, including Rioja (the flagship restaurant of James Beard award winner Jennifer Jasinski) and Beast + Bottle, owned by renowned chef Paul C. Reilly. Not only does indoor farming provide a steady supply of sustainable food for the Mile High City’s epicureans, it allows Romano to customize flavor profiles according to a chef’s specific request. For example, young arugula is sweeter. But if you grow it longer and change certain growing conditions, such as light exposure and nutrient levels, it tastes more peppery. Restaurants want this variety according to which dish the produce will be used in, or simply due to the taste preference of the chef. “I have to admit that at first I was a bit skeptical about vertical farming,” says Tim Kuklinski, Rioja’s chef de cuisine. “However, after tasting the products from Infinite Harvest, my skepticism washed away. I’m excited to get consistent and delicious greens year round while also supporting a Colorado business.” Infinite Harvest has a 50-mile limit for delivery, but Romano says all his current clients are within half that distance. Rarely does his delivery team venture outside downtown Denver, and harvest-to-table time is measured in hours, not days or weeks. Despite his set-up’s advantages, Romano admits it’s not without its share of difficulties. One: logistics, he says. Two: Market demand. Because Colorado has a short outdoor growing season, chefs have clamored for the opportunity to purchase Infinite Harvest’s produce since they began limited operations in March 2015. “The response from the local market for our product was extremely high,” Romano says. That left some frustrated chefs waiting up to six months to receive their first orders. Romano secured a second round of funding from investors quickly after the company’s first harvest last March, which he used to complete construction a full year ahead of schedule. Although Romano’s aerospace engineering background allows him to look at food production through a scientific lens, he is, first and foremost, a realist. “Yes, vertical farming is different than the way the world has been growing crops for centuries,” Romano says. But as he points out, “people also said that Thomas Edison’s light bulb was not a true light source because it didn’t burn a wick and make smoke. The demand for sustainable local food is rising with the world’s population growth and shifting climates, and we need a more efficient and healthy way to grow our food. Vertical farming is one of the ways leading the food production revolution. That’s evolution.” Looking forward, Romano plans to grow into an even larger facility in the Denver market. “Currently the industry is focused on green leafy crops because they are the easiest and fastest to grow,” he says. “Fruiting crops like tomatoes and strawberries present challenges, but they are not insurmountable in the vertical farm process. Going further, we believe that staple crops are also possible. We’ve successfully grown a test crop of sweet corn already, and there are others that are working on growing rice.” Being in Colorado, Romano says he is often questioned about other, shall we say, herbal options when people see his setup—but he clarifies that there is one crop you won’t be seeing in Infinite Harvest’s rotation. “Marijuana is not our market,” he says. “Our business model is based on a broader base, because everybody needs food." Other articles from Modern Farmer:
import pandas as pd import json dir = r'C:\Users\const\Desktop\dataset\INRIAPerson\data_train.txt' f = open(dir, "r") file = f.read() data = {} for line in file.split('\n'): elem = line.split(' ') filename = elem[0].split('/')[-1] data[filename] = {'boxes':[], 'scores':[]} dir = r'C:\Users\const\Desktop\dataset\test_set\Detection_Results_tiny.csv' f = open(dir, "r") df = pd.read_csv(dir) for index, row in df.iterrows(): data[str(row['image'])]['boxes'].append([row['xmin'], row['ymin'], row['xmax'], row['ymax']]) data[str(row['image'])]['scores'].append(row['confidence']) f = open(r'C:\Users\const\Desktop\dataset\test_set\test_data_predictions_tiny.json', 'w') app_json = json.dumps(data, sort_keys=True) f.write(str(app_json)) f.close()
import { Unit } from '../unit/Unit'; import { CombatUnit } from 'unit/CombatUnit'; export interface EnergyStructure extends Structure { energy: number; energyCapacity: number; } export interface StoreStructure extends Structure { store: StoreDefinition; storeCapacity: number; } export function isEnergyStructure(obj: RoomObject): obj is EnergyStructure { return (<EnergyStructure>obj).energy != undefined && (<EnergyStructure>obj).energyCapacity != undefined; } export function isStoreStructure(obj: RoomObject): obj is StoreStructure { return (<StoreStructure>obj).store != undefined && (<StoreStructure>obj).storeCapacity != undefined; } export function isStructure(obj: RoomObject): obj is Structure { return (<Structure>obj).structureType != undefined; } export function isOwnedStructure(structure: Structure): structure is OwnedStructure { return (<OwnedStructure>structure).owner != undefined; } export function isSource(obj: Source | Mineral): obj is Source { return (<Source>obj).energy != undefined; } export function isTombstone(obj: RoomObject): obj is Tombstone { return (<Tombstone>obj).deathTime != undefined; } export function isResource(obj: RoomObject): obj is Resource { return (<Resource>obj).amount != undefined; } export function hasPos(obj: HasPos | RoomPosition): obj is HasPos { return (<HasPos>obj).pos != undefined; } export function isCreep(obj: RoomObject): obj is Creep { return (<Creep>obj).fatigue != undefined; } export function isPowerCreep(obj: RoomObject): obj is PowerCreep { return (<PowerCreep>obj).powers != undefined; } export function isUnit(creep: Creep | Unit): creep is Unit { return (<Unit>creep).creep != undefined; } export function isCombatUnit(unit: Creep | Unit | CombatUnit): unit is CombatUnit { return (<CombatUnit>unit).isCombatZerg != undefined; }
// put the linear itnerpolation between a and b by s into the res color static void lerpFloat(QColor& res, const QColor& ca, const QColor& cb, float s) { res.setRedF(ca.redF() + s * (cb.redF() - ca.redF())); res.setGreenF(ca.greenF() + s * (cb.greenF() - ca.greenF())); res.setBlueF(ca.blueF() + s * (cb.blueF() - ca.blueF())); res.setAlphaF(ca.alphaF() + s * (cb.alphaF() - ca.alphaF())); }
<reponame>anantdhok/ComputerScience #include<bits/stdc++.h> using namespace std; int BinarySearchImperative(int key, vector<int> data) { int l = 0, m = 0, h = data.size(); while (l <= h) { m = (l + h) / 2; if (data[m] == key) return m; else if (data[m] < key) l = m + 1; else h = m - 1; } return -1; } int BinarySearchRecurssive(int key, vector<int> data, int l, int h) { int m = (l + h) / 2; if (l <= h) { if (data[m] == key) return m; else if (data[m] < key) return BinarySearchRecurssive(key, data, m + 1, h); else return BinarySearchRecurssive(key, data, l, m - 1); } return -1; } int main() { int key = 4; vector<int> data{ 1, 5, 2, 7, 4, 8, 3, 0, 9}; sort(data.begin(), data.end()); cout << "\n Sorted List : "; for (int i : data) cout << i << " "; cout << "\n At index : " << BinarySearchImperative(key, data); cout << "\n At index : " << BinarySearchRecurssive(key, data, 0, data.size()-1); return 0; }
def RunturbESN(esn, u_train: Union[np.ndarray, torch.Tensor] = None, y_train: Union[np.ndarray, torch.Tensor] = None, y_test: Union[np.ndarray, torch.Tensor] = None, pred_init_input: Union[np.ndarray, torch.Tensor] = None, u_test: Union[np.ndarray, torch.Tensor] = None, index_list_auto: list = [], index_list_teacher: list = []) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: torch.manual_seed(esn.randomSeed) logging.basicConfig(format=_LOGGING_FORMAT, level= esn.logging_level) assert None not in [u_train, y_train, y_test] or None not in [esn.u_train, esn.y_train, esn.y_test], "Error: u_train, y_train or y_test not specified." if None not in [u_train, y_train, y_test]: for data in [u_train, y_train]: if esn.trainingLength != data.shape[0]: logging.error('Training input/output time dimension ({0}) does not match ESN trainingLength ({1}).'.format(data.shape[0],esn.trainingLength)) if y_test is not None: if esn.testingLength != y_test.shape[0]: logging.error('Testing Output time dimension ({0}) does not match ESN testingLength ({1}).'.format(y_test.shape[0],esn.testingLength)) if esn.mode != _ESN_MODES[0]: assert u_test is not None or esn.u_test is not None, "Error: u_test not specified" if u_test is not None: if esn.testingLength != u_test.shape[0]: logging.error('Testing input time dimension ({0}) does not match ESN testingLength ({1}).'.format(u_test.shape[0],esn.testingLength)) if pred_init_input is not None: if pred_init_input.dtype is not _DTYPE: pred_init_input = torch.as_tensor(pred_init_input, dtype = _DTYPE) if pred_init_input.device is not esn.device: pred_init_input.to(esn.device) if None not in [u_train, y_train, y_test]: esn.SetTrainingData(u_train=u_train, y_train=y_train) esn.SetTestingData(y_test=y_test, pred_init_input=pred_init_input, u_test=u_test) esn.toTorch() esn.createWeightMatrices() esn.x_fit = esn.propagate(u = esn.u_train, transientTime = esn.transientTime) esn.fit(X = esn.x_fit, y = esn.y_train[esn.transientTime:]) mse_train = ComputeMSE(y_test = esn.y_train[esn.transientTime:], y_pred = ([email protected]_fit).T) if np.isnan(esn.Wout).any() or mse_train is None: logging.error("Reservoir {0}: while fitting the model, an error occured. Assuming default values.".format(esn.id)) mse_train = _MSE_DEFAULT mse_test = _MSE_DEFAULT y_pred = torch.zeros([esn.testingLength, esn.n_output], device = esn.device, dtype = _DTYPE) return mse_train, mse_test, y_pred if esn.mode == _ESN_MODES[1]: y_pred, esn.x_pred = esn.teacherforce(X = esn.x_fit, testingLength = esn.testingLength) elif esn.mode == _ESN_MODES[2]: assert len(index_list_auto) + len(index_list_teacher) == esn.n_input,'index_list_auto and index_list_teacher do not add up to n_input.' y_pred, esn.x_pred = esn.semiteacherforce(X = esn.x_fit, testingLength = esn.testingLength, index_list_auto = index_list_auto, index_list_teacher = index_list_teacher, u_test = esn.u_test) else: y_pred, esn.x_pred = esn.predict(X = esn.x_fit, testingLength = esn.testingLength) mse_test = ComputeMSE(y_test = esn.y_test,y_pred = y_pred) return mse_train, mse_test, y_pred
Difference in metabolite levels between photoautotrophic and photomixotrophic cultures of Synechocystis sp. PCC 6803 examined by capillary electrophoresis electrospray ionization mass spectrometry Capillary electrophoresis mass spectrometry (CE/MS) was applied for the comprehensive survey of changes in the amounts of metabolites upon the shift from photoautotrophic to photomixotrophic conditions in Synechocystis sp. PCC 6803. When glucose was added to the photoautotrophically grown culture, the increase in the metabolites for the oxidative pentose phosphate (OPP) pathway and glycolysis, together with the decrease in those for the Calvin cycle, was observed. Concomitantly, the increase in respiratory activity and the decrease in photosynthetic activity took place in the wild-type cells. In the pmgA-disrupted mutant that shows growth inhibition under photomixotrophic conditions, lower enzymatic activities of the OPP pathway and higher photosynthetic activity were observed, irrespective of trophic conditions. These defects brought about metabolic disorders such as a decrease in ATP and NADPH contents, a failure in the activation of respiratory activity, and the aberrant accumulation of isocitrate under photomixotrophic but not under photoautotrophic conditions. A delicate balancing of the carbon flow between the Calvin cycle and the OPP pathway seems indispensable for growth specifically under photomixotrophic conditions and PmgA is likely to be involved in the regulation. Introduction Photosynthetic organisms face continuous constraints in their growth environment. In order to meet the energy demands for sustainable life, cyanobacteria show versatile growth characteristics in response to the availability of light and the carbon source (Rippka et al., 1979;Stal and Moezelaar, 1997). They are primarily photoautotrophic organisms performing oxygenic photosynthesis to convert light energy to chemical energy. ATP and NADPH generated by photosynthesis are used for the assimilation of CO 2 by the Calvin cycle and the excess carbon fixed is stored in the form of glycogen. Some cyanobacterial species have the ability of heterotrophic energy generation in addition to the photosynthetic capability: they catabolize glucose via the oxidative pentose phosphate (OPP) pathway, the glycolytic pathway, and an incomplete tricarboxylic acid (TCA) cycle to produce ATP, NADPH, and carbon skeletons used as anabolic precursors. In cyanobacteria having no organelle, all of the abovementioned metabolisms, CO 2 fixation, gluconeogenesis, and glycolysis, are performed in the cytoplasm and several enzymes are shared among these pathways (Smith, 1982). Thus, the control of functionally equivalent reactions in the anabolic and catabolic pathways seems to be a prerequisite for optimized growth under different trophic conditions. The alteration in the metabolic processes in response to the environmental change has been examined using a glucose-tolerant strain of Synechocystis sp. PCC 6803 that is capable of photoautotrophic, photomixotrophic, and heterotrophic growth. The availability of light energy significantly affects the expression level of mRNA (Gill et al., 2002), that of proteins (Yang et al., 2002a;Kurian et al., 2006) and cellular metabolism (Yang et al., 2002a) in Synechocystis. When cultures are transferred from photoautotrophic to dark heterotrophic conditions with glucose, CO 2 assimilation by the Calvin cycle is rapidly inactivated and the major metabolic pathway switches from the Calvin cycle to the OPP pathway (Pelroy et al., 1972). On the other hand, the addition of glucose in the presence of light, namely, the shift from photoautotrophic to photomixotrophic conditions, hardly affects the expression pattern of transcripts (Yang et al., 2002a;Kahlon et al., 2006) and of proteins (Yang et al., 2002a;Herranen et al., 2004). Under photomixotrophic conditions where both light and glucose are available, substantial activity of the Calvin cycle is detected (Yang et al., 2002a, b, c). Early works showed that the OPP pathway is repressed in the presence of light through the allosteric inhibition of glucose-6-phosphate dehydrogenase (G6PDH) by ribulose-1,5-diphosphate (RuBP), an intermediate of the Calvin cycle (Pelroy et al., 1972). Thus, the metabolic characteristic of photomixotrophic cultures has been considered to be similar to that of photoautotrophic ones: it is largely dependent on photosynthesis but not on glucose catabolism. However, recent studies reported the detection of high G6PDH activity in photomixotrophically grown cells (Knowles and Plaxton, 2003;Singh and Sherman, 2005;Kahlon et al., 2006). This implies that both the Calvin cycle and the OPP pathway, whose enzymatic reactions proceed in the reverse direction for the most part, can coexist under photomixotrophic conditions. If this is the case, there must be a regulatory mechanism to co-ordinate anabolic and catabolic activities upon the shift from photoautotrophic to photomixotrophic conditions. At present, information on such a regulatory mechanism is not available, since there is no comprehensive study focused on the difference in metabolic processes under these two trophic conditions. In this study, two strategies were used to address the mechanism that enables Synechocystis sp. PCC 6803 to grow photomixotrophically. First, capillary electrophoresis mass spectrometry (CE/MS) was applied for the comprehensive survey of metabolic processes, since this method is useful for the simultaneous detection of metabolites, especially charged ones such as organic acids and nucleotides. In previous reports, the CE/MS method successfully revealed metabolite alterations in Bacillus subtilis (Soga et al., 2002), Oryza sativa (Sato et al., 2004;Takahashi et al., 2006a) and Arabidopsis thaliana (Takahashi et al., 2006b). Second, to unravel the regulatory mechanism required for photomixotrophic growth, the amounts of metabolites were compared between the wild-type and a mutant showing sensitivity to photomixotrophic conditions. The gene-disrupted mutant of pmgA (sll1968) encoding a putative regulatory protein can grow normally under photoautotrophic or heterotrophic conditions, but suffers severe growth inhibition under photomixotrophic conditions (Hihara and Ikeuchi, 1997). Metabolic characterization of such a regulatory mutant showing light/glucose sensitivity seems a good approach to clarify the regulatory processes required for photomixotrophic growth. Materials and methods Strains and culture conditions A glucose-tolerant wild-type strain of Synechocystis sp. PCC 6803 and the pmgA-disrupted mutant made by inserting the spectinomycin resistance cassette (Hihara and Ikeuchi, 1997) were used for the study. They were grown at 32°C in BG-11 medium with 20 mM HEPES-NaOH, pH 7.0, under continuous illumination of 50 lmol photons m À2 s À1 . Cultures were grown in volumes of 50 ml in test tubes (3 cm in diameter) and bubbled with air supplemented with 1% CO 2 . Cell density was estimated at A 730 using a spectrophotometer (model UV-160A, Shimadzu, Kyoto, Japan). For the comparison of a photomixotrophic culture with a photoautotrophic one, photoautotrophically grown cultures at midlog phase were inoculated into the fresh medium with or without 5 mM glucose at A 730 ¼0.05-0.1 and incubated at 50 lmol photons m À2 s À1 for the indicated duration. Metabolite analysis Quantification of metabolites was performed using the method described by Takahashi et al. (2006b). Fifty ml of cultures were harvested by centrifugation at 15 000 g at 4°C for 2 min and the cell pellets obtained (30-50 mg in fresh weight) were frozen in liquid nitrogen. Samples were vortexed with 200 ll of ice-cooled 50% (v/v) methanol containing internal standards (50 lM PIPES) for 10 min. The supernatant was recovered by centrifugation at 15 000 g at 4°C for 5 min, filtered with a 5 kDa cut-off filter (Millipore, Bedford, MA, USA), and used for analysis by CE/MS. Separation of metabolites was performed on a polyethylene glycolcoated capillary (DB-WAX, 100 cm350 lm i.d., J&W Scientific, Folsom, CA, USA) using 20 mM ammonium acetate, pH 6.8, as a running buffer. Metabolites in the extract were identified by comparison of the migration time and m/z ratio with those of authentic organic acids and nucleotides. The quantification was performed by comparing peak areas of metabolites in samples with those of the authentic standards. As for glyceraldehyde-3-phosphate (GA3P) that exists as both the labile diol and the stable aldehyde forms in aqueous solution (Trentham et al., 1969), the stable form was specifically detected by CE/MS. Measurement of enzymatic activities of cells Fifty ml of cultures were harvested by brief centrifugation and resuspended with 500 ll of 50 mM sodium phosphate buffer, pH 7.5, containing 3 mM MgCl 2 and 1 mM DTT. Approximately 250 ll volume of zircon beads (0.1 mm in diameter, Biospec, Bartlesville, OK, USA) were added to the cell suspension and the cells were broken by vigorous vortexing for 2 min followed by cooling on ice for 1 min and this procedure was repeated four times. The lysate was centrifuged at 15 000 g for 10 min and the resulting supernatant was used for the measurement. NADP + -specific enzymatic activities in cell extracts were measured spectrophotometrically by monitoring the substratedependent generation of NADPH at 340 nm. For measurements of the activities of glucose-6-phosphate dehydrogenase (G6PDH), 6-phosphogluconate dehydrogenase (6PGDH), and isocitrate dehydrogenase (ICDH), the reaction mixture containing 100 mM sodium phosphate buffer, pH 7.5, 3 mM MgCl 2 , 0.4 mM NADP + and the cell extract in a total volume of 1 ml was used. For measurements of aconitase activity, the reaction mixture containing 100 mM sodium phosphate buffer, pH 7.5, 15 mM MgCl 2 , 1 mM NADP + , 1 mM EDTA, the cell extract, and NADP + -dependent ICDH (0.6 units) in a total volume of 1 ml was used. Reactions were started by the addition of respective substrates at 30°C. Five mM glucose-6-phosphate, 5 mM 6-phosphogluconate, 2.5 mM isocitrate, and 30 mM cis-aconitate were added as substrates for the measurements of G6PDH, 6PGDH, ICDH, and aconitase, respectively. 1 U of enzymatic activity corresponds to the formation of 1 lmol NADPH/min under standard assay conditions. Protein concentration was determined using Protein Assay Kits II (Bio-Rad Laboratories, Hercules, CA, USA) according to the manufacturer's instructions. Measurement of photosynthetic and respiratory activities of cells An aliquot (1 ml) of photoautotrophically or photomixotrophically grown cultures was placed in a Clark-type oxygen electrode chamber and stirred gently at 30°C. Whole-cell photosynthetic activity was measured as oxygen evolution supported by 2 mM NaHCO 3 at 50 lmol photons m À2 s À1 . Respiratory activity was measured as oxygen consumption in the presence of 5 mM glucose in the dark. The rates of oxygen evolution and consumption were calculated in terms of lmol oxygen evolved (10 8 cells) À1 h À1 and of lmol oxygen consumed (10 8 cells) À1 h À1 , respectively. Results Growth properties of the wild-type and pmgA-disrupted mutant cells under photomixotrophic conditions First, the growth properties of the wild-type cells and pmgAdisrupted mutant cells (DpmgA mutant) were examined under photomixotrophic conditions (Fig. 1). Five mM glucose was added to the photoautotrophically grown cultures at time 0. When the cultures were diluted every 24 h to minimize the self-shading effect, the delay in the growth of the DpmgA mutant became prominent on the second day. The growth of the mutant completely stopped on the third day, whereas the wild-type cells did not suffer any growth inhibition. If the mutant has any defects in the regulation of metabolic processes, aberrant levels of metabolites should be detected preceding the growth inhibition. Thus, the time point of 24 h after the addition of glucose was chosen for the sampling time to examine the amounts of metabolites in cultures incubated under photomixotrophic conditions for 24 h. Amounts of metabolites in glycolysis, the OPP pathway, and the Calvin cycle The amounts of metabolites engaged in the central carbon metabolic pathway in cyanobacteria (Fig. 2) were examined using CE/MS. Figure 3 shows the amounts of metabolites in glycolysis, the OPP pathway, and the Calvin cycle in wild-type and DpmgA mutant cells incubated under photoautotrophic or photomixotrophic conditions for 24 h. The presence of glucose had a great impact on the amounts of these metabolites in both strains. Namely, the levels of glucose-6-phosphate (G6P), fructose-1,6-bisphosphate (FBP), dihydroxyacetone phosphate (DHAP), glyceraldehyde-3-phosphate (GA3P), 6-phosphogluconate (6PG), ribose-5-phosphate (R5P), and ribulose-5-phosphate (Ru5P) increased significantly, while those of RuBP, 3-phosphoglycerate (3PGA), phosphoenolpyruvate (PEP), and pyruvate showed a marked decrease. It is notable that GA3P, 6PG, Ru5P, and RuBP contents were lower in the mutant than in the wild-type under photomixotrophic conditions. Amounts of metabolites in the TCA cycle Next, the contents of metabolites related to the TCA cycle were examined (Fig. 4A). Incubation under photomixotrophic conditions led to the increase in contents of malate, fumarate, and isocitrate, while succinate and 2-oxoglutarate (2-OG) contents decreased in both strains. Surprisingly, the isocitrate content in the DpmgA mutant increased up to 10-fold on incubation with glucose, whereas that in the wild-type showed only a slight increase. Then the time-course of isocitrate accumulation was examined after the addition of glucose (Fig. 4B). In the wild-type cells, isocitrate content was slightly increased, while it showed an approximately 10-fold increase within 6 h in the mutant and stayed at an almost constant level until 24 h after glucose addition. Amounts of adenine and pyridine nucleotides The amounts of adenine and pyridine nucleotides were examined (Fig. 5). In wild-type cells incubated with glucose for 24 h, the increase of NADH, AMP, and ADP contents and the decrease of NAD + , NADP + , and NADPH contents were observed. In the case of DpmgA mutant cells under photoautotrophic conditions, the nucleotide contents were almost the same as those in the wild-type cells except for the lower amount of NADPH. On the other hand, under photomixotrophic conditions, NADP + , NADPH, and ATP contents were significantly lower in the mutant. In both strains, the ratio of NADH to total (NAD + +NADH) increased, whereas that of NADPH to total (NADP + +NADPH) hardly changed upon the shift to photomixotrophic conditions. Enzymatic activities For a better understanding of the changes in metabolite levels, the glucose-dependent changes in enzymatic activities were examined concerning several enzymes that are suspected to show different activities between the wildtype and delta-pmgA mutant cells. Cell extracts were prepared from cultures incubated under photomixotrophic conditions for 0, 1, 3, 6, and 24 h. The activities of G6PDH and 6PGDH that control the carbon flow into the OPP pathway were determined first. As shown in Fig. 6, activities of these enzymes were only slightly affected by the addition of glucose. A notable finding is that these activities in the mutant were considerably lower than those in the wild-type cells, irrespective of trophic conditions. The aberrant accumulation of isocitrate in the mutant may be caused by the defect in the isocitrate formation catalysed by aconitase and/or the degradation catalysed by NADP + -dependent isocitrate dehydrogenase (ICDH). Thus, the time-course change of the activity of these enzymes was examined (Fig. 6). The aconitase activity was up-regulated within 1 h, whereas the ICDH activity was hardly affected by the addition of glucose. In both cases, no difference in the enzymatic activity was observed between the wild-type and DpmgA mutant. ICDH activity is dependent on the level of NADP + which was shown to be low in the mutant under photomixotrophic conditions (Fig. 5). Thus, there is a possibility that in vivo activity of ICDH in the mutant is restricted by the availability of NADP + , although such a limitation cannot be observed in the reaction mixture containing sufficient NADP + . To test this possibility, the intracellular level of NADP + was examined 6 h after the addition of glucose when the accumulation of isocitrate became prominent in the mutant cells (Fig. 4B). At this time point, NADP + decreased to half of its initial level and there was no difference in the amount between the two strains (not shown). This indicates that the activity of NADP + -ICDH in the mutant is also normally regulated in vivo. Photosynthetic and respiratory activities The photosynthetic and respiratory activities of the wildtype and DpmgA mutant cells incubated under photoautotrophic or photomixotrophic conditions for 24 h were examined. Photosynthetic activity was measured as the rate of NaHCO 3 -dependent oxygen evolution under the growth conditions, that is, at an illumination of 50 lmol photons m À2 s À1 with or without 5 mM glucose (Fig. 7A). In the wild-type cells, the oxygen evolution rate decreased to two-thirds of the initial level upon the addition of glucose. The oxygen evolution rate of the DpmgA mutant was higher than that of the wild-type under photoautotrophic conditions, which is consistent with the previous observation (Hihara et al., 1998). Although the oxygen evolution rate also decreased in the mutant after the addition of glucose, the resultant activity was still considerably higher than that in the wild type. Respiratory activity was measured as the rate of oxygen consumption under dark conditions in the presence of 5 mM glucose (Fig. 7B). When incubated with glucose for 24 h, the oxygen consumption rate increased 1.5-fold in the wildtype cells, whereas it hardly changed in the DpmgA mutant. Discussion The difference in metabolic processes between photoautotrophically and photomixotrophically grown cultures of Synechocystis sp. PCC 6803 In this study, the level of metabolites in Synechocystis cells was significantly affected by the transfer from photoautotrophic to photomixotrophic conditions, irrespective Fig. 3. Amounts of metabolites in glycolysis, the OPP pathway and the Calvin cycle. Wild-type and DpmgA mutant were grown photoautotrophically (white bars) or photomixotrophically (black bars) for 24 h, and then metabolite contents were quantified. Each value represents the means 6SD of three independent experiments (nmol g À1 fresh weight). of the almost unchanged levels of transcripts (Yang et al., 2002a;Kahlon et al., 2006) and of proteins (Yang et al., 2002a;Herranen et al., 2004). It is evident that the metabolic balance is shifted toward catabolism by the addition of glucose, although the anabolic pathways are still active unlike in the case of the transfer to heterotrophic conditions. After 24 h of incubation under photomixotrophic conditions, the amounts of G6P, 6PG, Ru5P, R5P, FBP, GA3P, and DHAP increased (Fig. 3), indicating an increase in carbon flow to the OPP pathway and glycolysis. In cyanobacteria, the major route of glucose catabolism was suggested to be the OPP pathway and the lower part of the glycolytic pathway (Pelroy et al., 1972), whereas the contribution of the upper part of the glycolytic pathway has been elusive. However, our results clearly show that both the OPP pathway and the whole of glycolysis actively participate in glucose catabolism. The increase in NADH content under photomixotrophic conditions (Fig. 5) also indicates the enhanced flow through glycolysis. Measurement of G6PDH and 6PGDH activi-ties revealed that the enzymatic activities themselves are not up-regulated under photomixotrophic conditions (Fig. 6), which is consistent with previous reports (Knowles and Plaxton, 2003;Singh and Sherman, 2005;Kahlon et al., 2006). It is probable that the abundant supply of substrates under photomixotrophic conditions leads to the enhancement of sugar catabolism. Judging from the 1.5-fold higher oxygen consumption rate in photomixotrophically grown cells than that in photoautotrophically grown cells (Fig. 7B), respiratory electron transport seems also to be activated under photomixotrophic conditions. On the other hand, carbon flow through the Calvin cycle seems to decrease under photomixotrophic conditions, judging from the decrease in the Calvin cycle intermediates (Fig. 3) and photosynthetic activity (Fig. 7A). This indicates that the restriction of photosynthesis is one of the strategies of Synechocystis sp. PCC 6803 to adapt to photomixotrophic condition. The amounts of RuBP, 3PGA, PEP, and pyruvate significantly decreased upon the addition of glucose in contrast to the increase of Ru5P and GA3P (Fig. 3). Phosphoribulokinase (PRK) and glyceraldehyde-3-phosphate dehydrogenase (GAPDH) catalysing the conversions of Ru5P and GA3P to RuBP and 3PGA, respectively, are known to be key enzymes for the regulation of carbon flow between the Calvin cycle and the OPP pathway (Wedel and Soll, 1998). The increase in substrates and the decrease in products of these enzymes under photomixotrophic conditions are probably caused by the repression of the activities of PRK and GAPDH. In Synechococcus sp. PCC 7942 cultured under a 12/12 h light/dark cycle, the downregulation of the Calvin cycle during the dark period was shown to be attained through the complex formation of GAPDH, PRK, and a small protein named CP12 in response to the decrease in the NADP(H)/NAD(H) ratio (Tamoi et al., 2005). It is reasonable to assume that this inhibitory mechanism works in photomixotrophically grown Synechocystis, because a significant decrease was observed in the NADP(H)/NAD(H) ratio on the addition of glucose, from 2.3 to 1.4 in the wild-type cells. Upon the shift to photomixotrophic conditions, the extent of the decrease in the amount of the Calvin cycle intermediates such as RuBP and 3PGA (Fig. 3) was much larger than that in photosynthetic activity (Fig. 7A). It is possible that these metabolites hardly accumulate under photomixotrophic conditions probably due to the increased metabolic flux to the OPP pathway and to the TCA cycle. Among the metabolites in the TCA cycle, the amount of malate, fumarate, and isocitrate increased under photomixotrophic conditions, whereas that of succinate and 2-OG decreased (Fig. 4A). In cyanobacteria lacking both 2-oxoglutarate dehydrogenase and succinyl-CoA synthetase activities (Pearce et al., 1969), succinate and 2-OG are terminal metabolites of the TCA cycle and serve as a precursor of various biosynthetic reactions. Succinate is a substrate for succinate dehydrogenase (SDH) that is a major component of the cyanobacterial respiratory electron transport chain (Cooley and Vermaas, 2001). On the other hand, 2-OG serves as a carbon skeleton required for nitrogen assimilation and also works as a regulatory metabolite involved in the co-ordination between carbon and nitrogen metabolism (Muro-Pastor et al., 2001). Enhancemenet of SDH activity and nitrogen assimilation may be the cause of the decrease in these metabolites under photomixotrophic conditions. 5. Amounts of adenine-and pyridine-nucleotides. Wild-type and DpmgA mutants were grown photoautotrophically (white bars) or photomixotrophically (black bars) for 24 h, and then metabolite contents were quantified. Each value represents the means 6SD of three independent experiments (nmol g À1 fresh weight). Metabolome of photomixotrophically grown cyanobacteria 3015 The difference in metabolic processes between the wild-type and DpmgA mutant cells under photomixotrophic conditions Growth of the DpmgA mutant is severely inhibited after 24 h of incubation under photomixotrophic conditions, although the wild-type cells can grow normally. CE/MS analysis and measurement of enzymatic, photosynthetic, and respiratory activities suggested that the DpmgA mutant has a defect in the co-ordination of anabolic and catabolic activities, leading to a metabolic imbalance and growth inhibition under photomixotrophic conditions. The low amount of 6PG, Ru5P, R5P (Fig. 3), and NADPH (Fig. 5) together with the low activities of G6PDH and 6PGDH (Fig. 6) observed in the photomixotrophically grown DpmgA mutant indicated the decreased carbon flow to the OPP pathway. The respiratory activity of DpmgA mutant cells was not up-regulated under photomixotrophic conditions (Fig. 7B), which is probably due to the smaller Fig. 6. Change in activities of G6PDH, 6PGDH, aconitase, and ICDH in wild-type (open circles) and DpmgA (closed circles) cells upon the shift from photoautotrophic to photomixotrophic conditions. Cultures grown at 50 lmol photons m À2 s À1 were supplemented with 5 mM glucose at time 0. Samples were taken at the indicated time points, and enzymatic activities in cell extracts were determined as described in the Materials and methods. Data are the means 6SD of three independent experiments. Fig. 7. Photosynthetic (A) and respiratory (B) activities of the wildtype and DpmgA cells incubated under photoautotrophic or photomixotrophic conditions for 24 h. Photosynthetic activity was measured as oxygen evolution supported by 2 mM NaHCO 3 at 50 lmol photons m À2 s À1 . Respiratory activity was measured as oxygen consumption in the presence of 5 mM glucose in the dark. Data are the means 6SD of three independent experiments. amount of NADPH. Furthermore, the low respiratory activity in the mutant may result in a decreased rate of ATP synthesis through oxidative phosphorylation (Fig. 5). The decrease in photosynthetic activity upon the shift to photomixotrophic conditions was observed in the DpmgA mutant as well as in the wild type (Fig. 7A). The amounts of RuBP, 3PGA, PEP, and pyruvate largely decreased in the both strains (Fig. 3), suggesting that the inhibitory mechanism of PRK and GAPDH normally operates in the mutant. However, the DpmgA mutant still showed a higher rate of photosynthetic activity than the wild type under photomixotrophic conditions (Fig. 7A). Active CO 2 fixation, together with the slowdown in the regeneration of RuBP, the substrate of CO 2 fixation, is likely to cause the decrease in the RuBP level in the mutant (Fig. 3). The most notable difference between the wild type and the DpmgA mutant was seen in the isocitrate content under photomixotrophic conditions. Aberrant accumulation of isocitrate was observed in DpmgA mutant cells within 6 h after the addition of glucose (Fig. 4B). The conversion of citrate to isocitrate catalysed by aconitase and that of isocitrate to 2-OG catalysed by NADP + -ICDH were normally operated in the DpmgA mutant (Fig. 6). Perhaps the higher activity of the Calvin cycle in the DpmgA mutant brings about the excess supply of carbon materials, leading to the isocitrate accumulation. Sakuragi et al. (2006) reported that the DpmgA mutant accumulated twice as much total sugar as the wild type under photoautotrophic and photomixotrophic conditions. This observation also indicates an excess carbon flow within DpmgA mutant cells. It is of note that low enzymatic activities in the OPP pathway (Fig. 6) and high photosynthetic activity (Fig. 7A) can be observed in the mutant not only under photomixotrophic but also under photoautotrophic conditions. In spite of these defects, the amounts of metabolites in the mutant were not so much different from those in the wild type under photoautotrophic conditions. The low enzymatic activity in the OPP pathway in the mutant could not be rate-limiting under photoautotrophic conditions, since the carbon flow to the OPP pathway is largely restricted due to low availability of substrates. Without conflicting with the activity of the OPP pathway, the higher activity of the Calvin cycle in the mutant could bring about a higher growth rate under photoautotrophic conditions (Hihara et al., 1998). Apparently, a delicate balancing between anabolic and catabolic activities is indispensable for growth specifically under photomixotrophic conditions where both light and glucose are available. PmgA is likely to be involved in this regulation by partitioning the carbon flow between the Calvin cycle and the OPP pathway.
import { sleep, parseVideoUrls, checkRequirements, makeUniqueTitle, ffmpegTimemarkToChunk, makeOutputDirectories, getOutputDirectoriesList, checkOutDirsUrlsMismatch } from './Utils'; import { getPuppeteerChromiumPath } from './PuppeteerHelper'; import { setProcessEvents } from './Events'; import { ERROR_CODE } from './Errors'; import { TokenCache } from './TokenCache'; import { getVideoMetadata } from './Metadata'; import { Metadata, Session } from './Types'; import { drawThumbnail } from './Thumbnail'; import { argv } from './CommandLineParser'; import isElevated from 'is-elevated'; import puppeteer from 'puppeteer'; import colors from 'colors'; import path from 'path'; import fs from 'fs'; import { URL } from 'url'; import sanitize from 'sanitize-filename'; import cliProgress from 'cli-progress'; const { FFmpegCommand, FFmpegInput, FFmpegOutput } = require('@tedconf/fessonia')(); const tokenCache = new TokenCache(); // The cookie lifetime is one hour, // let's refresh every 3000 seconds. const REFRESH_TOKEN_INTERVAL = 3000; async function init() { setProcessEvents(); // must be first! if (await isElevated()) process.exit(ERROR_CODE.ELEVATED_SHELL); checkRequirements(); if (argv.username) console.info('Username: %s', argv.username); if (argv.simulate) console.info(colors.yellow('Simulate mode, there will be no video download.\n')); if (argv.verbose) { console.info('Video URLs:'); console.info(argv.videoUrls); } } async function DoInteractiveLogin(url: string, username?: string): Promise<Session> { const videoId = url.split("/").pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_ID) console.log('Launching headless Chrome to perform the OpenID Connect dance...'); const browser = await puppeteer.launch({ executablePath: getPuppeteerChromiumPath(), headless: false, args: ['--disable-dev-shm-usage'] }); const page = (await browser.pages())[0]; console.log('Navigating to login page...'); await page.goto(url, { waitUntil: 'load' }); if (username) { await page.waitForSelector('input[type="email"]'); await page.keyboard.type(username); await page.click('input[type="submit"]'); } else { // If a username was not provided we let the user take actions that // lead up to the video page. } await browser.waitForTarget(target => target.url().includes(videoId), { timeout: 150000 }); console.info('We are logged in.'); let session = null; let tries: number = 1; while (!session) { try { let sessionInfo: any; session = await page.evaluate( () => { return { AccessToken: sessionInfo.AccessToken, ApiGatewayUri: sessionInfo.ApiGatewayUri, ApiGatewayVersion: sessionInfo.ApiGatewayVersion }; } ); } catch (error) { if (tries > 5) process.exit(ERROR_CODE.NO_SESSION_INFO); session = null; tries++; await sleep(3000); } } tokenCache.Write(session); console.log('Wrote access token to token cache.'); console.log("At this point Chromium's job is done, shutting it down...\n"); await browser.close(); return session; } function extractVideoGuid(videoUrls: string[]): string[] { const videoGuids: string[] = []; let guid: string | undefined = ''; for (const url of videoUrls) { try { const urlObj = new URL(url); guid = urlObj.pathname.split('/').pop(); } catch (e) { console.error(`Unrecognized URL format in ${url}: ${e.message}`); process.exit(ERROR_CODE.INVALID_VIDEO_GUID); } if (guid) videoGuids.push(guid); } if (argv.verbose) { console.info('Video GUIDs:'); console.info(videoGuids); } return videoGuids; } async function downloadVideo(videoUrls: string[], outputDirectories: string[], session: Session) { const videoGuids = extractVideoGuid(videoUrls); let lastTokenRefresh: number; console.log('Fetching metadata...'); const metadata: Metadata[] = await getVideoMetadata(videoGuids, session, argv.verbose); if (argv.simulate) { metadata.forEach(video => { console.log( colors.yellow('\n\nTitle: ') + colors.green(video.title) + colors.yellow('\nPublished Date: ') + colors.green(video.date) + colors.yellow('\nPlayback URL: ') + colors.green(video.playbackUrl) ); }); return; } if (argv.verbose) console.log(outputDirectories); let freshCookie: string | null = null; const outDirsIdxInc = outputDirectories.length > 1 ? 1:0; for (let i=0, j=0, l=metadata.length; i<l; ++i, j+=outDirsIdxInc) { const video = metadata[i]; const pbar = new cliProgress.SingleBar({ barCompleteChar: '\u2588', barIncompleteChar: '\u2591', format: 'progress [{bar}] {percentage}% {speed} {eta_formatted}', // process.stdout.columns may return undefined in some terminals (Cygwin/MSYS) barsize: Math.floor((process.stdout.columns || 30) / 3), stopOnComplete: true, hideCursor: true, }); console.log(colors.yellow(`\nDownloading Video: ${video.title}\n`)); video.title = makeUniqueTitle(sanitize(video.title) + ' - ' + video.date, outputDirectories[j]); // Very experimental inline thumbnail rendering if (!argv.noExperiments) await drawThumbnail(video.posterImage, session.AccessToken); console.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...'); if (!process.stdout.columns) { console.info(colors.red('Unable to get number of columns from terminal.\n' + 'This happens sometimes in Cygwin/MSYS.\n' + 'No progress bar can be rendered, however the download process should not be affected.\n\n' + 'Please use PowerShell or cmd.exe to run destreamer on Windows.')); } // Try to get a fresh cookie, else gracefully fall back // to our session access token (Bearer) freshCookie = await tokenCache.RefreshToken(session, freshCookie); // Don't remove the "useless" escapes otherwise ffmpeg will // not pick up the header // eslint-disable-next-line no-useless-escape let headers = 'Authorization:\ Bearer\ ' + session.AccessToken; if (freshCookie) { lastTokenRefresh = Date.now(); if (argv.verbose) { console.info(colors.green('Using a fresh cookie.')); } // eslint-disable-next-line no-useless-escape headers = 'Cookie:\ ' + freshCookie; } const RefreshTokenMaybe = async (): Promise<void> => { let elapsed = Date.now() - lastTokenRefresh; if (elapsed > REFRESH_TOKEN_INTERVAL * 1000) { if (argv.verbose) { console.info(colors.green('\nRefreshing access token...')); } lastTokenRefresh = Date.now(); freshCookie = await tokenCache.RefreshToken(session, freshCookie); } }; const outputPath = outputDirectories[j] + path.sep + video.title + '.' + argv.format; const ffmpegInpt = new FFmpegInput(video.playbackUrl, new Map([ ['headers', headers] ])); const ffmpegOutput = new FFmpegOutput(outputPath, new Map([ argv.acodec === 'none' ? ['an', null] : ['c:a', argv.acodec], argv.vcodec === 'none' ? ['vn', null] : ['c:v', argv.vcodec] ])); const ffmpegCmd = new FFmpegCommand(); const cleanupFn = (): void => { pbar.stop(); if (argv.noCleanup) return; try { fs.unlinkSync(outputPath); } catch(e) {} } pbar.start(video.totalChunks, 0, { speed: '0' }); // prepare ffmpeg command line ffmpegCmd.addInput(ffmpegInpt); ffmpegCmd.addOutput(ffmpegOutput); ffmpegCmd.on('update', (data: any) => { const currentChunks = ffmpegTimemarkToChunk(data.out_time); RefreshTokenMaybe(); pbar.update(currentChunks, { speed: data.bitrate }); // Graceful fallback in case we can't get columns (Cygwin/MSYS) if (!process.stdout.columns) { process.stdout.write(`--- Speed: ${data.bitrate}, Cursor: ${data.out_time}\r`); } }); ffmpegCmd.on('error', (error: any) => { cleanupFn(); console.log(`\nffmpeg returned an error: ${error.message}`); process.exit(ERROR_CODE.UNK_FFMPEG_ERROR); }); process.on('SIGINT', cleanupFn); // let the magic begin... await new Promise((resolve: any, reject: any) => { ffmpegCmd.on('success', (data:any) => { pbar.update(video.totalChunks); // set progress bar to 100% console.log(colors.green(`\nDownload finished: ${outputPath}`)); resolve(); }); ffmpegCmd.spawn(); }); process.removeListener('SIGINT', cleanupFn); } } async function main() { await init(); // must be first const outDirs: string[] = getOutputDirectoriesList(argv.outputDirectory as string); const videoUrls: string[] = parseVideoUrls(argv.videoUrls); let session: Session; checkOutDirsUrlsMismatch(outDirs, videoUrls); makeOutputDirectories(outDirs); // create all dirs now to prevent ffmpeg panic session = tokenCache.Read() ?? await DoInteractiveLogin(videoUrls[0], argv.username); downloadVideo(videoUrls, outDirs, session); } main();
<reponame>scotthufeng/spring-cloud-gray<filename>spring-cloud-gray-client-netflix/src/main/java/cn/springcloud/gray/client/netflix/ribbon/RibbonServerExplainer.java package cn.springcloud.gray.client.netflix.ribbon; import cn.springcloud.gray.servernode.ServerExplainer; import cn.springcloud.gray.servernode.VersionExtractor; import com.netflix.loadbalancer.Server; import org.springframework.cloud.netflix.ribbon.DefaultServerIntrospector; import org.springframework.cloud.netflix.ribbon.ServerIntrospector; import org.springframework.cloud.netflix.ribbon.SpringClientFactory; import java.util.Map; public class RibbonServerExplainer implements ServerExplainer<Server> { private SpringClientFactory springClientFactory; private VersionExtractor<Server> versionExtractor; public RibbonServerExplainer(SpringClientFactory springClientFactory, VersionExtractor<Server> versionExtractor) { this.springClientFactory = springClientFactory; this.versionExtractor = versionExtractor; } @Override public VersionExtractor getVersionExtractor() { return versionExtractor; } @Override public String getServiceId(Server server) { return server.getMetaInfo().getServiceIdForDiscovery(); } @Override public String getInstaceId(Server server) { return server.getMetaInfo().getInstanceId(); } @Override public Map getMetadata(Server server) { return getServerMetadata(server.getMetaInfo().getServiceIdForDiscovery(), server); } public ServerIntrospector serverIntrospector(String serviceId) { if (springClientFactory == null) { return new DefaultServerIntrospector(); } ServerIntrospector serverIntrospector = this.springClientFactory.getInstance(serviceId, ServerIntrospector.class); if (serverIntrospector == null) { serverIntrospector = new DefaultServerIntrospector(); } return serverIntrospector; } /** * 获取实例的metadata信息 * * @param serviceId 服务id * @param server ribbon服务器(服务实例) * @return 服务实例的metadata信息 */ public Map<String, String> getServerMetadata(String serviceId, Server server) { return serverIntrospector(serviceId).getMetadata(server); } }
/** * Generic Handler for different BaseUiColumn types */ public abstract class BaseColumnConverterImpl implements BaseColumnConverter { protected static final int DEFAULT_COLUMN_WIDTH = 100; protected GuidedDecisionTable52 model; protected AsyncPackageDataModelOracle oracle; protected ColumnUtilities columnUtilities; protected GuidedDecisionTableView.Presenter presenter; protected GridLienzoPanel gridPanel; protected GridLayer gridLayer; @Override public int priority() { return 0; } @Override public void initialise(final GuidedDecisionTable52 model, final AsyncPackageDataModelOracle oracle, final ColumnUtilities columnUtilities, final GuidedDecisionTableView.Presenter presenter) { this.model = PortablePreconditions.checkNotNull("model", model); this.oracle = PortablePreconditions.checkNotNull("oracle", oracle); this.columnUtilities = PortablePreconditions.checkNotNull("columnUtilities", columnUtilities); this.presenter = PortablePreconditions.checkNotNull("presenter", presenter); this.gridLayer = presenter.getModellerPresenter().getView().getGridLayerView(); this.gridPanel = presenter.getModellerPresenter().getView().getGridPanel(); } protected GridColumn<?> newColumn(final BaseColumn column, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { //Get a column based upon the data-type final String type = columnUtilities.getType(column); if (DataType.TYPE_NUMERIC.equals(type)) { return newNumericColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_BIGDECIMAL.equals(type)) { return newBigDecimalColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_BIGINTEGER.equals(type)) { return newBigIntegerColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_BYTE.equals(type)) { return newByteColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_DOUBLE.equals(type)) { return newDoubleColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_FLOAT.equals(type)) { return newFloatColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_INTEGER.equals(type)) { return newIntegerColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_LONG.equals(type)) { return newLongColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_NUMERIC_SHORT.equals(type)) { return newShortColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_BOOLEAN.equals(type)) { return newBooleanColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (DataType.TYPE_DATE.equals(type)) { return newDateColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else { return newStringColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } } protected GridColumn<?> newValueListColumn(final ConditionCol52 column, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { final boolean isMultipleSelect = OperatorsOracle.operatorRequiresList(column.getOperator()); return new ValueListUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxSingletonDOMElementFactory<String, ListBox>(gridPanel, gridLayer, gridWidget) { @Override public ListBox createWidget() { final ListBox listBox = new ListBox(); listBox.setMultipleSelect(isMultipleSelect); return listBox; } @Override public void toWidget(final GridCell<String> cell, final ListBox widget) { BaseColumnConverterUtilities.toWidget(isMultipleSelect, cell, widget); } @Override public String fromWidget(final ListBox widget) { return BaseColumnConverterUtilities.fromWidget(isMultipleSelect, widget); } @Override public String convert(final String value) { return value; } }, presenter.getValueListLookups(column), isMultipleSelect); } protected GridColumn<?> newValueListColumn(final ActionCol52 column, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new ValueListUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxStringSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter.getValueListLookups(column)); } protected GridColumn<?> newMultipleSelectEnumColumn(final String factType, final String factField, final BaseColumn column, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new EnumMultiSelectUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxSingletonDOMElementFactory<String, ListBox>(gridPanel, gridLayer, gridWidget) { @Override public ListBox createWidget() { final ListBox listBox = new ListBox(); listBox.setMultipleSelect(true); return listBox; } @Override public String convert(final String value) { return value; } @Override public void toWidget(final GridCell<String> cell, final ListBox widget) { BaseColumnConverterUtilities.toWidget(true, cell, widget); } @Override public String fromWidget(final ListBox widget) { return BaseColumnConverterUtilities.fromWidget(true, widget); } }, presenter, factType, factField); } protected GridColumn<?> newSingleSelectionEnumColumn(final String factType, final String factField, final DataType.DataTypes dataType, final BaseColumn column, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { if (dataType.equals(DataType.DataTypes.NUMERIC)) { return new EnumSingleSelectNumericUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxNumericSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxNumericSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_BIGDECIMAL)) { return new EnumSingleSelectBigDecimalUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxBigDecimalSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxBigDecimalSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_BIGINTEGER)) { return new EnumSingleSelectBigIntegerUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxBigIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxBigIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_BYTE)) { return new EnumSingleSelectByteUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxByteSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxByteSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_DOUBLE)) { return new EnumSingleSelectDoubleUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxDoubleSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxDoubleSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_FLOAT)) { return new EnumSingleSelectFloatUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxFloatSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxFloatSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_INTEGER)) { return new EnumSingleSelectIntegerUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_LONG)) { return new EnumSingleSelectLongUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxLongSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxLongSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.NUMERIC_SHORT)) { return new EnumSingleSelectShortUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxShortSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxShortSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else if (dataType.equals(DataType.DataTypes.BOOLEAN)) { return newBooleanColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, gridWidget); } else if (dataType.equals(DataType.DataTypes.DATE)) { return new EnumSingleSelectDateUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxDateSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new DatePickerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } else { return new EnumSingleSelectStringUiColumn(makeHeaderMetaData(column), Math.max(column.getWidth(), DEFAULT_COLUMN_WIDTH), true, !column.isHideColumn(), access, new ListBoxStringSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), new TextBoxStringSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget), presenter, factType, factField); } } protected GridColumn<BigDecimal> newNumericColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new BigDecimalUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxBigDecimalSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<BigDecimal> newBigDecimalColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new BigDecimalUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxBigDecimalSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<BigInteger> newBigIntegerColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new BigIntegerUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxBigIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Byte> newByteColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new ByteUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxByteSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Double> newDoubleColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new DoubleUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxDoubleSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Float> newFloatColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new FloatUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxFloatSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget) { }); } protected GridColumn<Integer> newIntegerColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new IntegerUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxIntegerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Long> newLongColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new LongUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxLongSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Short> newShortColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new ShortUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxShortSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Date> newDateColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new DateUiColumn(headerMetaData, width, isResizable, isVisible, access, new DatePickerSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } protected GridColumn<Boolean> newBooleanColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new BooleanUiColumn(headerMetaData, width, isResizable, isVisible, access, new CheckBoxDOMElementFactory(gridLayer, gridWidget) { @Override public CheckBox createWidget() { final CheckBox checkBox = super.createWidget(); checkBox.setEnabled(access.isEditable()); return checkBox; } @Override public CheckBoxDOMElement createDomElement(final GridLayer gridLayer, final GridWidget gridWidget, final GridBodyCellRenderContext context) { final CheckBox widget = createWidget(); widget.addMouseDownHandler((e) -> e.stopPropagation()); widget.addKeyDownHandler((e) -> e.stopPropagation()); final CheckBoxDOMElement e = new CheckBoxDOMElement(widget, gridLayer, gridWidget); widget.addClickHandler((event) -> { e.flush(widget.getValue()); gridLayer.batch(); }); return e; } }); } protected GridColumn<String> newStringColumn(final List<GridColumn.HeaderMetaData> headerMetaData, final double width, final boolean isResizable, final boolean isVisible, final GuidedDecisionTablePresenter.Access access, final GuidedDecisionTableView gridWidget) { return new StringUiColumn(headerMetaData, width, isResizable, isVisible, access, new TextBoxStringSingletonDOMElementFactory(gridPanel, gridLayer, gridWidget)); } }
<gh_stars>10-100 {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} module Main ( main ) where import Lib import Control.Exception import Data.Foldable import Data.Functor.Const import Data.Maybe import Data.Some.Newtype import System.Timeout import Test.Tasty import Test.Tasty.HUnit import Test.Tasty.QuickCheck hardcore :: Bool hardcore = False newtype Flip f y x = Flip { unFlip :: f x y } -- Generates the vector lazily. listToVec :: [a] -> Some (Flip Vec a) listToVec [] = Some $ Flip Nil listToVec (x:xs) = case listToVec xs of Some (Flip rs) -> Some . Flip $ Cons x rs withVec :: [a] -> (forall n. Vec n a -> r) -> r withVec xs k = case listToVec xs of Some (Flip rs) -> k rs reverseVec :: Vec n a -> Vec n a -- Just for the shits and giggles of it. reverseVec = unFlip . ifoldl' ((Flip .) . flip Cons . unFlip) (Flip Nil) test_ifoldl'_sound :: TestTree test_ifoldl'_sound = testProperty "ifoldl' is sound" . withMaxSuccess 1000 $ \(xs :: [Int]) -> withVec xs (toList . reverseVec) == reverse xs data Stop = Stop deriving (Show, Exception) test_ifoldl'_strict :: TestTree test_ifoldl'_strict = testCase "ifoldl' is strict" $ do mayStopped <- timeout 500000 $ do let body = getConst . ifoldl' (\(Const acc) n -> Const $ acc + n) (throw Stop) try @Stop . evaluate $ withVec [1..] body isJust mayStopped @?= True test_ifoldl'_linear :: TestTree test_ifoldl'_linear = testCase "ifoldl' is linear" $ do let n = 10^7 :: Integer mayRes <- timeout 30000000 $ do let body = getConst . ifoldl' (\(Const acc) n -> Const $ n - acc) (Const 0) evaluate $ withVec [1..n] body mayRes @?= Just (n `div` 2) main :: IO () main = defaultMain . testGroup "all" $ [ test_ifoldl'_sound , test_ifoldl'_strict ] ++ [test_ifoldl'_linear | hardcore]
/** * Returns name of METS fileGrp corresponding to a DSpace bundle name. * They are mostly the same except for bundle "ORIGINAL" maps to "CONTENT". * Don't worry about the metadata bundles since they are not * packaged as fileGrps, but in *mdSecs. * * @param bname name of DSpace bundle. * @return string name of fileGrp */ @Override public String bundleToFileGrp(String bname) { if (bname.equals("ORIGINAL")) { return "CONTENT"; } else { return bname; } }