content
stringlengths
10
4.9M
<filename>src/List/index.tsx export { List, ListItem, ListItemIcon, ListItemText, ListItemAction, ListItemContainer, } from './List'; export type { ListProps, ListItemProps, ListItemIconProps, ListItemTextProps, } from './List';
/** * Struts action class to process all folder-related requests * in the NGBW web application. * * @author Jeremy Carver */ @SuppressWarnings("serial") public class FolderManager extends SessionManager { /*================================================================ * Constants *================================================================*/ private static final Logger logger = Logger.getLogger(FolderManager.class.getName()); // parameter attribute key constants public static final String ID = "id"; // session attribute key constants public static final String WORKING_FOLDER = "workingFolder"; public static final String PARENT_FOLDER = "parentFolder"; /*================================================================ * Properties *================================================================*/ // cached current folder private Folder currentFolder; // folder create/edit form bean private String label; private String description; /*================================================================ * Action methods *================================================================*/ public String list() { clearCurrentFolder(); clearExpandedFolderIds(); return LIST; } public String display() { Folder folder = getRequestFolder(ID); if (folder != null) setCurrentFolder(folder); else folder = getCurrentFolder(); if (folder == null) { reportUserError("You must select a folder to view its details."); return LIST; } else return DISPLAY; } public String create() { if (isRegistered()) { clearWorkingFolder(); Folder folder = getRequestFolder(PARENT_FOLDER); if (folder != null) setParentFolder(folder); else clearParentFolder(); return INPUT; } else { reportUserError("You must register to create a folder."); return back(); } } public String edit() { if (isRegistered()) { Folder folder = getRequestFolder(ID); if (folder == null) folder = getCurrentFolder(); if (folder == null) { reportUserError("You must select a folder to edit it."); return LIST; } else { clearParentFolder(); setWorkingFolder(folder); return INPUT; } } else { reportUserError("You must register to edit a folder."); return back(); } } public String save() { if (validateFolder()) { Folder folder = getWorkingFolder(); FolderController controller = getFolderController(); // if there is no working folder, we are trying to create a new folder if (folder == null) try { folder = controller.createFolder(getLabel(), getDescription(), getParentFolder()); if (folder != null) { reportUserMessage("Folder \"" + folder.getLabel() + "\" successfully created."); setCurrentFolder(folder); refreshFolders(); } else reportUserError("Folder \"" + getLabel() + "\" could not be created."); } catch (UserAuthenticationException error) { reportUserError(error.getMessage()); } // otherwise, we are trying to edit an existing folder else try { String oldLabel = folder.getLabel(); folder = controller.editFolder(folder, getLabel(), getDescription()); if (folder != null) { reportUserMessage("Folder \"" + folder.getLabel() + "\" successfully edited."); setCurrentFolder(folder); refreshFolders(); } else reportUserError("Folder \"" + oldLabel + "\" could not be edited."); } catch (UserAuthenticationException error) { reportUserError(error.getMessage()); } clearWorkingFolder(); return back(); } else return INPUT; } public String cancel() { // discard input and return reportUserMessage("Folder not saved."); clearWorkingFolder(); return back(); } public String delete() { Folder folder = getRequestFolder(ID); if (folder == null) folder = getCurrentFolder(); if (folder == null) { reportUserError("You must select a folder to delete it."); return LIST; } else { String folderLabel = folder.getLabel(); boolean isCurrent = isCurrentFolder(folder); try { if (getFolderController().deleteFolder(folder)) { if (isCurrent) clearCurrentFolder(); reportUserMessage("Folder \"" + folderLabel + "\" successfully deleted."); refreshFolders(); if (isCurrent) return LIST; else return back(); } else { reportUserError("Folder \"" + folderLabel + "\" could not be deleted."); return back(); } } catch (UserAuthenticationException error) { reportUserError(error.getMessage()); return back(); } } } public String importBwbData() { if (validateLogin()) try { int imported = getFolderController().importBwbData(getUsername(), getCurrentPassword()); refreshFolders(); String count; if (imported > 0) { count = imported + " data item"; if (imported != 1) count += "s were successfully "; else count += " was successfully "; } else count = "No data items were "; addActionMessage(count + "imported from Biology Workbench account \"" + getUsername() + "\"."); return SUCCESS; } catch (WorkbenchException error) { reportUserError(error.getMessage()); return SUCCESS; } catch (UserAuthenticationException error) { reportUserError(error.getMessage()); return INPUT; } else return INPUT; } /*================================================================ * Folder display page property accessor methods *================================================================*/ public Folder getCurrentFolder() { // first try the folder stored in the action if (currentFolder != null) return currentFolder; // if not found, retrieve it from the session else { currentFolder = super.getCurrentFolder(); return currentFolder; } } public void setCurrentFolder(Folder folder) { if (folder == null) clearCurrentFolder(); else { super.setCurrentFolder(folder); currentFolder = folder; } } public void clearCurrentFolder() { super.clearCurrentFolder(); currentFolder = null; } /*================================================================ * Form property accessor methods *================================================================*/ public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /*================================================================ * Internal property accessor methods *================================================================*/ protected FolderController getFolderController() { FolderController controller = super.getFolderController(); if (controller == null) throw new RuntimeException("A valid WorkbenchSession should " + "always be present throughout the lifespan of this action."); else return controller; } protected Folder getWorkingFolder() { return (Folder)getSessionAttribute(WORKING_FOLDER); } protected void setWorkingFolder(Folder folder) { if (folder == null) clearWorkingFolder(); else { setSessionAttribute(WORKING_FOLDER, folder); setLabel(folder.getLabel()); setDescription(getFolderController().getDescription(folder)); } } protected void clearWorkingFolder() { clearSessionAttribute(WORKING_FOLDER); setLabel(null); setDescription(null); } protected Folder getParentFolder() { return (Folder)getSessionAttribute(PARENT_FOLDER); } protected void setParentFolder(Folder folder) { if (folder == null) clearParentFolder(); else setSessionAttribute(PARENT_FOLDER, folder); } protected void clearParentFolder() { clearSessionAttribute(PARENT_FOLDER); } /*================================================================ * Convenience methods *================================================================*/ protected Folder getRequestFolder(String parameter) { String folderId = getRequestParameter(parameter); if (folderId == null) return null; else try { return getFolderController().getFolder(Long.parseLong(folderId)); } catch (NumberFormatException error) { return null; } } protected boolean validateFolder() { String label = getLabel(); if (label == null || label.equals("")) { addFieldError("label", "Label is required."); } String description = getDescription(); if (description != null && description.length() > 100) { addFieldError("description", "Description cannot be more than 100 characters in length."); } if (hasFieldErrors()) return false; else return true; } private String back() { if (getCurrentFolder() != null) return DISPLAY; else return LIST; } }
-- Copyright (c) Facebook, Inc. and its affiliates. module ServiceData.GlobalStats (module ServiceData.GlobalStats) where import Data.ByteString (ByteString) import ServiceData.Types setCounter :: ByteString -> Int -> IO () setCounter _ _ = return () addStatValue :: ByteString -> Int -> IO () addStatValue _ _ = return () addStatValueType :: ByteString -> Int -> ExportType -> IO () addStatValueType _ _ _ = return ()
<gh_stars>0 /* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codebuild.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/codebuild-2016-10-06/GetReportGroupTrend" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetReportGroupTrendRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { private String reportGroupArn; private Integer numOfReports; private String trendField; /** * @param reportGroupArn */ public void setReportGroupArn(String reportGroupArn) { this.reportGroupArn = reportGroupArn; } /** * @return */ public String getReportGroupArn() { return this.reportGroupArn; } /** * @param reportGroupArn * @return Returns a reference to this object so that method calls can be chained together. */ public GetReportGroupTrendRequest withReportGroupArn(String reportGroupArn) { setReportGroupArn(reportGroupArn); return this; } /** * @param numOfReports */ public void setNumOfReports(Integer numOfReports) { this.numOfReports = numOfReports; } /** * @return */ public Integer getNumOfReports() { return this.numOfReports; } /** * @param numOfReports * @return Returns a reference to this object so that method calls can be chained together. */ public GetReportGroupTrendRequest withNumOfReports(Integer numOfReports) { setNumOfReports(numOfReports); return this; } /** * @param trendField * @see ReportGroupTrendFieldType */ public void setTrendField(String trendField) { this.trendField = trendField; } /** * @return * @see ReportGroupTrendFieldType */ public String getTrendField() { return this.trendField; } /** * @param trendField * @return Returns a reference to this object so that method calls can be chained together. * @see ReportGroupTrendFieldType */ public GetReportGroupTrendRequest withTrendField(String trendField) { setTrendField(trendField); return this; } /** * @param trendField * @return Returns a reference to this object so that method calls can be chained together. * @see ReportGroupTrendFieldType */ public GetReportGroupTrendRequest withTrendField(ReportGroupTrendFieldType trendField) { this.trendField = trendField.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getReportGroupArn() != null) sb.append("ReportGroupArn: ").append(getReportGroupArn()).append(","); if (getNumOfReports() != null) sb.append("NumOfReports: ").append(getNumOfReports()).append(","); if (getTrendField() != null) sb.append("TrendField: ").append(getTrendField()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetReportGroupTrendRequest == false) return false; GetReportGroupTrendRequest other = (GetReportGroupTrendRequest) obj; if (other.getReportGroupArn() == null ^ this.getReportGroupArn() == null) return false; if (other.getReportGroupArn() != null && other.getReportGroupArn().equals(this.getReportGroupArn()) == false) return false; if (other.getNumOfReports() == null ^ this.getNumOfReports() == null) return false; if (other.getNumOfReports() != null && other.getNumOfReports().equals(this.getNumOfReports()) == false) return false; if (other.getTrendField() == null ^ this.getTrendField() == null) return false; if (other.getTrendField() != null && other.getTrendField().equals(this.getTrendField()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getReportGroupArn() == null) ? 0 : getReportGroupArn().hashCode()); hashCode = prime * hashCode + ((getNumOfReports() == null) ? 0 : getNumOfReports().hashCode()); hashCode = prime * hashCode + ((getTrendField() == null) ? 0 : getTrendField().hashCode()); return hashCode; } @Override public GetReportGroupTrendRequest clone() { return (GetReportGroupTrendRequest) super.clone(); } }
from collections import deque import collections.abc from typing import IO, Mapping, Sequence, Union from . import serialization @serialization.serializable class TrieNode: def __init__(self, value=None, sources=None, _children=None): if _children is None: self._children: Mapping[object, TrieNode] = {} else: self._children = _children self.value = value if sources is not None: self._sources = set(sources) else: self._sources = set() def __repr__(self): return f"{self.__class__.__name__}(value={self.value!r}, sources={self.sources!r}, _children={self._children!r})" def __len__(self): return len(self._children) def __iter__(self): return iter(self._children.values()) def __hash__(self): return hash(self.value) def __eq__(self, other): return self.value == other.value def __ne__(self, other): return not (self == other) def __getitem__(self, key): return self._children[key] def __contains__(self, value): if not isinstance(value, collections.abc.Sequence): first, remainder, n = value, (), 1 else: first, remainder, n = self._car_cdr_len(value) if n == 1: return first in self._children else: return self._find(first, remainder, n) @staticmethod def _car_cdr_len(sequence): n = len(sequence) if n == 0: first = None else: first = sequence[0] return first, sequence[1:], n def _find(self, first, remainder, n): if n == 0: return len(self._sources) > 0 if first not in self._children: return False return self[first]._find(*self._car_cdr_len(remainder)) def find(self, key): if isinstance(key, collections.abc.Sequence): return self._find(*self._car_cdr_len(key)) else: return self._find(key, (), 1) @property def children(self): return dict(self._children) @property def sources(self): return frozenset(self._sources) def _add_child(self, value, sources=None): new_child = TrieNode(value, sources) self._children[value] = new_child return new_child def _add(self, sequence, source): node = self while True: first, sequence, n = self._car_cdr_len(sequence) if n == 0: break if first in node: node = node[first] else: node = node._add_child(first) node._sources.add(source) return node def add(self, sequence, source=None): if source is None: source = sequence return self._add(sequence, source) def find_prefix(self, prefix): first, remainder, n = self._car_cdr_len(prefix) if n == 0: yield from iter(self._sources) for child in self: yield from child.find_prefix(prefix) else: if first in self._children: yield from self[first].find_prefix(remainder) def bfs(self): queue = deque([self]) while queue: head = queue.popleft() yield head queue.extend(head._children.values()) def dfs(self): stack = [self] visited = set() while stack: tail = stack.pop() yield tail children = tail._children.values() stack.extend(child for child in children if id(child) not in visited) visited |= set(id(c) for c in children) def serialize(self): return self.value, self._sources, self._children @serialization.serializable class ACNode(TrieNode): """A data structure for implementing the Aho-Corasick multi-string matching algorithm""" def __init__(self, value=None, sources=None, _children=None, parent=None, _fall=None): super().__init__(value=value, sources=sources, _children=_children) self.parent = parent self.fall = _fall def serialize(self): return super().serialize() + (self.parent, self.fall) def __repr__(self): if self.fall is not self: return f"{self.__class__.__name__}(value={self.value!r}, sources={self.sources!r}, _children={self._children!r}), parent={self.parent!r}, _fall={self.fall!r}" else: return f"{self.__class__.__name__}(value={self.value!r}, sources={self.sources!r}, _children={self._children!r}), parent={self.parent!r}, _fall=self" def _add_child(self, value, sources=None): new_child = ACNode(value, sources, parent=self) self._children[value] = new_child return new_child def finalize(self): self.fall = self for n in self.bfs(): if n is self: continue new_fall = n.parent.fall while n.value not in new_fall and new_fall is not self: new_fall = new_fall.fall if n.value not in new_fall: # there is no suffix n.fall = self else: n.fall = new_fall[n.value] if n.fall is n: n.fall = self def to_dot(self, include_falls=False): """Returns a Graphviz/Dot representation of this Trie""" dot = "digraph G {\n" node_ids = {} falls = {} for node in self.dfs(): assert node not in node_ids nid = len(node_ids) node_ids[id(node)] = nid dot += f" node{nid}" if node.value is None: dot += f"[label=\"Root\"]" else: if node.value == ord('"'): c = '\\"' elif node.value == ord('\\'): c = '\\\\' elif 32 <= node.value <= 126: c = chr(node.value) else: c = f"\\\\x{hex(node.value)[2:]}" dot += f"[label=\"{c}\"]" dot += ";\n" if node.parent is not None: dot += f" node{node_ids[id(node.parent)]} -> node{nid};\n" if include_falls and node.fall is not None and node.fall is not node: falls[id(node)] = id(node.fall) for nodeid, fallid in falls.items(): dot += f" node{node_ids[nodeid]} -> node{node_ids[fallid]} [style=dashed,label=\"fall\"];\n" dot += "}\n" return dot class MultiSequenceSearch: """A datastructure for efficiently searching a sequence for multiple strings""" def __init__(self, *sequences_to_find): self.trie = ACNode() for seq in sequences_to_find: self.trie.add(seq) self.trie.finalize() def save(self, output_stream: IO): serialization.dump(self.trie, output_stream) @staticmethod def load(input_stream: IO): mss = MultiSequenceSearch() mss.trie = serialization.load(input_stream) print(mss.trie) exit(0) return mss def search(self, source_sequence: Union[Sequence, IO]): """The Aho-Corasick Algorithm""" if hasattr(source_sequence, 'read'): def iterator(): while True: b = source_sequence.read(1) if not b: return yield b[0] else: def iterator(): return iter(source_sequence) state = self.trie for stream_offset, c in enumerate(iterator()): n = state while c not in n and n is not self.trie: n = n.fall if n is self.trie: if c in n: n = n[c] else: n = n[c] state = n while n is not self.trie: yield from ((stream_offset - len(source) + 1, source) for source in n.sources) n = n.fall class StartsWithMatcher: def __init__(self, *sequences_to_find): self.trie = TrieNode() for seq in sequences_to_find: self.trie.add(seq) def search(self, source_sequence: Union[Sequence, IO]): if hasattr(source_sequence, 'read'): def iterator(): while True: b = source_sequence.read(1) if not b: return yield b[0] else: def iterator(): return iter(source_sequence) state = self.trie yield from ((0, s) for s in state.sources) for c in iterator(): if c not in state: return state = state[c] yield from ((0, s) for s in state.sources) if __name__ == '__main__': root = TrieNode() root.add('The quick brown fox jumps over the lazy dog') root.add('The quick person') root.add('The best') assert len(list(root.find_prefix('The'))) == 3 assert len(list(root.find_prefix('The quick'))) == 2 assert not root.find('The') assert root.find('The best') mss = MultiSequenceSearch(b'hack', b'hacker', b'crack', b'ack', b'kool') to_search = b'This is a test to see if hack or hacker is in this string.'\ b'Can you crack it? If so, please ack, \'cause that would be kool.' for offset, match in mss.search(to_search): print(offset, match) assert to_search[offset:offset+len(match)] == match swm = StartsWithMatcher(b'hack', b'hacker', b'crack', b'ack', b'kool') for match in swm.search(b'hacker'): print(match) print(ACNode.load(mss.trie.serialize()))
def timedelta_total_seconds(td): """ Needed for python 2.6 compat """ return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10. ** 6) / 10. ** 6
/** * Logging filter which is applied to every incoming request. * * <p>Example: `127.0.0.1 GET [/api/users]` */ @Slf4j public class LoggingFilter implements Filter { @Override public void handle(final Request request, final Response response) { final Stopwatch watch = request.attribute("watch"); log.info( "{} {} [{}] in {} ms", request.ip(), request.requestMethod(), request.pathInfo(), watch.stop().elapsed(TimeUnit.MILLISECONDS)); } }
A hybrid factored frontier algorithm for dynamic Bayesian network models of biopathways Dynamic Bayesian Networks (DBNs) can serve as succinct models of large biochemical networks . To analyze these models, one must compute the probability distribution over system states at a given time point. Doing this exactly is infeasible for large models and hence approximate methods are needed. The Factored Frontier algorithm (FF) is a simple and efficient approximate algorithm that has been designed to meet this need. However the errors it incurs can be quite large. The earlier Boyen-Koller (BK) algorithm can also incur significant errors. To address this, we present here a novel approximation algorithm called the Hybrid Factored Frontier (HFF) algorithm. HFF may be viewed as a parametrized version of FF. At each time slice, in addition to maintaining probability distributions over local states -as FF does- we also maintain explicitly the probabilities of a small number of global states called spikes. When the number of spikes is 0, we get FF and with all global states as spikes, we get the - computationally infeasible- exact inference algorithm. We show that by increasing the number of spikes one can reduce errors while the additional computational effort required is only quadratic in the number of spikes. We have validated the performance of our algorithm on large DBN models of biopathways. Each pathway has more than 30 species and the corresponding DBN has more than 3000 nodes. Comparisons with the performances of FF and BK show that HFF can be a useful and powerful approximation algorithm for analyzing DBN models of biopathways.
<reponame>sleexyz/mnist<filename>src/Mnist.hs module Mnist where -- | taken from https://github.com/mhwombat/backprop-example/blob/master/Mnist.hs import Data.Word import Data.Binary.Get import qualified Data.List.Split as S import qualified Data.ByteString.Lazy as BL data Image = Image { iRows :: Int , iColumns :: Int , iPixels :: [Word8] } deriving (Eq, Show) deserializeLabels :: Get (Word32, Word32, [Word8]) deserializeLabels = do magicNumber <- getWord32be count <- getWord32be labelData <- getRemainingLazyByteString let labels = BL.unpack labelData return (magicNumber, count, labels) readLabels :: FilePath -> IO [Int] readLabels filename = do content <- BL.readFile filename let (_, _, labels) = runGet deserializeLabels content return (map fromIntegral labels) deserializeHeader :: Get (Word32, Word32, Word32, Word32, [[Word8]]) deserializeHeader = do magicNumber <- getWord32be imageCount <- getWord32be r <- getWord32be c <- getWord32be packedData <- getRemainingLazyByteString let len = fromIntegral (r * c) let unpackedData = S.chunksOf len (BL.unpack packedData) return (magicNumber, imageCount, r, c, unpackedData) readImages :: FilePath -> IO [Image] readImages filename = do content <- BL.readFile filename let (_, _, r, c, unpackedData) = runGet deserializeHeader content return (map (Image (fromIntegral r) (fromIntegral c)) unpackedData)
/* Marks given sector as allocated in BAM */ static void mark_sector(image_type type, unsigned char* image, int track, int sector, int free) { if (free != is_sector_free(type, image, track, sector, 0, 0)) { int bam; unsigned char* bitmap; if (type == IMAGE_D81) { if (track <= 40) { bam = linear_sector(type, dirtrack(type), 1 ) * BLOCKSIZE; bitmap = image + bam + (track * 6) + 11; } else { bam = linear_sector(type, dirtrack(type), 2 ) * BLOCKSIZE; bitmap = image + bam + ((track - 40) * 6) + 11; } if (free) { ++bitmap[-1]; } else { --bitmap[-1]; } } else if ((type == IMAGE_D71) && (track > D64NUMTRACKS)) { bam = linear_sector(type, dirtrack(type) + D64NUMTRACKS, 0) * BLOCKSIZE; bitmap = image + bam + (track - D64NUMTRACKS - 1) * 3; if (free) { image[bam + 0xdd + track - D64NUMTRACKS - 1]++; } else { image[bam + 0xdd + track - D64NUMTRACKS - 1]--; } } else { if (((type == IMAGE_D64_EXTENDED_SPEED_DOS) || (type == IMAGE_D64_EXTENDED_DOLPHIN_DOS)) && (track > D64NUMTRACKS)) { track -= D64NUMTRACKS; bam = linear_sector(type, dirtrack(type), 0) * BLOCKSIZE + ((type == IMAGE_D64_EXTENDED_SPEED_DOS) ? BAM_OFFSET_SPEED_DOS : BAM_OFFSET_DOLPHIN_DOS); } else { bam = linear_sector(type, dirtrack(type), 0) * BLOCKSIZE; } bitmap = image + bam + (track * 4) + 1; if (free) { ++image[bam + (track * 4)]; } else { --image[bam + (track * 4)]; } } int byte = sector >> 3; int bit = sector & 7; if (free) { bitmap[byte] |= 1 << bit; } else { bitmap[byte] &= ~(1 << bit); } } }
def split_sample_map(sample_ids, populations, ratios, pop_ids, sample_map_paths): assert sum(ratios) == 1, "ratios must sum to 1" set_ids = [[] for _ in ratios] for p in np.unique(populations): pop_idx = populations == p pop_sample_ids = list(np.copy(sample_ids[pop_idx])) n_pop = len(pop_sample_ids) n_sets = [round(r*n_pop) for r in ratios] while sum(n_sets) > n_pop: n_sets[0] -= 1 while sum(n_sets) < n_pop: n_sets[-1] += 1 for s, r in enumerate(ratios): n_set = n_sets[s] set_ids_idx = np.random.choice(len(pop_sample_ids),n_set,replace=False) set_ids[s] += [[pop_sample_ids.pop(idx), p] for idx in sorted(set_ids_idx,reverse=True)] for i, sample_fname in enumerate(sample_map_paths): write_sample_map(set_ids[i], sample_map_paths[i]) sample_map_file_idxs = [get_sample_map_file_idxs(f, pop_ids) for f in sample_map_paths] return sample_map_file_idxs
/** * Encodes the byte array into base64 string * * @return String a {@link java.lang.String} * @throws IOException * @throws MalformedURLException */ public static String encodeImage() throws MalformedURLException, IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); URLConnection connection; connection = new URL(LINK).openConnection(); connection.connect(); InputStream imageInFile = new BufferedInputStream(connection.getInputStream()); int read = imageInFile.read(); while (read != -1) { baos.write(read); read = imageInFile.read(); } imageInFile.close(); baos.close(); return Base64.getEncoder().encodeToString(baos.toByteArray()); }
Mycoplasma pneumoniae central nervous system infections Purpose of reviewMycoplasma pneumoniae is associated with a wide range of central nervous system diseases, most importantly with childhood encephalitis. This review summarizes and discusses recent findings in the field of M. pneumoniae central nervous system infections in context with previously published findings, with reference to clinical spectrum, pathogenesis, diagnosis, and treatment. Recent findingsFurther insight into the pathogenesis has been provided by studies on cytokine production and autoantibody formation. Some new manifestations have been described (e.g. Kluver–Bucy syndrome, intracranial hypertension). Anecdotal descriptions on the association of M. pneumoniae with uncommon neurologic diseases remain to be confirmed by additional reports, however, especially when aetiologic diagnosis relied exclusively on serology. New knowledge on treatment options targeting the immune system has been provided by isolated reports. Recent diagnostic advances refer to general methods (polymerase chain reaction, serology), without specific reference to neurologic disease. SummaryM. pneumoniae must be considered as causative agent of various neurologic diseases. The recent literature shows, however, that the clinical spectrum of M. pneumoniae central nervous system disease is still not well defined. In addition, the main future challenges are the investigation of the pathogenesis of M. pneumoniae central nervous system disease and the establishment of therapeutic approaches.
<reponame>chenzhekl/iBRDF<filename>src/ssim.cpp #include "ssim.h" namespace { torch::Tensor Gaussian(std::size_t windowSize, float sigma) { std::vector<float> gauss_; for (std::size_t x = 0; x < windowSize; ++x) { gauss_.push_back(std::exp(-(static_cast<float>(x) - windowSize / 2) * (static_cast<float>(x) - windowSize / 2) / (2.0f * sigma * sigma))); } torch::Tensor gauss = torch::tensor(gauss_, torch::kFloat32); return gauss / gauss.sum(); } torch::Tensor CreateWindow(std::size_t windowSize, std::size_t channel) { torch::Tensor window1D = Gaussian(windowSize, 1.5f).unsqueeze(1); torch::Tensor window2D = window1D.mm(window1D.t()).unsqueeze(0).unsqueeze(0); torch::Tensor window = window2D .expand({ static_cast<std::int64_t>(channel), 1, static_cast<std::int64_t>(windowSize), static_cast<std::int64_t>(windowSize) }) .contiguous(); return window; } } torch::Tensor SSIM(const torch::Tensor& x, const torch::Tensor& y, std::size_t windowSize, bool sizeAverage) { torch::Tensor img1 = x.permute({ 2, 0, 1 }).unsqueeze(0); torch::Tensor img2 = y.permute({ 2, 0, 1 }).unsqueeze(0); std::int64_t channel = img1.size(1); torch::Tensor window = CreateWindow(windowSize, channel).to(img1.device()); torch::Tensor mu1 = torch::conv2d(img1, window, {}, 1, windowSize / 2, 1, channel); torch::Tensor mu2 = torch::conv2d(img2, window, {}, 1, windowSize / 2, 1, channel); torch::Tensor mu1Sq = mu1 * mu1; torch::Tensor mu2Sq = mu2 * mu2; torch::Tensor mu1Mu2 = mu1 * mu2; torch::Tensor sigma1Sq = torch::conv2d(img1 * img1, window, {}, 1, windowSize / 2, 1, channel) - mu1Sq; torch::Tensor sigma2Sq = torch::conv2d(img2 * img2, window, {}, 1, windowSize / 2, 1, channel) - mu2Sq; torch::Tensor sigma12 = torch::conv2d(img1 * img2, window, {}, 1, windowSize / 2, 1, channel) - mu1Mu2; float c1 = 0.01f * 0.01f; float c2 = 0.03f * 0.03f; torch::Tensor ssimMap = ((2.0f * mu1Mu2 + c1) * (2.0f * sigma12 + c2)) / ((mu1Sq + mu2Sq + c1) * (sigma1Sq + sigma2Sq + c2)); if (sizeAverage) { return ssimMap.mean(); } else { return ssimMap.mean(1).mean(1).mean(1); } }
// Given a request, find the appropriate handler func (g *Goober) GetHandler(r *Request) (node *routeTreeNode, err error) { path := strings.TrimFunc(r.URL.Path, isSlash) if len(path) == 0 { if g.head[r.Method].handler == nil { err := &RouteNotFoundError{Route: r.URL.Path} return g.head[r.Method], err } else { return g.head[r.Method], nil } } parts := strings.Split(path, "/") return walkTree(g.head[r.Method], parts, r) }
/* search val in B-Tree */ void searching(int val, int *pos, struct btreeNode *myNode) { if (!myNode) { return; } if (val < myNode->val[1]) { *pos = 0; } else { for (*pos = myNode->count; (val < myNode->val[*pos] && *pos > 1); (*pos)--); if (val == myNode->val[*pos]) { printf("Given data %d is present in B-Tree", val); return; } } searching(val, pos, myNode->link[*pos]); return; }
def write_struct(self, st): pos = self.tell() self.DEBUG('write_struct: %s: %s bytes: %s', pos, sizeof(st), st) self.write(st)
/** * Ends an account with account number IDI belonging to the customer pNo. When M * n ends an account, the interest rate is calculated as balance * interest / * 100. * * @param pNr personal number * @param accountId accountnumber * @return String Returns zero if no account was removed */ public String closeAccount(String pNr, int accountId) { Customer c = this.getCustomerObj(pNr); if (c != null) return c.closeAccount(accountId); return null; }
Many parents want to move toward positive parenting. They want to abandon controlling, bullying, and being punitive with their children. Instead, they want their relationship to be authentic, open, and genuine. These mothers and fathers ask “how do I do this? Where do I start? What do I do?” There is no easy answer. This massive change requires a paradigm shift toward the way we think about children, and our relationship with them. Society conditions us to be unkind to those in our care. We are taught early on that we must distance ourselves from and control our kids. We are told to dictate babies feeding schedules instead of meeting their biological needs. Rather than being comforted by the familiar scent of the only world they have ever known, our minutes old newborns are taken from us to be poked, prodded, and assaulted. We are taught to override our instincts so that we can ignore the cries and desires of our babies, instead of listening to them and deepening their trust for us. Parents regularly find themselves in a position of feeling inner unrest when it comes to making decisions on behalf of their children. Whether it is listening to the cries of our precious toddler as he sits in isolation begging for our comfort, or our own tears shed while collapsing in the car after leaving our panicked child for his first day of preschool, our intense feelings are trivialized. Those around us tell us that though none of this feels “good” or even “right, that it is all “normal”. We are taught that this pain is necessary for our children to become functioning members of society. What if it’s not? If none of this is true, why are we treating our children in such a horrible and unkind way? It is easy to blame individual parents. This allows us to in-fight and keep the focus on ridiculous urban myths like mommy wars and judgment. It keeps the conversation away from the real issue; the hard and demanding work of changing ourselves for the sake of our children. The problem does not lie with individual families, but the most efficient solution does. If we want to improve our relationship with our children, we must improve ourselves. We need to face the stuff that causes us to react to our children, instead of guiding them. We must get comfortable questioning every single thing that we have been taught. We need to learn how to be okay with doing things differently. It requires that we hold ourselves accountable for knowing more, rather than blaming others for not educating us. There is no simple solution to learning to live respectfully with our children. “10 Easy Steps to Living Gently with Children” does not exist, and there is no single blog post that can magically teach us. We must be inspired to commit to making things better in our families. Once this happens, no excuse will be relevant, no reason for failure will be valid, and our hearts will open in a way that allows our brains to be flooded with resources for achieving real change. When we say we want to improve our relationship with our children, do we mean it? The answer to this question has to come from the deepest part of ourselves, and nobody else can answer it for us.
def CreateTrainingData(self): if self.listPictureNames==[] or self.label_NumberPictures.text()=='Path to the directory: (0 picture)': messageWin=MessageWindow(parent=self, WorkingDirectory='OK', trainingData='OK', listPictureNames='OK', ROI='OK', text_InfoTestPictures='OK', trainingDataPicture=[], NamesList='Y', selectedpixels='Y',Window='N') messageWin.exec_() else : self.ListTrainingDataFile=[] TrainingDataDirectory=self.workingDirectory+'/TrainingData' if not os.path.exists(TrainingDataDirectory): os.mkdir(TrainingDataDirectory) colorList=[(0,0,255),(0,255,0),(255,0,0),(0,255,255),(255,0,255)] for i in range(self.classes): InfoWin=InfoWindowTrainindData(parent=None, Class=self.classesNamesList[i]) InfoWin.exec_() t=TrainingData() ListFile=TrainingData.SelectOneClass(t,self.listPictureNames,TrainingDataDirectory,self.classesNamesList[i],colorList[i],self.displaySize) self.ListTrainingDataFile.extend(ListFile) if self.ListTrainingDataFile==[]: messageWin=MessageWindow(parent=self, WorkingDirectory='OK', trainingData='OK', listPictureNames='OK', ROI='OK', text_InfoTestPictures='OK', trainingDataPicture='OK', NamesList='Y', selectedpixels='N', Window='N') messageWin.exec_() else: self.newfilecreated='Y' trainDataTab=np.array([[0,0,0,0,0,0,0,0,0,0,0,0,0]]) for file in self.ListTrainingDataFile: f=open(file,"r",newline='') TrainData = list(csv.reader(f)) f.close() TrainData.remove(['Class', 'Image', 'x','y','B','G','R','H','S','V','L','a','b']) TrainData=np.asarray(TrainData) trainDataTab=np.concatenate((trainDataTab, TrainData), axis=0) trainDataTab=np.delete(trainDataTab, (0), axis=0) np.savetxt(TrainingDataDirectory+'/trainData_'+str(self.classes)+'classes.csv', trainDataTab, delimiter=",",header='Class,Image,x,y,B,G,R,H,S,V,L,a,b', comments='',fmt='%s') self.ListTrainingDataFile=[TrainingDataDirectory+'/trainData_'+str(self.classes)+'classes.csv'] InfoWin2=YourDoneWindow(parent=None, Name=TrainingDataDirectory+'/trainData_'+str(self.classes)+'classes.csv') InfoWin2.exec_() self.hide()
import java.lang.reflect.InvocationTargetException; import Base.BaseReport; import Impl.ModelChild; import Impl.SubChildReport; /** * */ /** * @author <NAME> * */ public class Main { /** * @param args * @throws InvocationTargetException * @throws IllegalArgumentException * @throws IllegalAccessException * @By : <NAME> * @Author: SEAL */ public static void main(String[] args) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { BaseReport b = new SubChildReport(); b.prepareData(new ModelChild()); System.out.println("sd"); } }
Moderator Chris Wallace addresses the audience at the final presidential debate between Donald Trump and Hillary Clinton in Las Vegas, Nev. on Oct. 19. (EPA/Gary He) The national debt has only increased in the four years since former Republican presidential nominee Mitt Romney compared it to "a prairie fire" and predicted that the United States would soon confront a fiscal crisis like those that have afflicted Greece and other European countries. That crisis has not materialized, at least not yet, and neither Donald Trump nor Hillary Clinton has made debt reduction a focus of this presidential campaign. That's not hurting them directly with voters -- who tell pollsters they don't worry about debt nearly as much as they used to. But economic forecasters say neglecting debt-reduction will limit the next president's ability to address two of voters' top issues: growing the economy and creating jobs. In the view of these forecasters, one way to grow the economy is by raising taxes to reduce the debt. These forecasters are relying on new computer models that use sophisticated mathematical techniques, collectively known as "dynamic scoring," to simulate the effect of a new policy on the broader economy. According to new dynamic projections from the Tax Policy Center, Clinton's plan to increase taxes would help the economy over the long term if the revenue was used to reduce the deficit, while Trump's plan to reduce taxes would prove counterproductive. The projections are based on a model developed by economists at the University of Pennsylvania that assumes that reduced borrowing by the government will result in cheaper rates of interest for everyone, thereby boosting the economy -- an assumption some economists in both parties would dispute. Accumulating debt On that assumption, Clinton's proposals for increased taxes would expand the economy by about 0.5 percent over 20 years, relative to current projections, the model forecasts -- if she used the money to reduce the deficit. Clinton, however, has proposed spending the money on new programs, not reducing the deficit. With this approach, discretionary outlays -- the section of the budget excluding entitlements such as Social Security and Medicare -- would increase by about 5 percent, and the economy would be about 0.5 percent smaller after 20 years, according to the forecast. The difference in the size of the economy amounts to "a smidgen" in the long term, said Kent Smetters, an economist at Pennsylvania. "These aren't necessarily radical changes," he said. Clinton's additional taxes on corporations and the wealthy would discourage investment, limiting the funds available to businesses seeking to expand. Meanwhile, the national debt would continue to accumulate at the same pace as before, so Clinton's agenda would not reduce interest rates. Trump's tax relief would give the economy a boost in the short term, but only at the cost of lowering its trajectory for years to come, according to the model. He has proposed reducing discretionary outlays, excluding the Pentagon's budget, by 1 percent each year -- a schedule known as the "penny plan." This plan would reduce discretionary spending overall by roughly 5 percent over a 10-year period. Trump has claimed that the penny plan, together with increased economic activity from energy, regulatory and trade policy reform, would cancel out any increase in the deficit from the tax relief. According to the forecast, however, the tax cuts would still force the government to borrow more to continue operating, even with the 5 percent reduction in expenditures. Rapidly mounting debt would increase interest rates, eliminating the economic benefits from a reduction in taxes. After 20 years, the economy would be 3 percent smaller, compared to current projections. 'Some kind of truth' Some economists disagree with the assumption that growing debt will hold back the economy, including Dean Baker, a founder of the liberal Center for Economic and Policy Research. He argues that the U.S. economy still has plenty of unused capital that will keep interest rates in check, even if the government borrows more. "The idea is that, by us borrowing more money, we’re pulling money away from the private sector that could have gone to productive investment," Baker said. "The government could borrow more money and employ more people, and it could actually lead to higher growth." Trump's economic advisers have been scathing in their response to the model. They have accused the Tax Policy Center of working to boost Clinton and mused about whether the organization leaned on Smetters to manipulate his results to make the Trump plan look worse. They dismiss the interest-rate argument about debt and investment, noting that rates have fallen under the Obama administration even as the debt has grown by trillions of dollars. Most importantly, they have accused Smetters and other economic modelers of deliberately excluding several planks of Trump's economic plan that the campaign's own modeling suggests would supercharge economic growth -- most notably, a more aggressive stance with America's international trade partners, which the campaign believes would reduce America's trade deficit. In an interview last week, Peter Navarro, a University of California-Irvine economist who is a Trump senior adviser, slammed Smetters for his analytical assumptions and for not modeling the full Trump plan: “He’s going out there like he’s speaking some kind of truth to the financial markets,” Navarro said. Smetters and his collaborators say they share these critics' skepticism about the salience of the national debt. The model, they note, follows the nonpartisan Congressional Budget Office in projecting that interest rates will remain close to zero for many years, limiting the influence of the debt on the economy. All the same, the increase in borrowing that would likely accompany Trump's policies would be great enough to have an effect, said Len Burman, the Tax Policy Center's director. "Even if you think that market interest rates are going to stay low for a long time, there are risks associated with massive increases in government debt," he said. "That could be disastrous."
/* Clone but .. type 0 clone solver, 1 clone continuous solver Add 2 to say without integer variables which are at low priority Add 4 to say quite likely infeasible so give up easily.*/ OsiSolverInterface * CbcHeuristic::cloneBut(int type) { OsiSolverInterface * solver; if ((type&1) == 0 || !model_->continuousSolver()) solver = model_->solver()->clone(); else solver = model_->continuousSolver()->clone(); #ifdef COIN_HAS_CLP OsiClpSolverInterface * clpSolver = dynamic_cast<OsiClpSolverInterface *> (solver); #endif if ((type&2) != 0) { int n = model_->numberObjects(); int priority = model_->continuousPriority(); if (priority < COIN_INT_MAX) { for (int i = 0; i < n; i++) { const OsiObject * obj = model_->object(i); const CbcSimpleInteger * thisOne = dynamic_cast <const CbcSimpleInteger *> (obj); if (thisOne) { int iColumn = thisOne->columnNumber(); if (thisOne->priority() >= priority) solver->setContinuous(iColumn); } } } #ifdef COIN_HAS_CLP if (clpSolver) { for (int i = 0; i < n; i++) { const OsiObject * obj = model_->object(i); const CbcSimpleInteger * thisOne = dynamic_cast <const CbcSimpleInteger *> (obj); if (thisOne) { int iColumn = thisOne->columnNumber(); if (clpSolver->isOptionalInteger(iColumn)) clpSolver->setContinuous(iColumn); } } } #endif } #ifdef COIN_HAS_CLP if ((type&4) != 0 && clpSolver) { int options = clpSolver->getModelPtr()->moreSpecialOptions(); clpSolver->getModelPtr()->setMoreSpecialOptions(options | 64); } #endif return solver; }
/** * Delete cells of the range. To delete a row, you have to call {@link Range#toRowRange()} first, to delete a column, you have to call {@link Range#toColumnRange()} first. * @param range the range to delete * @param shift the shift direction when deleting. */ public static void delete(Range range, DeleteShift shift) { if(range.isProtected()) { if (DeleteShift.LEFT.equals(shift)) { if (!range.getSheetProtection().isDeleteColumnsAllowed()) return; } else if (!range.getSheetProtection().isDeleteRowsAllowed()) { return; } } range.delete(shift); }
// Cette methode permettent de multiplier un tableau a plusieurs dimensions par un tableau // de dimension inferieure (par exemple un tableau a trois composantes par un tableau a une composante). // Chaque valeur du tableau vx est utilisee pour plusieurs items consecutifs du tableau resu // (le nombre de fois est le rapport des line_size() des deux tableaux). // resu.line_size() doit etre un multiple int de vx.line_size() et les descripteurs doivent etre identiques. // Cas particulier: vx peut contenir une constante unique (size_array() == 1 et descripteur nul), // dans ce cas c'est un simple produit par la constante void tab_multiply_any_shape(DoubleVect& resu, const DoubleVect& vx, Mp_vect_options opt) { if (vx.size_array() == 1 && !vx.get_md_vector().non_nul()) { double x = vx[0]; operator_multiply(resu, x, opt); } else if (vx.line_size() == resu.line_size()) { operator_multiply(resu, vx, opt); } else { tab_multiply_any_shape_(resu, vx, opt); } }
// This indexer only handles positive numbers. Behavior on zero and negative numbers is undefined. public abstract class ScaledExpIndexer implements ScaledIndexer { // Highest resolution. All mantissa bits are used to resolve buckets. public static final int MAX_SCALE = DoubleFormat.MANTISSA_BITS; // Lowest resolution. At min scale, base = 2 ^ (2^11). Because 2^11 covers the absolute value from // max double exponent 1023 to min exponent -1074 (including subnormals), there are only two buckets: // Bucket 0 for values >= 1 // Bucket -1 for values < 1 public static final int MIN_SCALE = -DoubleFormat.EXPONENT_BITS; // Max scale where index will fit in a signed 32 bit integer. public static final int MAX_SINT32_INDEX_SCALE = 20; protected int scale; public ScaledExpIndexer(final int scale) { this.scale = scale; } // base = 2 ^ (2 ^ -scale) public static double getBase(final int scale) { return Math.pow(2, Math.pow(2, -scale)); } // When scale is high, base is very close to 1, in the binary form like 1.000000XXXX, // where there are many leading zero bits before the non-zero portion of XXXX. // At scale N, only about 52 - N significant bits are left, resulting in large errors as N approaches 52. // Any inaccuracy in base is then magnified when computing value to index or index to value mapping. // Therefore we should avoid using base as intermediate result. // // LIMITATION: "index" must not have more than 52 significant bits, // because this function converts it to double for computation. public static double scaledBasePower(final int scale, final long index) { // result = base ^ index // = (2^(2^-scale))^index // = 2^(2^-scale * index) // = 2^(index * 2^-scale)) return Math.pow(2, Math.scalb((double) index, -scale)); } // For numbers up to Double.MAX_VALUE public static long getMaxIndex(final int scale) { // Scale > 0: max exponent followed by max subbucket index. // Scale <= 0: max exponent with -scale bits truncated. return scale > 0 ? (((long) Double.MAX_EXPONENT << scale) | ((1L << scale) - 1)) : ((long) Double.MAX_EXPONENT >>> -scale); } // For numbers down to Double.MIN_NORMAL public static long getMinIndexNormal(final int scale) { return getMinIndex(scale, Double.MIN_EXPONENT); } // For numbers down to Double.MIN_VALUE public static long getMinIndex(final int scale) { return getMinIndex(scale, DoubleFormat.MIN_SUBNORMAL_EXPONENT); } // Index of 1.0 * 2^exponent public static long getMinIndex(final int scale, final int exponent) { // Scale > 0: min exponent followed by min subbucket index, which is 0. // Scale <= 0: min exponent with -scale bits truncated. return scale > 0 ? (((long) exponent << scale)) : ((long) exponent >> -scale); // Use ">>" to preserve sign of exponent. } @Override public int getScale() { return scale; } // Returns relative error upper bound for percentiles generated from this histogram. // relativeError = Math.abs(reportedValue - actualValue) / reportedValue // // When a requested percentile falls into a bucket, the actual percentile value can be anywhere within this bucket. // The percentile function shall return the mid point of the bucket for symmetric +/- error margin. // The relative error upper bound is (bucketWidth / 2) / bucketMiddle // @Override public double getPercentileRelativeError() { final double base = getBase(); return (base - 1) / (base + 1); } public double getBase() { return getBase(scale); } public long getMaxIndex() { return getMaxIndex(scale); } public long getMinIndexNormal() { return getMinIndexNormal(scale); } public long getMinIndex() { return getMinIndex(scale); } @Override public double getBucketEnd(final long index) { return index == getMaxIndex(scale) ? Double.MAX_VALUE : getBucketStart(index + 1); } }
// IsGlobalRORole is used to check if this context is global read only role func (context *Context) IsGlobalRORole() bool { if context == nil { return true } return format.ContainsString(context.roles, GlobalReadOnlyRole) }
<filename>src/main/java/at/uibk/dps/ee/io/json/ResourceInformationJsonFile.java package at.uibk.dps.ee.io.json; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import com.google.gson.Gson; /** * The {@link ResourceInformationJsonFile} class models the information about * the resources which can be used for the enactment of the different resource * types. * * @author <NAME> */ public class ResourceInformationJsonFile extends ArrayList<FunctionTypeEntry> { private static final long serialVersionUID = 1L; /** * Converts the json file found in the provided path to a * {@link ResourceInformationJsonFile}. * * @param filePath the path to the json file * @return the {@link ResourceInformationJsonFile} built from the file */ public static ResourceInformationJsonFile readFromFile(final String filePath) { final Gson gson = new Gson(); String jsonString; try { jsonString = Files.readString(Paths.get(filePath)); return gson.fromJson(jsonString, ResourceInformationJsonFile.class); } catch (IOException ioExc) { throw new IllegalStateException("IOException when trying to read resource input.", ioExc); } } }
Quantitative analysis of liposome-cell interactions in vitro: rate constants of binding and endocytosis with suspension and adherent J774 cells and human monocytes. We have characterized the parameters describing the total association (uptake) of liposomes with murine macrophage-like cell line J774 cells and human peripheral blood monocytes at 4 degrees C and at 37 degrees C with or without inhibitors of endocytosis. The uptake of neutral liposomes composed of phosphatidylcholine (PC)/cholesterol (Chol) (2:1 mole ratio) is about 10-fold lower than that of negatively charged liposomes composed of phosphatidylserine (PS)/PC/Chol (1:1:1). However, the rate of uptake of PC/Chol liposomes at 37 degrees C is still 10-fold higher than that by fluid-phase pinocytosis. The uptake of liposomes, which is mediated by high-affinity binding to the cell surface binding sites and subsequent endocytosis, could be simulated and predicted by model calculations employing mass action kinetics. The number of binding sites, affinity constants of binding at 37 degrees C and 4 degrees C, on- and off-rate constants of binding, and endocytic rate constants for both types of liposomes were determined. The number of binding sites and the binding constants for PS/PC/Chol liposomes binding to J774 cells is severalfold to an order of magnitude higher than that for PC/Chol liposomes, but the rate constants at which they are endocytosed following binding to the cells are similar for both liposome types. The binding of liposomes, especially PS/PC/Chol, to J774 cells and monocytes is greatly enhanced by adherence of cells to plastic substratum and is also increased by maturation/differentiation in the case of monocytes. Our quantitative analysis indicates that the binding and endocytosis of liposomes, especially PS-containing liposomes, is mediated by binding sites that have strong affinity, comparable to or about an order of magnitude smaller than other known particle-cell interactions with specific receptors such as virus and lipoproteins binding to cells.
import { IListUsersResponse } from "../../../src/useCases/users/ports/IListUsersResponse"; import { IUpdatedUserData } from "../../../src/useCases/users/ports/IUpdatedUserData"; import { IUserData } from "../../../src/useCases/users/ports/IUserData"; import { IUsersRepository } from "../../../src/useCases/users/ports/IUsersRepository"; export class UsersRepositoryInMemory implements IUsersRepository { users: IUserData[] = []; async create(data: IUserData): Promise<Omit<IUserData, "password" | "isAdmin">> { this.users.push(data); const user = { id: data.id, name: data.name, email: data.email, createdAt: data.createdAt, updatedAt: data.updatedAt, }; return user; }; async findByEmail(email: string): Promise<IUserData | null> { const user = this.users.find((user) => user.email === email); return user || null; }; async findById(id: string): Promise<IUserData | null> { const user = this.users.find((user) => user.id === id); return user || null; }; async update(data: IUserData): Promise<IUpdatedUserData> { const userIndex = this.users.findIndex(user => user.id === data.id); this.users[userIndex] = data; const userUpdated = { id: this.users[userIndex].id, name: this.users[userIndex].name, email: this.users[userIndex].email, } return userUpdated; }; async listAll(): Promise<IListUsersResponse> { const usersFormatted = this.users.map(user => { return { id: user.id, name: user.name, email: user.email, createdAt: user.createdAt, updatedAt: user.updatedAt, } }); return usersFormatted; } async deleteOne(id: string): Promise<void> { const userIndex = this.users.findIndex(user => user.id === id); this.users.splice(userIndex, 1); } };
package analysis import ( "errors" "fmt" "go/types" "regexp" "strings" ) func (fm *FuncMaker) getFuncName(dstType, srcType types.Type) (string, error) { dstName, derr := fm.formatPkgType(dstType) srcName, serr := fm.formatPkgType(srcType) var err error if derr != nil || serr != nil { err = errors.New("cannot type") } re := regexp.MustCompile(`\.`) srcName = string(re.ReplaceAll([]byte(srcName), []byte(""))) dstName = string(re.ReplaceAll([]byte(dstName), []byte(""))) re = regexp.MustCompile(`\[\]`) srcName = string(re.ReplaceAll([]byte(srcName), []byte("S"))) dstName = string(re.ReplaceAll([]byte(dstName), []byte("S"))) re = regexp.MustCompile(`\*`) srcName = string(re.ReplaceAll([]byte(srcName), []byte("P"))) dstName = string(re.ReplaceAll([]byte(dstName), []byte("P"))) return fmt.Sprintf("Conv%sTo%s", srcName, dstName), err } func (fm *FuncMaker) isAlreadyExist(funcName string) bool { // 1. rootまで遡る。 var root *FuncMaker var goBackRoot func(*FuncMaker) *FuncMaker goBackRoot = func(fm *FuncMaker) *FuncMaker { if fm.parentFunc == nil { return fm } return goBackRoot(fm.parentFunc) } root = goBackRoot(fm) // 2. 存在しているか見る。 var inspectSamaFuncName func(*FuncMaker) bool inspectSamaFuncName = func(fm *FuncMaker) bool { if fm.funcName == funcName { return true } if fm.childFunc != nil { for _, child := range *fm.childFunc { exist := inspectSamaFuncName(child) if exist { return true } } } return false } return inspectSamaFuncName(root) } func (fm *FuncMaker) varVisiable(v *types.Var) bool { if fm.samePkg(v.Pkg()) { return true } return v.Exported() } func (fm *FuncMaker) typeNameVisiable(v *types.TypeName) bool { if fm.samePkg(v.Pkg()) { return true } return v.Exported() } func (fm *FuncMaker) samePkg(pkg *types.Package) bool { return fm.pkg.Path() == pkg.Path() } // TODO fix func (fm *FuncMaker) formatPkgString(str string) string { // TODO fix only pointer, slice and badic re := regexp.MustCompile(`[\w\./]*/`) last := string(re.ReplaceAll([]byte(str), []byte(""))) tmp := strings.Split(last, ".") p := string(regexp.MustCompile(`\[\]|\*`).ReplaceAll([]byte(tmp[0]), []byte(""))) path := strings.Split(fm.pkg.Path(), "/") if p == path[len(path)-1] { re := regexp.MustCompile(`[\w]*\.`) return string(re.ReplaceAll([]byte(last), []byte(""))) } return last } func (fm *FuncMaker) formatPkgType(t types.Type) (string, error) { if namedT, ok := t.(*types.Named); ok { if !fm.typeNameVisiable(namedT.Obj()) { return "", errors.New("not exported") } } return fm.formatPkgString(t.String()), nil } // 無限ループを防ぐ func checkHistory(dst, src types.Type, history [][2]types.Type) bool { for _, his := range history { if types.Identical(his[0], dst) && types.Identical(his[1], src) { return true } } return false } func (fm *FuncMaker) dstWritten(dstSelector string) bool { _, ok := fm.dstWrittenSelector[dstSelector] if ok { return true } // 前方一致 // TODO fix pointer selector for sel := range fm.dstWrittenSelector { // TODO fix replace * ( ) [ ] . -> \* \( ... re := regexp.MustCompile(fmt.Sprintf(`\^%s[\.\(\[]`, strings.Replace(sel, "*", "\\*", -1))) written := re.Match([]byte(dstSelector)) if written { return true } } return false }
package com.example.demo.repository; import com.example.demo.model.Person; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.List; import java.util.Optional; @Service public class PersonService { private final PersonRepository personRepository; @Autowired public PersonService(PersonRepository personRepository) { this.personRepository = personRepository; } public Optional<Person> getPerson(Long personId) { return personRepository.findById(personId); } public List<Person> getPersons() { return personRepository.findAll(); } public void addPerson(Person person) { personRepository.save(person); } public void updatePerson(Person person, Long personId) { personRepository.save(person); } public Optional<Person> getPersonByName(String name) { return personRepository.findPersonByName(name); } public List<Person> getPersonByAge(Integer age) { return personRepository.findPersonByAge(age); } }
def create_obs_message(self, average_flow_mag, average_flow_dir, timestamp): data = { 'module_id': self.module_id, 'type_module': self.type_module, 'camera_ids': [self.cam_id], 'average_flow_magnitude': average_flow_mag, 'average_flow_direction': average_flow_dir, 'flow_frame_byte_array': '', 'timestamp': str(timestamp), } message = json.dumps(data) return message
Evaluation of the Effect of Topical Application of Nigella sativa on Acute Radiation-Induced Nasal Mucositis Abstract The goal of this study was to demonstrate the effect of radiotherapy (RT) on nasal mucosa in rats and to evaluate the radioprotective effects of the topical application of black seed oil (Nigella sativa ) to treat acute radiation-induced nasal mucositis. A total of 18 rats were randomized into 3 groups, with 6 animals per group. The rats in group 1 were topically administered saline in the nasal cavity after sham irradiation. Group 2 received saline at the same dose after irradiation. Group 3 was given NS after irradiation. The rats in groups 2 and 3 were irradiated with a single dose of 40 Gy to the nasal and paranasal area. Only one drop of saline (0.05 mL) was applied to each nostril in the first, second, and third days after RT in groups 1 and 2. One drop of cold press NS (0.05 mL) was applied to each nostril in group 3. Fourteen days after irradiation, the nasal mucosal tissues were excised for histopathological evaluation. Vascular dilatation, inflammatory cell infiltration, superficial erosion, and formation of exudates were classified according to the severity. No evidence of mucositis was observed in group 1. Of all the parameters the only statistically significant difference between groups 2 and 3 were observed for “superficial erosion" (P < 0.05). Overall microscopic observations in the NS-treated group were better than in group 2. The preliminary results of our study have shown that local application of NS to the nasal mucosa may be an effective treatment of acute nasal mucositis due to RT.
def make_model_3d(self): actor = PPONetwork3D(self.model_container, self.obs) actor_old = PPONetwork3D(self.model_container, self.obs) actor_old.load_state_dict(actor.state_dict()) model = PPOModel() model.actor = actor model.actor_old = actor_old model.loss = nn.MSELoss() return model
/** * Informations sur un un point d'une HeightTab. * @author Vincent */ public class Cell { private int height; /** * Un Cell correspond à un élément de HeightTab, et définie une altitude. * @param height Compris entre 0 et 1000. Définie l'altitude du Cell. */ public Cell(int height){ this.height = height; } public int getHeight() { return height; } public void setHeight(int height) { this.height = height; } /** * Ajoute la valeur height à l'altitude de la Cell. Ne remplace pas l'altitude comme le fait setHeight. * @param height Valeur à ajouter */ public void addHeight(int height) { this.height += height; } }
/******************************************************************************* * * Copyright (C) 2010 <NAME> <<EMAIL>> * All Rights Reserved. * * Borrowed heavily from gmond/modules/mod_python.c * * This code is part of a perl module for ganglia. * * Contributors : <NAME> <<EMAIL>> * * Portions Copyright (C) 2007 Novell, Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * - Neither the name of Novell, Inc. nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL Novell, Inc. OR THE CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ******************************************************************************/ #include <EXTERN.h> #include <perl.h> #include <XSUB.h> #include <gm_metric.h> #include <gm_msg.h> #include <string.h> #include <apr_tables.h> #include <apr_strings.h> #include <apr_lib.h> #include <dirent.h> /* * Declare ourselves so the configuration routines can find and know us. * We'll fill it in at the end of the module. */ mmodule perl_module; typedef struct { char *pcb; /* The metric call back function */ char *mod_name; /* Name of the module */ PerlInterpreter *perl; /* Perl interpreter */ } mapped_info_t; typedef struct { char mname[128]; int tmax; char vtype[32]; char units[64]; char slope[32]; char format[64]; char desc[512]; char groups[512]; apr_table_t *extra_data; char pcb[128]; } pl_metric_init_t; static apr_pool_t *pool; static apr_array_header_t *metric_info = NULL; static apr_array_header_t *metric_mapping_info = NULL; static apr_status_t perl_metric_cleanup ( void *data); char modname_bfr[PATH_MAX]; static char* is_perl_module(const char* fname) { char* p = strrchr(fname, '.'); if (!p) { return NULL; } if (strcmp(p, ".pl")) { return NULL; } strncpy(modname_bfr, fname, p-fname); modname_bfr[p-fname] = 0; return modname_bfr; } static cfg_t* find_module_config(char *modname) { cfg_t *modules_cfg; int j; modules_cfg = cfg_getsec(perl_module.config_file, "modules"); for (j = 0; j < cfg_size(modules_cfg, "module"); j++) { char *modName, *modLanguage; int modEnabled; cfg_t *plmodule = cfg_getnsec(modules_cfg, "module", j); /* Check the module language to make sure that the language designation is perl. */ modLanguage = cfg_getstr(plmodule, "language"); if (!modLanguage || strcasecmp(modLanguage, "perl")) continue; modName = cfg_getstr(plmodule, "name"); if (strcasecmp(modname, modName)) continue; /* Check to make sure that the module is enabled. */ modEnabled = cfg_getbool(plmodule, "enabled"); if (!modEnabled) continue; return plmodule; } return NULL; } static HV* build_params_hash(cfg_t *plmodule) { int k; HV *params_hash; params_hash = newHV(); if (plmodule && params_hash) { for (k = 0; k < cfg_size(plmodule, "param"); k++) { cfg_t *param; char *name, *value; SV *sv_value; param = cfg_getnsec(plmodule, "param", k); name = apr_pstrdup(pool, param->title); value = apr_pstrdup(pool, cfg_getstr(param, "value")); sv_value = newSVpv(value, 0); if (name && sv_value) { /* Silence "value computed is not used" warning */ (void)hv_store(params_hash, name, strlen(name), sv_value, 0); } } } return params_hash; } static void fill_metric_info(HV* plhash, pl_metric_init_t* minfo, char *modname, apr_pool_t *pool) { char *metric_name = ""; char *key; SV* sv_value; I32 ret; memset(minfo, 0, sizeof(*minfo)); /* create the apr table here */ minfo->extra_data = apr_table_make(pool, 2); hv_iterinit(plhash); while ((sv_value = hv_iternextsv(plhash, &key, &ret))) { if (!strcasecmp(key, "name")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->mname, value, sizeof(minfo->mname))) { err_msg("[PERL] No metric name given in module [%s].\n", modname); } else metric_name = minfo->mname; continue; } if (!strcasecmp(key, "call_back")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->pcb, value, sizeof(minfo->pcb))) { err_msg("[PERL] No perl call back given for metric [%s] in module [%s]. Will not call\n", metric_name, modname); } continue; } if (!strcasecmp(key, "time_max")) { int value = SvIV(sv_value); if (!(minfo->tmax = value)) { minfo->tmax = 60; err_msg("[PERL] No time max given for metric [%s] in module [%s]. Using %d.\n", metric_name, modname, minfo->tmax); } continue; } if (!strcasecmp(key, "value_type")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->vtype, value, sizeof(minfo->vtype))) { strcpy (minfo->vtype, "uint"); err_msg("[PERL] No value type given for metric [%s] in module [%s]. Using %s.\n", metric_name, modname, minfo->vtype); } continue; } if (!strcasecmp(key, "units")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->units, value, sizeof(minfo->units))) { strcpy (minfo->units, "unknown"); err_msg("[PERL] No metric units given for metric [%s] in module [%s]. Using %s.\n", metric_name, modname, minfo->units); } continue; } if (!strcasecmp(key, "slope")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->slope, value, sizeof(minfo->slope))) { strcpy (minfo->slope, "both"); err_msg("[PERL] No slope given for metric [%s] in module [%s]. Using %s.\n", metric_name, modname, minfo->slope); } continue; } if (!strcasecmp(key, "format")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->format, value, sizeof(minfo->format))) { strcpy (minfo->format, "%u"); err_msg("[PERL] No format given for metric [%s] in module [%s]. Using %s.\n", metric_name, modname, minfo->format); } continue; } if (!strcasecmp(key, "description")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->desc, value, sizeof(minfo->desc))) { strcpy (minfo->desc, "unknown metric"); err_msg("[PERL] No description given for metric [%s] in module [%s]. Using %s.\n", metric_name, modname, minfo->desc); } continue; } if (!strcasecmp(key, "groups")) { STRLEN len; char *value = SvPV(sv_value, len); if (!strncpy(minfo->groups, value, sizeof(minfo->groups))) { strcpy (minfo->groups, ""); } continue; } STRLEN len; char *value; if (!(value = SvPV(sv_value, len))) { err_msg("[PERL] Extra data key [%s] could not be processed.\n", key); } else { apr_table_add(minfo->extra_data, key, value); } } /*err_msg("name: %s", minfo->mname); err_msg("callback: %s", minfo->pcb); err_msg("time_max: %d", minfo->tmax); err_msg("value_type: %s", minfo->vtype); err_msg("units: %s", minfo->units); err_msg("slope: %s", minfo->slope); err_msg("format: %s", minfo->format); err_msg("description: %s", minfo->desc); err_msg("groups: %s", minfo->groups);*/ } static void fill_gmi(Ganglia_25metric* gmi, pl_metric_init_t* minfo) { char *s, *lasts; int i; const apr_array_header_t *arr = apr_table_elts(minfo->extra_data); const apr_table_entry_t *elts = (const apr_table_entry_t *)arr->elts; /* gmi->key will be automatically assigned by gmond */ gmi->name = apr_pstrdup(pool, minfo->mname); gmi->tmax = minfo->tmax; if (!strcasecmp(minfo->vtype, "string")) { gmi->type = GANGLIA_VALUE_STRING; gmi->msg_size = UDP_HEADER_SIZE+MAX_G_STRING_SIZE; } else if (!strcasecmp(minfo->vtype, "uint")) { gmi->type = GANGLIA_VALUE_UNSIGNED_INT; gmi->msg_size = UDP_HEADER_SIZE+8; } else if (!strcasecmp(minfo->vtype, "int")) { gmi->type = GANGLIA_VALUE_INT; gmi->msg_size = UDP_HEADER_SIZE+8; } else if (!strcasecmp(minfo->vtype, "float")) { gmi->type = GANGLIA_VALUE_FLOAT; gmi->msg_size = UDP_HEADER_SIZE+8; } else if (!strcasecmp(minfo->vtype, "double")) { gmi->type = GANGLIA_VALUE_DOUBLE; gmi->msg_size = UDP_HEADER_SIZE+16; } else { gmi->type = GANGLIA_VALUE_UNKNOWN; gmi->msg_size = UDP_HEADER_SIZE+8; } gmi->units = apr_pstrdup(pool, minfo->units); gmi->slope = apr_pstrdup(pool, minfo->slope); gmi->fmt = apr_pstrdup(pool, minfo->format); gmi->desc = apr_pstrdup(pool, minfo->desc); MMETRIC_INIT_METADATA(gmi, pool); for (s=(char *)apr_strtok(minfo->groups, ",", &lasts); s!=NULL; s=(char *)apr_strtok(NULL, ",", &lasts)) { char *d = s; /* Strip the leading white space */ while (d && *d && apr_isspace(*d)) { d++; } MMETRIC_ADD_METADATA(gmi,MGROUP,d); } /* transfer any extra data as metric metadata */ for (i = 0; i < arr->nelts; ++i) { if (elts[i].key == NULL) continue; MMETRIC_ADD_METADATA(gmi, elts[i].key, elts[i].val); } } static int perl_metric_init(apr_pool_t *p) { DIR *dp; struct dirent *entry; int i, size; char* modname; char *modpath; HV *pparamhash; pl_metric_init_t minfo; Ganglia_25metric *gmi; mapped_info_t *mi; const char* path = perl_module.module_params; cfg_t *module_cfg; PerlInterpreter *perl = NULL; int argc = 0; char *argv[] = { }; char *env[] = { }; char *embedding[] = {"", ""}; /* Allocate a pool that will be used by this module */ apr_pool_create(&pool, p); metric_info = apr_array_make(pool, 10, sizeof(Ganglia_25metric)); metric_mapping_info = apr_array_make(pool, 10, sizeof(mapped_info_t)); /* Verify path exists and can be read */ if (!path) { err_msg("[PERL] Missing perl module path.\n"); return -1; } if (access(path, F_OK)) { /* 'path' does not exist */ err_msg("[PERL] Can't open the perl module path %s.\n", path); return -1; } if (access(path, R_OK)) { /* Don't have read access to 'path' */ err_msg("[PERL] Can't read from the perl module path %s.\n", path); return -1; } /* Initialize each perl module */ if ((dp = opendir(path)) == NULL) { /* Error: Cannot open the directory - Shouldn't happen */ /* Log? */ err_msg("[PERL] Can't open the perl module path %s.\n", path); return -1; } PERL_SYS_INIT3(&argc, (char ***) &argv, (char ***) &env); i = 0; while ((entry = readdir(dp)) != NULL) { modname = is_perl_module(entry->d_name); if (modname == NULL) continue; /* Find the specified module configuration in gmond.conf If this return NULL then either the module config doesn't exist or the module is disabled. */ module_cfg = find_module_config(modname); if (!module_cfg) continue; size_t path_len = strlen(path) + strlen(modname) + 5; modpath = malloc(path_len); modpath = strncpy(modpath, path, path_len); modpath = strcat(modpath, "/"); modpath = strcat(modpath, modname); modpath = strcat(modpath, ".pl"); embedding[1] = modpath ; perl = perl_alloc(); PL_perl_destruct_level = 0; PERL_SET_CONTEXT(perl); perl_construct(perl); PL_origalen = 1; PERL_SET_CONTEXT(perl); perl_parse(perl, NULL, 1, embedding, NULL); /* Run the perl script so that global variables can be accessed */ perl_run(perl); free(modpath); /* Build a parameter dictionary to pass to the module */ pparamhash = build_params_hash(module_cfg); if (!pparamhash) { err_msg("[PERL] Can't build the parameters hash for [%s].\n", modname); continue; } dSP; ENTER; SAVETMPS; PUSHMARK(SP); /* Push a reference to the pparamhash to the Perl stack */ XPUSHs(sv_2mortal(newRV_noinc((SV*)pparamhash))); PUTBACK; size = call_pv("metric_init", G_ARRAY|G_EVAL); SPAGAIN; /*SP -= size; ax = (SP - PL_stack_base) + 1; SvGETMAGIC(plarray); */ if (SvTRUE(ERRSV)) { /* failed calling metric_init */ err_msg("[PERL] Can't call the metric_init function in the perl module [%s].\n", modname); continue; } else { if (size) { int j; for (j = 0; j < size; j++) { SV* sref = POPs; if (!SvROK(sref)) { err_msg("[PERL] No descriptors returned from metric_init call in the perl module [%s].\n", modname); continue; } /* Dereference the reference */ HV* plhash = (HV*)(SvRV(sref)); if (plhash != NULL) { fill_metric_info(plhash, &minfo, modname, pool); gmi = (Ganglia_25metric*)apr_array_push(metric_info); fill_gmi(gmi, &minfo); mi = (mapped_info_t*)apr_array_push(metric_mapping_info); mi->mod_name = apr_pstrdup(pool, modname); mi->pcb = apr_pstrdup(pool, minfo.pcb); mi->perl = perl; } } } } PUTBACK; FREETMPS; LEAVE; } closedir(dp); apr_pool_cleanup_register(pool, NULL, perl_metric_cleanup, apr_pool_cleanup_null); /* Replace the empty static metric definition array with the dynamic array that we just created */ /*XXX Need to put this into a finalize MACRO. This is just pushing a NULL entry onto the array so that the looping logic can determine the end if the array. We should probably give back a ready APR array rather than a pointer to a Ganglia_25metric array. */ gmi = apr_array_push(metric_info); memset (gmi, 0, sizeof(*gmi)); mi = apr_array_push(metric_mapping_info); memset (mi, 0, sizeof(*mi)); perl_module.metrics_info = (Ganglia_25metric *)metric_info->elts; return 0; } static apr_status_t perl_metric_cleanup(void *data) { mapped_info_t *mi, *smi; int i, j; mi = (mapped_info_t*) metric_mapping_info->elts; for (i = 0; i < metric_mapping_info->nelts; i++) { if (mi[i].mod_name) { /* XXX: Should work but segfault... if (mi[i].perl != NULL) { PERL_SET_CONTEXT(mi[i].perl); perl_destruct(mi[i].perl); PERL_SET_CONTEXT(mi[i].perl); perl_free(mi[i].perl); } */ /* Set all modules that fall after this once with the same * module pointer to NULL so metric_cleanup only gets called * once on the module. */ smi = (mapped_info_t*) metric_mapping_info->elts; for (j = i+1; j < metric_mapping_info->nelts; j++) { if (smi[j].mod_name == mi[i].mod_name) { smi[j].mod_name = NULL; } } } } PERL_SYS_TERM(); return APR_SUCCESS; } static g_val_t perl_metric_handler(int metric_index) { g_val_t val; Ganglia_25metric *gmi = (Ganglia_25metric *) metric_info->elts; mapped_info_t *mi = (mapped_info_t*) metric_mapping_info->elts; int size; memset(&val, 0, sizeof(val)); if (!mi[metric_index].pcb) { /* No call back provided for this metric */ return val; } if (!mi[metric_index].perl) { err_msg("No perl interpreter found."); return val; } PERL_SET_CONTEXT(mi[metric_index].perl); dSP; ENTER; SAVETMPS; PUSHMARK(SP); PUTBACK; /* Call the metric handler call back for this metric */ size = call_pv(mi[metric_index].pcb, G_NOARGS); SPAGAIN; if (SvTRUE(ERRSV)) { err_msg("[PERL] Can't call the metric handler function for [%s] in the perl module [%s].\n", gmi[metric_index].name, mi[metric_index].mod_name); /* return what? */ return val; } else { /*SV* sref = POPs; if (!SvROK(sref)) { err_msg("[PERL] No values returned from metric handler function for [%s] in the perl module [%s].\n", gmi[metric_index].name, mi[metric_index]. modname); return val; }*/ if (size) { switch (gmi[metric_index].type) { case GANGLIA_VALUE_STRING: { snprintf(val.str, sizeof(val.str), "%s", POPpx); break; } case GANGLIA_VALUE_UNSIGNED_INT: { val.uint32 = POPl; break; } case GANGLIA_VALUE_INT: { val.int32 = POPi; break; } case GANGLIA_VALUE_FLOAT: { /* XXX FIXEME */ val.f = POPn; break; } case GANGLIA_VALUE_DOUBLE: { /* XXX FIXEME */ val.d = POPn; break; } default: { memset(&val, 0, sizeof(val)); break; } } } } PUTBACK; FREETMPS; LEAVE; return val; } mmodule perl_module = { STD_MMODULE_STUFF, perl_metric_init, NULL, NULL, /* defined dynamically */ perl_metric_handler, };
def argmax(a, axis=None): a = array_creation.asarray(a) if axis is None or utils.isscalar(a): a_t = tf.reshape(a.data, [-1]) else: a_t = a.data return utils.tensor_to_ndarray(tf.argmax(a_t, axis))
def property_get(self, property_name, default = None): self.report_info("reading property '%s'" % property_name, dlevel = 3) data = { "projection": json.dumps([ property_name ]) } if self.ticket: data['ticket'] = self.ticket r = self.rtb.send_request("GET", "targets/" + self.id, data = data) propertyl = property_name.split(".") for prop_name in propertyl: r = r.get(prop_name, None) if r == None: val = None break else: val = r self.report_info("read property '%s': '%s' [%s]" % (property_name, val, default), dlevel = 4) if val == None and default != None: return default return val
Endovascular Stent-Graft Repair of a Complicated Penetrating Ulcer of the Descending Thoracic Aorta: A Word of Caution Purpose: To report a pitfall encountered during stenting of a complicated penetrating ulcer of the descending thoracic aorta. Case Report: A 65-year-old man was diagnosed with a complicated penetrating ulcer of the thoracic aorta. A 38-mm Talent endograft was implanted. On balloon dilation of the distal end of the endoprosthesis, the terminal bare stent became distorted and penetrated the aortic wall. A 42-mm endoprosthesis was immediately placed to exclude the aortic perforation. Control aortography demonstrated exclusion of the original proximal aortic ulcer and the distal iatrogenic aortic tear. Conclusions: Endoprostheses may present some drawbacks in terms of elasticity and adaptability to tortuous and angulated diseased aortas. Caution is advised in the treatment of penetrating aortic ulcers where the aortic wall is diffusely friable. In this condition, balloon dilation should be limited to the covered portion of the stent-graft to prevent stent distortion and erosion through the aortic wall.
EXCLUSIVE: Sony Pictures Entertainment is negotiating with Seth Gordon to direct Uncharted, the live-action adaptation of the top-selling PlayStation vidgame series Uncharted: Drake’s Fortune. This brings the Horrible Bosses and Identity Thief helmer back to his origins, where he helmed the celebrated documentary The King Of Kong and immersed himself in video game culture. The film is an Arad/Atlas Entertainment Production, produced by Chuck Roven, Avi Arad, Alex Gartner, and Ari Arad. The most recent script of Uncharted is by Safe House scribe David Guggenheim, and the studio was hot enough on that draft that they set him to write Bad Boys 3. The project has had several prominent filmmakers involved over the years, most recently David O Russell, before he left to helm the Roven-produced American Hustle, a Best Picture Oscar contender. Next step is to find a protagonist to play Nathan Drake, the treasure and antiquities hunter and descendant of the famed explorer Sir Francis Drake. With a female journalist and his mentor by his side, Drake attempts to find the lost treasure of El Dorado. It is a big-scale adventure film. The game was developed by Naughty Dog and published by Sony Computer Entertainment America, LLC. The intention of the producers and the studio is to get this into production later this year. Sony has long coveted the project for its tentpole potential. Jonathan Kadin and Hannah Minghella are overseeing for the studio. Gordon is repped by WME and Brillstein.
async def limit(self, namespace: str, num: int): if namespace not in self.buckets: queue: asyncio.Queue = asyncio.Queue(num) self.buckets[namespace] = (queue, num) for _ in range(num): queue.put_nowait(1) if not self.task: self.task = asyncio.ensure_future(self.token_manager()) await asyncio.sleep(0.001) else: queue = self.buckets[namespace][0] await queue.get()
def clamp_expression(self, ne, relations, scope, do_clamp=True): exp = ne.expression cols = exp.find_nodes(Column) if type(exp) is Column: cols += [exp] for col in cols: colname = col.name minval = None maxval = None sym = col.symbol(relations) if do_clamp and sym.valtype in ["float", "int"] and not sym.unbounded: minval = sym.minval maxval = sym.maxval if minval is None or sym.is_key: cexpr = Column(colname) ce_name = scope.push_name(cexpr, str(colname)) else: clamped_string = "CASE WHEN {0} < {1} THEN {1} WHEN {0} > {2} THEN {2} ELSE {0} END".format(str(colname), minval, maxval) cexpr = Expression(clamped_string) ce_name = scope.push_name(cexpr, str(colname)) else: cexpr = Column(colname) ce_name = scope.push_name(cexpr, str(colname)) col.name = ce_name return ne
/* SPDX-License-Identifier: LGPL-2.1-or-later */ #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include "alloc-util.h" #include "fileio-label.h" #include "selinux-util.h" #include "time-util.h" #define MESSAGE \ "# This file was created by systemd-update-done. Its only \n" \ "# purpose is to hold a timestamp of the time this directory\n" \ "# was updated. See man:systemd-update-done.service(8).\n" static int apply_timestamp(const char *path, struct timespec *ts) { _cleanup_free_ char *message = NULL; int r; /* * We store the timestamp both as mtime of the file and in the file itself, * to support filesystems which cannot store nanosecond-precision timestamps. */ if (asprintf(&message, MESSAGE "TIMESTAMP_NSEC=" NSEC_FMT "\n", timespec_load_nsec(ts)) < 0) return log_oom(); r = write_string_file_atomic_label_ts(path, message, ts); if (r == -EROFS) log_debug_errno(r, "Cannot create \"%s\", file system is read-only.", path); else if (r < 0) return log_error_errno(r, "Failed to write \"%s\": %m", path); return 0; } int main(int argc, char *argv[]) { struct stat st; int r, q = 0; log_setup(); if (stat("/usr", &st) < 0) { log_error_errno(errno, "Failed to stat /usr: %m"); return EXIT_FAILURE; } r = mac_init(); if (r < 0) return EXIT_FAILURE; r = apply_timestamp("/etc/.updated", &st.st_mtim); q = apply_timestamp("/var/.updated", &st.st_mtim); return r < 0 || q < 0 ? EXIT_FAILURE : EXIT_SUCCESS; }
/** * Adds the Topic Filter / QoS pair to the list. * * @param strTopic - the Topic Filter * @param qos - the QoS * @return true if the Topic Filter / QoS pair is successfully added; false otherwise. */ public boolean addTopicQoS(String strTopic, byte qos) { CMMqttTopicQoS topicQoS = new CMMqttTopicQoS(strTopic, qos); return m_topicQoSList.addElement(topicQoS); }
/** * Created by olenasyrota on 6/28/16. */ public class FactorialTest { @Test public void factorial_EdgeParameter() { long i = 1; assertEquals(1, Factorial.factorial(i)); } @Test public void factorial_NormalParameter() { long i = 5; assertEquals(120, Factorial.factorial(i)); } }
Predicting the Future: Advantages of Semilocal Units In investigating gaussian radial basis function (RBF) networks for their ability to model nonlinear time series, we have found that while RBF networks are much faster than standard sigmoid unit backpropagation for low-dimensional problems, their advantages diminish in high-dimensional input spaces. This is particularly troublesome if the input space contains irrelevant variables. We suggest that this limitation is due to the localized nature of RBFs. To gain the advantages of the highly nonlocal sigmoids and the speed advantages of RBFs, we propose a particular class of semilocal activation functions that is a natural interpolation between these two families. We present evidence that networks using these gaussian bar units avoid the slow learning problem of sigmoid unit networks, and, very importantly, are more accurate than RBF networks in the presence of irrelevant inputs. On the Mackey-Glass and Coupled Lattice Map problems, the speedup over sigmoid networks is so dramatic that the difference in training time between RBF and gaussian bar networks is minor. Gaussian bar architectures that superpose composed gaussians (gaussians-of-gaussians) to approximate the unknown function have the best performance. We postulate that an interesing behavior displayed by gaussian bar functions under gradient descent dynamics, which we call automatic connection pruning, is an important factor in the success of this representation.
/** * Get the precedence value of a condition branch operator. */ static inline int c2h_b_opp(c2_b_op_t op) { switch (op) { case C2_B_OAND: return 2; case C2_B_OOR: return 1; case C2_B_OXOR: return 1; default: break; } assert(0); return 0; }
/** * Detects and runs all Service Implementation Compatibility Kits (SLICKs)inside * the current OSGI instance. The SipCommunicatorSlickRunner produces an xml log * file following ant format rules (so that it could be used by CruiseControl) * and stores it inside the directory indicated in the * net.java.sip.communicator.slick.runner.OUTPUT_DIR property (default is * test-reports). * <p> * In order for the SipCommunicatorSlickRunner to detect all SLICKs they * needs to be registered as services in the OSGI environment prior to the * activation of the runner, and their names need to be specified in a * whitespace separated list registered against the * net.java.sip.communicator.slick.runner.TEST_LIST system property. * <p> * After running all unit tests the SipcCommunicatorSlickRunner will try to * gracefully shutdown the Felix OSGI framework (if it fails it'll shut it * down rudely ;) ) and will System.exit() with an error code in case any * test failures occurred or with 0 if all tests passed. * * @author Emil Ivov */ public class SipCommunicatorSlickRunner extends TestSuite implements BundleActivator { private Logger logger = Logger.getLogger(getClass().getName()); /** * The name of the property indicating the Directory where test reports * should be stored. */ private static final String OUTPUT_DIR_PROPERTY_NAME = "net.java.sip.communicator.slick.runner.OUTPUT_DIR"; /** * A default name for the Directory where test reports should be stored. */ private static final String DEFAULT_OUTPUT_DIR = "test-reports"; /** * The name of the property indicating the name of the file where test * reports should be stored. */ private static final String OUTPUT_FILE_NAME = "sip-communicator.unit.test.reports.xml"; /** * The name of the property that contains the list of Service ICKs that * we'd have to run. */ private static final String TEST_LIST_PROPERTY_NAME = "net.java.sip.communicator.slick.runner.TEST_LIST"; /** * A reference to the bundle context received when activating the test * runner. */ private BundleContext bundleContext = null; /** * The number of failures and errors that occurred during unit testing. */ private int errCount = 0; /** * The number of unit tests run by the slick runner. */ private int runCount = 0; /** * Starts the slick runner, runs all unit tests indicated in the * TEST_LIST property, and exits with an error code corresponding to whether * or there were failure while running the tests. * @param bc BundleContext * @throws Exception */ public void start(BundleContext bc) throws Exception { logger.logEntry(); try { bundleContext = bc; setName(getClass().getName()); //Let's now see what tests have been scheduled for execution. String tests = System.getProperty(TEST_LIST_PROPERTY_NAME); if (tests == null || tests.trim().length() == 0) { tests = ""; } logger.debug("specified test list is: " + tests); StringTokenizer st = new StringTokenizer(tests); String[] ids = new String[st.countTokens()]; int n = 0; while (st.hasMoreTokens()) { ids[n++] = st.nextToken().trim(); } //Determine the file specified for storing test results. String outputDirName = System.getProperty(OUTPUT_DIR_PROPERTY_NAME); if (outputDirName == null || outputDirName.trim().length() == 0) { outputDirName = DEFAULT_OUTPUT_DIR; } File outputDir = new File(outputDirName); if (!outputDir.exists()) { outputDir.mkdirs(); } for (int i = 0; i < ids.length; i++) { logger.info("=========== Running tests in : " + ids[i] + " ==========="); TestSuite slick = getTestSuite(bc, ids[i]); logger.debug("with " + slick.countTestCases() + " tests."); File outputFile = new File(outputDir, "SC-TEST-" + ids[i] + ".xml"); if (!outputFile.exists()) { outputFile.createNewFile(); } logger.debug("specified reports file: " + outputFile.getCanonicalFile()); OutputStream out = new FileOutputStream(outputFile); XmlFormatter fmtr = new XmlFormatter(new PrintStream(out)); TestResult res = ScTestRunner.run(slick, fmtr); errCount += res.errorCount() + res.failureCount(); runCount += res.runCount(); out.flush(); out.close(); } //output results logger.info(""); logger.info("===================================================="); logger.info("We ran " + runCount + " tests and encountered " + errCount + " errors and failures."); logger.info("===================================================="); logger.info(""); //in order to shutdown felix we'd first need to wait for it to //complete it's start process, so we'll have to implement shutdown //in a framework listener. bc.addFrameworkListener(new FrameworkListener(){ public void frameworkEvent(FrameworkEvent event){ if( event.getType() == FrameworkEvent.STARTED) { try { //first stop the system bundle thus causing oscar //to stop all user bundles and shut down. bundleContext.getBundle(0).stop(); } catch (BundleException ex) { logger.error("Failed to gently shutdown Felix",ex); } //if everything is ok then the stop call shouldn't have //exited the the program since we must have set the //"felix.embedded.execution" property to true //we could therefore now System.exit() with a code //indicating whether or not all unit tests went wrong // After updating to Felix 3.2.2, System.exit locks // the tests and it never stop, so it has to be removed // or in new thread. new Thread(new Runnable() { public void run() { System.exit(errCount > 0? -1: 0); } }).start(); } } }); } finally { logger.logExit(); } } /** * Dummy impl * @param bc BundleContext */ public void stop(BundleContext bc) { logger.debug("Stopping!"); } /** * Looks through the osgi framework for a service with a "service.pid" * property set to <tt>id</tt>. * @param bc the BundleContext where the service is to be looked for. * @param id the value of the "service.pid" property for the specified * service. * @return a TestSuite service corresponding the specified <tt>id</tt> * or a junit TestCase impl wrapping an exception in case we failed to * retrieve the service for some reason. */ public TestSuite getTestSuite(BundleContext bc, final String id) { Object obj = null; try { ServiceReference[] srl = bc.getServiceReferences( (String) null, "(service.pid=" + id + ")"); if (srl == null || srl.length == 0) { obj = new TestCase("No id=" + id) { @Override public void runTest() { throw new IllegalArgumentException("No test with id=" + id); } }; } if (srl != null && srl.length != 1) { obj = new TestCase("Multiple id=" + id) { @Override public void runTest() { throw new IllegalArgumentException( "More than one test with id=" + id); } }; } if (obj == null) { obj = bc.getService(srl[0]); } } catch (Exception e) { obj = new TestCase("Bad filter syntax id=" + id) { @Override public void runTest() { throw new IllegalArgumentException("Bad syntax id=" + id); } }; } if (! (obj instanceof Test)) { final Object oldObj = obj; obj = new TestCase("ClassCastException") { @Override public void runTest() { throw new ClassCastException("Service implements " + oldObj.getClass().getName() + " instead of " + Test.class.getName()); } }; } Test test = (Test) obj; TestSuite suite; if (test instanceof TestSuite) { suite = (TestSuite) test; } else { suite = new TestSuite(id); suite.addTest(test); } return suite; } }
Image caption The A6 scheme will include a bypass around Dungiven More than 200 people have five months to move out of their homes and businesses to make way for a major new road between Londonderry and Dungiven. The A6 dual carriageway has been in the pipeline for years and work is due to get under way next year. The £400m project will upgrade 18 miles (30km) of the road and it is aimed at reducing journey times between Derry and Belfast. The Department for Infrastructure said landowners would be compensated. The A6 consists mainly of a single two-lane carriageway, and it has been a bottleneck for decades. 'Complete shock' The department has issued an order to purchase land for a 15-mile (25.5km) section of the scheme, between Dungiven and Drumahoe, including a bypass around Dungiven. Nine homes, 13 businesses and 760 acres of farmland are needed to make way for the road, affecting a total of 230 people. They have been told the department will take ownership of their land and properties from 19 September. They can work, live and farm until spring next year when construction work is due to begin. It is understood the deadline for people to move out is next February. But one of the people affected told BBC Radio Foyle the time scale had come as a "complete shock". Dungiven farmer Nigel McLaughlin knows some of the farmers who will be impacted. 'Knew it was coming' "It will be a traumatic experience for people losing their homes," he said. "It is something we have known has been coming for a long time. It has been discussed for most of my lifetime." The Department of Infastructure said it had advised those affected to appoint an agent to help with their compensation claims. "Where land is acquired for a road scheme, landowners will be compensated in accordance with the Land Compensation (Northern Ireland) Order 1982," the department said. "The purpose of this monetary compensation is to ensure the landowner is put in a similar position to what they would have been had the land or property not been taken for the scheme." The tendering process to appoint contractors to build the road has begun. It is estimated that this section of the scheme will cost between £230m and £255m. Construction work is expected to start in spring 2018 and take up to four years to complete.
Heterogeneous catalysts for hydrogenation of CO2 and bicarbonates to formic acid and formates ABSTRACT Formic acid and formates are often produced by hydrogenation of CO2 with hydrogen over homogeneous catalysts. The present review reports recent achievements in utilization of heterogeneous catalysts. It shows that highly dispersed supported metal catalysts are able to carry out this reaction by providing activation of hydrogen on the metal sites and activation of CO2 or bicarbonate on the support sites. Important advances have recently been achieved through utilization of catalysts using CxNy materials as supports. The high activity of these catalysts could be assigned to their ability to stabilize the active metal in a state of single-metal atoms or heterogenized metal complexes, which may demonstrate a higher activity than metal atoms on the surface of metal nanoparticles.
<reponame>maurizioabba/rose // t0445.cc // DQTs in non-function declarators template <class T> struct A { struct B {}; static B *array[2]; }; template <class T2> typename A<T2>::B *A<T2>::array[2] = { 0,0 }; void foo() { A<int>::array[0] = A<int>::array[1]; }
package org.mercycorps.translationcards.service; import org.mercycorps.translationcards.model.Dictionary; import org.mercycorps.translationcards.repository.DictionaryRepository; import java.util.Arrays; import java.util.List; public class DictionaryService { private DictionaryRepository dictionaryRepository; Dictionary currentDictionary; DeckService deckService; int currentDictionaryIndex; public DictionaryService(DictionaryRepository dictionaryRepository, DeckService deckService) { this.dictionaryRepository = dictionaryRepository; this.deckService = deckService; currentDictionaryIndex = 0; currentDictionary = getDictionariesForCurrentDeck().get(currentDictionaryIndex); } public List<Dictionary> getDictionariesForCurrentDeck() { return Arrays.asList( dictionaryRepository.getAllDictionariesForDeck( deckService.currentDeck().getDbId())); } public Dictionary currentDictionary() { return getDictionariesForCurrentDeck().get(currentDictionaryIndex); } public int getCurrentDictionaryIndex() { return currentDictionaryIndex; } public void setCurrentDictionary(int index) { currentDictionaryIndex = index; currentDictionary = getDictionariesForCurrentDeck().get(index); } }
<gh_stars>10-100 declare const version = "0.11.4"; export { version };
<filename>Super-Power-Naps/app/src/main/java/com/example/super_power_naps/AlarmOptionsFragment.java package com.example.super_power_naps; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.fragment.app.Fragment; import androidx.lifecycle.Observer; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.example.super_power_naps.Room_Database.UserRepository; import com.google.firebase.auth.FirebaseAuth; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.List; public class AlarmOptionsFragment extends Fragment { private AlarmManager alarmManager; private PendingIntent pendingIntent; private UserRepository userRepository; @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_alarm_options, container, false); alarmManager = (AlarmManager) getActivity().getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(getContext(), AlarmReceiver.class); pendingIntent = PendingIntent.getBroadcast(getContext(), 0, intent, PendingIntent.FLAG_UPDATE_CURRENT); if (FirebaseAuth.getInstance().getCurrentUser() != null) { String uid = FirebaseAuth.getInstance().getCurrentUser().getUid(); userRepository = new UserRepository(view.getContext(), uid); } view.findViewById(R.id.tired).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setAlarm(30); } }); view.findViewById(R.id.sleepy).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setAlarm(35); } }); view.findViewById(R.id.normal).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { setAlarm(40); } }); return view; } private void setAlarm(int minutes) { Calendar calendar = Calendar.getInstance(); Date date = new Date(); SimpleDateFormat mins = new SimpleDateFormat("mm"); SimpleDateFormat hour = new SimpleDateFormat("hh"); if (userRepository != null) { userRepository.update(date.toString()); } calendar.set(Calendar.HOUR, Integer.parseInt(hour.format(date))); calendar.set(Calendar.MINUTE, Integer.parseInt(mins.format(date)) + minutes); Toast.makeText(getContext(), "Get to bed!!!", Toast.LENGTH_SHORT).show(); alarmManager.setExact(AlarmManager.RTC_WAKEUP, calendar.getTimeInMillis(), pendingIntent); getFragmentManager() .beginTransaction() .replace(R.id.nap_options_container, new AlarmSetFragment()) .commit(); } }
Imagine you are enjoying your golden years, driving to your daily appointment for some painless brain zapping that is helping to stave off memory loss. That's the hope of a new study, in which people who learned associations (such as a random word and an image) after transcranial magnetic stimulation (TMS) were better able to learn more pairings days and weeks later—with no further stimulation needed. TMS uses a magnetic coil placed on the head to increase electrical signaling a few centimeters into the brain. Past studies have found that TMS can boost cognition and memory during stimulation, but this is the first to show that such gains can last even after the TMS regimen is completed. In the new study, which was published in Science, neuroscientists first used brain imaging to identify the associative memory network of 16 young, healthy participants. This network, based around the hippocampus, glues together things such as sights, places, sounds and time to form a memory, explains neuroscientist Joel Voss of Northwestern University, a senior author of the paper. Next, the researchers applied TMS behind the left ear of each participant for 20 minutes for five consecutive days to stimulate this memory network. To see if participants' associative memory improved, one day after the stimulation regimen finished they were tested for their ability to learn random words paired with faces. Subjects who had had TMS performed 33 percent better, compared with those who received placebo treatments, such as sham stimulation. “Twenty-four hours may not sound like a long time, but in fact that's quite long in terms of affecting the brain,” Voss says. His team followed up with the participants about 15 days later and found the benefit remained, according to another paper in press at Hippocampus. The team also imaged the subjects' brains one and 15 days after stimulation, finding increases in neural connectivity in their associative memory network. Voss now plans to test whether this method works on individuals who have disorders in which the memory association network is weak, such as Alzheimer's disease, traumatic brain injury and schizophrenia.
// Repeat returns a function that runs the specified function repeatedly for the specific number of times. func Repeat(n int, fn Action) Action { return func(ctx context.Context) error { for i := 0; i < n; i++ { if err := fn(ctx); err != nil { return err } } return nil } }
package com.github.nylle.javafixture.testobjects.example; import java.util.Set; public interface IContract { Set<ContractPosition> getContractPositions(); void addContractPosition(ContractPosition contractPosition); }
package com.liwy.oscafe.upms.entity; /** * 用户实体类 * Created by liwy on 2018/1/20. */ public class UpmsUser { /** * 主键 * * @mbg.generated */ private Integer userId; /** * 用户账号 * * @mbg.generated */ private String username; /** * 密码MD5加密 * * @mbg.generated */ private String password; /** * 盐 */ private String salt; /** * 昵称 * * @mbg.generated */ private String nickname; /** * 头像 * * @mbg.generated */ private String headpic; /** * 真实姓名 * * @mbg.generated */ private String realname; /** * 性别 * * @mbg.generated */ private Byte sex; /** * 邮箱 * * @mbg.generated */ private String email; /** * 电话 * * @mbg.generated */ private String phone; /** * 状态 (0:正常,1:锁定) * * @mbg.generated */ private Byte status; /** * 创建时间 * * @mbg.generated */ private Long ctime; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getNickname() { return nickname; } public void setNickname(String nickname) { this.nickname = nickname; } public String getHeadpic() { return headpic; } public void setHeadpic(String headpic) { this.headpic = headpic; } public String getRealname() { return realname; } public void setRealname(String realname) { this.realname = realname; } public Byte getSex() { return sex; } public void setSex(Byte sex) { this.sex = sex; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public Byte getStatus() { return status; } public void setStatus(Byte status) { this.status = status; } public Long getCtime() { return ctime; } public void setCtime(Long ctime) { this.ctime = ctime; } public String getSalt() { return salt; } public void setSalt(String salt) { this.salt = salt; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpmsUser upmsUser = (UpmsUser) o; if (userId != null ? !userId.equals(upmsUser.userId) : upmsUser.userId != null) return false; if (username != null ? !username.equals(upmsUser.username) : upmsUser.username != null) return false; if (password != null ? !password.equals(upmsUser.password) : upmsUser.password != null) return false; if (salt != null ? !salt.equals(upmsUser.salt) : upmsUser.salt != null) return false; if (nickname != null ? !nickname.equals(upmsUser.nickname) : upmsUser.nickname != null) return false; if (headpic != null ? !headpic.equals(upmsUser.headpic) : upmsUser.headpic != null) return false; if (realname != null ? !realname.equals(upmsUser.realname) : upmsUser.realname != null) return false; if (sex != null ? !sex.equals(upmsUser.sex) : upmsUser.sex != null) return false; if (email != null ? !email.equals(upmsUser.email) : upmsUser.email != null) return false; if (phone != null ? !phone.equals(upmsUser.phone) : upmsUser.phone != null) return false; if (status != null ? !status.equals(upmsUser.status) : upmsUser.status != null) return false; return ctime != null ? ctime.equals(upmsUser.ctime) : upmsUser.ctime == null; } @Override public int hashCode() { int result = userId != null ? userId.hashCode() : 0; result = 31 * result + (username != null ? username.hashCode() : 0); result = 31 * result + (password != null ? password.hashCode() : 0); result = 31 * result + (salt != null ? salt.hashCode() : 0); result = 31 * result + (nickname != null ? nickname.hashCode() : 0); result = 31 * result + (headpic != null ? headpic.hashCode() : 0); result = 31 * result + (realname != null ? realname.hashCode() : 0); result = 31 * result + (sex != null ? sex.hashCode() : 0); result = 31 * result + (email != null ? email.hashCode() : 0); result = 31 * result + (phone != null ? phone.hashCode() : 0); result = 31 * result + (status != null ? status.hashCode() : 0); result = 31 * result + (ctime != null ? ctime.hashCode() : 0); return result; } @Override public String toString() { return "UpmsUser{" + "userId=" + userId + ", username='" + username + '\'' + ", password='" + password + '\'' + ", salt='" + salt + '\'' + ", nickname='" + nickname + '\'' + ", headpic='" + headpic + '\'' + ", realname='" + realname + '\'' + ", sex=" + sex + ", email='" + email + '\'' + ", phone='" + phone + '\'' + ", status=" + status + ", ctime=" + ctime + '}'; } }
/************************************************************************** * * Copyright 2013 Advanced Micro Devices, Inc. * All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sub license, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice (including the * next paragraph) shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **************************************************************************/ /* * Authors: * <NAME> <<EMAIL>> * */ #include <assert.h> #include <OMX_Video.h> /* bellagio defines a DEBUG macro that we don't want */ #ifndef DEBUG #include <bellagio/omxcore.h> #undef DEBUG #else #include <bellagio/omxcore.h> #endif #include <bellagio/omx_base_video_port.h> #include "pipe/p_screen.h" #include "pipe/p_video_codec.h" #include "util/u_memory.h" #include "entrypoint.h" #include "vid_enc.h" #include "vid_omx_common.h" #include "vid_enc_common.h" static OMX_ERRORTYPE vid_enc_Constructor(OMX_COMPONENTTYPE *comp, OMX_STRING name); static OMX_ERRORTYPE vid_enc_Destructor(OMX_COMPONENTTYPE *comp); static OMX_ERRORTYPE vid_enc_SetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param); static OMX_ERRORTYPE vid_enc_GetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param); static OMX_ERRORTYPE vid_enc_SetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config); static OMX_ERRORTYPE vid_enc_GetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config); static OMX_ERRORTYPE vid_enc_MessageHandler(OMX_COMPONENTTYPE *comp, internalRequestMessageType *msg); static OMX_ERRORTYPE vid_enc_AllocateInBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf, OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size); static OMX_ERRORTYPE vid_enc_UseInBuffer(omx_base_PortType *port, OMX_BUFFERHEADERTYPE **buf, OMX_U32 idx, OMX_PTR private, OMX_U32 size, OMX_U8 *mem); static OMX_ERRORTYPE vid_enc_FreeInBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf); static OMX_ERRORTYPE vid_enc_EncodeFrame(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf); static OMX_ERRORTYPE vid_enc_AllocateOutBuffer(omx_base_PortType *comp, OMX_INOUT OMX_BUFFERHEADERTYPE **buf, OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size); static OMX_ERRORTYPE vid_enc_FreeOutBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf); static void vid_enc_BufferEncoded(OMX_COMPONENTTYPE *comp, OMX_BUFFERHEADERTYPE* input, OMX_BUFFERHEADERTYPE* output); OMX_ERRORTYPE vid_enc_LoaderComponent(stLoaderComponentType *comp) { comp->componentVersion.s.nVersionMajor = 0; comp->componentVersion.s.nVersionMinor = 0; comp->componentVersion.s.nRevision = 0; comp->componentVersion.s.nStep = 1; comp->name_specific_length = 1; comp->constructor = vid_enc_Constructor; comp->name = CALLOC(1, OMX_MAX_STRINGNAME_SIZE); if (!comp->name) return OMX_ErrorInsufficientResources; comp->name_specific = CALLOC(1, sizeof(char *)); if (!comp->name_specific) goto error_arrays; comp->role_specific = CALLOC(1, sizeof(char *)); if (!comp->role_specific) goto error_arrays; comp->name_specific[0] = CALLOC(1, OMX_MAX_STRINGNAME_SIZE); if (comp->name_specific[0] == NULL) goto error_specific; comp->role_specific[0] = CALLOC(1, OMX_MAX_STRINGNAME_SIZE); if (comp->role_specific[0] == NULL) goto error_specific; strcpy(comp->name, OMX_VID_ENC_BASE_NAME); strcpy(comp->name_specific[0], OMX_VID_ENC_AVC_NAME); strcpy(comp->role_specific[0], OMX_VID_ENC_AVC_ROLE); return OMX_ErrorNone; error_specific: FREE(comp->role_specific[0]); FREE(comp->name_specific[0]); error_arrays: FREE(comp->role_specific); FREE(comp->name_specific); FREE(comp->name); return OMX_ErrorInsufficientResources; } static OMX_ERRORTYPE vid_enc_Constructor(OMX_COMPONENTTYPE *comp, OMX_STRING name) { vid_enc_PrivateType *priv; omx_base_video_PortType *port; struct pipe_screen *screen; OMX_ERRORTYPE r; int i; assert(!comp->pComponentPrivate); priv = comp->pComponentPrivate = CALLOC(1, sizeof(vid_enc_PrivateType)); if (!priv) return OMX_ErrorInsufficientResources; r = omx_base_filter_Constructor(comp, name); if (r) return r; priv->BufferMgmtCallback = vid_enc_BufferEncoded; priv->messageHandler = vid_enc_MessageHandler; priv->destructor = vid_enc_Destructor; comp->SetParameter = vid_enc_SetParameter; comp->GetParameter = vid_enc_GetParameter; comp->GetConfig = vid_enc_GetConfig; comp->SetConfig = vid_enc_SetConfig; priv->screen = omx_get_screen(); if (!priv->screen) return OMX_ErrorInsufficientResources; screen = priv->screen->pscreen; if (!screen->get_video_param(screen, PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH, PIPE_VIDEO_ENTRYPOINT_ENCODE, PIPE_VIDEO_CAP_SUPPORTED)) return OMX_ErrorBadParameter; priv->s_pipe = pipe_create_multimedia_context(screen); if (!priv->s_pipe) return OMX_ErrorInsufficientResources; enc_InitCompute_common(priv); if (!vl_compositor_init(&priv->compositor, priv->s_pipe)) { priv->s_pipe->destroy(priv->s_pipe); priv->s_pipe = NULL; return OMX_ErrorInsufficientResources; } if (!vl_compositor_init_state(&priv->cstate, priv->s_pipe)) { vl_compositor_cleanup(&priv->compositor); priv->s_pipe->destroy(priv->s_pipe); priv->s_pipe = NULL; return OMX_ErrorInsufficientResources; } priv->t_pipe = pipe_create_multimedia_context(screen); if (!priv->t_pipe) return OMX_ErrorInsufficientResources; priv->sPortTypesParam[OMX_PortDomainVideo].nStartPortNumber = 0; priv->sPortTypesParam[OMX_PortDomainVideo].nPorts = 2; priv->ports = CALLOC(2, sizeof(omx_base_PortType *)); if (!priv->ports) return OMX_ErrorInsufficientResources; for (i = 0; i < 2; ++i) { priv->ports[i] = CALLOC(1, sizeof(omx_base_video_PortType)); if (!priv->ports[i]) return OMX_ErrorInsufficientResources; base_video_port_Constructor(comp, &priv->ports[i], i, i == 0); } port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX]; port->sPortParam.format.video.nFrameWidth = 176; port->sPortParam.format.video.nFrameHeight = 144; port->sPortParam.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar; port->sVideoParam.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar; port->sPortParam.nBufferCountActual = 8; port->sPortParam.nBufferCountMin = 4; port->Port_SendBufferFunction = vid_enc_EncodeFrame; port->Port_AllocateBuffer = vid_enc_AllocateInBuffer; port->Port_UseBuffer = vid_enc_UseInBuffer; port->Port_FreeBuffer = vid_enc_FreeInBuffer; port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX]; strcpy(port->sPortParam.format.video.cMIMEType,"video/H264"); port->sPortParam.format.video.nFrameWidth = 176; port->sPortParam.format.video.nFrameHeight = 144; port->sPortParam.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC; port->sVideoParam.eCompressionFormat = OMX_VIDEO_CodingAVC; port->Port_AllocateBuffer = vid_enc_AllocateOutBuffer; port->Port_FreeBuffer = vid_enc_FreeOutBuffer; priv->bitrate.eControlRate = OMX_Video_ControlRateDisable; priv->bitrate.nTargetBitrate = 0; priv->quant.nQpI = OMX_VID_ENC_QUANT_I_FRAMES_DEFAULT; priv->quant.nQpP = OMX_VID_ENC_QUANT_P_FRAMES_DEFAULT; priv->quant.nQpB = OMX_VID_ENC_QUANT_B_FRAMES_DEFAULT; priv->profile_level.eProfile = OMX_VIDEO_AVCProfileBaseline; priv->profile_level.eLevel = OMX_VIDEO_AVCLevel51; priv->force_pic_type.IntraRefreshVOP = OMX_FALSE; priv->frame_num = 0; priv->pic_order_cnt = 0; priv->restricted_b_frames = debug_get_bool_option("OMX_USE_RESTRICTED_B_FRAMES", FALSE); priv->scale.xWidth = OMX_VID_ENC_SCALING_WIDTH_DEFAULT; priv->scale.xHeight = OMX_VID_ENC_SCALING_WIDTH_DEFAULT; list_inithead(&priv->free_tasks); list_inithead(&priv->used_tasks); list_inithead(&priv->b_frames); list_inithead(&priv->stacked_tasks); return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_Destructor(OMX_COMPONENTTYPE *comp) { vid_enc_PrivateType* priv = comp->pComponentPrivate; int i; enc_ReleaseTasks(&priv->free_tasks); enc_ReleaseTasks(&priv->used_tasks); enc_ReleaseTasks(&priv->b_frames); enc_ReleaseTasks(&priv->stacked_tasks); if (priv->ports) { for (i = 0; i < priv->sPortTypesParam[OMX_PortDomainVideo].nPorts; ++i) { if(priv->ports[i]) priv->ports[i]->PortDestructor(priv->ports[i]); } FREE(priv->ports); priv->ports=NULL; } for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i) if (priv->scale_buffer[i]) priv->scale_buffer[i]->destroy(priv->scale_buffer[i]); if (priv->s_pipe) { vl_compositor_cleanup_state(&priv->cstate); vl_compositor_cleanup(&priv->compositor); enc_ReleaseCompute_common(priv); priv->s_pipe->destroy(priv->s_pipe); } if (priv->t_pipe) priv->t_pipe->destroy(priv->t_pipe); if (priv->screen) omx_put_screen(); return omx_workaround_Destructor(comp); } static OMX_ERRORTYPE enc_AllocateBackTexture(omx_base_PortType *port, struct pipe_resource **resource, struct pipe_transfer **transfer, OMX_U8 **map) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; struct pipe_resource buf_templ; struct pipe_box box = {}; OMX_U8 *ptr; memset(&buf_templ, 0, sizeof buf_templ); buf_templ.target = PIPE_TEXTURE_2D; buf_templ.format = PIPE_FORMAT_I8_UNORM; buf_templ.bind = PIPE_BIND_LINEAR; buf_templ.usage = PIPE_USAGE_STAGING; buf_templ.flags = 0; buf_templ.width0 = port->sPortParam.format.video.nFrameWidth; buf_templ.height0 = port->sPortParam.format.video.nFrameHeight * 3 / 2; buf_templ.depth0 = 1; buf_templ.array_size = 1; *resource = priv->s_pipe->screen->resource_create(priv->s_pipe->screen, &buf_templ); if (!*resource) return OMX_ErrorInsufficientResources; box.width = (*resource)->width0; box.height = (*resource)->height0; box.depth = (*resource)->depth0; ptr = priv->s_pipe->transfer_map(priv->s_pipe, *resource, 0, PIPE_MAP_WRITE, &box, transfer); if (map) *map = ptr; return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_SetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param) { OMX_COMPONENTTYPE *comp = handle; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_ERRORTYPE r; if (!param) return OMX_ErrorBadParameter; switch(idx) { case OMX_IndexParamPortDefinition: { OMX_PARAM_PORTDEFINITIONTYPE *def = param; r = omx_base_component_SetParameter(handle, idx, param); if (r) return r; if (def->nPortIndex == OMX_BASE_FILTER_INPUTPORT_INDEX) { omx_base_video_PortType *port; unsigned framesize; struct pipe_resource *resource; struct pipe_transfer *transfer; port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX]; enc_AllocateBackTexture(priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX], &resource, &transfer, NULL); port->sPortParam.format.video.nStride = transfer->stride; pipe_transfer_unmap(priv->s_pipe, transfer); pipe_resource_reference(&resource, NULL); framesize = port->sPortParam.format.video.nStride * port->sPortParam.format.video.nFrameHeight; port->sPortParam.format.video.nSliceHeight = port->sPortParam.format.video.nFrameHeight; port->sPortParam.nBufferSize = framesize * 3 / 2; port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX]; port->sPortParam.nBufferSize = framesize * 512 / (16*16); priv->frame_rate = def->format.video.xFramerate; priv->callbacks->EventHandler(comp, priv->callbackData, OMX_EventPortSettingsChanged, OMX_BASE_FILTER_OUTPUTPORT_INDEX, 0, NULL); } break; } case OMX_IndexParamStandardComponentRole: { OMX_PARAM_COMPONENTROLETYPE *role = param; r = checkHeader(param, sizeof(OMX_PARAM_COMPONENTROLETYPE)); if (r) return r; if (strcmp((char *)role->cRole, OMX_VID_ENC_AVC_ROLE)) { return OMX_ErrorBadParameter; } break; } case OMX_IndexParamVideoBitrate: { OMX_VIDEO_PARAM_BITRATETYPE *bitrate = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_BITRATETYPE)); if (r) return r; priv->bitrate = *bitrate; break; } case OMX_IndexParamVideoQuantization: { OMX_VIDEO_PARAM_QUANTIZATIONTYPE *quant = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_QUANTIZATIONTYPE)); if (r) return r; priv->quant = *quant; break; } case OMX_IndexParamVideoProfileLevelCurrent: { OMX_VIDEO_PARAM_PROFILELEVELTYPE *profile_level = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PROFILELEVELTYPE)); if (r) return r; priv->profile_level = *profile_level; break; } default: return omx_base_component_SetParameter(handle, idx, param); } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_GetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param) { OMX_COMPONENTTYPE *comp = handle; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_ERRORTYPE r; if (!param) return OMX_ErrorBadParameter; switch(idx) { case OMX_IndexParamStandardComponentRole: { OMX_PARAM_COMPONENTROLETYPE *role = param; r = checkHeader(param, sizeof(OMX_PARAM_COMPONENTROLETYPE)); if (r) return r; strcpy((char *)role->cRole, OMX_VID_ENC_AVC_ROLE); break; } case OMX_IndexParamVideoInit: r = checkHeader(param, sizeof(OMX_PORT_PARAM_TYPE)); if (r) return r; memcpy(param, &priv->sPortTypesParam[OMX_PortDomainVideo], sizeof(OMX_PORT_PARAM_TYPE)); break; case OMX_IndexParamVideoPortFormat: { OMX_VIDEO_PARAM_PORTFORMATTYPE *format = param; omx_base_video_PortType *port; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PORTFORMATTYPE)); if (r) return r; if (format->nPortIndex > 1) return OMX_ErrorBadPortIndex; if (format->nIndex >= 1) return OMX_ErrorNoMore; port = (omx_base_video_PortType *)priv->ports[format->nPortIndex]; memcpy(format, &port->sVideoParam, sizeof(OMX_VIDEO_PARAM_PORTFORMATTYPE)); break; } case OMX_IndexParamVideoBitrate: { OMX_VIDEO_PARAM_BITRATETYPE *bitrate = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_BITRATETYPE)); if (r) return r; bitrate->eControlRate = priv->bitrate.eControlRate; bitrate->nTargetBitrate = priv->bitrate.nTargetBitrate; break; } case OMX_IndexParamVideoQuantization: { OMX_VIDEO_PARAM_QUANTIZATIONTYPE *quant = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_QUANTIZATIONTYPE)); if (r) return r; quant->nQpI = priv->quant.nQpI; quant->nQpP = priv->quant.nQpP; quant->nQpB = priv->quant.nQpB; break; } case OMX_IndexParamVideoProfileLevelCurrent: { OMX_VIDEO_PARAM_PROFILELEVELTYPE *profile_level = param; r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PROFILELEVELTYPE)); if (r) return r; profile_level->eProfile = priv->profile_level.eProfile; profile_level->eLevel = priv->profile_level.eLevel; break; } default: return omx_base_component_GetParameter(handle, idx, param); } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_SetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config) { OMX_COMPONENTTYPE *comp = handle; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_ERRORTYPE r; int i; if (!config) return OMX_ErrorBadParameter; switch(idx) { case OMX_IndexConfigVideoIntraVOPRefresh: { OMX_CONFIG_INTRAREFRESHVOPTYPE *type = config; r = checkHeader(config, sizeof(OMX_CONFIG_INTRAREFRESHVOPTYPE)); if (r) return r; priv->force_pic_type = *type; break; } case OMX_IndexConfigCommonScale: { OMX_CONFIG_SCALEFACTORTYPE *scale = config; r = checkHeader(config, sizeof(OMX_CONFIG_SCALEFACTORTYPE)); if (r) return r; if (scale->xWidth < 176 || scale->xHeight < 144) return OMX_ErrorBadParameter; for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i) { if (priv->scale_buffer[i]) { priv->scale_buffer[i]->destroy(priv->scale_buffer[i]); priv->scale_buffer[i] = NULL; } } priv->scale = *scale; if (priv->scale.xWidth != 0xffffffff && priv->scale.xHeight != 0xffffffff) { struct pipe_video_buffer templat = {}; templat.buffer_format = PIPE_FORMAT_NV12; templat.width = priv->scale.xWidth; templat.height = priv->scale.xHeight; templat.interlaced = false; for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i) { priv->scale_buffer[i] = priv->s_pipe->create_video_buffer(priv->s_pipe, &templat); if (!priv->scale_buffer[i]) return OMX_ErrorInsufficientResources; } } break; } default: return omx_base_component_SetConfig(handle, idx, config); } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_GetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config) { OMX_COMPONENTTYPE *comp = handle; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_ERRORTYPE r; if (!config) return OMX_ErrorBadParameter; switch(idx) { case OMX_IndexConfigCommonScale: { OMX_CONFIG_SCALEFACTORTYPE *scale = config; r = checkHeader(config, sizeof(OMX_CONFIG_SCALEFACTORTYPE)); if (r) return r; scale->xWidth = priv->scale.xWidth; scale->xHeight = priv->scale.xHeight; break; } default: return omx_base_component_GetConfig(handle, idx, config); } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_MessageHandler(OMX_COMPONENTTYPE* comp, internalRequestMessageType *msg) { vid_enc_PrivateType* priv = comp->pComponentPrivate; if (msg->messageType == OMX_CommandStateSet) { if ((msg->messageParam == OMX_StateIdle ) && (priv->state == OMX_StateLoaded)) { struct pipe_video_codec templat = {}; omx_base_video_PortType *port; port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX]; templat.profile = enc_TranslateOMXProfileToPipe(priv->profile_level.eProfile); templat.level = enc_TranslateOMXLevelToPipe(priv->profile_level.eLevel); templat.entrypoint = PIPE_VIDEO_ENTRYPOINT_ENCODE; templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420; templat.width = priv->scale_buffer[priv->current_scale_buffer] ? priv->scale.xWidth : port->sPortParam.format.video.nFrameWidth; templat.height = priv->scale_buffer[priv->current_scale_buffer] ? priv->scale.xHeight : port->sPortParam.format.video.nFrameHeight; if (templat.profile == PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE) { struct pipe_screen *screen = priv->screen->pscreen; templat.max_references = 1; priv->stacked_frames_num = screen->get_video_param(screen, PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH, PIPE_VIDEO_ENTRYPOINT_ENCODE, PIPE_VIDEO_CAP_STACKED_FRAMES); } else { templat.max_references = OMX_VID_ENC_P_PERIOD_DEFAULT; priv->stacked_frames_num = 1; } priv->codec = priv->s_pipe->create_video_codec(priv->s_pipe, &templat); } else if ((msg->messageParam == OMX_StateLoaded) && (priv->state == OMX_StateIdle)) { if (priv->codec) { priv->codec->destroy(priv->codec); priv->codec = NULL; } } } return omx_base_component_MessageHandler(comp, msg); } static OMX_ERRORTYPE vid_enc_AllocateInBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf, OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size) { struct input_buf_private *inp; OMX_ERRORTYPE r; r = base_port_AllocateBuffer(port, buf, idx, private, size); if (r) return r; inp = (*buf)->pInputPortPrivate = CALLOC_STRUCT(input_buf_private); if (!inp) { base_port_FreeBuffer(port, idx, *buf); return OMX_ErrorInsufficientResources; } list_inithead(&inp->tasks); FREE((*buf)->pBuffer); r = enc_AllocateBackTexture(port, &inp->resource, &inp->transfer, &(*buf)->pBuffer); if (r) { FREE(inp); base_port_FreeBuffer(port, idx, *buf); return r; } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_UseInBuffer(omx_base_PortType *port, OMX_BUFFERHEADERTYPE **buf, OMX_U32 idx, OMX_PTR private, OMX_U32 size, OMX_U8 *mem) { struct input_buf_private *inp; OMX_ERRORTYPE r; r = base_port_UseBuffer(port, buf, idx, private, size, mem); if (r) return r; inp = (*buf)->pInputPortPrivate = CALLOC_STRUCT(input_buf_private); if (!inp) { base_port_FreeBuffer(port, idx, *buf); return OMX_ErrorInsufficientResources; } list_inithead(&inp->tasks); return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_FreeInBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; struct input_buf_private *inp = buf->pInputPortPrivate; if (inp) { enc_ReleaseTasks(&inp->tasks); if (inp->transfer) pipe_transfer_unmap(priv->s_pipe, inp->transfer); pipe_resource_reference(&inp->resource, NULL); FREE(inp); } buf->pBuffer = NULL; return base_port_FreeBuffer(port, idx, buf); } static OMX_ERRORTYPE vid_enc_AllocateOutBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf, OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size) { OMX_ERRORTYPE r; r = base_port_AllocateBuffer(port, buf, idx, private, size); if (r) return r; FREE((*buf)->pBuffer); (*buf)->pBuffer = NULL; (*buf)->pOutputPortPrivate = CALLOC(1, sizeof(struct output_buf_private)); if (!(*buf)->pOutputPortPrivate) { base_port_FreeBuffer(port, idx, *buf); return OMX_ErrorInsufficientResources; } return OMX_ErrorNone; } static OMX_ERRORTYPE vid_enc_FreeOutBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; if (buf->pOutputPortPrivate) { struct output_buf_private *outp = buf->pOutputPortPrivate; if (outp->transfer) pipe_transfer_unmap(priv->t_pipe, outp->transfer); pipe_resource_reference(&outp->bitstream, NULL); FREE(outp); buf->pOutputPortPrivate = NULL; } buf->pBuffer = NULL; return base_port_FreeBuffer(port, idx, buf); } static struct encode_task *enc_NeedTask(omx_base_PortType *port) { OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video; OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; return enc_NeedTask_common(priv, def); } static OMX_ERRORTYPE enc_LoadImage(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf, struct pipe_video_buffer *vbuf) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video; return enc_LoadImage_common(priv, def, buf, vbuf); } static void enc_ScaleInput(omx_base_PortType *port, struct pipe_video_buffer **vbuf, unsigned *size) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video; enc_ScaleInput_common(priv, def, vbuf, size); } static void enc_ControlPicture(omx_base_PortType *port, struct pipe_h264_enc_picture_desc *picture) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; enc_ControlPicture_common(priv, picture); } static void enc_HandleTask(omx_base_PortType *port, struct encode_task *task, enum pipe_h2645_enc_picture_type picture_type) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; unsigned size = priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX]->sPortParam.nBufferSize; struct pipe_video_buffer *vbuf = task->buf; struct pipe_h264_enc_picture_desc picture = {}; /* -------------- scale input image --------- */ enc_ScaleInput(port, &vbuf, &size); priv->s_pipe->flush(priv->s_pipe, NULL, 0); /* -------------- allocate output buffer --------- */ task->bitstream = pipe_buffer_create(priv->s_pipe->screen, PIPE_BIND_VERTEX_BUFFER, PIPE_USAGE_STAGING, /* map for read */ size); picture.picture_type = picture_type; picture.pic_order_cnt = task->pic_order_cnt; picture.base.profile = enc_TranslateOMXProfileToPipe(priv->profile_level.eProfile); picture.base.entry_point = PIPE_VIDEO_ENTRYPOINT_ENCODE; if (priv->restricted_b_frames && picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B) picture.not_referenced = true; enc_ControlPicture(port, &picture); /* -------------- encode frame --------- */ priv->codec->begin_frame(priv->codec, vbuf, &picture.base); priv->codec->encode_bitstream(priv->codec, vbuf, task->bitstream, &task->feedback); priv->codec->end_frame(priv->codec, vbuf, &picture.base); } static void enc_ClearBframes(omx_base_PortType *port, struct input_buf_private *inp) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; struct encode_task *task; if (list_is_empty(&priv->b_frames)) return; task = LIST_ENTRY(struct encode_task, priv->b_frames.prev, list); list_del(&task->list); /* promote last from to P frame */ priv->ref_idx_l0 = priv->ref_idx_l1; enc_HandleTask(port, task, PIPE_H2645_ENC_PICTURE_TYPE_P); list_addtail(&task->list, &inp->tasks); priv->ref_idx_l1 = priv->frame_num++; /* handle B frames */ LIST_FOR_EACH_ENTRY(task, &priv->b_frames, list) { enc_HandleTask(port, task, PIPE_H2645_ENC_PICTURE_TYPE_B); if (!priv->restricted_b_frames) priv->ref_idx_l0 = priv->frame_num; priv->frame_num++; } enc_MoveTasks(&priv->b_frames, &inp->tasks); } static OMX_ERRORTYPE vid_enc_EncodeFrame(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf) { OMX_COMPONENTTYPE* comp = port->standCompContainer; vid_enc_PrivateType *priv = comp->pComponentPrivate; struct input_buf_private *inp = buf->pInputPortPrivate; enum pipe_h2645_enc_picture_type picture_type; struct encode_task *task; unsigned stacked_num = 0; OMX_ERRORTYPE err; enc_MoveTasks(&inp->tasks, &priv->free_tasks); task = enc_NeedTask(port); if (!task) return OMX_ErrorInsufficientResources; if (buf->nFilledLen == 0) { if (buf->nFlags & OMX_BUFFERFLAG_EOS) { buf->nFilledLen = buf->nAllocLen; enc_ClearBframes(port, inp); enc_MoveTasks(&priv->stacked_tasks, &inp->tasks); priv->codec->flush(priv->codec); } return base_port_SendBufferFunction(port, buf); } if (buf->pOutputPortPrivate) { struct pipe_video_buffer *vbuf = buf->pOutputPortPrivate; buf->pOutputPortPrivate = task->buf; task->buf = vbuf; } else { /* ------- load input image into video buffer ---- */ err = enc_LoadImage(port, buf, task->buf); if (err != OMX_ErrorNone) { FREE(task); return err; } } /* -------------- determine picture type --------- */ if (!(priv->pic_order_cnt % OMX_VID_ENC_IDR_PERIOD_DEFAULT) || priv->force_pic_type.IntraRefreshVOP) { enc_ClearBframes(port, inp); picture_type = PIPE_H2645_ENC_PICTURE_TYPE_IDR; priv->force_pic_type.IntraRefreshVOP = OMX_FALSE; priv->frame_num = 0; priv->pic_order_cnt = 0; } else if (priv->codec->profile == PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE || !(priv->pic_order_cnt % OMX_VID_ENC_P_PERIOD_DEFAULT) || (buf->nFlags & OMX_BUFFERFLAG_EOS)) { picture_type = PIPE_H2645_ENC_PICTURE_TYPE_P; } else { picture_type = PIPE_H2645_ENC_PICTURE_TYPE_B; } task->pic_order_cnt = priv->pic_order_cnt++; if (picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B) { /* put frame at the tail of the queue */ list_addtail(&task->list, &priv->b_frames); } else { /* handle I or P frame */ priv->ref_idx_l0 = priv->ref_idx_l1; enc_HandleTask(port, task, picture_type); list_addtail(&task->list, &priv->stacked_tasks); LIST_FOR_EACH_ENTRY(task, &priv->stacked_tasks, list) { ++stacked_num; } if (stacked_num == priv->stacked_frames_num) { struct encode_task *t; t = LIST_ENTRY(struct encode_task, priv->stacked_tasks.next, list); list_del(&t->list); list_addtail(&t->list, &inp->tasks); } priv->ref_idx_l1 = priv->frame_num++; /* handle B frames */ LIST_FOR_EACH_ENTRY(task, &priv->b_frames, list) { enc_HandleTask(port, task, PIPE_H2645_ENC_PICTURE_TYPE_B); if (!priv->restricted_b_frames) priv->ref_idx_l0 = priv->frame_num; priv->frame_num++; } enc_MoveTasks(&priv->b_frames, &inp->tasks); } if (list_is_empty(&inp->tasks)) return port->ReturnBufferFunction(port, buf); else return base_port_SendBufferFunction(port, buf); } static void vid_enc_BufferEncoded(OMX_COMPONENTTYPE *comp, OMX_BUFFERHEADERTYPE* input, OMX_BUFFERHEADERTYPE* output) { vid_enc_PrivateType *priv = comp->pComponentPrivate; vid_enc_BufferEncoded_common(priv, input, output); }
import h5py import numpy as np class BatchIter: def __init__(self, data): self.data = data self.i = 0 def __iter__(self): return self def next(self): if self.i < self.data.N: i = self.i self.i += 1 return (i, self.data.obs[i]) else: raise StopIteration() class SampledIter: def __init__(self, data, M): self.data = data self.i = 0 self.M = M def __iter__(self): return self def next(self): if self.i < self.M: i = np.random.randint(self.N) self.i += 1 return (i, self.data.obs[i]) else: raise StopIteration() class Data: def __init__(self, observations): f = h5py.File(observations, 'r') self.obs = f['observations'] self.N = self.obs.shape[0] # num observations self.M = self.obs.shape[1] # num features self.known_density = False if 'density' in f: self.known_density = True self.density = f['density'] #TODO: remove this cheating (for piecewise checks) '''if "global_factor_concentration" in f: self.beta = f["global_factor_concentration"] self.mu = f["global_factor_features"] self.sigma = f["global_factor_feature_covariance"] self.pi = f["local_factor_concentration"] self.x = f["local_factor_features"] self.K = self.beta.shape[0] self.eta = f["global_features"]''' def batch_iter(self): return BatchIter(self) def sampled_iter(self, M): return SampledIter(self, M)
Yamato 980459 Liquid Line of Descent at 0.5 GPa: Approaching QUE94201 Introduction: The martian basaltic meteorites Yamato 980459 (Y98 hereafter) and QUE94201 (QUE) are thought by many to represent bona fide liquid compositions , in contrast to the majority of martian basalts, which are likely products of protracted crystal accumulation. Y98 is the most primitive (Mg#, molar Mg/Mg+Fe, ~65) martian basalt yet studied, whereas QUE is one of the most evolved (Mg # ~37). Yet they share several important features of isotopic and trace element composition that show they probably sampled the same (or very similar) highly depleted reservoir in the martian mantle . Because these two samples represent the best existing candidates for genuine liquids, it is natural to seek possible petrogenetic linkages between them to maximize the information about how martian basalts are formed and evolve. Recent efforts to this end include the employment of MELTS petrological models to derive a QUE-like liquid via fractional crystallization of Y98 , and 1-bar experimentation in which simulations of this process were less successful. In this contribution, I present the results of anhydrous equilibrium crystallization experiments on Y98 conducted at 0.5 GPa, and compare the down-temperature liquid compositions with that for QUE. These experiments complement those performed at 1 bar by Galenas et al. by simulating Y98 differentiation within the martian crust (0.5 GPa ≈ 40 km) rather than upon the surface. I show that the 0.5 GPa residual liquids approach (but do not yet reach) the composition of QUE, but under quite different circumstances from those modeled by Symes et al. . Additional experiments intended to mimic a fractional process will shortly be underway. Experimental and Analytical: Experiments were conducted using talc-pyrex assemblies fitted with graphite heaters and sample capsules in a piston-cylinder apparatus. Starting material was Y98A glass powder conditioned at the iron-wüstite buffer at 1000°C for ~24 hr. This material is the same powder (except for the IW conditioning step) used in the McKay group's original 1-bar work and by Galenas et al. , eliminating one possible source of discrepancy between these various studies. Experiments were run by holding Y98A at 1550°C (well above its 0.5 GPa liquidus) for 10 min prior to dropping to target temperature over 3-4 min, where they were held for several hours before quenching by cutting power to the assembly. The experiments reported here were run with final temperatures ranging from 1300-1210°C. Successful …
//By Ratna Priya class Solution { public int numTrees(int n) { return catlan(n); } public int catlan(int n){ int[] cat = new int[n+1]; cat[0] =1; cat[1] =1; for(int i =2; i<=n; i++){ cat[i] = 0; for(int j =0; j<i; j++) cat[i] += cat[j]*cat[i-j-1]; } return cat[n]; } }
/** * @file Example.java * @author <NAME> * @version 1 * @date 2015/01/13 */ package org.samovich.technologies.basics.concepts.objects.treehouse; public class Example { public static void main(String[] args) { System.out.println("Wea are making a new Pez Dispenser."); PezDispenser dispenser = new PezDispenser("Yoda"); System.out.println("The dispenser character is " + dispenser.getCharacterName() + "."); // Check if dispenser is empty. if (dispenser.isEmpty()) { System.out.println("It is currently empty!"); } System.out.println("Loading..."); // Load the dispenser. dispenser.load(); if (!dispenser.isEmpty()) { System.out.println("It is no longer empty."); } while(dispenser.despense()) { System.out.println("Chomp!"); } // Message for empty dispenser. if (dispenser.isEmpty()) { System.out.println("It is currently empty."); } dispenser.load(4); dispenser.load(2); while(dispenser.despense()) { System.out.println("Chomp!"); } } }
//use this if a desired state is being published void SteeringController::desStateCallback(const nav_msgs::Odometry &des_state_rcvd) { des_state_speed_ = des_state_rcvd.twist.twist.linear.x; des_state_omega_ = des_state_rcvd.twist.twist.angular.z; des_state_x_ = des_state_rcvd.pose.pose.position.x; des_state_y_ = des_state_rcvd.pose.pose.position.y; des_state_pose_ = des_state_rcvd.pose.pose; des_state_quat_ = des_state_rcvd.pose.pose.orientation; des_state_psi_ = convertPlanarQuat2Phi(des_state_quat_); }
// Find the patch constant function (issues error, returns nullptr if not found) const TFunction* HlslParseContext::findPatchConstantFunction(const TSourceLoc& loc) { if (symbolTable.isFunctionNameVariable(patchConstantFunctionName)) { error(loc, "can't use variable in patch constant function", patchConstantFunctionName.c_str(), ""); return nullptr; } const TString mangledName = patchConstantFunctionName + "("; TVector<const TFunction*> candidateList; bool builtIn; symbolTable.findFunctionNameList(mangledName, candidateList, builtIn); if (candidateList.empty()) { error(loc, "patch constant function not found", patchConstantFunctionName.c_str(), ""); return nullptr; } if (candidateList.size() > 1) { error(loc, "ambiguous patch constant function", patchConstantFunctionName.c_str(), ""); return nullptr; } return candidateList[0]; }
/** * Updates time remaining in hard mode * @param elapsedTime is the double amount of time passed */ private void updateHardMode(double elapsedTime) { if (hardMode) { hardTime = hardTime - elapsedTime; timeDisplay.changeDisplay("Time remaining: " + (int) hardTime); } checkHardTime(); }
CHILDREN'S EXPERIENCES OF MATHEMATICS There is general acceptance that mathematics learnt in school should be useful to the learner, both in other school situations and away from the educational context. However, educational programmes have achieved only limited success in promoting children's use of mathematics beyond the classroom. This article reports the findings of a study of primary school children, that describes using and applying mathematics as the children experience it, capturing their experiences as faithfully as possible. Findings, in part, parallel those of other studies, providing an indication of their generality: in almost all cases primary children do not see mathematics as being a significant feature of the world beyond school. Further findings suggest that, if we are to improve children's use of mathematics beyond the classroom, we must promote their movement towards more productive beliefs about the nature of mathematics. A categorisation of metaphoric descriptions of such beliefs is provided and suggestions are made for improving practice.
The question had to be asked, but he hated to ask it. He looked at me, asking for a job tending his bar a few nights a week, and mostly nodded, excited at the idea of us, friends of 15 years, working together at last. I sought work with a bit more flexibility, and a different decorum, than my gig reporting for our city's newspaper. And I hoped bartending at his young, but successful, cocktail bar to be the right fit. So we met over lengua tacos and aguas frescas to discuss the possibility. I thought the job to be a lock, but I sensed a bit of apprehension from him. Experience wasn't the problem. A decade in the service industry, a respected name in the community, time spent behind a bustling urban bar in Ireland, a local reputation as a knowledgeable food writer, a spirits lover -- I needed no resume. Still, as an employer, he couldn't neglect one concern. Finally, he lobbed it at me. "Do you think it's the right fit for your family?" If a stranger had asked me this, perhaps I'd have socked him, but I understood the question. My daughter, Mavis, was two-and-a-half at the time. She's a sociable comet of a kid with a blonde tail trailing behind as she charges a playground, and I am over the moon for her. In leaving the office behind, I wagered I could freelance, volunteer at her school, and on a whim, my pastry chef wife, Mavis and I could take off on a day trip, pick apples in the mountains or something. But bartending meant missing dinner and bedtimes and sleeping through morning rituals, he warned. In asking, he only wanted to be sure that I'd thought it through. I had, or least thought I had. A few weeks later, I completed my first solo close, an entertaining night immersed in the drinking habits of my soon-to-be regulars. I locked the door and the place glowed with order: backbar labels faced out, cooler stocked, floors mopped. Sometime after 4 a.m., I slid into bed next to my daughter and thought, "I can do this!" Three hours later, sweet words pierced a deep sleep: "Daddy, it's wake up time!" Then again, maybe I can't. For André, the key is to be present in every moment he spends with his daughter. Photo by Johanna Nicol. Parents in the bartending world are rare, but we're a resilient breed, hard-working and sleep deprived. It's not hard to imagine why new parents leave bartending behind: In addition to hours on our feet, unclogging sinks and kicking out drunks, a babysitter who works as late as we do is a mythological figure. Just as fictional is the concept of a work-life balance, but that's not something we wanted, anyway. We love our children and drinking culture. Cocktail development and early childhood development interest us equally. And while steadying both worlds induces hangover-like headaches, we're not giving it up anytime soon. Thankfully, bartenders like me have role models. If there's someone in the industry who exemplifies the best traits of the bartender-slash-parent, it's Kellie Thorn, beverage director for and consultant to Hugh Acheson's four Georgia restaurants. I first met Kellie behind the bar at Empire State, Acheson's Atlanta outpost, and have now delighted myself with cocktails she's composed at bars in three different cities. She became a master in the trade while raising Finnian Sandifer, her six-year-old doppleganger who slurps oysters and orders mocktails with confidence. Getting back behind the bar after Finn's birth took its toll. Parents, especially mothers, suffer a separation anxiety when they head back to work after leave, and for Kellie, it was no different. Running on two or three hours of sleep a night, Kellie came home exhausted and went back to work exhausted. It was a difficult and emotional experience, she admits. "Those first couple of months were almost dreamlike," she remembers. Finn's dad, Trip, is also a bartender, which means both parents spend hours away from home. Their extended service industry family, it turns out, makes perfect babysitters, understanding why mom and dad wouldn't be home until 3 a.m. On days off, Kellie refuses to skimp on quality time. She turns her phone off and leaves email unchecked. Most nights, she says no to industry events in favor of couch snuggles before bed. "It's never easy, it's damn hectic, and you are constantly filled with anxiety that you aren't doing a good job," she says. "But like any parent, you make it work." Luckily, my wife's pastry career and my bartending and writing have been aided both by grandparents and co-workers who don't mind Mavis tagging along for meetings or her spending a quiet happy hour drinking OJ in a booth. We make it work, as Kellie says, hoping she's impacted as little as possible by our work lives. It's never easy, as Kellie says, but I'd give my wife and I a B-plus on work-life balance. Not too bad, and definitely getting better. If there's a superpower I've developed as a bartender, it's serving other parents. New moms and dads on date night, their babes asleep at home under the watch of grandparents, sidle up to the bar unsure of themselves. The last time they knocked back a cocktail feels like a lifetime ago. What do they even like anymore? I recognize in them the same exhaustion that I certainly exude. I've been there, I want to say — I'm still there, and it's okay. But I say nothing. I don't mention children and won't. If we start talking kids, they'll end their date early, desperate to get home and cuddle. I would. I'll finish my night wishing I could feel my daughter tossing and turning and talking in her sleep. But we need this moment, both of us. Parents need to check in on each other, and a quiet bar and a cold cocktail only eases the conversation. I shake the finest Mezcal Last Word I can muster, and stir a Sorghum Old Fashioned until it's chilled just so. I hear the ice crack against the tin and meditate, maybe thinking about how to be a better dad and husband when I wake up tomorrow. Maybe I zone out, because I need that, too. I slide their drinks across the bar and smile. Relax, you two, if just for a little while.
<filename>src/main/java/ch18io/D39_SerialCtl.java package ch18io; import java.io.*; /** * Controlling serialization by adding your own writeObject() and readObject() * methods. * * <pre> * Output: * Before: * Not Transient: Test1 * Transient: Test2 * After: * Not Transient: Test1 * Transient: Test2 * </pre> */ public class D39_SerialCtl implements Serializable { private String a; private transient String b; public D39_SerialCtl(String aa, String bb) { a = "Not Transient: " + aa; b = "Transient: " + bb; } public String toString() { return a + "\n" + b; } private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); stream.writeObject(b); } private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); b = (String) stream.readObject(); } public static void main(String[] args) throws IOException, ClassNotFoundException { D39_SerialCtl sc = new D39_SerialCtl("Test1", "Test2"); System.out.println("Before:\n" + sc); ByteArrayOutputStream buf = new ByteArrayOutputStream(); ObjectOutputStream o = new ObjectOutputStream(buf); o.writeObject(sc); // Now get it back: ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buf.toByteArray())); D39_SerialCtl sc2 = (D39_SerialCtl) in.readObject(); System.out.println("After:\n" + sc2); } }
A worldwide population study of the Ag-system haplotypes, a genetic polymorphism of human low-density lipoprotein. The aim of this investigation is to examine the distribution of the Ag immunological polymorphism in human populations on a worldwide scale and to look for possible explanations of this distribution in the field of modern human peopling history and Ag-system evolution. Extensive Ag-antigene typings were carried out on 13 human population samples, including sub-Saharan African, European, west and east Asiatic, Melanesian, Australian aborigine, and Amerindian groups. Complete Ag-haplotype frequencies were estimated by maximum-likelihood-score procedures, and the data were analyzed by genetic distance computations and principal coordinate projections. With the exception of the Amerindian sample, the Ag polymorphism is shown to be highly polymorphic in all the populations tested. Their genetic relationships appear to be closely correlated to their geographical distribution. This suggests that the Ag system has evolved as a neutral or nearly neutral polymorphism and that it is highly informative for modern human peopling history studies. From the worldwide Ag haplotypic distributions, a model for the Ag molecular structure is derived. According to this model and to the most recent results obtained from molecular data, the establishment of the Ag polymorphism could be explained by several mutations and recombination events between the haplotypes most frequently found in human populations today. As a conclusion, genetic and paleontological data suggest that the genetic structure of caucasoid populations (located from North Africa to India) may be the least differentiated from an ancestral genetic stock. Worldwide genetic differentiations are properly explained as the results of westward and eastward human migrations from a Near East-centered but undefined geographical area where modern humans may have originated. The importance of Ag polymorphism analyses for the reconstruction of human settlement history and origins is discussed in the light of the main conclusions of the most recent genetic polymorphism studies.
/* * Minimum Checksum Coverage is located at the RX side (9.2.1). This means that * `rx' holds when the sending peer informs about his partial coverage via a * ChangeR() option. In the other case, we are the sender and the receiver * announces its coverage via ChangeL() options. The policy here is to honour * such communication by enabling the corresponding partial coverage - but only * if it has not been set manually before; the warning here means that all * packets will be dropped. */ static int dccp_hdlr_min_cscov(struct sock *sk, u64 cscov, bool rx) { struct dccp_sock *dp = dccp_sk(sk); if (rx) dp->dccps_pcrlen = cscov; else { if (dp->dccps_pcslen == 0) dp->dccps_pcslen = cscov; else if (cscov > dp->dccps_pcslen) DCCP_WARN("CsCov %u too small, peer requires >= %u\n", dp->dccps_pcslen, (u8)cscov); } return 0; }
/* * Given an array of int's as returned by is_path_to, allocates a string of * their names joined by newlines. Returns the size of the allocated buffer * in *sz and frees path. */ static void path_to_str(int *path, char **cpp, size_t *sz) { int i; graph_vertex_t *v; size_t allocd, new_allocd; char *new, *name; assert(MUTEX_HELD(&dgraph_lock)); assert(path[0] != -1); allocd = 1; *cpp = startd_alloc(1); (*cpp)[0] = '\0'; for (i = 0; path[i] != -1; ++i) { name = NULL; v = vertex_get_by_id(path[i]); if (v == NULL) name = "<deleted>"; else if (v->gv_type == GVT_INST || v->gv_type == GVT_SVC) name = v->gv_name; if (name != NULL) { new_allocd = allocd + strlen(name) + 1; new = startd_alloc(new_allocd); (void) strcpy(new, *cpp); (void) strcat(new, name); (void) strcat(new, "\n"); startd_free(*cpp, allocd); *cpp = new; allocd = new_allocd; } } startd_free(path, sizeof (int) * (i + 1)); *sz = allocd; }
Assessing Ecological Quality Based on Remote Sensing Images in Wugong Mountain This study takes multitemporal Landsat images as data source to explore the changes of Fractional Vegetation Cover (FVC) and ecological quality (EQ) of the Wugong Mountain (WGM). The regression model obtained from MODIS NDVI data was used to correct the temporal difference of Landsat normalized difference vegetation index (NDVI) data in 2019, and the Remote Sensing based Ecological Index (RSEI) was calculated to assess the change of FVC and EQ in WGM. Results showed that, in the past 25 yr, the mean value of FVC has increased from 0.795 in 1994 to 0.890 in 2019, which consequently improved EQ from 0.746 to 0.773 respectively. The relationship among topographic factors, FVC and EQ was analyzed by using elevation, slope, and aspect. The effect of topographic factor on FVC and EQ in WGM was in the order of slope, aspect, and elevation. The value of FVC and RSEI increased first and then decreased with increment of elevation and slope. In detail, the favorable terrain of FVC and EQ was the area with an elevation of 500–1,400 m and a slope of 8°–35°. The value of FVC and RSEI in semi‐sunny and sunny slopes were higher than that of shady slopes. The unfavorable terrain of FVC and EQ was the area of the meadow with an elevation greater than 1,700 m. This work highlights the importance of FVC and EQ which can provide scientific decision‐making for the sustainability of WGM, and coordinate development of ecological environment protection and economic construction.
package bindparameters import ( "net/http" "strings" "testing" "github.com/go-chi/chi" "github.com/go-chi/render" "github.com/steinfletcher/apitest" "github.com/stretchr/testify/assert" ) func bindChiParametersInto(r *http.Request, fn interface{}) (string, string) { getURLParam := func(key string) string { if rctx := chi.RouteContext(r.Context()); rctx != nil { for k := len(rctx.URLParams.Keys) - 1; k >= 0; k-- { if strings.ToLower(rctx.URLParams.Keys[k]) == strings.ToLower(key) { return rctx.URLParams.Values[k] } } } return "" } returnValues := Into(r, getURLParam, fn) if lenV := len(returnValues); lenV == 0 { return "", "" } else if lenV == 1 { return returnValues[0].Interface().(string), "" } else { return returnValues[0].Interface().(string), returnValues[1].Interface().(string) } } type application struct { Router *chi.Mux } func newApp() *application { router := chi.NewRouter() router.Use(render.SetContentType(render.ContentTypeJSON)) return &application{Router: router} } func TestURLParameters(t *testing.T) { router := newApp().Router router.Get("/user/{id}/post/{postId}", func(w http.ResponseWriter, r *http.Request) { bindChiParametersInto(r, func(params struct { ID int `json:"id"` PostID int `json:"postId"` }) { render.JSON(w, r, params) }) }) apitest.New(). Handler(router). Get("/user/1234/post/9876"). Expect(t). Body(`{"id": 1234, "postId": 9876}`). Status(http.StatusOK). End() } func TestQueryStringOfSimpleTypes(t *testing.T) { router := newApp().Router router.Get("/user/{id}", func(w http.ResponseWriter, r *http.Request) { bindChiParametersInto(r, func(params struct { ID int `json:"id"` FilterInt int `json:"filterInt"` FilterStr string `json:"filterStr"` FilterBool bool `json:"filterBool"` }) { render.JSON(w, r, params) }) }) // GET /user/1234 apitest.New(). Handler(router). Get("/user/1234"). Expect(t). Body(`{"id":1234,"filterInt":0,"filterStr":"","filterBool":false}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterInt=10 apitest.New(). Handler(router). Get("/user/1234"). Query("filterInt", "10"). Expect(t). Body(`{"id":1234,"filterInt":10,"filterStr":"","filterBool":false}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterInt=10&filterStr=hello apitest.New(). Handler(router). Get("/user/1234"). Query("filterInt", "10"). Query("filterStr", "hello"). Expect(t). Body(`{"id":1234,"filterInt":10,"filterStr":"hello","filterBool":false}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterInt=20&filterStr=hello&filterBool=true apitest.New(). Handler(router). Get("/user/1234"). Query("filterInt", "20"). Query("filterStr", "hello"). Query("filterBool", "true"). Expect(t). Body(`{"id":1234,"filterInt":20,"filterStr":"hello","filterBool":true}` + "\n"). Status(http.StatusOK). End() } func TestQueryStringOfSlices(t *testing.T) { router := newApp().Router router.Get("/user/{id}", func(w http.ResponseWriter, r *http.Request) { bindChiParametersInto(r, func(params struct { ID int `json:"id"` FilterArrInt []int `json:"filterArrInt"` FilterArrStr []string `json:"filterArrStr"` FilterArrBool []bool `json:"filterArrBool"` }) { render.JSON(w, r, params) }) }) // GET /user/1234 apitest.New(). Handler(router). Get("/user/1234"). Expect(t). Body(`{"id":1234,"filterArrInt":[],"filterArrStr":[],"filterArrBool":[]}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterArrInt=1 // GET /user/1234?filterArrInt[]=1 apitest.New(). Handler(router). Get("/user/1234"). Query("filterArrInt", "1"). Expect(t). Body(`{"id":1234,"filterArrInt":[1],"filterArrStr":[],"filterArrBool":[]}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterArrInt=1&filterArrInt=2 // GET /user/1234?filterArrInt=1[]&filterArrInt[]=2 apitest.New(). Handler(router). Get("/user/1234"). Query("filterArrInt", "1"). Query("filterArrInt", "2"). Expect(t). Body(`{"id":1234,"filterArrInt":[1,2],"filterArrStr":[],"filterArrBool":[]}` + "\n"). Status(http.StatusOK). End() // GET /user/1234?filterArrInt=1&filterArrInt=2&filterArrStr=one&filterArrStr=two&filterArrBool=true&filterArrBool=false // GET /user/1234?filterArrInt[]=1&filterArrInt[]=2&filterArrStr[]=one&filterArrStr[]=two&filterArrBool[]=true&filterArrBool[]=false apitest.New(). Handler(router). Get("/user/1234"). // Ints Query("filterArrInt", "1"). Query("filterArrInt", "2"). // Strings Query("filterArrStr", "one"). Query("filterArrStr", "two"). // Bools Query("filterArrBool", "true"). Query("filterArrBool", "false"). Expect(t). Body(`{"id":1234,"filterArrInt":[1,2],"filterArrStr":["one","two"],"filterArrBool":[true,false]}` + "\n"). Status(http.StatusOK). End() } func TestRequestBody(t *testing.T) { router := newApp().Router type User struct { Name string `json:"name"` Age int `json:"age"` } router.Post("/user/{id}", func(w http.ResponseWriter, r *http.Request) { bindChiParametersInto(r, func(params struct { ID int `json:"id"` }, user User) { response := struct { ID int `json:"id"` User User `json:"user"` }{ ID: params.ID, User: user, } render.JSON(w, r, response) }) }) // POST /user/1234 apitest.New(). Handler(router). Post("/user/1234"). JSON(`{"name":"Ronald","age":27}`). Expect(t). Body(`{"id":1234,"user":{"name":"Ronald","age":27}}` + "\n"). Status(http.StatusOK). End() } func TestReturnValues(t *testing.T) { router := newApp().Router router.Get("/user/{id}/post/{postId}", func(w http.ResponseWriter, r *http.Request) { s, ss := bindChiParametersInto(r, func(params struct { ID int `json:"id"` PostID int `json:"postId"` }) (string, string) { render.JSON(w, r, params) return "hello", "world" }) assert.Equal(t, s, "hello") assert.Equal(t, ss, "world") }) apitest.New(). Handler(router). Get("/user/1234/post/9876"). Expect(t). Body(`{"id": 1234, "postId": 9876}`). Status(http.StatusOK). End() }
<reponame>unphp/Bifrost package mysql import ( "bytes" "database/sql/driver" "fmt" "log" "runtime/debug" "strings" ) type eventParser struct { format *FormatDescriptionEvent tableMap map[uint64]*TableMapEvent // tableId 对应的最后一个 TableMapEvent 事件 tableNameMap map[string]uint64 // schame.table 做为key 对应的tableId tableSchemaMap map[uint64]*tableStruct // tableId 对应的表结构 dataSource *string connStatus StatusFlag conn MysqlConnection dumpBinLogStatus StatusFlag binlogFileName string currentBinlogFileName string binlogPosition uint32 gtid string binlogTimestamp uint32 lastEventID uint64 maxBinlogFileName string maxBinlogPosition uint32 eventDo []bool ServerId uint32 connectionId string binlog_checksum bool filterNextRowEvent bool binlogDump *BinlogDump lastMapEvent *TableMapEvent // 保存最近一次 map event 解析出来的 tableId,用于接下来的 row event 解析使用,因为实际运行中发现,row event 解析出来 tableId 可能对不上。row event 紧跟在 map event 之后,row event 的时候,直接采用最后一次map event callbackErrChan chan error isGTID bool nextEventID uint64 // 下一个事件ID, 不能修改 lastPrevtiousGTIDSMap map[string]Intervals // 当前解析的 binlog 文件的 PrevtiousGTIDS 对应关系 } func newEventParser(binlogDump *BinlogDump) (parser *eventParser) { parser = new(eventParser) parser.tableMap = make(map[uint64]*TableMapEvent) parser.tableNameMap = make(map[string]uint64) parser.tableSchemaMap = make(map[uint64]*tableStruct) parser.eventDo = make([]bool, 164, 164) parser.ServerId = 21036 parser.connectionId = "" parser.maxBinlogFileName = "" parser.maxBinlogPosition = 0 parser.binlog_checksum = false parser.filterNextRowEvent = false parser.binlogDump = binlogDump return } func (parser *eventParser) getNextEventID() uint64 { parser.nextEventID += 1 return parser.nextEventID } func (parser *eventParser) getGTIDSIDStart(sid string) int64 { if _, ok := parser.lastPrevtiousGTIDSMap[sid]; ok { return parser.lastPrevtiousGTIDSMap[sid].Start } return 1 } func (parser *eventParser) saveBinlog(event *EventReslut) { switch event.Header.EventType { case QUERY_EVENT, XID_EVENT: if event.BinlogFileName == "" { return } parser.binlogDump.Lock() parser.binlogFileName = event.BinlogFileName parser.binlogPosition = event.Header.LogPos parser.binlogTimestamp = event.Header.Timestamp parser.lastEventID = event.EventID parser.binlogDump.Unlock() break case ROTATE_EVENT: parser.binlogDump.Lock() parser.currentBinlogFileName = event.BinlogFileName parser.lastEventID = event.EventID parser.binlogDump.Unlock() case GTID_EVENT, ANONYMOUS_GTID_EVENT: parser.binlogDump.Lock() parser.binlogTimestamp = event.Header.Timestamp parser.lastEventID = event.EventID parser.gtid = event.Gtid parser.binlogDump.Unlock() break default: break } } func (parser *eventParser) parseEvent(data []byte) (event *EventReslut, filename string, err error) { var buf *bytes.Buffer if parser.binlog_checksum { buf = bytes.NewBuffer(data[0 : len(data)-4]) } else { buf = bytes.NewBuffer(data) } //log.Println("data[4]:",data[4]) switch EventType(data[4]) { case HEARTBEAT_EVENT, IGNORABLE_EVENT: return case PREVIOUS_GTIDS_EVENT: return case GTID_EVENT, ANONYMOUS_GTID_EVENT: var GtidEvent *GTIDEvent GtidEvent, err = parser.parseGTIDEvent(buf) event = &EventReslut{ Header: GtidEvent.header, BinlogFileName: parser.currentBinlogFileName, BinlogPosition: GtidEvent.header.LogPos, Gtid: fmt.Sprintf("%s:%d-%d", GtidEvent.SID36, parser.getGTIDSIDStart(GtidEvent.SID36), GtidEvent.GNO), } break case FORMAT_DESCRIPTION_EVENT: parser.format, err = parser.parseFormatDescriptionEvent(buf) /* i := strings.IndexAny(parser.format.mysqlServerVersion, "-") var version string if i> 0{ version = parser.format.mysqlServerVersion[0:i] }else{ version = parser.format.mysqlServerVersion } if len(version)==5{ version = strings.Replace(version, ".", "", 1) version = strings.Replace(version, ".", "0", 1) }else{ version = strings.Replace(version, ".", "", -1) } parser.mysqlVersionInt,err = strconv.Atoi(version) if err != nil{ log.Println("mysql version:",version,"err",err) } */ //log.Println("binlogVersion:",parser.format.binlogVersion,"server version:",parser.format.mysqlServerVersion) event = &EventReslut{ Header: parser.format.header, } return case QUERY_EVENT: var queryEvent *QueryEvent queryEvent, err = parser.parseQueryEvent(buf) event = &EventReslut{ Header: queryEvent.header, SchemaName: queryEvent.schema, BinlogFileName: parser.currentBinlogFileName, TableName: "", Query: queryEvent.query, BinlogPosition: queryEvent.header.LogPos, } switch queryEvent.query { case "COMMIT": event.Gtid = parser.gtid default: break } break case ROTATE_EVENT: var rotateEvent *RotateEvent rotateEvent, err = parser.parseRotateEvent(buf) event = &EventReslut{ Header: rotateEvent.header, BinlogFileName: rotateEvent.filename, BinlogPosition: rotateEvent.header.LogPos, } for _, v := range parser.tableSchemaMap { v.needReload = true } parser.saveBinlog(event) log.Println(*parser.dataSource, " ROTATE_EVENT ", event.BinlogFileName) break case TABLE_MAP_EVENT: var table_map_event *TableMapEvent table_map_event, err = parser.parseTableMapEvent(buf) //log.Println("table_map_event:",table_map_event) parser.tableMap[table_map_event.tableId] = table_map_event parser.lastMapEvent = table_map_event //log.Println("table_map_event:",*table_map_event,"tableId:",table_map_event.tableId," schemaName:",table_map_event.schemaName," tableName:",table_map_event.tableName) if parser.binlogDump.CheckReplicateDb(table_map_event.schemaName, table_map_event.tableName) == false { parser.filterNextRowEvent = true } else { parser.filterNextRowEvent = false _, ok := parser.tableSchemaMap[table_map_event.tableId] if !ok || (parser.tableSchemaMap[table_map_event.tableId].needReload == true) { parser.GetTableSchema(table_map_event.tableId, table_map_event.schemaName, table_map_event.tableName) } } event = &EventReslut{ Header: table_map_event.header, BinlogFileName: parser.currentBinlogFileName, BinlogPosition: table_map_event.header.LogPos, SchemaName: parser.tableMap[table_map_event.tableId].schemaName, TableName: parser.tableMap[table_map_event.tableId].tableName, } break case WRITE_ROWS_EVENTv0, WRITE_ROWS_EVENTv1, WRITE_ROWS_EVENTv2, UPDATE_ROWS_EVENTv0, UPDATE_ROWS_EVENTv1, UPDATE_ROWS_EVENTv2, DELETE_ROWS_EVENTv0, DELETE_ROWS_EVENTv1, DELETE_ROWS_EVENTv2: var rowsEvent *RowsEvent rowsEvent, err = parser.parseRowsEvent(buf) if err != nil { log.Println("row event err:", err) } if tableInfo, ok := parser.tableSchemaMap[rowsEvent.tableId]; ok { event = &EventReslut{ Header: rowsEvent.header, BinlogFileName: parser.currentBinlogFileName, BinlogPosition: rowsEvent.header.LogPos, SchemaName: parser.lastMapEvent.schemaName, TableName: parser.lastMapEvent.tableName, Rows: rowsEvent.rows, Pri: tableInfo.Pri, ColumnMapping: tableInfo.ColumnMapping, } } else { event = &EventReslut{ Header: rowsEvent.header, BinlogFileName: parser.currentBinlogFileName, BinlogPosition: rowsEvent.header.LogPos, SchemaName: parser.lastMapEvent.schemaName, TableName: parser.lastMapEvent.tableName, Rows: rowsEvent.rows, } } break case XID_EVENT: var xidEvent *XIdEvent xidEvent, err = parser.parseXidEvent(buf) if err != nil { log.Println("xid event err:", err) } event = &EventReslut{ Header: xidEvent.header, BinlogFileName: parser.currentBinlogFileName, BinlogPosition: xidEvent.header.LogPos, SchemaName: "", TableName: "", Rows: nil, Gtid: parser.gtid, } break default: var genericEvent *GenericEvent genericEvent, err = parseGenericEvent(buf) event = &EventReslut{ Header: genericEvent.header, } event.BinlogFileName = parser.currentBinlogFileName event.BinlogPosition = genericEvent.header.LogPos } return } // 初始化 用于查询mysql 表结构等信息的连接,这个不是 主从连接 func (parser *eventParser) initConn() { dbopen := &mysqlDriver{} conn, err := dbopen.Open(*parser.dataSource) if err != nil { panic(err) } else { parser.connStatus = STATUS_RUNNING } parser.conn = conn.(MysqlConnection) } // 关闭用于查询mysql 表结构等信息的连接 func (parser *eventParser) ParserConnClose(lock bool) { if lock == true { parser.binlogDump.Lock() defer parser.binlogDump.Unlock() } parser.connStatus = STATUS_CLOSED if parser.conn != nil { func() { func() { if err := recover(); err != nil { return } }() parser.conn.Close() }() parser.conn = nil } } func (parser *eventParser) GetTableSchema(tableId uint64, database string, tablename string) { //var errPrint bool = false var lastErr string for { err := parser.GetTableSchemaByName(tableId, database, tablename) if err == nil { break } else { if lastErr != err.Error() { log.Println("binlog GetTableSchema err:", err, " tableId:", tableId, " database:", database, " tablename:", tablename) lastErr = err.Error() } } } } func (parser *eventParser) GetTableSchemaByName(tableId uint64, database string, tablename string) (errs error) { parser.binlogDump.Lock() defer parser.binlogDump.Unlock() errs = fmt.Errorf("unknow error") defer func() { if err := recover(); err != nil { parser.ParserConnClose(false) errs = fmt.Errorf(string(debug.Stack())) } }() if parser.connStatus == STATUS_CLOSED { parser.initConn() } //set dbAndTable Name tableId parser.tableNameMap[database+"."+tablename] = tableId sql := "SELECT COLUMN_NAME,COLUMN_KEY,COLUMN_TYPE,CHARACTER_SET_NAME,COLLATION_NAME,NUMERIC_SCALE,EXTRA,COLUMN_DEFAULT,DATA_TYPE,CHARACTER_OCTET_LENGTH,IS_NULLABLE FROM information_schema.columns WHERE table_schema='" + database + "' AND table_name='" + tablename + "' ORDER BY `ORDINAL_POSITION` ASC" stmt, err := parser.conn.Prepare(sql) if err != nil { errs = err parser.ParserConnClose(false) return } defer stmt.Close() p := make([]driver.Value, 0) rows, err := stmt.Query(p) if err != nil { errs = err parser.ParserConnClose(false) return } defer rows.Close() //columeArr := make([]*tableStruct column_schema_type,0) tableInfo := &tableStruct{ Pri: make([]string, 0), ColumnSchemaTypeList: make([]*ColumnInfo, 0), } ColumnMapping := make(map[string]string, 0) for { dest := make([]driver.Value, 11, 11) err := rows.Next(dest) if err != nil { break } var COLUMN_NAME, COLUMN_KEY, COLUMN_TYPE string var CHARACTER_SET_NAME, COLLATION_NAME, NUMERIC_SCALE, EXTRA string var isBool bool = false var unsigned bool = false var is_primary bool = false var auto_increment bool = false var enum_values, set_values []string var COLUMN_DEFAULT string var DATA_TYPE string var CHARACTER_OCTET_LENGTH uint64 var IS_NULLABLE string COLUMN_NAME = dest[0].(string) COLUMN_KEY = dest[1].(string) COLUMN_TYPE = dest[2].(string) if dest[3] == nil { CHARACTER_SET_NAME = "" } else { CHARACTER_SET_NAME = dest[3].(string) } if dest[4] == nil { COLLATION_NAME = "" } else { COLLATION_NAME = dest[4].(string) } if dest[5] == nil { NUMERIC_SCALE = "" } else { NUMERIC_SCALE = fmt.Sprint(dest[5]) } EXTRA = dest[6].(string) DATA_TYPE = dest[8].(string) //bit类型这个地方比较特殊,不能直接转成string,并且当前只有 time,datetime 类型转换的时候会用到 默认值,这里不进行其他细节处理 if DATA_TYPE != "bit" { if dest[7] == nil { COLUMN_DEFAULT = "" } else { COLUMN_DEFAULT = dest[7].(string) } } if COLUMN_TYPE == "tinyint(1)" { isBool = true } if EXTRA == "auto_increment" { auto_increment = true } if strings.Contains(COLUMN_TYPE, "unsigned") { unsigned = true } if COLUMN_KEY != "" { is_primary = true } if DATA_TYPE == "enum" { d := strings.Replace(COLUMN_TYPE, "enum(", "", -1) d = strings.Replace(d, ")", "", -1) d = strings.Replace(d, "'", "", -1) enum_values = strings.Split(d, ",") } else { enum_values = make([]string, 0) } if DATA_TYPE == "set" { d := strings.Replace(COLUMN_TYPE, "set(", "", -1) d = strings.Replace(d, ")", "", -1) d = strings.Replace(d, "'", "", -1) set_values = strings.Split(d, ",") } else { set_values = make([]string, 0) } if dest[9] == nil { CHARACTER_OCTET_LENGTH = 0 } else { switch dest[9].(type) { case uint32: CHARACTER_OCTET_LENGTH = uint64(dest[9].(uint32)) case uint64: CHARACTER_OCTET_LENGTH = dest[9].(uint64) case int64: CHARACTER_OCTET_LENGTH = uint64(dest[9].(int64)) default: CHARACTER_OCTET_LENGTH = 0 } } if dest[10] == nil { IS_NULLABLE = "YES" } else { IS_NULLABLE = dest[10].(string) } tableInfo.ColumnSchemaTypeList = append(tableInfo.ColumnSchemaTypeList, &ColumnInfo{ COLUMN_NAME: COLUMN_NAME, COLUMN_KEY: COLUMN_KEY, COLUMN_TYPE: COLUMN_TYPE, EnumValues: enum_values, SetValues: set_values, IsBool: isBool, Unsigned: unsigned, IsPrimary: is_primary, AutoIncrement: auto_increment, CHARACTER_SET_NAME: CHARACTER_SET_NAME, COLLATION_NAME: COLLATION_NAME, NUMERIC_SCALE: NUMERIC_SCALE, COLUMN_DEFAULT: COLUMN_DEFAULT, DATA_TYPE: DATA_TYPE, CHARACTER_OCTET_LENGTH: CHARACTER_OCTET_LENGTH, }) if strings.ToUpper(COLUMN_KEY) == "PRI" { tableInfo.Pri = append(tableInfo.Pri, COLUMN_NAME) } var columnMappingType string switch DATA_TYPE { case "tinyint": if unsigned { columnMappingType = "uint8" } else { if COLUMN_TYPE == "tinyint(1)" { columnMappingType = "bool" } else { columnMappingType = "int8" } } case "smallint": if unsigned { columnMappingType = "uint16" } else { columnMappingType = "int16" } case "mediumint": if unsigned { columnMappingType = "uint24" } else { columnMappingType = "int24" } case "int": if unsigned { columnMappingType = "uint32" } else { columnMappingType = "int32" } case "bigint": if unsigned { columnMappingType = "uint64" } else { columnMappingType = "int64" } case "numeric": columnMappingType = strings.Replace(COLUMN_TYPE, "numeric", "decimal", 1) case "real": columnMappingType = strings.Replace(COLUMN_TYPE, "real", "double", 1) default: columnMappingType = COLUMN_TYPE break } if IS_NULLABLE == "YES" { columnMappingType = "Nullable(" + columnMappingType + ")" } ColumnMapping[COLUMN_NAME] = columnMappingType } if len(tableInfo.ColumnSchemaTypeList) == 0 { return fmt.Errorf("column len is 0 " + "db:" + database + " table:" + tablename + " tableId:" + fmt.Sprint(tableId) + " may be no privilege") } tableInfo.needReload = false tableInfo.ColumnMapping = ColumnMapping parser.tableSchemaMap[tableId] = tableInfo errs = nil return } func (parser *eventParser) GetConnectionInfo(connectionId string) (m map[string]string, e error) { parser.binlogDump.Lock() defer func() { if err := recover(); err != nil { parser.ParserConnClose(false) log.Println("binlog.go GetConnectionInfo err:", err) m = nil } parser.binlogDump.Unlock() }() if parser.connStatus == STATUS_CLOSED { parser.initConn() } sql := "select TIME,STATE from `information_schema`.`PROCESSLIST` WHERE ID='" + connectionId + "'" stmt, err := parser.conn.Prepare(sql) if err != nil { parser.ParserConnClose(false) return nil, nil } defer stmt.Close() p := make([]driver.Value, 0) rows, err := stmt.Query(p) if err != nil { parser.ParserConnClose(false) return nil, err } defer rows.Close() m = make(map[string]string, 2) for { dest := make([]driver.Value, 2, 2) err := rows.Next(dest) if err != nil { break } m["TIME"] = fmt.Sprint(dest[0]) m["STATE"] = dest[1].(string) break } return m, nil } func (parser *eventParser) KillConnect(connectionId string) (b bool) { if connectionId == "" { return true } b = false parser.binlogDump.Lock() defer func() { if err := recover(); err != nil { parser.ParserConnClose(false) b = false } parser.binlogDump.Unlock() }() if parser.connStatus == STATUS_CLOSED { parser.initConn() } sql := "kill " + connectionId _, err := parser.conn.Exec(sql, []driver.Value{}) if err != nil { parser.ParserConnClose(false) return false } return true } func (parser *eventParser) GetTableId(database string, tablename string) (uint64, error) { key := database + "." + tablename if _, ok := parser.tableNameMap[key]; !ok { return 0, fmt.Errorf("not found key:%s", key) } return parser.tableNameMap[key], nil } func (parser *eventParser) delTableId(database string, tablename string) { key := database + "." + tablename if tableId, ok := parser.tableNameMap[key]; ok { delete(parser.tableSchemaMap, tableId) } delete(parser.tableNameMap, key) return }
The Mechanical and Thermal Behavior of Electrostatic Powder Coating Waste Reinforced Epoxy Composites The present study investigates the mechanical and thermal behavior of polyurethane electrostatic powder coating waste reinforced epoxy composites. Different percentages of electrostatic powder coating waste (3, 6, and 9 wt. %) reinforced epoxy composites were manufactured. The mixture of polyurethane powder coating waste and epoxy was mixed with a magnetic stirrer to ensure that the polyurethane powder coating waste was dispersed well in the epoxy, and then the mixture was placed under vacuum and air bubbles were removed. Tensile and three-point tests were performed to determine the changes in the mechanical properties of the materials, and thermogravimetric analysis was conducted to determine the thermal properties. In addition, images were taken with scanning electron microscopy for morphological features. The study revealed that the three-point flexural strength was increased by up to 8% and 15%, respectively, in the samples with 3 wt% and 6 wt% powder coating waste additives. The material's tensile strength decreased by up to 27% with powder coating waste reinforcement. However, the opposite trend was observed in the modulus of elasticity. Additionally, no significant difference was observed in the thermal properties of the materials. Also, from scanning electron microscopy analysis, it was observed that the inclusion of powder coating waste changed the damage mechanism of the material.
The Madison Mountains, photo courtesy Gallatin National Forest Avalanche Center, B. Vandenbos A therapist’s role is not to sit in judgment. We listen and we help clients of all ages process events in their lives. None of us are immune to feeling their pain and suffering. My colleagues in clinical practice understand keenly why some happenings jolt us at the community level, knowing they reflect traumas that we too are witnessing as patterns in those with whom we meet. I was startled to read about the tragic accident of two young climbers caught in an avalanche in Montana’s Madison Mountain Range. The autumn slide that killed the female climber, who grew up in Bozeman, and the subsequent suicide of her twentysomething companion—her partner in climbing, life, and death. That two young, fit people in the prime of life should leave us under such horrifying conditions is so disturbing that the unbearable anguish for those left behind is palpable even for those who did not know them personally. How do we make sense of such a loss in a mountain town where athletic achievement in the great outdoors is venerated more than in most other communities? Last week on my weekly radio show on KGLT, I interviewed Bozeman writer David Quammen about the backstory for his novella that, in turn, was recently transformed by filmmakers Andrew and Alex Smith into the acclaimed movie Walking Out. Afterward, my millennial radio apprentice and I talked about what it means to be a fit man here in his twenties. How timely, how informative, our exchange proved to be. He shared how members of his generation have been reared with such praise by their parents and teachers that they believe they can do anything they want, almost to the point of believing in their own invincibility. This attitude, combined with instant access to the escapades of peers showering the Internet with selfies about all the grand times being had, creates the compelling illusion that indeed, anything you want to do is possible. Yet this notion is both unproved and comes replete with brittle, sometimes devastating, reality. “Over the last few years, however, as I’ve watched too many friends go to the mountains only to never return, I’ve realized something painful. It’s not just the memorable summits and crux moves that are fleeting. Friends and climbing partners are fleeting, too. This is the painful reality of our sport, and I’m unsure what to make of it. Climbing is either a beautiful gift or a curse.” I will weigh in here. Part of “the curse” is not only the loss of people close to us and whom others choose to idolize; it is “the curse” of those who are emulating them, never feeling as if they are measuring up unless they are courting a raw edge, defying danger and documenting it. This is a serious problem in affluent mountain towns where achievement of social status is ordered around lives of leisure and perpetual play. Not all who play feel fulfilled, and most who dwell in mountain towns cannot afford to perpetually play. The two young people who died have been eulogized as thoughtful, well-adjusted and full of affection for each other and the outdoors. But for many it is easy to get lost. For those aspiring to be professional adventurers, gravity—reality— functions as a leveling force against any perspective that might teeter toward visions of self-grandiosity: the mere struggle to make ends meet, in an ever more expensive locale, while trying to keep up with those who don't face similar financial hardships, and, among some who play for a living, an underdeveloped emotional maturity. The craving to be noticed, to be validated, to hold prestige among peers swirls around prolifically in the psyches of our young. It isn’t new but here it takes a different form. Heroic greatness escapes most, but you don’t have to be “great” to matter or to register positively in the lives of others. Peace can be found in knowing that who you are is plenty good enough. Too often, this is lost amid the intense pressure many young people feel to live up to the mantle (burden) of seeking exceptionalism imposed upon them by their elders. Based on what I’ve witnessed, it has created overwhelming despair involving a fear of somehow being left behind; it often is accompanied by sensations of profound loneliness and episodes of acute anxiety and depression. If this description resonates, know that you are not alone. "Many young people feel intense pressure to live up to the mantle (burden) of seeking exceptionalism imposed upon them by their elders. Based on what I’ve witnessed, it has created overwhelming despair involving a fear of somehow being left behind; it often is accompanied by sensations of profound loneliness and episodes of acute anxiety and depression. If this description resonates, know that you are not alone." The real trial isn’t in ascending the peak or skiing a gnarly fall line; it’s dealing with the mundaneness of grinding out daily existence and doing it in a way that gives us meaning. So many aspire to secure prestigious brand ambassadorships by outdoing the accomplishments of their contemporaries, which in many ways has caused the antes to rise. The painful need to discover who they are remains after the rush of adrenaline goes away. And the thinking for many young adults goes something like this: since I can do anything I want, since I have had praise heaped upon me making me believe I am more extraordinary than everyone else, and since I am ill-equipped to handle disappointment, what, then, are my options for coming to terms with simply being normal? By normal, I mean day-to-day non-spine tingling moments of personal introspection and potential revelation where most of life is actually lived. are caught in a desperate struggle to find purpose in the outlandish. In my last column , I referenced the essential human need to craft personal meaning from life’s raw materials, not material possessions or resume items, but this challenge often comes without the reflective value of necessary psychological work. My apprentice went on to say how many of his peers are unprepared for their inescapable encounter with this kind of good old-fashioned reality and the challenge of living in their own skin. Instead, they ° ° ° Rites of passage, generally lacking today, used to provide some assistance in this regard. There was a time when village elders had the authority, based in tradition, to initiate the young into a self-realized sense of who they were. The persona of who they had been in the tribe, village, or community, was seen as a shell to crack through rather than an identity imposed upon them to clutch forever. The erstwhile, often over-romanticized, notion of initiation, was meant to force the young adult into a character-revealing hardship, often involving days of fasting without benefit of food or water, until they had a dream or vision of their genuine nature. Coming down off the mountain the elders would listen to their dream or vision and give them their true name. The point I am making is that the primal drive to discover our own character does not belong only in the hazy past of history but remains a pivotal factor in modern humans leading a meaningful life in the present. In traditional culture, people did not go to the mountain to become rich or famous or made more full of hubris. They went, and returned, holding more humility, embraced by other older individuals who reinforced the notion that the power of validation comes from within, by striving to discover who you are. In more recent generations, this rite of passage often came packaged in basic training for drafted soldiers, or hard physical work on the land or by going down a mine shaft. Absent these tasks our youth today settle for what I call the ghosts of initiation—securring the driver's license, having the first sexual experience, and getting drunk. Beyond this, there is a graver danger and that is the impulse to self-initiate. Urban gangs, competitive sports and partaking in fraternity hazing can provide a mimicking of this intrinsic need to initiate. But more often than not our young are left to thrashing around, trying to experience something, anything, that grabs them by their raw roots and shakes them up: in a word, the extreme. Seeking extreme experiences, be it on snowboard, skis, rock-climbing, whitewater kayaking, or BASE jumping, are examples of risk taking behaviors. But there's another way as well and that is to take powerful mind-altering drugs. It shocked me to hear that over the last several months in the Bozeman area numerous drug overdoses have been reported in the Bozeman area. Which leads to this point: from what are so many trying to escape? ° ° ° I do not wish to end a column like this on a downcast note. I see many positive things happening quietly in my community where the endgame is not escape but engagement. It's simple things that can add up. I have noticed, for example, young adults and kids gathering at Hawthorne Elementary School across the street from my office. Once a week, I see young fit women standing on the corners of the school’s block encouraging elementary age girls to run/walk around the block. Yesterday I went across the street and asked one of the volunteers what it was these girls were doing. She said the program is called Girls on the Run. It is sponsored by a local organization called Thrive and rotates around our various elementary schools aiming at building confidence in young girls and finding themselves in womanhood. aims to support teenagers with a combination of outdoor activities and exercises to build trust and self-esteem. But there are many outlets, including traditional scouting chapters and church youth groups and others where meaningful grounding for those who feel left out or alienated can happen. Another group in town, Big Sky Youth Empowerment aims to support teenagers with a combination of outdoor activities and exercises to build trust and self-esteem. But there are many outlets, including traditional scouting chapters and church youth groups and others where meaningful grounding for those who feel left out or alienated can happen. The positive encouragement young people receive through these outlets reminds me that there are ways we in our community can step up. We can raise our young to confront hardship and handle imperfection rather than defer that task to later in life when the accumulation of what has been avoided comes calling. The foundation for psychological development, self-esteem and self-realization is established in youth. We can do a better job of helping young people find their way. Our community—many mountain towns—are today under siege from obvious and hidden forces. There are ways to confront and deal with dynamic change. The most effective way I have found to abide complexity is to have a personal practice that enhances a balanced life.
Highly dynamic transient colonization by Staphylococcus aureus in healthy Malaysian students. Staphylococcus aureus carriage is a risk factor for infection in both community and hospital settings. Three main S. aureus carriage patterns have been described: non-carriage, persistent carriage (repeatedly culture-positive) and transient or intermittent carriage (may or may not carry at different time intervals). Persistent carriage in particular is associated with a higher risk for infections (Nouwen et al., 2005, 2006). Although the anterior nares are the primary colonization site for S. aureus, throat carriage has frequently been reported (Uemura et al., 2004; Mertz et al., 2007). In Malaysia, the prevalence of nasal carriage of S. aureus is within the reported range of 20–25 % (Choi et al., 2006; Neela et al., 2008). However, data on the pattern of S. aureus carriage and the incidence of throat colonization are still uknown. Therefore, the aims of the current study were to determine the S. aureus carriage pattern of local Malaysian residents and the incidence of strain carriage in the nose and throat and to investigate the strain types carried in the nose and throat of the same subject.
/** * Enforce all provided validators for the specified paths. * * @param config a map of paths & their values. * @return the corrected map. */ public Map<String, Object> validateConfiguration(final Map<String, Object> config) { for (String path : config.keySet()) { if ( validationMap.containsKey( path ) ) { Object value = config.get( path ); Object validated = validationMap.get( path ).validate( value ); config.put(path, validated); } } return config; }
def emit_monitor_start(self, pth: str): self.monitor_start.emit(pth)
Mechanisms, molecular and sero-epidemiology of antimicrobial resistance in bacterial respiratory pathogens isolated from Japanese children Background The clinical management of community-acquired respiratory tract infections (RTIs) is complicated by the increasing worldwide prevalence of antibacterial resistance, in particular, β-lactam and macrolide resistance, among the most common causative bacterial pathogens. This study aimed to determine the mechanisms and molecular- and sero-epidemiology of antibacterial resistance among the key paediatric respiratory pathogens in Japan. Methods Isolates were collected at 18 centres in Japan during 2002 and 2003 from children with RTIs as part of the PROTEKT surveillance programme. A proportion of Haemophilus influenzae isolates was subjected to sequencing analysis of the ftsI gene; phylogenetic relatedness was assessed using multilocus sequence typing. Streptococcus pneumoniae isolates were screened for macrolide-resistance genotype by polymerase chain reaction and serotyped using the capsular swelling method. Susceptibility of isolates to selected antibacterials was performed using CLSI methodology. Results and Discussion Of the 557 H. influenzae isolates collected, 30 (5.4%) were β-lactamase-positive , 115 (20.6%) were BL-nonproducing ampicillin-resistant (BLNAR; MIC ≥ 4 mg/L) and 79 (14.2%) were BL-nonproducing ampicillin-intermediate (BLNAI; MIC 2 mg/L). Dabernat Group III penicillin binding protein 3 (PBP3) amino acid substitutions in the ftsI gene were closely correlated with BLNAR status but phylogenetic analysis indicated marked clonal diversity. PBP mutations were also found among BL+ and BL-nonproducing ampicillin-sensitive isolates. Of the antibacterials tested, azithromycin and telithromycin were the most active against H. influenzae (100% and 99.3% susceptibility, respectively). A large proportion (75.2%) of the 468 S. pneumoniae isolates exhibited macrolide resistance (erythromycin MIC ≥ 1 mg/L); erm(B) was the most common macrolide resistance genotype (58.8%), followed by mef(A) (37.2%). The most common pneumococcal serotypes were 6B (19.7%), 19F (13.7%), 23F (13.5%) and 6A (12.8%). Telithromycin and amoxicillin-clavulanate were the most active antibacterials against S. pneumoniae (99.8% and 99.6% susceptibility, respectively). Conclusion Approximately one-third of H. influenzae isolates from paediatric patients in Japan are BLNAI/BLNAR, mainly as a result of clonally diverse PBP3 mutations. Together with the continued high prevalence of pneumococcal macrolide resistance, these results may have implications for the clinical management of paediatric RTIs in Japan. Background Community-acquired respiratory tract infections (RTIs) such as pneumonia, bronchitis, tonsillitis/pharyngitis, otitis media and bacterial sinusitis are prevalent conditions accounting for approximately three-quarters of all outpatient antibacterial prescriptions , a large proportion of which are for children. Antibacterial therapy for community-acquired RTIs is usually empirical . However, the clinical management of community-acquired RTIs is complicated by the increasing worldwide prevalence of antibacterial resistance, in particular β-lactam and macrolide resistance, among the most common causative bacterial pathogens . This has important implications for the selection of appropriate and effective antibacterial therapy, especially for children in whom current treatment options are largely limited to these two classes of antibacterial agents. Haemophilus influenzae is frequently implicated in paediatric community-acquired RTIs. Resistance of H. influenzae to ampicillin has been increasing steadily since its emergence in the 1970s . Until recently, β-lactamase (BL) production has been the primary mechanism of ampicillin resistance among H. influenzae. However, the prevalence of BL-nonproducing ampicillin-resistant (BLNAR) strains of H. influenzae now also appears to be increasing . This is of clinical significance, since BLNAR H. influenzae are typically co-resistant to other commonly prescribed β-lactams, including amoxicillin-clavulanate and ampicillin-sulbactam, in addition to most cephalosporins . The increasing global prevalence of antibacterial resistance among Streptococcus pneumoniae -another significant respiratory pathogen -is cause for further concern. As a result of the rapid evolution of β-lactam-resistant S. pneumoniae, macrolides have been increasingly used as initial empirical therapy in community-acquired RTIs . However, the global increase in macrolide-resistant strains of S. pneumoniae now also threatens to compromise the use of these antibacterials for the treatment of these conditions . Such resistance trends highlight an urgent need for new antibacterials for the treatment of paediatric communityacquired RTIs that are effective against the common respiratory pathogens, but which retain activity against isolates resistant to current treatment options. PROTEKT (Prospective Resistant Organism Tracking and Epidemiology for the Ketolide Telithromycin) is a global, multicentre surveillance study investigating the antimicrobial susceptibility of bacterial pathogens associated with community-acquired RTIs. As part of this survey, a study was undertaken in Japan to assess the BL status of H. influenzae isolates collected from paediatric patients with community-acquired RTIs and to determine the mechanism of resistance and molecular epidemiology of BLNAR strains. The activities of several antibacterial agents against these isolates and other key respiratory pathogens were also assessed. Isolate collection Bacterial isolates were collected over four 1-week periods (24-30 November 2002;19-26 January 2003;9-15 March 2003;and 15-21 June 2003) from children (aged < 16 years) with community-acquired RTIs (sinusitis, tonsillitis, pharyngitis/laryngitis, otitis media, pneumonia, bronchitis and others) at 18 centres in Japan. Acceptable sources for isolates included blood, sputum, bronchoalveolar lavage fluid, middle-ear fluid, nasopharyngeal swabs or aspirates, sinus aspirates and throat swabs. Isolates from patients with nosocomial RTIs or cystic fibrosis were excluded from this analysis. Duplicate strains, isolates originating from existing collections and those of doubtful pathogenicity were also excluded. Susceptibility testing All isolates were transported to a central laboratory for susceptibility testing (Mitsubishi Kagaku Bio-Clinical Laboratories Inc., Japan). For H. influenzae, BL production was detected using the chromogenic nitrocefin method (Unipath Ltd, Basingstoke, UK) and by isolates being nonsusceptible to ampicillin MIC ≥ 2 mg/L). MICs of isolates to a panel of antibacterial agents (including telithromycin, erythromycin, azithromycin, ampicillin, amoxicillin-clavulanate and cefdinir) were determined using the Clinical and Laboratory Standards Institute (CLSI) broth microdilution method and interpreted using established CLSI breakpoints . Molecular methods A proportion of H. influenzae isolates were analysed by polymerase chain reaction (PCR) amplification and sequencing of regions of the ftsI gene encoding the transpeptidase domain of penicillin-binding protein (PBP) 3A and/or PBP 3B as described by Dabernat et al. Phylogenetic relationships among H. influenzae strains were determined by multilocus sequence typing (MLST) of seven housekeeping genes as described previously . All isolates of S. pneumoniae found to be resistant to erythromycin (MIC ≥ 1 mg/L) were analysed for the presence of erm(B), mef(A) and erm(A) subclass erm(TR) macrolide resistance gene sequences using a rapid-cycle multiplex PCR method as described previously . Serotyping of S. pneumoniae isolates was performed at G.R. Micro Ltd (London, UK) using the Neufeld's quellung reaction with Statens Serum Institute (SSI) antisera (SSI, Copenhagen, Denmark). The SSI was used as the reference laboratory for quality assurance and rare serotypes. Patients A total of 5,592 patients were included in this analysis. Key demographics and patient characteristics are summarized in Table 1. Almost two-thirds (64.5%) of study participants were aged 0-5 years. Upper RTIs accounted for 61.6% of all community-acquired RTIs reported. Pharyngitis/laryngitis was the most common upper RTI (33.5%), followed by unspecified upper RTIs and tonsillitis (14.1% and 7.1%, respectively). Bronchitis was the most common lower RTI, occurring in 23.4% of patients. Just over half of all study participants (53.3%) received antibacterial therapy for the treatment of their community-acquired RTI. Bacterial isolates A total of 2,596 pathogens were collected in this study, including 557 isolates of H. influenzae and 468 isolates of S. pneumoniae. A total of 110 BLNAR isolates were viable for molecular analysis and a further 5 BLNAI, 14 BL-nonproducing ampicillin-sensitive (BLNAS), and 27 BL+ isolates were examined for comparison. In the 156 isolates analysed, ftsI mutations resulted in amino acid substitutions at 23 different loci and 36 different combinations of these substitutions were found. One particular combination of substitutions predominated (D350N, S357N, M377I, S385T, L389F, N526K) -corresponding to PBP 3 Group III in the classification scheme described by Dabernat et al. , accounting for 58.3% (91/156) of the isolates ( Table 2). Classification of the remaining isolates into Dabernat groups was difficult owing to the large number of previously undescribed mutations. The distribution of amino acid substitution types according to β-lactamase and ampicillin resistance status is shown in Table 2. Of the BLNAR isolates, 75.5% (83/110) were classified as Dabernat Group III. None of the BLNAR or BLNAI isolates had the wild-type (i.e., ampicillin-susceptible) PBP 3 sequence. By contrast, 39.0% (16/41) of the isolates that were either BLNAS or BL+ had the wild-type PBP 3 sequence, with the remainder having mainly non-Group III substitution patterns. MLST of the 156 isolates identified a total of 73 different sequence types: this lack of genetic similarity in the BLNAR PBP 3 Group III, BLNAS Discussion Results of this study indicate that a high prevalence of in vitro resistance to conventional antibacterial agents, such as azithromycin, erythromycin and cefdinir, existed among the key paediatric respiratory pathogens in Japan during the surveillance period in 2002 and 2003. Although the data cannot be directly extrapolated to the current status of antibacterial resistance in Japan, the study highlights a number of potentially important issues regarding the antibacterial susceptibility and epidemiology of the key respiratory tract pathogens. One-fifth of all isolates of H. influenzae were found to be BLNAR (20.6%), with a further 14.2% BLNAI and 5.4% BL+. This is in keeping with the findings of other Japanese studies, with results of one recent nationwide survey showing 23.1% of all H. influenzae isolates collected to be BLNAR and 6.0% BL+ . Molecular epidemiological analysis of a proportion of the H. influenzae isolates col-Phylogenetic relationships based on sequence-type variations found in Haemophilus influenzae that were β-lactamase nonpro-ducing ampicillin-resistant with β-lactamase nonproducing ampicillin-sensitive (BLNAS; n = 14) Figure 1 Phylogenetic relationships based on sequence-type variations found in Haemophilus influenzae that were β-lactamase nonproducing ampicillin-resistant with β-lactamase nonproducing ampicillin-sensitive (BLNAS; n = 14). lected in this study indicated that Dabernat Group III PBP 3 amino acid substitutions in the ftsI gene were found to correlate highly with BLNAR status. However, there was marked clonal diversity among all H. influenzae isolates, suggesting that the ftsI mutations associated with BLNAR status have developed independently in different strains at a relatively high frequency, rather than through clonal expansion of a successful strain. Mutations in ftsI were also seen in more than half of the BLNAS and BL+ isolates analysed. BL+ H. influenzae strains exhibiting resistance to Phylogenetic relationships based on sequence-type variations found in Haemophilus influenzae that were β-lactamase nonpro-ducing ampicillin-resistant with βlactamase positive (BL+; n = 27) Figure 2 Phylogenetic relationships based on sequence-type variations found in Haemophilus influenzae that were β-lactamase nonproducing ampicillin-resistant with βlactamase positive (BL+; n = 27). amoxicillin-clavulanate and/or cephalosporin antibiotics have been identified previously in isolates collected in various countries, including Japan . The identification of ftsI mutations in some BL+ H. influenzae strains may, in some cases, explain their decreased susceptibility to non-β-lactam antibiotics. Both azithromycin and telithromycin demonstrated good in vitro activity against H. influenzae isolates collected in this study, irrespective of BL+ or BLNAR/I, resistance mechanism or serotype status. In contrast, only 24.6% of S. pneumoniae isolates were fully susceptible to azithromycin, while telithromycin was highly active against this pathogen, including isolates resistant to macrolides. Overall, three-quarters of the 468 S. pneumoniae isolates were found to be macrolide-resistant, whereas only 1 isolate exhibited low-level resistance to telithromycin. These observations are in keeping with the results of previous analyses of paediatric S. pneumoniae isolates, which showed telithromycin to be highly active against all strains, irrespective of macrolide, azalide or clindamycin resistance status . To date, no conjugate pneumococcal vaccine has been routinely used in Japan. The 7-valent conjugate vaccine (PCV-7) is licensed in the USA and Europe and has coverage against serotypes 4, 6B, 9V, 14, 18C, 19F and 23F. Only 55.2% (257/465) of the isolates in this study were serotypes covered by PCV-7. If the potentially cross-reacting serotypes (6A and 19A) were added, coverage was raised to 70.1%. However, the uncertainty regarding cross-protection and the potential for serotype replacement underscore the need for serotype surveillance . Although the results presented in this paper may have important implications for the empiral antibiotic treatment of paediatric RTIs in Japan, two factors limit the degree to which the data can be interpreted. Firstly, no attempt was made to correlate in vitro resistance with resistance in vivo that may lead to adverse clinical outcome. Secondly, the study did not distinguish between isolates that may be colonising the respiratory tract from those actually causing the infection. This is particularly important when considering paediatric RTIs, as carriage of common respiratory tract pathogens, such as S. pneumoniae, in children is extremely common . Conclusion In conclusion, this study provides a snapshot of the antibacterial susceptibility and epidemiology of key respiratory tract pathogens isolated from children in Japan during 2002 and 2003. Approximately one-third of H. influenzae isolates were BLNAI/BLNAR, mainly as a result of clonally diverse PBP3 mutations. PBP3 mutations were also common among BL+ and BLNAS isolates. Together with the observed high prevalence of in vitro pneumococ-Serotype distribution of 468 isolates of Streptococcus pneumo-niae collected from Japanese children (aged < 16 years) with community-acquired respiratory tract infections (combined data for all four 1-week study periods) Figure 3 Serotype distribution of 468 isolates of Streptococcus pneumoniae collected from Japanese children (aged < 16 years) with community-acquired respiratory tract infections (combined data for all four 1-week study periods).
/* * Geodetic Position Solution * This message outputs the Geodetic position. */ public class NavPosllh extends UbxData { // Sample message // header(2) class+id(2) length(2) iTOW(4) lon(4) lat(4) height(4) hMSL(4) hAcc(4) vAcc(4) CHS(2) // B5 62 01 02 1C 00 88 32 D3 17 62 A8 AE FD 2A E1 1E 18 50 7B 0C 00 2A B7 0B 00 B7 CC 31 00 A9 5E 2F 00 66 D3 NavPosllh() { messageClass = UBX_NAV_POSLLH; } // GPS time of week of the navigation epoch public long iTOW; // Longitude public long lon; // Latitude public long lat; // Height above ellipsoid public long height; // Height above mean sea level public long hMSL; // Horizontal accuracy estimate public long hAcc; // Vertical accuracy estimate public long vAcc; @NonNull @Override public String toString() { return "NavPosllh iTOW = " + iTOW + " lon = " + lon + " lat = " + lat + " height = " + height + " hMSL = " + hMSL + " hAcc = " + hAcc + " vAcc = " + vAcc; } public long getLon() { return lon; } public long getLat() { return lat; } }
/** * in order to learn java! * created at 2022/3/27 15:53 * * @author wangchao */ @Data public class Order { private Integer id; private String name; private Customer customer; }
<reponame>utkusarioglu/basak-beykoz // import React from 'react'; import type { InjectionFunction } from '../services/injection.service'; import injection from '../services/injection.service'; import { renderToStaticMarkup } from 'react-dom/server'; import SocialDesktopView from '../components/views/social-desktop/SocialDesktop.view'; import { contactFormInjection } from './contact-form.injection'; /** * Injects homepage enhancements * @param root {@link OverlayScrollbars} refs object from the store * @param history {@link react-router-dom} history object */ export const iletisimInjection: InjectionFunction = (params) => { return injection.execFunctions([socialLinks, contactFormInjection], params); }; const socialLinks: InjectionFunction = ({ ref }) => { const socialContainer = ref.querySelector('.contact-links__social'); if (!socialContainer) return injection.dummyInjectionFunction; socialContainer.innerHTML = renderToStaticMarkup(SocialDesktopView()); return injection.dummyInjectionFunction; };
def bi_directional_broadcasting(input_value: np.array, second_shape: np.array): output_shape = bi_directional_shape_broadcasting(shape_array(input_value.shape), second_shape) assert output_shape is not None, 'The tensor of shape "{}" cannot be bi-directionally broadcasted to shape "{}"' \ ''.format(input_value.shape, second_shape) assert is_fully_defined(output_shape) return input_value * np.ones(second_shape).astype(input_value.dtype)
import * as React from 'react' import { browserHistory } from 'react-router' import Table from '../../components/Table' import { map } from '../../utils' import DocumentModel from '../../../universal/experiment/DocumentModel' import './index.less' export default class ExperimentList extends React.Component<Props, {}> { constructor(props) { super(props) } render() { return ( <section className="experiments-container"> {map(this.props.groups, (key, group) => ( <div key={key}> {this.props.groupby ? <h2>{this.props.groupby + ': ' + key}</h2> : null} <Table columns={{ name: { title: 'Name' }, progress: { title: 'Progress' }, lastModified: { title: 'Last modified' } }} rows={group.map(e => { return { name: ( <div className="action-buttons-container"> <a href="" onClick={ev => ev.preventDefault() || browserHistory.push(`/experiments/${e.id}`)}>{e.props.name}</a> <span className="action-buttons"> <a onClick={_ => this.props.cloneExperiment(e)}><i className="fa fa-clone"></i></a> {' '} <a onClick={_ => this.props.deleteExperiment(e)}><i className="fa fa-remove"></i></a> </span> </div> ), progress: ( <div> <div style={{'width': '50%', height: '5px', 'background': 'green', 'borderRadius': '5px'}}></div> </div> ), lastModified: (e.props.updated || 'n/a').substr(0, 16) } })} /> </div> ))} </section> ) } } interface Props { groups: { [key: string]: DocumentModel[] } groupby: string cloneExperiment: Function deleteExperiment: Function }
<gh_stars>1-10 #include "util.h" #include <malloc.h> #include <stdarg.h> #include <stdint.h> #include <stdio.h> #include <string.h> #ifndef _BYTESTRING_H #define _BYTESTRING_H struct bytestring { uint32_t length; char *data; }; struct bytestring empty; struct bytestring EOS; char *hexbytestring(struct bytestring bs); struct bytestring concatbytestring(struct bytestring bs0, ...); // int sendbs(int sock, struct bytestring msg); #endif // _BYTESTRING_H
// returns the converted amount according to current rate func (k Keeper) convertToRate(ctx sdk.Context, from, to commontypes.Denom, amt sdk.Coin) (sdk.DecCoin, error) { rate := k.GetRate(ctx) if rate.GT(amt.Amount.ToDec()) { return sdk.DecCoin{}, sdkerrors.Wrapf(sdkerrors.ErrInsufficientFunds, "current rate: %s is higher then amount provided: %s", rate.String(), amt.String()) } convertedAmt := amt.Amount.ToDec().QuoRoundUp(rate) return sdk.NewDecCoinFromDec(to.String(), convertedAmt), nil }
def runIteration(self, task, Trees, Evaluations, xb, fxb, age, **dparams): candidatePopulation = ndarray((0, task.D + 1)) zeroAgeTrees = Trees[age == 0] localSeeds = self.localSeeding(task, zeroAgeTrees) age += 1 Trees, candidatePopulation, age = self.removeLifeTimeExceeded(Trees, candidatePopulation, age) Trees = append(Trees, localSeeds, axis=0) age = append(age, zeros(len(localSeeds), dtype=int32)) Trees, candidatePopulation, Evaluations, age = self.survivalOfTheFittest(task, Trees, candidatePopulation, age) gsn = int(self.tr * len(candidatePopulation)) if gsn > 0: globalSeeds = self.globalSeeding(task, candidatePopulation, gsn) Trees = append(Trees, globalSeeds, axis=0) age = append(age, zeros(len(globalSeeds), dtype=int32)) gste = apply_along_axis(task.eval, 1, globalSeeds) Evaluations = append(Evaluations, gste) ib = argmin(Evaluations) age[ib] = 0 return Trees, Evaluations, {'age': age}
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.cache.internal; import com.google.common.primitives.Ints; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import java.util.Arrays; public final class CacheVersion implements Comparable<CacheVersion> { public static final String COMPONENT_SEPARATOR = "."; public static CacheVersion parse(String version) { String[] parts = StringUtils.split(version, COMPONENT_SEPARATOR); int[] components = new int[parts.length]; for (int i = 0; i < parts.length; i++) { components[i] = Integer.parseInt(parts[i]); } return new CacheVersion(components); } public static CacheVersion empty() { return new CacheVersion(ArrayUtils.EMPTY_INT_ARRAY); } public static CacheVersion of(int component) { return new CacheVersion(new int[] {component}); } public static CacheVersion of(int... components) { return new CacheVersion(ArrayUtils.clone(components)); } private final int[] components; private CacheVersion(int[] components) { this.components = components; } public CacheVersion append(int additionalComponent) { int[] appendedComponents = new int[components.length + 1]; System.arraycopy(components, 0, appendedComponents, 0, components.length); appendedComponents[components.length] = additionalComponent; return new CacheVersion(appendedComponents); } @Override public String toString() { return Ints.join(COMPONENT_SEPARATOR, components); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CacheVersion that = (CacheVersion) o; return Arrays.equals(this.components, that.components); } @Override public int hashCode() { return Arrays.hashCode(components); } @Override public int compareTo(CacheVersion that) { int minLength = Math.min(this.components.length, that.components.length); for (int i = 0; i < minLength; i++) { int result = Ints.compare(this.components[i], that.components[i]); if (result != 0) { return result; } } return Ints.compare(this.components.length, that.components.length); } }
def display_all_numerical_hist(set1_df, set2_df, n_bins=25): concat_df = pd.concat([set1_df, set2_df], ignore_index=True, sort=False) numeric_col_names = concat_df.select_dtypes(include='number').columns.values for col_name in numeric_col_names: fig, [ax_0, ax_1, ax_2] = plt.subplots(1, 3, figsize=(15, 5)) ax_0.set_title('set1 {0}'.format(col_name)) set1_df[col_name].hist(ax=ax_0, bins=n_bins) ax_1.set_title('set2 {0}'.format(col_name)) set2_df[col_name].hist(ax=ax_1, bins=n_bins) ax_2.set_title('concat [set1, set2] {0}'.format(col_name)) concat_df[col_name].hist(ax=ax_2, bins=n_bins) fig.tight_layout() plt.show()
package google import ( "fmt" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestAccSpannerDatabase_basic(t *testing.T) { t.Parallel() project := getTestProjectFromEnv() rnd := randString(t, 10) instanceName := fmt.Sprintf("my-instance-%s", rnd) databaseName := fmt.Sprintf("mydb_%s", rnd) vcrTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccSpannerDatabase_basic(instanceName, databaseName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), ), }, { // Test import with default Terraform ID ResourceName: "google_spanner_database.basic", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ddl"}, }, { Config: testAccSpannerDatabase_basicUpdate(instanceName, databaseName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), ), }, { // Test import with default Terraform ID ResourceName: "google_spanner_database.basic", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ddl"}, }, { ResourceName: "google_spanner_database.basic", ImportStateId: fmt.Sprintf("projects/%s/instances/%s/databases/%s", project, instanceName, databaseName), ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ddl"}, }, { ResourceName: "google_spanner_database.basic", ImportStateId: fmt.Sprintf("instances/%s/databases/%s", instanceName, databaseName), ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ddl"}, }, { ResourceName: "google_spanner_database.basic", ImportStateId: fmt.Sprintf("%s/%s", instanceName, databaseName), ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"ddl"}, }, }, }) } func testAccSpannerDatabase_basic(instanceName, databaseName string) string { return fmt.Sprintf(` resource "google_spanner_instance" "basic" { name = "%s" config = "regional-us-central1" display_name = "display-%s" num_nodes = 1 } resource "google_spanner_database" "basic" { instance = google_spanner_instance.basic.name name = "%s" ddl = [ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", ] } `, instanceName, instanceName, databaseName) } func testAccSpannerDatabase_basicUpdate(instanceName, databaseName string) string { return fmt.Sprintf(` resource "google_spanner_instance" "basic" { name = "%s" config = "regional-us-central1" display_name = "display-%s" num_nodes = 1 } resource "google_spanner_database" "basic" { instance = google_spanner_instance.basic.name name = "%s" ddl = [ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", ] } `, instanceName, instanceName, databaseName) } // Unit Tests for type spannerDatabaseId func TestDatabaseNameForApi(t *testing.T) { id := spannerDatabaseId{ Project: "project123", Instance: "instance456", Database: "db789", } actual := id.databaseUri() expected := "projects/project123/instances/instance456/databases/db789" expectEquals(t, expected, actual) } // Unit Tests for ForceNew when the change in ddl func TestSpannerDatabase_resourceSpannerDBDdlCustomDiffFuncForceNew(t *testing.T) { t.Parallel() cases := map[string]struct { before interface{} after interface{} forcenew bool }{ "remove_old_statements": { before: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, after: []interface{}{ "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)"}, forcenew: true, }, "append_new_statements": { before: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, after: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", }, forcenew: false, }, "no_change": { before: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, after: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, forcenew: false, }, "order_of_statments_change": { before: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", }, after: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", }, forcenew: true, }, "missing_an_old_statement": { before: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", }, after: []interface{}{ "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", }, forcenew: true, }, } for tn, tc := range cases { d := &ResourceDiffMock{ Before: map[string]interface{}{ "ddl": tc.before, }, After: map[string]interface{}{ "ddl": tc.after, }, } err := resourceSpannerDBDdlCustomDiffFunc(d) if err != nil { t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) } if d.IsForceNew != tc.forcenew { t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v;actual:%v", tn, tc.forcenew, d.IsForceNew) } } }
import * as semver from "semver"; import { Module } from "./types"; /** * Update package.json and package-lock.json modules in given modules with the * given update * @param modules modules array to update the package files in * @param baseDirectory the base directory of the modules paths * @param update the update to apply to the package files */ export function updatePackageJSONs( modules: Module[], baseDirectory: string, update: Record<string, any> ): Module[] { return modules.map((module) => { if (module.path === `${baseDirectory}/package.json`) { return updatePackageJSON(module, update); } else if (module.path === `${baseDirectory}/package-lock.json`) { return updatePackageLockJSON(module, update); } return module; }); } function updatePackageJSON(module: Module, update: Record<string, any>) { const pkg = JSON.parse(module.code); Object.assign(pkg, update); if (!semver.valid(pkg.version)) { delete pkg.version; } return { ...module, code: JSON.stringify(pkg, null, 2), }; } /** * * * @param update object that contain the update data to enter instead of the default data as name and version * @returns the updated module * * In npm 7 and higher the package lock contains a packages section (lockfile v2) * to see the npm docs of this update https://github.blog/2021-02-02-npm-7-is-now-generally-available/#changes-to-the-lockfile */ function updatePackageLockJSON( module: Module, update: Record<string, any> ): Module { const lockfile = JSON.parse(module.code); /** * The v2 lockfile contains a package in the packages section that is a mirror of the project so we need to update the static data there also */ const pkg = lockfile.packages[""]; Object.assign(lockfile, update); Object.assign(pkg, update); if (!semver.valid(lockfile.version)) { delete lockfile.version; } if (!semver.valid(pkg.version)) { delete pkg.version; } return { ...module, code: JSON.stringify(lockfile, null, 4), }; }
/** * overriding calculated height, useful for overflow layouts * * @param height */ @SuppressLint("WrongConstant") public void applyWithHeight(int height) { if (this.view != null) { if(this.getDisplay() == YogaDisplay.NONE){ this.view.setVisibility(View.GONE); return; } else { if(this.previousVisibility >= 0){ this.view.setVisibility(this.previousVisibility); } else { this.view.setVisibility(View.VISIBLE); } } ViewGroup.LayoutParams lp = this.view.getLayoutParams(); if (lp == null) { lp = new RelativeLayout.LayoutParams(0, 0); this.view.setLayoutParams(lp); } else { } if (height > 0) { lp.height = height; } else { lp.height = (int) this.getLayoutHeight(); } lp.width = (int) this.getLayoutWidth(); if (lp instanceof ViewGroup.MarginLayoutParams) { ((ViewGroup.MarginLayoutParams) lp).leftMargin = (int) this.getAccX(); ((ViewGroup.MarginLayoutParams) lp).topMargin = (int) this.getAccY(); } this.view.requestLayout(); } for (YLayout lo : this.children) { lo.apply(); } }
TORONTO, ON – Perfectly healthy Leafs forward Nazem Kadri has implored team doctors to find "something, anything" that will keep him from playing any more games this season. Fearing that he will be the only player left after teammates James van Riemsdyk and Joffrey Lupul were placed on injured reserve on Thursday, the 25-year-old insisted that he felt as if something was not right with his body, either. "Yeah, I can't quite put my finger on it, doc, but there's something wrong in this region of my body," he said as he drew a big circle around his head and torso before team practice on Friday morning. "I have a scratchy feeling in my throat, is it strep? It feels like strep. What about this freckle, could it develop into a melanoma? That sounds serious." van Riemsdyk was seen skipping out of the team locker room with crutches above his head when told on Thursday that he would sit out the rest of the season. When approached by reporters, he said that he was "devastated not to be able to help out the team." Reporters were unable to get a comment from Lupul, who remains quarantined in his permanent room at St. Michael's Hospital.
"""Additional tests for the caster to ensure full code coverage. """ import pytest def test_corner(): from aflow.caster import cast assert cast("numbers", "spinD", None) is None assert cast("numbers", "spinD", "garbage") is None assert cast("numbers", "ldau_TLUJ", "garbage") == {'ldau_params': 'garbage'}
def draw_predictions(self, task): bboxes = task.display_bboxes.cpu().numpy() keyframe_idx = len(task.frames) // 2 draw_range = [ keyframe_idx - task.clip_vis_length // 2, keyframe_idx + (task.clip_vis_length - 1) // 2 ] assert draw_range[0] >= 0 and draw_range[1] < len(task.frames) task.frames = self.draw_clip_range(task.frames, task.action_preds, bboxes, draw_range) return task
package streamgrep import ( "bufio" "io" "log" "strings" ) // Match is a single found instance of target within the stream type Match struct { before string target string after string } func (m Match) String() string { var sb strings.Builder if len(m.before) > 0 { sb.WriteString(m.before + " ") } sb.WriteString(m.target) if len(m.after) > 0 { sb.WriteString(" " + m.after) } return sb.String() } func newMatch(runes []rune, left, right int) Match { return Match{ before: trimmedString(runes[:left]), target: string(runes[left:right]), after: trimmedString(runes[right:]), } } // StreamGrep allows you to grep a stream of runes type StreamGrep struct { target []rune b, a int // before and after context length } // NewStreamGrep creates and returns a streamGrep func NewStreamGrep(target string, before, after int) StreamGrep { return StreamGrep{[]rune(target), before, after} } // Grep works similar to *nix grep but on an io.Reader such as os.Stdin // It sends matches out on a string channel func (g StreamGrep) Grep(stream io.Reader, c chan<- Match, eos rune) { defer close(c) start, stop := g.b, g.b+len(g.target) in := bufio.NewReader(stream) w := newWindow(g.b + len(g.target) + g.a) checkAndHandleMatch := func() { s := w.runes[start:stop] for i := range s { if s[i] != g.target[i] { return } } c <- newMatch(w.runes, start, stop) } for { r, _, err := in.ReadRune() if err == io.EOF || r == eos { for i := 0; i < g.a; i++ { w.push(rune(0)) checkAndHandleMatch() } break } if err != nil { log.Fatal(err) } w.push(r) checkAndHandleMatch() } } type window struct { runes []rune } func newWindow(cap int) *window { return &window{ runes: make([]rune, cap, cap), } } func (w *window) push(b rune) { w.runes = append(w.runes[1:], b) } func (w *window) String() string { return trimmedString(w.runes) } func trimmedString(runeList []rune) string { return strings.Trim(string(runeList), "\x00") }
/** * Base class of an object holding a reference to another object. * @author G1ta0 * @param <T> */ public class AbstractHardReference<T> implements HardReference<T> { private T reference; public AbstractHardReference(T reference) { this.reference = reference; } @Override public T get() { return reference; } @Override public void clear() { reference = null; } @SuppressWarnings("rawtypes") @Override public boolean equals(Object o) { if (o == this) return true; if (o == null) return false; if (!(o instanceof AbstractHardReference)) return false; if ((((AbstractHardReference) o)).get() == null) return false; return ((((AbstractHardReference) o)).get().equals(get())); } }