content
stringlengths 10
4.9M
|
---|
/**
*
* ใใใใใใใ}:ใใใใใ ใ ใ ใ ใ ใ ใ _๏ฝค,rใ๏ฝค,,vใใใ๏ผผ
* ใใใใใใ /ใใใใ๏ผผใใ ใ ใ ๏ฝค,v'" ~ใใใใใ๏พ~"''x๏ฝคใ๏ผผ
* ใ ใ ใ ใ / ใ ใ ใ ใ ใ ใ ๏ฝคr''"ใใ ,, ๏ฝค,v๏ฝคx๏ฝก,,ใใใใใ~" ๏ฝค,
* ใใใใใใใใใใใ ...::::ๅ
ฅ,,"ใใ,,๏ฝคใค: : : : : : : : :"'๏พ๏ฝค,
* ใใใใใใใใใใ...::::๏ผ ใ๏ฝ": : : : : : : : : :ใค : : : : : : :"vใใใใ'๏ฝค
* ใใใใใ }ใใ ....:::::ใค ใ๏ผ.: :/: : :๏ผ: :๏ผ ๅ
ซ: : : : : : : : :๏ผผ ใใ ,"
* ใใ ใ ใ jใใใใใ/ใ,:': : : /: :๏ผ: ๏ผ ใ/: ใ: : }:๏ผผ: : : : :ยด๏ฝค, ๏ฝซ
* ใ๏ฝค'"~๏ฝ"ใใใใใใใ ,.: : : : : : : :ใคโฆ"๏ฟฃ''ใ ๏ฝค;_:j : : : : : : : : ๏พ๏พ
* r"ใใใใใ๏ฝตๅฝกใใใ {: : : {: : : Vfใใใใ ใใซ"๏ผใ: : : : :!: !
* ๏ฝคใใใใใ ,"ใใใใ ไบบ: :ๅ
ซ.: :ไป ใใใใใใใใใ: : : : : ๏พ j
* ยด๏ฝค,,ใใ๏ฝคใ'โฒใ-โ:ๅฝก/: : : : :๏ผผ ใฝ""ใใใใ ใ ใ ๏พ ไป: : : ๏ฝฒ๏พ
* ใใ"ยจใ. . : : : : : : : : : : : {: : : : : :)ใใ ใ ใ 'ใ `ยจ ใ: :ใ: : :ไบบใฝ
* ใใใ๏ผ: ๏ผ: : : ๏ผ : : : :ๅ
ซ: : : : :๏พใใใ`ใ๏ฝฐใ๏ฝฐ=ๅฝก:ใค: : : : : : : :
* ใ ๏ผ: : : : : : : : : : : : ใค: : :): : : : :| ๏ผผใใ ใ ใ ใค : : }: : : :}: :ใ: }
* ใ,': : : {: : : :ๅยจยจ๏ผ: ไบบ: : : : : : : !ใใใ๏ฝฐใ ็ช: :๏ฝ : : :} : : :ใซ: : :๏ฝฟ
* ใ{: : :ๅ
ซ: ๏ผ ใ ใ ๏ผ: : : : : : : : ๏พใใใใใ {: : : i: : :r- ๏ผ ยจยจใฝ: : .
* ..ไบบ: : ๏ผใใ ใ๏ผ: : : : : i: : : : :ๅ
ซ ใใ๏ผใใใ: i: rโดใใใใฝใใV: :๏ผผ
* ใใ๏ฝฟใใใใ๏ผ: : : : : : : i: : ๏ผใ ใ ใ ใ ,"ใ๏ผผ:.r--ใใใใใ ๏ฝคใV: : : ๏ผผ
* ๏ผ: :{ ใใใ,':๏ฝจ: /: : : ๏ผ๏พ๏ผใใใใ ใ ใ ,,ใ ใ ฮป ๏ฝคใใใใใใโจi: : : : : : .
* ใ /:{ใใใใใซ: : ๅฝก" โ ๏ผ==โฆ-==๏ผๅฝณใใ vยด ใๅฌใใใ ใใใ: : : : : :
* ใ. :โงใใ /: : :๏ผ"ใ ใ"s๏ฝกใใ ใ ใ ใ ไบฅใใ;"ใใใใฝใใใใใ Y: : : : : : :}
* .ใ: : ไบบใ/: ๏ผ ใ {ใใใใใx๏ฝกใใใ ๏ฝกฮถใใใ๏ฝคใใใใ: :๏พใใใใ โ
ฃ: : : : :ไบบ
* : : : : : :ะฃ: 'ใใใใ๏ฝคใใใ ';ใ โซโช๏พใใ;ใใใ ;" ใใใใ}:๏ผพjใใใใi "๏ฝค:ใซ๏ฝฒ: : ๏ผผ
* : : : : : (: :({ใใใใ ;"ใใใ๏ฝผ๏พ๏พใใ.::ใใพ๏ฝก,"ใใใ",ใใใใ: ;"" ๏ฝค,ใใ iใ "; :ๅ
ซ: :}
* : :!: : :ใ:.ๅ
ฅใใใใvใใใ;"ใใ.::::..ใใ`;ใใใใ"๏ฝคใใไบบ{๏ฝคใใใ"๏ฝค,,ใ ใ)
* V{: : : : : ใ ใ ใ ใ jใใใ๏ฝค ใใใใ๏ผผใ";ใใใ ใฯใใ ใ"๏ฝค,,ใใใ ",๏ฝค
* ใ ๏ฝค: : : Yยจยจโซ๏ผใ_)ใ ใ,ะถ"ใใใใ ,,ใ''::.. -โใใ".๏ฝคใ/ใใใ"๏ฝค,,ใใใ"
* ใใใฝ: :โงใ ใ ใ ", ใ ฯ๏ผฟ๏พโซ=โฆ๏พ๏ผฟใ"๏ฝคใใใใใVใใใใใ "๏ฝค,,ใใ"๏ฝค
* ใใใVใใใใใใใ''๏ฝคใ ใยจใใใใ ใใยจ๏ผโง๏ฝค,
* ใใใใใ } ใ ใ ใ ใ y"ใใใใใใใใใใใใ"๏ฝค,_ใใ{ใใใใใใใ}"ใใ ,"
* ใใ ใ ใ { ใ ใ ใ ใ ใฝใใใใใใ;iใใ ใ ใ ใ ใ '`~ใ
* ใใใใ ใ ใ ใ ใ ใ ๏ผผ๏ฝคใใใใใใใใใใใใใ,'` ~ใใ", ใ ใ ใ }ใใใ;"
* ใใใใใใVใใใ ใ ใ ๏ผผใใใใใใใใใ,,'` ~ใใใใใใ",
* ใใใใใใใใใใใใใใ โง"''ใ ๏ฝค๏ผฟ,๏ฝค"~ใใใใใใใใใใ")ๅฝก'ใใใใ;"
* ใใใ ใ ใ ๏ฝใใใ ใ ใ ใ ใ j=๏ผยจๅคฉโใo๏ฝกใใใใ๏ฝค,,v '`"~ใ๏ผผใใใr'"
* ใใใใใใใใใใใใใ ใ,,๏ฝค'" ๏ฝค๏ฝกๅฝณ ใพ ใใใใ'` "~ใใ ใ ใ ใ๏ฝค ใฝ๏ฝค,v"
* ใใใใใ /ใใใ ใ ,,r ๏ฝคv~ ใ ใ "i{ ใ ใๆทปๅฌใใใใใใใ,,ใ'`ใ~ "๏ฝ
* ใใใใใใใใr ๏ฝคv~ใใใใใใ ใ "y ไพๆ็ฟ
* ใใใใใใ ,,๏ผพ"ใใใ ใ ใ ใ๏ฝค,"๏ผพใ๏ผผ ใ่ถๅฝก'ใใใใ,''๏ฝใใใf"~
* ใใใใใ r"ใใใใใ ใ ใ ใ)iใ ใ ใ ใฝใ ใ ใ ใ ,,'๏ฝใ ใ ,''๏ฝi
* ใใใใใjใใใ..:::'โฒ ,๏ฝคใใ '"|
* ใใใใYใใใ ",,ใ,,๏ผพใใใ ใiใใใใใ๏ฝค .)ใ๏ฝค๏ฝ~ใ ใ,๏ฝค๏ฝ~ ใ ใj
* ใใใใใใ ใ ๅฝก๏ฝจ"ใใใใใใ`-ใฝ_ใ ใY,`~ใใ๏ฝ "
* ใใใใใ"๏ฝคใใ ๏ฝคjใใใใใใใใใ ใ ใ ;i๏ผพใ`~
* ใใ ใ ใ ใ "ใ ~|ใใใ ใ ใ ใ ใ ใ ใ i|ใใใใใใใใใใ/
*
* Byakuren - A theme color extracting library implemented by C.
*
* Copyright (c) 2017 XadillaX <<EMAIL>>
*
* MIT LIcense <https://github.com/XadillaX/byakuren/blob/master/LICENSE>
*/
#ifndef __BKR_COMMON_H__
#define __BKR_COMMON_H__
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include "third-party/xmempool/xmempool.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifndef NULL
#define NULL (0)
#endif
#define SAFE_DESTROY_POOL(pool) if(pool) { xmem_destroy_pool(pool); pool = NULL; }
#define SAFE_FREE(pointer) if(pointer) { free(pointer); pointer = NULL; }
#define MAX_INT (2147483647)
#define BKR_RGB_TO_INT32(r, g, b) (((r) << 16) + ((g) << 8) + (b))
#define BKR_IS_GRAY(a, offset) (abs((a)->red - (a)->green) <= offset && \
abs((a)->red - (a)->blue) <= offset && \
abs((a)->green - (a)->blue) <= offset)
/**
* RGB ้ข่ฒ็ปๆไฝ
*/
typedef struct bkr_rgb {
uint8_t red;
uint8_t green;
uint8_t blue;
} bkr_rgb;
/**
* ไธป้ข่ฒ็ปๆ๏ผ้ข่ฒใ่ฒๅผใๅ็ด ๆฐ๏ผ็ปๆไฝ
*/
typedef struct bkr_color_stats {
bkr_rgb color;
uint32_t value;
uint32_t count;
} bkr_color_stats;
/**
* ๅ
ซๅๆ ่็น็ปๆไฝ
*/
typedef struct bkr_octree_node {
uint32_t red_components;
uint32_t green_components;
uint32_t blue_components;
uint8_t is_leaf;
uint32_t pixel_count;
struct bkr_octree_node* children[8];
} bkr_octree_node;
/**
* ๅ
ซๅๆ ๅฏๅ ๅ้พ่กจ่็น็ปๆไฝ
*/
typedef struct bkr_octree_reducible_list_node {
struct bkr_octree_node* node;
struct bkr_octree_reducible_list_node* next;
} bkr_octree_reducible_list_node;
/**
* ๆ ๅ่ฒๆฟ็ปๆไฝ
*/
typedef struct bkr_palette_array {
uint32_t count;
bkr_rgb* colors;
} bkr_palette_array;
/**
* ๆๅฐๅทฎๅผๆณๅๆฐ็ปๆไฝ
*/
typedef struct bkr_mindiff_parameter {
bkr_palette_array* palette;
int16_t gray_offset;
} bkr_mindiff_parameter;
// * * * *
// ** ๆณจๆๆฒกๆๅฏนๅค็บฟ็จไฝๆฏๆ **
// ไธป่ฆๆฏๅ ไธบๅ
ๅญๆฑ ๆฒกๆฏๆ
// ** ---------------------- **
// * *
extern xmem_pool_handle bkr_rgb_pool;
extern int bkr_init();
extern void bkr_destroy();
// ่ฏฅๅฝๆฐไธบ้ข่ฒ็ปๆๆๅบ็ๅฝๆฐ
extern int _stats_cmp(const void* a, const void* b);
#ifdef __cplusplus
}
#endif
#endif
|
def dump_no_canonicalize_svndate(sbox):
sbox.build(create_wc=False, empty=True)
svntest.actions.enable_revprop_changes(sbox.repo_dir)
propval = "2015-01-01T00:00:00.0Z"
svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
"propset", "--revprop", "-r0", "svn:date",
propval,
sbox.repo_url)
dump_lines = svntest.actions.run_and_verify_dump(sbox.repo_dir)
assert propval + '\n' in dump_lines |
/**
* return true if the input is a directory in the ftp resource. works only
* if you have permissions to change to the specified directory.
*/
public boolean isDirectory(String dirName) throws FileResourceException {
boolean isDir = true;
String currentDirectory = getCurrentDirectory();
try {
setCurrentDirectory(dirName);
}
catch (FileResourceException e) {
isDir = false;
}
finally {
try {
setCurrentDirectory(currentDirectory);
}
catch (Exception e) {
}
}
return isDir;
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.profiler.j2ee;
import org.netbeans.api.java.project.JavaProjectConstants;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.SourceGroup;
import org.netbeans.lib.profiler.client.ClientUtils;
import org.netbeans.lib.profiler.common.Profiler;
import org.netbeans.modules.j2ee.deployment.devmodules.api.JSPServletFinder;
import org.netbeans.modules.j2ee.spi.ejbjar.EarProvider;
import org.netbeans.modules.profiler.nbimpl.project.ProjectUtilities;
import org.netbeans.modules.web.api.webmodule.WebModule;
import org.netbeans.modules.web.api.webmodule.WebProjectConstants;
import org.openide.ErrorManager;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.NbBundle;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.text.MessageFormat;
import java.util.*;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.netbeans.modules.profiler.api.java.JavaProfilerSource;
import org.openide.util.Lookup;
/**
*
* @author <NAME>
*/
@NbBundle.Messages({
"WebProjectUtils_CannotFindServletMsg=Cannot resolve servlet class generated from {0}. It will not be included into root methods.",
"WebProjectUtils_CannotFindServletClassMsg=Cannot find servlet class {0} defined in deployment descriptor. Its methods will not be included into root methods.",
"WebProjectUtils_CannotFindFilterClassMsg=Cannot find filter class {0} defined in deployment descriptor. Its methods will not be included into root methods.",
"WebProjectUtils_CannotFindListenerClassMsg=Cannot find listener class {0} defined in deployment descriptor. Its methods will not be included into root methods."
})
public class WebProjectUtils {
//~ Static fields/initializers -----------------------------------------------------------------------------------------------
final private static Logger LOGGER = Logger.getLogger(WebProjectUtils.class.getName());
private static final Map<ClientUtils.SourceCodeSelection, String> jspClass2NameMap = new HashMap<ClientUtils.SourceCodeSelection, String>();
private static final String[][] jspServletMethods = new String[][] {
{
"_jspService",
"(Ljavax/servlet/http/HttpServletRequest;Ljavax/servlet/http/HttpServletResponse;)V"
}
};
//~ Methods ------------------------------------------------------------------------------------------------------------------
public static Document getDeploymentDescriptorDocument(FileObject deploymentDescriptorFile) {
Document deploymentDescriptorDocument = null;
try {
DocumentBuilderFactory dbfactory = DocumentBuilderFactory.newInstance();
dbfactory.setValidating(false);
DocumentBuilder builder = dbfactory.newDocumentBuilder();
builder.setEntityResolver(new EntityResolver() {
public InputSource resolveEntity(String publicId, String systemId)
throws SAXException, IOException {
StringReader reader = new StringReader("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); // NOI18N
InputSource source = new InputSource(reader);
source.setPublicId(publicId);
source.setSystemId(systemId);
return source;
}
});
deploymentDescriptorDocument = builder.parse(FileUtil.toFile(deploymentDescriptorFile));
} catch (Exception e) {
e.printStackTrace();
}
return deploymentDescriptorDocument;
}
public static Collection<Document> getDeploymentDescriptorDocuments(Project project, boolean subprojects) {
Collection<Document> documents = new ArrayList<Document>();
for (FileObject ddFile : getDeploymentDescriptorFileObjects(project, subprojects)) {
Document d = getDeploymentDescriptorDocument(ddFile);
if (d != null) documents.add(d);
}
return documents;
}
public static Collection<FileObject> getDeploymentDescriptorFileObjects(Project project, boolean subprojects) {
Collection<FileObject> descriptors = new ArrayList<FileObject>();
for (WebModule wm : getWebModules(project, subprojects)) {
FileObject d = wm.getDeploymentDescriptor();
if (d != null) descriptors.add(d);
}
return descriptors;
}
public static Collection<FileObject> getDocumentBaseFileObjects(Project project, boolean subprojects) {
Collection<FileObject> basefos = new ArrayList<FileObject>();
for (WebModule wm : getWebModules(project, subprojects)) {
FileObject d = wm.getDocumentBase();
if (d != null) basefos.add(d);
}
return basefos;
}
public static Collection<File> getDocumentBaseFiles(Project project, boolean subprojects) {
Collection<File> basefiles = new ArrayList<File>();
for (FileObject basefo : getDocumentBaseFileObjects(project, subprojects)) {
basefiles.add(FileUtil.toFile(basefo));
}
return basefiles;
}
public static boolean isWebProject(Lookup.Provider p) {
assert p != null;
return p.getLookup().lookup(WebModule.class) != null;
}
public static ArrayList[] getFilterClasses(Document deploymentDescriptorDocument) {
ArrayList mappedFilters = new ArrayList();
ArrayList notMappedFilters = new ArrayList();
NodeList filtersList = getFilters(deploymentDescriptorDocument);
NodeList filterMappingsList = getFilterMappings(deploymentDescriptorDocument);
Collection<String> mappedFilterNames = new HashSet<String>();
for (int i = 0; i < filterMappingsList.getLength(); i++) {
String mappedFilterName = getElementContent((Element) filterMappingsList.item(i), "filter-name"); // NOI18N
if ((mappedFilterName != null) && !mappedFilterNames.contains(mappedFilterName)) {
mappedFilterNames.add(mappedFilterName);
}
}
for (int i = 0; i < filtersList.getLength(); i++) {
String filterName = getElementContent((Element) filtersList.item(i), "filter-name"); // NOI18N
String filterClassName = getElementContent((Element) filtersList.item(i), "filter-class"); // NOI18N
if ((filterName != null) && (filterClassName != null) && mappedFilterNames.contains(filterName)) {
if (!mappedFilters.contains(filterClassName)) {
mappedFilters.add(filterClassName);
}
} else {
if (!notMappedFilters.contains(filterClassName)) {
notMappedFilters.add(filterClassName);
}
}
}
return new ArrayList[] { mappedFilters, notMappedFilters };
}
public static NodeList getFilterMappings(Document deploymentDescriptorDocument) {
return deploymentDescriptorDocument.getElementsByTagName("filter-mapping"); // NOI18N
}
public static NodeList getFilters(Document deploymentDescriptorDocument) {
return deploymentDescriptorDocument.getElementsByTagName("filter"); // NOI18N
}
public static boolean isHttpServlet(FileObject fo) {
// FIXME pass in the JavaProfilerSource instead
JavaProfilerSource src = JavaProfilerSource.createFrom(fo);
return src != null && src.isInstanceOf("javax.servlet.http.HttpServlet"); // NOI18N
}
public static boolean isJSP(FileObject fo) {
return "jsp".equals(fo.getExt()); // NOI18N
}
public static String getJSPFileContext(Project project, FileObject jspFile, boolean subprojects) {
Collection<FileObject> docBases = getDocumentBaseFileObjects(project, subprojects);
String relPathUsed = null;
for (FileObject docBase : docBases) {
if (docBase == null) {
continue;
}
String relativePath = FileUtil.getRelativePath(docBase, jspFile);
if (relativePath == null) {
continue;
}
if ((relPathUsed == null) || (relPathUsed.length() > relativePath.length())) {
relPathUsed = relativePath;
}
}
return relPathUsed;
}
public static ClientUtils.SourceCodeSelection getJSPFileRootMethod(Project project, FileObject jspFile) {
return getJSPMethodSignature(project, jspFile);
}
public static ClientUtils.SourceCodeSelection getJSPMethodSignature(Project project, FileObject jspFile) {
String jspPseudoServletClass = getJSPPseudoServletClass(project, jspFile);
if (jspPseudoServletClass == null) {
LOGGER.log(Level.WARNING, Bundle.WebProjectUtils_CannotFindServletMsg(FileUtil.toFile(jspFile).getPath()));
return null; // According to Issue 62519, jsp file is not resolved/found due odd project layout
}
return getJSPMethodSignature(jspPseudoServletClass);
}
public static ClientUtils.SourceCodeSelection getJSPMethodSignature(String jspPseudoServletClass) {
return new ClientUtils.SourceCodeSelection(jspPseudoServletClass, jspServletMethods[0][0], jspServletMethods[0][1]);
}
public static Set<ClientUtils.SourceCodeSelection> getJSPMethodSignatures(Project project, boolean subprojects) {
Set<ClientUtils.SourceCodeSelection> methodSignatures = new HashSet<ClientUtils.SourceCodeSelection>();
Collection<FileObject> baseFOs = getDocumentBaseFileObjects(project, subprojects);
if ((baseFOs == null) || baseFOs.isEmpty()) {
return methodSignatures;
}
for (FileObject baseFO : baseFOs) {
int jspStringStartIndex = baseFO.getPath().length();
List<FileObject> jspFileObjects = getJSPs(baseFO);
ClientUtils.SourceCodeSelection methodDescription;
for (FileObject jsp : jspFileObjects) {
methodDescription = getJSPMethodSignature(project, jsp);
if (methodDescription != null) {
// TODO: Get rid of this ridiculous formatter and enhance FlatProfileContainer to return a SourceCodeSelection of a certain row
jspClass2NameMap.put(methodDescription, jsp.getPath().substring(jspStringStartIndex));
// ****
methodSignatures.add(methodDescription);
}
}
}
return methodSignatures;
}
public static String getJSPPath(ClientUtils.SourceCodeSelection jspMethod) {
return jspClass2NameMap.get(jspMethod);
}
public static String getJSPPseudoServletClass(Project project, FileObject jspFile) {
String jspPseudoServletJavaFile = JSPServletFinder.findJSPServletFinder(jspFile)
.getServletResourcePath(getJSPFileContext(project, jspFile, true));
if (jspPseudoServletJavaFile == null) {
return null; // According to Issue 62519, jsp file is not resolved/found due odd project layout
}
String jspPseudoServletJavaClass = jspPseudoServletJavaFile.substring(0,
jspPseudoServletJavaFile.length()
- ".java".length()).replace('/', '.'); // NOI18N
return jspPseudoServletJavaClass;
}
public static ClientUtils.SourceCodeSelection[] getJSPRootMethods(Project project, boolean subprojects) {
Set<ClientUtils.SourceCodeSelection> jspRootMethodDescriptions = getJSPMethodSignatures(project, subprojects);
if (jspRootMethodDescriptions == null) {
return new ClientUtils.SourceCodeSelection[0];
}
return jspRootMethodDescriptions.toArray(new ClientUtils.SourceCodeSelection[0]);
}
public static ArrayList getJSPs(FileObject documentBase) {
ArrayList jspFileObjects = new ArrayList();
if (documentBase.isFolder()) {
searchForJSPs(documentBase, jspFileObjects);
}
return jspFileObjects;
}
public static ArrayList getListenerClasses(Document deploymentDescriptorDocument) {
ArrayList listeners = new ArrayList();
NodeList listenersList = getListeners(deploymentDescriptorDocument);
for (int i = 0; i < listenersList.getLength(); i++) {
String listenerClass = getElementContent((Element) listenersList.item(i), "listener-class"); // NOI18N
if ((listenerClass != null) && !listeners.contains(listenerClass)) {
listeners.add(listenerClass);
}
}
return listeners;
}
public static NodeList getListeners(Document deploymentDescriptorDocument) {
return deploymentDescriptorDocument.getElementsByTagName("listener"); // NOI18N
}
public static boolean isMappedServlet(FileObject servlet, Project project, boolean subprojects) {
Collection<Document> dds = getDeploymentDescriptorDocuments(project, subprojects);
for (Document dd : dds) {
if (getServletMapping(servlet, dd) != null) {
return true;
}
}
return false;
}
public static ArrayList[] getServletClasses(Document deploymentDescriptorDocument) {
ArrayList mappedServlets = new ArrayList();
ArrayList notMappedServlets = new ArrayList();
NodeList servletsList = getServlets(deploymentDescriptorDocument);
NodeList servletMappingsList = getServletMappings(deploymentDescriptorDocument);
Collection<String> mappedServletNames = new HashSet<String>();
for (int i = 0; i < servletMappingsList.getLength(); i++) {
String mappedServletName = getElementContent((Element) servletMappingsList.item(i), "servlet-name"); // NOI18N
if ((mappedServletName != null) && !mappedServletNames.contains(mappedServletName)) {
mappedServletNames.add(mappedServletName);
}
}
for (int i = 0; i < servletsList.getLength(); i++) {
String servletName = getElementContent((Element) servletsList.item(i), "servlet-name"); // NOI18N
String servletClassName = getElementContent((Element) servletsList.item(i), "servlet-class"); // NOI18N
if ((servletName != null) && (servletClassName != null) && mappedServletNames.contains(servletName)) {
if (!mappedServlets.contains(servletClassName)) {
mappedServlets.add(servletClassName);
}
} else {
if (!notMappedServlets.contains(servletClassName)) {
notMappedServlets.add(servletClassName);
}
}
}
return new ArrayList[] { mappedServlets, notMappedServlets };
}
public static String getServletMapping(FileObject servletFO, Document deploymentDescriptorDocument) {
// FIXME - pass in JavaProfilerSource param
JavaProfilerSource src = JavaProfilerSource.createFrom(servletFO);
if (src == null) {
return null;
}
String servletClassName = src.getTopLevelClass().getVMName();
if ((servletClassName == null) || (deploymentDescriptorDocument == null)) {
return null;
}
NodeList servletsList = getServlets(deploymentDescriptorDocument);
for (int i = 0; i < servletsList.getLength(); i++) {
String servletName = getElementContent((Element) servletsList.item(i), "servlet-name"); // NOI18N
String className = getElementContent((Element) servletsList.item(i), "servlet-class"); // NOI18N
if ((servletName != null) && (className != null) && servletClassName.equals(className)) {
NodeList servletMappingsList = getServletMappings(deploymentDescriptorDocument);
for (int j = 0; j < servletMappingsList.getLength(); j++) {
if (servletName.equals(getElementContent((Element) servletMappingsList.item(j), "servlet-name"))) {
// NOI18N
return getElementContent((Element) servletMappingsList.item(j), "url-pattern"); // NOI18N
}
}
return null;
}
}
return null;
}
public static NodeList getServletMappings(Document deploymentDescriptorDocument) {
return deploymentDescriptorDocument.getElementsByTagName("servlet-mapping"); // NOI18N
}
public static NodeList getServlets(Document deploymentDescriptorDocument) {
return deploymentDescriptorDocument.getElementsByTagName("servlet"); // NOI18N
}
// returns true if passed fo lives in /web directory
public static boolean isWebDocumentSource(FileObject fo, Project project) {
SourceGroup[] sg = ProjectUtils.getSources(project).getSourceGroups(WebProjectConstants.TYPE_DOC_ROOT);
for (int i = 0; i < sg.length; i++) {
if (FileUtil.isParentOf(sg[i].getRootFolder(), fo)) {
return true;
}
}
return false;
}
// returns true if passed fo lives in /src directory
public static boolean isWebJavaSource(FileObject fo, Project project) {
SourceGroup[] sg = ProjectUtils.getSources(project).getSourceGroups(JavaProjectConstants.SOURCES_TYPE_JAVA);
for (int i = 0; i < sg.length; i++) {
if (FileUtil.isParentOf(sg[i].getRootFolder(), fo)) {
return true;
}
}
return false;
}
public static Collection<WebModule> getWebModules(Project project, boolean subprojects) {
Collection<WebModule> wms = new ArrayList<WebModule>();
WebModule wm = WebModule.getWebModule(project.getProjectDirectory());
if (wm != null) {
wms.add(wm);
}
if (subprojects) {
EarProvider earprovider = project.getLookup().lookup(EarProvider.class);
if ((wm == null) && (earprovider != null)) {
Set<Project> projects = new HashSet<Project>();
ProjectUtilities.fetchSubprojects(project, projects);
for (Project subp : projects) {
wms.addAll(getWebModules(subp, subprojects));
}
}
}
return wms;
}
public static void resetJSPNameCache() {
jspClass2NameMap.clear();
}
private static String getElementContent(Element rootElement, String tagName) {
if (rootElement == null) {
return null;
}
NodeList elementsList = rootElement.getElementsByTagName(tagName);
if ((elementsList == null) || (elementsList.getLength() == 0)) {
return null;
}
Node element = elementsList.item(0);
if (element == null) {
return null;
}
String elementContents = element.getTextContent();
if (elementContents == null) {
return null;
}
return elementContents.trim();
}
private static void searchForJSPs(FileObject root, ArrayList jspFileObjects) {
FileObject[] childs = root.getChildren();
FileObject child;
for (int i = 0; i < childs.length; i++) {
child = childs[i];
if (child.isFolder()) {
searchForJSPs(child, jspFileObjects);
} else if (child.isData() && "jsp".equals(child.getExt())) {
jspFileObjects.add(child); // NOI18N
}
}
}
}
|
def filter_grid(size, dx, center=False):
x = np.arange(0, size * dx, dx)
if center:
x = (x + dx / 2) - (size / 2) * dx
assert x.size == size
return x |
From event detection to storytelling on microblogs
The problem of detecting events from content published on microblogs has garnered much interest in recent times. In this paper, we address the questions of what happens after the outbreak of an event in terms of how the event gradually progresses and attains each of its milestones, and how it eventually dissipates. We propose a model based approach to capture the gradual unfolding of an event over time. This enables the model to automatically produce entire timeline trajectories of events from the time of their outbreak to their disappearance. We apply our model on the Twitter messages collected about Ebola during the 2014 outbreak and obtain the progression timelines of several events that occurred during the outbreak. We also compare our model to several existing topic modeling and event detection baselines in literature to demonstrate its efficiency. |
<filename>formatters/basic.go
package formatters
import (
"github.com/xrash/gol/v2"
"strings"
"time"
)
const (
TIMESTAMP_FORMAT = "2006-01-02T15:04:05.000Z07:00"
MESSAGE_FORMAT = "[%timestamp%] [%level%] %message%\n"
)
type nowProvider func() time.Time
type BasicFormatter struct {
nowProvider nowProvider
}
func NewBasicFormatter() *BasicFormatter {
return &BasicFormatter{
nowProvider: time.Now,
}
}
func (f *BasicFormatter) Format(message string, l gol.LogLevel) string {
timestamp := f.nowProvider().UTC().Format(TIMESTAMP_FORMAT)
params := map[string]string{
"%timestamp%": timestamp,
"%message%": message,
"%level%": l.String(),
}
line := MESSAGE_FORMAT
for key, value := range params {
line = strings.Replace(line, key, value, -1)
}
return line
}
|
#include <stdio.h>
#include <stdlib.h>
#include "Inverte.h"
void inverte(Lista *lista)
{
if (vazio(lista))
{
return;
}
for (Elemento *primeiro = lista->primeiro; primeiro->prox != NULL;)
{
Elemento *aux = primeiro->prox;
primeiro->prox = aux->prox;
aux->prox = lista->primeiro;
lista->primeiro = aux;
}
}
|
// This file is part of the Acts project.
//
// Copyright (C) 2019-2021 CERN for the benefit of the Acts project
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#include "ActsExamples/Options/CommonOptions.hpp"
#include "Acts/Utilities/Helpers.hpp"
#include "ActsExamples/Utilities/Options.hpp"
#include <exception>
#include <fstream>
#include <regex>
#include <system_error>
using namespace boost::program_options;
boost::program_options::options_description
ActsExamples::Options::makeDefaultOptions(const std::string& caption) {
std::cout
<< "\n\n======================= DEPRECATION NOTICE "
"========================\n"
"The examples executables is deprecated. They will be removed in a\n"
"future version.\n"
"Consider using the python bindings for the example algorithms: \n"
"https://acts.readthedocs.io/en/latest/examples/python_bindings.html\n"
"==================================================================="
"\n\n"
<< std::endl;
options_description opt(caption);
opt.add_options()("help,h", "Produce help message");
opt.add_options()(
"loglevel,l", value<size_t>()->default_value(2),
"The output log level. Please set the wished number (0 = VERBOSE, 1 = "
"DEBUG, 2 = INFO, 3 = WARNING, 4 = ERROR, 5 = FATAL).");
opt.add_options()(
"response-file", value<std::string>()->default_value(""),
"Configuration file (response file) replacing command line options.");
return opt;
}
void ActsExamples::Options::addSequencerOptions(
boost::program_options::options_description& opt) {
// sequencer options
opt.add_options()("events,n", value<size_t>(),
"The number of events to process. If not given, all "
"available events will be processed.")(
"skip", value<size_t>()->default_value(0),
"The number of events to skip")("jobs,j", value<int>()->default_value(-1),
"Number of parallel jobs, negative for "
"automatic.");
}
void ActsExamples::Options::addRandomNumbersOptions(
boost::program_options::options_description& opt) {
opt.add_options()("rnd-seed", value<uint64_t>()->default_value(1234567890u),
"Random numbers seed.");
}
void ActsExamples::Options::addGeometryOptions(
boost::program_options::options_description& opt) {
opt.add_options()("geo-surface-loglevel", value<size_t>()->default_value(3),
"The outoput log level for the surface building.")(
"geo-layer-loglevel", value<size_t>()->default_value(3),
"The output log level for the layer building.")(
"geo-volume-loglevel", value<size_t>()->default_value(3),
"The output log level "
"for the volume "
"building.");
}
void ActsExamples::Options::addMaterialOptions(
boost::program_options::options_description& opt) {
opt.add_options()(
"mat-input-type", value<std::string>()->default_value("build"),
"The way material is loaded: 'none', 'build', 'proto', 'file'.")(
"mat-input-file", value<std::string>()->default_value(""),
"Name of the material map input file, supported: '.json', '.cbor' or "
"'.root'.")("mat-output-file", value<std::string>()->default_value(""),
"Name of the material map output file (without extension).")(
"mat-output-sensitives", value<bool>()->default_value(true),
"Write material information of sensitive surfaces.")(
"mat-output-approaches", value<bool>()->default_value(true),
"Write material information of approach surfaces.")(
"mat-output-representing", value<bool>()->default_value(true),
"Write material information of representing surfaces.")(
"mat-output-boundaries", value<bool>()->default_value(true),
"Write material information of boundary surfaces.")(
"mat-output-volumes", value<bool>()->default_value(true),
"Write material information of volumes.")(
"mat-output-dense-volumes", value<bool>()->default_value(false),
"Write material information of dense volumes.")(
"mat-output-allmaterial", value<bool>()->default_value(false),
"Add protoMaterial to all surfaces and volume for the mapping.");
}
void ActsExamples::Options::addOutputOptions(
boost::program_options::options_description& opt,
OutputFormat formatFlags) {
// Add specific options for this example
opt.add_options()("output-dir", value<std::string>()->default_value(""),
"Output directory location.");
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Root)) {
opt.add_options()("output-root", bool_switch(),
"Switch on to write '.root' output file(s).");
}
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Csv)) {
opt.add_options()("output-csv", bool_switch(),
"Switch on to write '.csv' output file(s).");
}
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Obj)) {
opt.add_options()("output-obj", bool_switch(),
"Switch on to write '.obj' output file(s).");
}
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Json)) {
opt.add_options()("output-json", bool_switch(),
"Switch on to write '.json' output file(s).");
}
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Cbor)) {
opt.add_options()("output-cbor", bool_switch(),
"Switch on to write '.cbor' output file(s).");
}
if (ACTS_CHECK_BIT(formatFlags, OutputFormat::Txt)) {
opt.add_options()("output-txt", bool_switch(),
"Switch on to write '.txt' output file(s).");
}
}
void ActsExamples::Options::addInputOptions(
boost::program_options::options_description& opt) {
// Add specific options for this example
opt.add_options()("input-dir", value<std::string>()->default_value(""),
"Input directory location.")(
"input-files", value<std::vector<std::string>>(),
"Input files, can occur multiple times.")(
"input-root", value<bool>()->default_value(false),
"Switch on to read '.root' file(s).")(
"input-csv", value<bool>()->default_value(false),
"Switch on to read '.csv' file(s).")("input-obj",
value<bool>()->default_value(false),
"Switch on to read '.obj' file(s).")(
"input-json", value<bool>()->default_value(false),
"Switch on to read '.json' file(s).")(
"input-cbor", value<bool>()->default_value(false),
"Switch on to read '.cbor' file(s).");
}
boost::program_options::variables_map ActsExamples::Options::parse(
const boost::program_options::options_description& opt, int argc,
char* argv[]) noexcept(false) {
variables_map vm;
store(command_line_parser(argc, argv).options(opt).run(), vm);
notify(vm);
if (vm.count("response-file") != 0u and
not vm["response-file"].template as<std::string>().empty()) {
// Load the file and tokenize it
std::ifstream ifs(vm["response-file"].as<std::string>().c_str());
if (!ifs) {
throw(std::system_error(std::error_code(),
"Could not open response file."));
}
// Read the whole file into a string
std::stringstream ss;
ss << ifs.rdbuf();
std::string rString = ss.str();
std::vector<std::string> args;
const std::regex rgx("[ \t\r\n\f]");
std::sregex_token_iterator iter(rString.begin(), rString.end(), rgx, -1);
std::sregex_token_iterator end;
for (; iter != end; ++iter) {
if (std::string(*iter).empty()) {
continue;
}
args.push_back(*iter);
}
// Parse the file and store the options
store(command_line_parser(args).options(opt).run(), vm);
}
// Automatically handle help
if (vm.count("help") != 0u) {
std::cout << opt << std::endl;
vm.clear();
}
return vm;
}
Acts::Logging::Level ActsExamples::Options::readLogLevel(
const boost::program_options::variables_map& vm) {
return Acts::Logging::Level(vm["loglevel"].as<size_t>());
}
ActsExamples::Sequencer::Config ActsExamples::Options::readSequencerConfig(
const boost::program_options::variables_map& vm) {
Sequencer::Config cfg;
cfg.skip = vm["skip"].as<size_t>();
if (not vm["events"].empty()) {
cfg.events = vm["events"].as<size_t>();
}
cfg.logLevel = readLogLevel(vm);
cfg.numThreads = vm["jobs"].as<int>();
if (not vm["output-dir"].empty()) {
cfg.outputDir = vm["output-dir"].as<std::string>();
}
return cfg;
}
// Read the random numbers config.
ActsExamples::RandomNumbers::Config
ActsExamples::Options::readRandomNumbersConfig(
const boost::program_options::variables_map& vm) {
ActsExamples::RandomNumbers::Config cfg;
cfg.seed = vm["rnd-seed"].as<uint64_t>();
return cfg;
}
|
<reponame>rockersey/Biometria
package view.consultar;
import java.awt.Color;
import java.awt.Font;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import model.tabela.FuncionariosTableModel;
import service.FuncionarioService;
import util.Utils;
public class ConsultarFuncionario extends JFrame implements ActionListener {
private static final long serialVersionUID = 867099008028671483L;
FuncionarioService funcionarioService = new FuncionarioService();
Font font = new Font("Arial", Font.PLAIN, 22);
Font font1 = new Font("Arial", Font.BOLD, 28);
Font font2 = new Font("Arial", Font.PLAIN, 15);
JPanel painelFundo;
JTable tabela;
JScrollPane scrollPane;
JButton voltar;
JLabel titulo;
JFrame consultarFuncionarioFrame = new JFrame("Consultar Funcionรกrios");
public ConsultarFuncionario() throws Exception {
consultarFuncionarioFrame.setBounds(20, 20, 800, 500);
consultarFuncionarioFrame.setLayout(null);
consultarFuncionarioFrame.setResizable(false);
consultarFuncionarioFrame.setLocationRelativeTo(null);
consultarFuncionarioFrame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
titulo = new JLabel("Consultar Funcionรกrios");
titulo.setBounds(20, 10, 400, 50);
titulo.setFont(font1);
voltar = new JButton("Voltar");
voltar.setBounds(670, 25, 100, 30);
voltar.setFont(font2);
voltar.addActionListener(this);
painelFundo = new JPanel();
FuncionariosTableModel funcionarioTableModel = new FuncionariosTableModel();
tabela = new JTable(funcionarioTableModel);
tabela.setModel(funcionarioTableModel);
funcionarioTableModel.addListaDeProprietarios(funcionarioService.buscarFuncionario());
scrollPane = new JScrollPane(tabela);
tabela.setFillsViewportHeight(true);
tabela.setBounds(20, 20, 800, 500);
painelFundo.add(scrollPane);
painelFundo.setBounds(20, 120, 750, 300);
painelFundo.setLayout( new GridLayout(1, 1));
painelFundo.setForeground(Color.red);
consultarFuncionarioFrame.getContentPane().add(titulo);
consultarFuncionarioFrame.getContentPane().add(painelFundo);
consultarFuncionarioFrame.getContentPane().add(voltar);
consultarFuncionarioFrame.setVisible(true);
}
@Override
public void actionPerformed(ActionEvent event) {
if(event.getSource() == voltar) {
Utils.voltarParaMenu(consultarFuncionarioFrame);
}
}
}
|
def run_until(self, y1):
ts = utils.monthly_timeseries(self.yr, y1)
ts = np.append(ts, y1)
self.iterations = 0
for y in ts:
t = (y - self.y0) * SEC_IN_YEAR
while self.t < t:
self.step(t-self.t)
self.iterations = self.iterations + 1
if self.check_for_boundaries:
if self.fls[-1].thick[-1] > 10:
raise RuntimeError('Glacier exceeds domain boundaries, '
'at year: {}'.format(self.yr))
for fl in self.fls:
if torch.any(~torch.isfinite(fl.thick)):
raise FloatingPointError('NaN in numerical solution, '
'at year: {}'.format(self.yr)) |
๏ปฟimport * as _ from "lodash";
import * as moment from "moment";
import * as Utils from "@paperbits/common/utils";
import { ISettingsProvider } from "@paperbits/common/configuration";
import { HttpHeader, HttpMethod, HttpClient } from "@paperbits/common/http";
import { IGithubClient } from "./IGithubClient";
import { IGithubFile } from "./IGithubFile";
import { IGithubCommit } from "./IGithubCommit";
import { IGithubReference } from "./IGithubReference";
import { IGithubGetTreeResponse } from "./IGithubGetTreeResponse";
import { IGithubCreateTreeResponse } from "./IGithubCreateTreeResponse";
import { IGithubTreeItem } from "./IGithubTreeItem";
import { IGithubCreateBlobReponse } from "./IGithubCreateBlobReponse";
import { IGithubBlob } from "./IGithubBlob";
import { IGithubGetBlobResponse } from "./IGithubGetBlobResponse";
import { IGithubObject } from "./IGithubObject";
import { GithubMode } from "./githubMode";
import { GithubTreeItemType } from "./githubTreeItemType";
export class GithubClient implements IGithubClient {
private baseUrl: string;
private baseRepositoriesUrl: string;
private repositoryOwner: string;
private authorizationToken: string;
private mandatoryHttpHeaders: HttpHeader[];
private changes: IGithubTreeItem[];
public repositoryName: string;
constructor(
private readonly settingsProvider: ISettingsProvider,
private readonly httpClient: HttpClient
) {
// initialization...
this.settingsProvider = settingsProvider;
this.httpClient = httpClient;
// rebinding...
this.getHeads = this.getHeads.bind(this);
this.ensureConfig = this.ensureConfig.bind(this);
this.changes = [];
}
private applyConfiguration(githubSettings: Object): Promise<any> {
this.authorizationToken = githubSettings["authorizationKey"];
this.repositoryName = githubSettings["repositoryName"];
this.repositoryOwner = githubSettings["repositoryOwner"];
this.baseUrl = `https://api.github.com/repos/${this.repositoryOwner}/${this.repositoryName}`;
this.baseRepositoriesUrl = `${this.baseUrl}/git`;
this.mandatoryHttpHeaders = [{ name: "Authorization", value: "token " + this.authorizationToken }];
return Promise.resolve();
}
private async ensureConfig(): Promise<void> {
const settings = await this.settingsProvider.getSetting("github");
await this.applyConfiguration(settings);
}
public async getFileContent(path: string): Promise<IGithubFile> {
await this.ensureConfig();
const response = await this.httpClient.send<IGithubFile>({
url: `${this.baseUrl}/contents/${path}`,
headers: this.mandatoryHttpHeaders
});
return response.toObject();
}
/**
* Deletes a file in a single commit.
* Please see https://developer.github.com/v3/repos/contents/
*/
public async deleteFile(path: string, blobSha: string, commitMsg: string): Promise<void> {
await this.ensureConfig();
const requestBody = {
sha: blobSha,
message: commitMsg,
branch: "master"
};
await this.httpClient.send({
url: `${this.baseUrl}/contents/${path}`,
method: HttpMethod.delete,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
}
/**
* Please see http://developer.github.com/v3/git/refs/
*/
public async getHeads(): Promise<IGithubReference[]> {
await this.ensureConfig();
const response = await this.httpClient.send<IGithubReference[]>({
url: `${this.baseRepositoriesUrl}/refs/heads`,
method: HttpMethod.get,
headers: this.mandatoryHttpHeaders
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/commits/
*/
public async getCommit(commitSha: string): Promise<IGithubCommit> {
await this.ensureConfig();
const response = await this.httpClient.send<IGithubCommit>({
url: `${this.baseRepositoriesUrl}/commits/${commitSha}`,
method: HttpMethod.get,
headers: this.mandatoryHttpHeaders
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/commits/
*/
public async createCommit(parentCommitSha: string, tree: string, message: string): Promise<IGithubCommit> {
await this.ensureConfig();
const requestBody = {
message: message,
tree: tree,
parents: parentCommitSha ? [parentCommitSha] : []
};
const response = await this.httpClient.send<IGithubCommit>({
url: `${this.baseRepositoriesUrl}/commits`,
method: HttpMethod.post,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/trees/
*/
public async getTree(treeSha: string): Promise<IGithubGetTreeResponse> {
await this.ensureConfig();
const response = await this.httpClient.send<IGithubGetTreeResponse>({
url: `${this.baseRepositoriesUrl}/trees/${treeSha}?recursive=1`,
method: HttpMethod.get,
headers: this.mandatoryHttpHeaders
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/trees/
*/
public async createTree(baseTreeSha: string, treeItems: IGithubTreeItem[]): Promise<IGithubCreateTreeResponse> {
await this.ensureConfig();
const tree = new Array<Object>();
treeItems.forEach(treeItem => {
if (treeItem.path.startsWith("/")) {
treeItem.path = treeItem.path.substr(1);
}
tree.push({
path: treeItem.path,
sha: treeItem.sha,
mode: GithubMode.file,
type: GithubTreeItemType.blob
});
});
const requestBody = {
base_tree: baseTreeSha,
tree: tree
};
const response = await this.httpClient.send<IGithubCreateTreeResponse>({
url: `${this.baseRepositoriesUrl}/trees`,
method: HttpMethod.post,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/refs/
*/
public async createReference(branch: string, commitSha: string): Promise<any> {
await this.ensureConfig();
const requestBody = {
ref: `refs/heads/${branch}`,
sha: commitSha
};
const response = await this.httpClient.send({
url: `${this.baseRepositoriesUrl}/refs`,
method: HttpMethod.post,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
return response.toObject();
}
/**
* Please see http://developer.github.com/v3/git/refs/
*/
public async deleteReference(branch: string): Promise<void> {
await this.ensureConfig();
await this.httpClient.send({
url: `${this.baseRepositoriesUrl}/refs/heads/${branch}`,
method: HttpMethod.delete,
headers: this.mandatoryHttpHeaders
});
}
/**
* Please see http://developer.github.com/v3/git/refs/
*/
public async updateReference(branch: string, commitSha: string): Promise<IGithubReference> {
await this.ensureConfig();
const requestBody = {
sha: commitSha,
force: true
};
const response = await this.httpClient.send<IGithubReference>({
url: `${this.baseRepositoriesUrl}/refs/heads/${branch}`,
method: HttpMethod.patch,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
return response.toObject();
}
public async push(message: string = null, branch: string = "master"): Promise<void> {
await this.pushTree(this.changes, message, branch);
this.changes = [];
}
public async pushTree(treeItems: IGithubTreeItem[], message: string = null, branch: string = "master"): Promise<IGithubReference> {
await this.ensureConfig();
console.log(`Pushing ${treeItems.length} files to branch ${branch}.`);
// get the head of the master branch
const heads = await this.getHeads();
// get the last commit
const lastCommitReference = _.last(heads).object;
const lastCommit = await this.getCommit(lastCommitReference.sha);
// create tree object (also implicitly creates a blob based on content)
const createTreeResponse = await this.createTree(lastCommit.tree.sha, treeItems);
if (!message) {
message = moment().format("MM/DD/YYYY, hh:mm:ss");
}
// create new commit
const newCommit = await this.createCommit(lastCommit.sha, createTreeResponse.sha, message);
// update branch to point to new commit
const head = await this.updateReference(branch, newCommit.sha);
return head;
}
public async getBlob(blobSha: string): Promise<IGithubBlob> {
await this.ensureConfig();
const response = await this.httpClient.send<IGithubGetBlobResponse>({
url: `${this.baseRepositoriesUrl}/blobs/${blobSha}`,
method: HttpMethod.get,
headers: this.mandatoryHttpHeaders
});
const getBlobReponse = response.toObject();
const blob: IGithubBlob = {
content: atob(getBlobReponse.content),
path: ""
};
return blob;
}
public async createBlob(path: string, content: Uint8Array): Promise<IGithubCreateBlobReponse> {
await this.ensureConfig();
const base64 = Utils.arrayBufferToBase64(content);
const requestBody = {
content: base64,
encoding: "base64"
};
const httpResponse = await this.httpClient.send<IGithubCreateBlobReponse>({
url: `${this.baseRepositoriesUrl}/blobs`,
method: HttpMethod.post,
headers: this.mandatoryHttpHeaders,
body: JSON.stringify(requestBody)
});
const response = httpResponse.toObject();
const treeItem: IGithubTreeItem = {
path: path,
sha: response.sha
};
this.changes.push(treeItem);
return response;
}
public async getLatestCommitTree(): Promise<IGithubGetTreeResponse> {
await this.ensureConfig();
// get the head of the master branch
const heads = await this.getHeads();
// get the last commit
const lastCommitReference: IGithubObject = _.last(heads).object;
const lastCommit = await this.getCommit(lastCommitReference.sha);
// get the last commit tree
const getTreeResponse = await this.getTree(lastCommit.tree.sha);
getTreeResponse.lastCommit = lastCommit;
return getTreeResponse;
}
public async getLatestCommit(): Promise<IGithubCommit> {
await this.ensureConfig();
// get the head of the master branch
const heads = await this.getHeads();
const lastCommitReference: IGithubObject = _.last(heads).object;
// get the last commit
const commit = await this.getCommit(lastCommitReference.sha);
return commit;
}
} |
(JTA) โ A Palestinian push to try Israeli officials for war crimes at a United Nations tribunal would end any chance of reaching a peace deal, Israeli Prime Minister Benjamin Netanyahu said.
Netanyahu spoke to Army Radio on Friday, a day after the Palestinian Authorityโs envoy to the United Nations said his government would join the International Criminal Court if the U.N. Security Council refuses to set a deadline for Israel to withdraw from all Palestinian territories.
โWe may end up there,โ Netanyahu said of the prospect of war crimes charges being brought against Israel at the Hague-based U.N. tribunal. โIf Abu Mazen attempts it, this will have dire consequences,โ he added, using another name for Palestinian President Mahmoud Abbas. โHe could bring the Hague to do it, bringing us to the destruction of any chance of a sane peace deal.โ
On Thursday, Riyad Mansour, the Palestinian Authorityโs U.N. envoy, told the Associated Press that his government has turned to the Security Council โto force Israel to negotiate in good faith the end of the occupation within a time frame.โ
The Palestinian Authority hopes the council will adopt a draft resolution setting November 2016 as the deadline for an Israeli pullout from the West Bank, east Jerusalem and Gaza.
โBut if this additional door of peace is closed before us, then we will not only join the ICC to seek accountability,โ Mansour said. โWe will join other treaties and agenciesโ to build evidence โthat we exist as a nation, we exist as a state, although the land of our state is under occupation.โ |
def ward_count(df, col, value, unique_id, ward_num):
rows = df[ df[col] == value]
if ward_num not in rows['Ward'].values:
return 0
grouped = rows.groupby('Ward')
return grouped[unique_id].count()[ward_num] |
def payloader(input_name, value, fields, settings):
if "name" in fields:
value_name = fields["name"]
input_setting = settings[input_name]
if 'value' in input_setting:
if value_name in input_setting["value"]:
set_value = input_setting['value'][value_name]
payload = {value_name: set_value}
elif "possibleValues" in fields:
possible = fields["possibleValues"]
payload = {value_name: possible[value]}
else:
payload = {value_name: value}
return payload |
import { formatTimeString } from '.';
describe('timeFormatting', () => {
const date = new Date(2021, 6, 14, 13, 26, 39);
it('returns locale time string', () => {
const result = formatTimeString(date);
expect(result).toBe('1:26 PM');
});
it('returns locale time string with seconds', () => {
const result = formatTimeString(date, true);
expect(result).toBe('1:26:39 PM');
});
it('returns locale time string with 12-hour time', () => {
const result = formatTimeString(date, false, true);
expect(result).toBe('1:26 PM');
});
it('returns locale time string with 24-hour time', () => {
const result = formatTimeString(date, false, false);
expect(result).toBe('13:26');
});
it('returns locale time string with seconds and 12-hour time', () => {
const result = formatTimeString(date, true, true);
expect(result).toBe('1:26:39 PM');
});
it('returns locale time string with seconds and 24-hour time', () => {
const result = formatTimeString(date, true, false);
expect(result).toBe('13:26:39');
});
});
|
// TestListDir1 verifies that List() returns files and subdirectories in a directory.
func (s *StorageTester) TestListDir1() {
s.insertStandardFiles()
files, dirs, err := s.Storage.List("dir1")
s.Nil(err)
s.ElementsMatch([]string{"dir1/file2", "dir1/file3"}, files)
s.ElementsMatch([]string{"dir1/dir4"}, dirs)
} |
/**
* Returns true if the XML parser must be in validation mode, false
* otherwise.
*/
public boolean isXMLParserValidating() {
Boolean b = (Boolean)SVGAbstractTranscoder.this.hints.get
(KEY_XML_PARSER_VALIDATING);
if (b != null)
return b.booleanValue();
return super.isXMLParserValidating();
} |
Overlaps between the various biodegradation pathways in Sphingomonas subarctica SA1.
A bacterium capable to grow on sulfanilic acid as sole carbon, nitrogen and sulfur source has been isolated. A unique feature of this strain is that it contains the full set of enzymes necessary for the biodegradation of sulfanilic acid. Taxonomical analysis identified our isolate as Sphingomonas subaretica SA1 sp. The biodegradation pathway of sulfanilic acid was investigated at the molecular level. Screening the substrate specificity of the strain disclosed its capacity to degrade six analogous aromatic compounds including p-aminobenzoic acid. Moreover, the strain was successfully used for removal of oil contaminations. S. subarctica SA1 seemed to use distinct enzyme cascades for decomposition of these molecules, since alternative enzymes were induced in cells grown on various substrates. However, the protein patterns appearing upon induction by sulfanilic acid and sulfocatechol were very similar to each other indicating common pathways for the degradation of these substrates. Cells grown on sulfanilic acid could convert p-aminobenzoic acid to some extent and vice versa. Two types of ring cleaving dioxygenases were detected in the cells grown on various substrates: one preferred protocatechol, while the other had higher activity with sulfocatechol. This latter enzyme, named as sulfocatechol dioxygenase was partially purified and characterized. |
/**
* Configuration class for {@link SqlGenerator}.
*
* @author Kazuki Shimizu
* @since 1.0.2
*/
public class SqlGeneratorConfig {
private static class PropertyKeys {
private static final String CONFIG_FILE = "mybatis-thymeleaf.config.file";
private static final String CONFIG_ENCODING = "mybatis-thymeleaf.config.encoding";
}
private static class Defaults {
private static final String PROPERTIES_FILE = "mybatis-thymeleaf.properties";
}
private static final Map<Class<?>, Function<String, Object>> TYPE_CONVERTERS;
static {
Map<Class<?>, Function<String, Object>> converters = new HashMap<>();
converters.put(boolean.class, v -> Boolean.valueOf(v.trim()));
converters.put(String.class, String::trim);
converters.put(Character[].class, v -> Stream.of(v.split(",")).map(String::trim).filter(e -> e.length() == 1)
.map(e -> e.charAt(0)).toArray(Character[]::new));
converters.put(Character.class, v -> v.trim().charAt(0));
converters.put(Charset.class, v -> Charset.forName(v.trim()));
converters.put(Long.class, v -> Long.valueOf(v.trim()));
converters.put(String[].class, v -> Stream.of(v.split(",")).map(String::trim).toArray(String[]::new));
converters.put(Class.class, SqlGeneratorConfig::toClassForName);
TYPE_CONVERTERS = Collections.unmodifiableMap(converters);
}
/**
* Whether use the 2-way SQL feature.
*/
private boolean use2way = true;
/**
* The interface for customizing a default TemplateEngine instanced by the mybatis-thymeleaf.
*/
private Class<? extends TemplateEngineCustomizer> customizer;
/**
* Template file configuration.
*/
private final TemplateFileConfig templateFile = new TemplateFileConfig();
/**
* Dialect configuration.
*/
private final DialectConfig dialect = new DialectConfig();
/**
* Get whether use the 2-way SQL feature.
* <p>
* Default is {@code true}.
* </p>
*
* @return If use the 2-way SQL feature, return {@code true}
*/
public boolean isUse2way() {
return use2way;
}
/**
* Set whether use the 2-way SQL feature.
*
* @param use2way
* If use the 2-way SQL feature, set {@code true}
*/
public void setUse2way(boolean use2way) {
this.use2way = use2way;
}
/**
* Get the interface for customizing a default TemplateEngine instanced by the mybatis-thymeleaf.
* <p>
* Default is {@code null}.
* </p>
*
* @return the interface for customizing a default TemplateEngine
*/
public Class<? extends TemplateEngineCustomizer> getCustomizer() {
return customizer;
}
/**
* Set the interface for customizing a default TemplateEngine instanced by the mybatis-thymeleaf.
*
* @param customizer
* the interface for customizing a default TemplateEngine
*/
public void setCustomizer(Class<? extends TemplateEngineCustomizer> customizer) {
this.customizer = customizer;
}
/**
* Get a template file configuration.
*
* @return a template file configuration
*/
public TemplateFileConfig getTemplateFile() {
return templateFile;
}
/**
* Get a dialect configuration.
*
* @return a dialect configuration
*/
public DialectConfig getDialect() {
return dialect;
}
/**
* Template file configuration.
*
* @since 1.0.0
*/
public static class TemplateFileConfig {
/**
* The character encoding for reading template resource file.
*/
private Charset encoding = StandardCharsets.UTF_8;
/**
* The base directory for reading template resource file.
*/
private String baseDir = "";
/**
* The patterns for reading as template resource file. (Can specify multiple patterns using comma(",") as separator
* character)
*/
private String[] patterns = { "*.sql" };
/**
* Whether use the cache feature when load template resource file.
*/
private boolean cacheEnabled = true;
/**
* The cache TTL(millisecond) for resolved templates.
*/
private Long cacheTtl;
/**
* Get the character encoding for reading template resource file.
* <p>
* Default is {@code UTF-8}.
* </p>
*
* @return the character encoding for reading template resource file
*/
public Charset getEncoding() {
return encoding;
}
/**
* Set the character encoding for reading template resource file.
*
* @param encoding
* the character encoding for reading template resource file
*/
public void setEncoding(Charset encoding) {
this.encoding = encoding;
}
/**
* Get the base directory for reading template resource file.
* <p>
* Default is {@code ""}(none).
* </p>
*
* @return the base directory for reading template resource file
*/
public String getBaseDir() {
return baseDir;
}
/**
* Set the base directory for reading template resource file.
*
* @param baseDir
* the base directory for reading template resource file
*/
public void setBaseDir(String baseDir) {
this.baseDir = baseDir;
}
/**
* Get patterns for reading as template resource file.
* <p>
* Default is {@code "*.sql"}.
* </p>
*
* @return patterns for reading as template resource file
*/
public String[] getPatterns() {
return patterns;
}
/**
* Set patterns for reading as template resource file.
*
* @param patterns
* patterns for reading as template resource file
*/
public void setPatterns(String... patterns) {
this.patterns = patterns;
}
/**
* Get whether use the cache feature when load template resource file.
* <p>
* Default is {@code true}.
* </p>
*
* @return If use th cache feature, return {@code true}
*/
public boolean isCacheEnabled() {
return cacheEnabled;
}
/**
* Set whether use the cache feature when load template resource file.
*
* @param cacheEnabled
* If use th cache feature, set {@code true}
*/
public void setCacheEnabled(boolean cacheEnabled) {
this.cacheEnabled = cacheEnabled;
}
/**
* Get the cache TTL(millisecond) for resolved templates.
* <p>
* Default is {@code null}(indicate to use default value of Thymeleaf).
* </p>
*
* @return the cache TTL(millisecond) for resolved templates
*/
public Long getCacheTtl() {
return cacheTtl;
}
/**
* Set the cache TTL(millisecond) for resolved templates.
*
* @param cacheTtl
* the cache TTL(millisecond) for resolved templates
*/
public void setCacheTtl(Long cacheTtl) {
this.cacheTtl = cacheTtl;
}
}
/**
* Dialect configuration.
*
* @since 1.0.0
*/
public static class DialectConfig {
/**
* The prefix name of dialect provided by this project.
*/
private String prefix = "mb";
/**
* The escape character for wildcard of LIKE condition.
*/
private Character likeEscapeChar = '\\';
/**
* The format of escape clause for LIKE condition (Can specify format that can be allowed by String#format method).
*/
private String likeEscapeClauseFormat = "ESCAPE '%s'";
/**
* Additional escape target characters(custom wildcard characters) for LIKE condition. (Can specify multiple
* characters using comma(",") as separator character)
*/
private Character[] likeAdditionalEscapeTargetChars;
/**
* The bind variable render.
*/
private Class<? extends BindVariableRender> bindVariableRender;
/**
* Get the prefix name of dialect provided by this project.
* <p>
* Default is {@code "mb"}.
* </p>
*
* @return the prefix name of dialect
*/
public String getPrefix() {
return prefix;
}
/**
* Set the prefix name of dialect provided by this project.
*
* @param prefix
* the prefix name of dialect
*/
public void setPrefix(String prefix) {
this.prefix = prefix;
}
/**
* Get the escape character for wildcard of LIKE condition.
* <p>
* Default is {@code '\'}.
* </p>
*
* @return the escape character for wildcard
*/
public Character getLikeEscapeChar() {
return likeEscapeChar;
}
/**
* Set the escape character for wildcard of LIKE condition.
*
* @param likeEscapeChar
* the escape character for wildcard
*/
public void setLikeEscapeChar(Character likeEscapeChar) {
this.likeEscapeChar = likeEscapeChar;
}
/**
* Get the format of escape clause for LIKE condition.
* <p>
* Can specify format that can be allowed by String#format method. Default is {@code "ESCAPE '%s'"}.
* </p>
*
* @return the format of escape clause for LIKE condition
*/
public String getLikeEscapeClauseFormat() {
return likeEscapeClauseFormat;
}
/**
* Set the format of escape clause for LIKE condition.
*
* @param likeEscapeClauseFormat
* the format of escape clause for LIKE condition
*/
public void setLikeEscapeClauseFormat(String likeEscapeClauseFormat) {
this.likeEscapeClauseFormat = likeEscapeClauseFormat;
}
/**
* Get additional escape target characters(custom wildcard characters) for LIKE condition.
* <p>
* Can specify multiple characters using comma(",") as separator character. Default is empty(none).
* </p>
*
* @return additional escape target characters(custom wildcard characters)
*/
public Character[] getLikeAdditionalEscapeTargetChars() {
return likeAdditionalEscapeTargetChars;
}
/**
* Set additional escape target characters(custom wildcard characters) for LIKE condition.
*
* @param likeAdditionalEscapeTargetChars
* additional escape target characters(custom wildcard characters)
*/
public void setLikeAdditionalEscapeTargetChars(Character... likeAdditionalEscapeTargetChars) {
this.likeAdditionalEscapeTargetChars = likeAdditionalEscapeTargetChars;
}
/**
* Get a bind variable render.
* <p>
* Default is {@link BindVariableRender.BuiltIn#MYBATIS}
* </p>
*
* @return a bind variable render
*/
public Class<? extends BindVariableRender> getBindVariableRender() {
return bindVariableRender;
}
public void setBindVariableRender(Class<? extends BindVariableRender> bindVariableRender) {
this.bindVariableRender = bindVariableRender;
}
}
/**
* Create an instance from default properties file. <br>
* If you want to customize a default {@code TemplateEngine}, you can configure some property using
* mybatis-thymeleaf.properties that encoded by UTF-8. Also, you can change the properties file that will read using
* system property (-Dmybatis-thymeleaf.config.file=... -Dmybatis-thymeleaf.config.encoding=...). <br>
* Supported properties are as follows:
* <table border="1">
* <caption>Supported properties</caption>
* <tr>
* <th>Property Key</th>
* <th>Description</th>
* <th>Default</th>
* </tr>
* <tr>
* <th colspan="3">General configuration</th>
* </tr>
* <tr>
* <td>use2way</td>
* <td>Whether use the 2-way SQL</td>
* <td>{@code true}</td>
* </tr>
* <tr>
* <td>customizer</td>
* <td>The implementation class for customizing a default {@code TemplateEngine} instanced by the MyBatis Thymeleaf
* </td>
* <td>None</td>
* </tr>
* <tr>
* <th colspan="3">Template file configuration</th>
* </tr>
* <tr>
* <td>template-file.cache-enabled</td>
* <td>Whether use the cache feature</td>
* <td>{@code true}</td>
* </tr>
* <tr>
* <td>template-file.cache-ttl</td>
* <td>The cache TTL for resolved templates</td>
* <td>None(use default value of Thymeleaf)</td>
* </tr>
* <tr>
* <td>template-file.encoding</td>
* <td>The character encoding for reading template resources</td>
* <td>{@code "UTF-8"}</td>
* </tr>
* <tr>
* <td>template-file.base-dir</td>
* <td>The base directory for reading template resources</td>
* <td>None(just under class path)</td>
* </tr>
* <tr>
* <td>template-file.patterns</td>
* <td>The patterns for reading as template resources</td>
* <td>{@code "*.sql"}</td>
* </tr>
* <tr>
* <th colspan="3">Dialect configuration</th>
* </tr>
* <tr>
* <td>dialect.prefix</td>
* <td>The prefix name of dialect provided by this project</td>
* <td>{@code "mb"}</td>
* </tr>
* <tr>
* <td>dialect.like-escape-char</td>
* <td>The escape character for wildcard of LIKE</td>
* <td>{@code '\'} (backslash)</td>
* </tr>
* <tr>
* <td>dialect.like-escape-clause-format</td>
* <td>The format of escape clause</td>
* <td>{@code "ESCAPE '%s'"}</td>
* </tr>
* <tr>
* <td>dialect.like-additional-escape-target-chars</td>
* <td>The additional escape target characters(custom wildcard characters) for LIKE condition</td>
* <td>None</td>
* </tr>
* </table>
*
* @return a configuration instance
*/
public static SqlGeneratorConfig newInstance() {
SqlGeneratorConfig config = new SqlGeneratorConfig();
applyDefaultProperties(config);
return config;
}
/**
* Create an instance from specified properties file. <br>
* you can configure some property using specified properties file that encoded by UTF-8. Also, you can change file
* encoding that will read using system property (-Dmybatis-thymeleaf.config.encoding=...).
*
* @param resourcePath
* A property file resource path
* @return a configuration instance
* @see #newInstance()
*/
public static SqlGeneratorConfig newInstanceWithResourcePath(String resourcePath) {
SqlGeneratorConfig config = new SqlGeneratorConfig();
applyResourcePath(config, resourcePath);
return config;
}
/**
* Create an instance from specified properties.
*
* @param customProperties
* custom configuration properties
* @return a configuration instance
* @see #newInstance()
*/
public static SqlGeneratorConfig newInstanceWithProperties(Properties customProperties) {
SqlGeneratorConfig config = new SqlGeneratorConfig();
applyProperties(config, customProperties);
return config;
}
/**
* Create an instance using specified customizer and override using a default properties file.
*
* @param customizer
* baseline customizer
* @return a configuration instance
* @see #newInstance()
*/
public static SqlGeneratorConfig newInstanceWithCustomizer(Consumer<SqlGeneratorConfig> customizer) {
SqlGeneratorConfig config = new SqlGeneratorConfig();
customizer.accept(config);
applyDefaultProperties(config);
return config;
}
/**
* Apply properties that read from default properties file. <br>
* If you want to customize a default {@code TemplateEngine}, you can configure some property using
* mybatis-thymeleaf.properties that encoded by UTF-8. Also, you can change the properties file that will read using
* system property (-Dmybatis-thymeleaf.config.file=... -Dmybatis-thymeleaf.config.encoding=...).
*/
static <T extends SqlGeneratorConfig> void applyDefaultProperties(T config) {
applyProperties(config, loadDefaultProperties());
}
/**
* Apply properties that read from specified properties file. <br>
* you can configure some property using specified properties file that encoded by UTF-8. Also, you can change file
* encoding that will read using system property (-Dmybatis-thymeleaf.config.encoding=...).
*
* @param resourcePath
* A property file resource path
*/
static <T extends SqlGeneratorConfig> void applyResourcePath(T config, String resourcePath) {
Properties properties = loadDefaultProperties();
properties.putAll(loadProperties(resourcePath));
applyProperties(config, properties);
}
/**
* Apply properties from specified properties.
*
* @param config
* a configuration instance
* @param customProperties
* custom configuration properties
*/
static <T extends SqlGeneratorConfig> void applyProperties(T config, Properties customProperties) {
Properties properties = loadDefaultProperties();
Optional.ofNullable(customProperties).ifPresent(properties::putAll);
override(config, properties);
}
/**
* Create new instance using default constructor with specified type.
*
* @param type
* a target type
* @param <T>
* a target type
* @return new instance of target type
*/
static <T> T newInstanceForType(Class<T> type) {
try {
return type.getConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
throw new IllegalStateException("Cannot create an instance for class: " + type, e);
}
}
private static void override(SqlGeneratorConfig config, Properties properties) {
PropertyAccessor standardPropertyAccessor = PropertyAccessor.BuiltIn.STANDARD;
try {
properties.forEach((key, value) -> {
String propertyPath = StringUtils.unCapitalize(StringUtils.capitalizeWords(key, "-").replaceAll("-", ""));
try {
Object target = config;
String propertyName;
if (propertyPath.indexOf('.') != -1) {
String[] propertyPaths = StringUtils.split(propertyPath, ".");
propertyName = propertyPaths[propertyPaths.length - 1];
for (String path : Arrays.copyOf(propertyPaths, propertyPaths.length - 1)) {
target = standardPropertyAccessor.getPropertyValue(target, path);
}
} else {
propertyName = propertyPath;
}
Object convertedValue = TYPE_CONVERTERS
.getOrDefault(standardPropertyAccessor.getPropertyType(target.getClass(), propertyName), v -> v)
.apply(value.toString());
standardPropertyAccessor.setPropertyValue(target, propertyName, convertedValue);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
String.format("Detected an invalid property. key='%s' value='%s'", key, value), e);
}
});
} finally {
StandardPropertyAccessor.clearCache();
}
}
private static Properties loadDefaultProperties() {
return loadProperties(System.getProperty(PropertyKeys.CONFIG_FILE, Defaults.PROPERTIES_FILE));
}
private static Properties loadProperties(String resourcePath) {
Properties properties = new Properties();
Optional.ofNullable(ClassLoaderUtils.findResourceAsStream(resourcePath)).ifPresent(in -> {
Charset encoding = Optional.ofNullable(System.getProperty(PropertyKeys.CONFIG_ENCODING)).map(Charset::forName)
.orElse(StandardCharsets.UTF_8);
try (InputStreamReader inReader = new InputStreamReader(in, encoding);
BufferedReader bufReader = new BufferedReader(inReader)) {
properties.load(bufReader);
} catch (IOException e) {
throw new IllegalStateException(e);
}
});
return properties;
}
private static Class<?> toClassForName(String value) {
try {
return ClassLoaderUtils.loadClass(value.trim());
} catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
} |
def sha3_256(data: bytes) -> bytes:
return _hash("sha3_256", data) |
<filename>internal/pkg/model/page.go<gh_stars>1-10
package model
//ๅ้กต
type Page struct {
Records interface{} `json:"records"`
Total int64 `json:"total"`
PageNum int `json:"pageNum"`
PageInfo
}
//ๅ้กตๅๆฐ
type PageInfo struct {
PageIndex int `json:"pageIndex"`
PageSize int `json:"pageSize"`
}
func (p *Page) SetPageNum(count int64) {
if count == 0 {
p.PageNum = 0
return
}
c := int(count)
if c%p.PageSize == 0 {
p.PageNum = c / p.PageSize
} else {
p.PageNum = c/p.PageSize + 1
}
}
|
def _get_req_data():
data = request.form if request.form else json.loads(
request.data, encoding='utf-8'
)
for k, v in data.items():
try:
if k in ('comment',):
data[k] = v
else:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
required_args = ['consrc']
for arg in required_args:
if arg not in data or data[arg] == '':
return True, make_json_response(
status=400,
success=0,
errormsg=_(
"Could not find the required parameter ({})."
).format(arg),
), data
return False, '', data |
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
@TeleOp
public class RepresentobotTester extends LinearOpMode {
@Override
public void runOpMode() {
Representobot bot = new Representobot(this);
// wait for the start button to be pushed
waitForStart();
bot.startGyro();
//bot.goForward(0.4,10);
//bot.turnRight(40, 0.2);
bot.forwardToWall(6, 0.2);
}
}
|
def __replace_in_h(Ho, Ho_fs, p):
H = Ho.copy()
H_fs = Ho_fs.copy()
HC = np.where((Ho | Ho_fs) == False)[0]
xis = np.random.choice(np.where(Ho)[0], size=p, replace=False)
xjs = np.random.choice(HC, size=p, replace=False)
H[xis] = False
H[xjs] = True
return H, H_fs, xis, xjs |
package org.dotwebstack.framework.backend.rdf4j;
import static org.dotwebstack.framework.backend.rdf4j.shacl.NodeShapeFactory.createShapeFromModel;
import static org.dotwebstack.framework.backend.rdf4j.shacl.NodeShapeFactory.processInheritance;
import com.google.common.collect.ImmutableMap;
import graphql.schema.DataFetchingEnvironment;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import lombok.Cleanup;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.dotwebstack.framework.backend.rdf4j.Rdf4jProperties.RepositoryProperties;
import org.dotwebstack.framework.backend.rdf4j.shacl.NodeShape;
import org.dotwebstack.framework.backend.rdf4j.shacl.NodeShapeRegistry;
import org.dotwebstack.framework.core.helpers.ResourceLoaderUtils;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.util.Models;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.eclipse.rdf4j.model.vocabulary.SHACL;
import org.eclipse.rdf4j.query.BooleanQuery;
import org.eclipse.rdf4j.query.GraphQuery;
import org.eclipse.rdf4j.query.QueryResults;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.config.RepositoryConfig;
import org.eclipse.rdf4j.repository.config.RepositoryImplConfig;
import org.eclipse.rdf4j.repository.manager.LocalRepositoryManager;
import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.sail.memory.config.MemoryStoreConfig;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.ResourcePatternUtils;
@Slf4j
@Configuration
@EnableConfigurationProperties(Rdf4jProperties.class)
class Rdf4jConfiguration {
public static final String LOCAL_REPOSITORY_ID = "local";
private static final String BASE_DIR_PREFIX = "rdf4j";
private static final String MODEL_PATH = "model";
private static final String MODEL_PATTERN = "/**.trig";
private static final String SPARQL_PATH = "sparql";
private static final String SPARQL_PATTERN = "/**.rq";
@Bean
public ConfigFactory configFactory() {
return new ConfigFactoryImpl();
}
@Bean
RepositoryAdapter localRepositoryAdapter(LocalRepositoryManager localRepositoryManager) {
return new RepositoryAdapter() {
@Override
public TupleQuery prepareTupleQuery(String repositoryId, DataFetchingEnvironment environment, String query) {
return localRepositoryManager.getRepository(repositoryId)
.getConnection()
.prepareTupleQuery(query);
}
@Override
public GraphQuery prepareGraphQuery(String repositoryId, DataFetchingEnvironment environment, String query,
List<String> subjectIris) {
return localRepositoryManager.getRepository(repositoryId)
.getConnection()
.prepareGraphQuery(query);
}
@Override
public BooleanQuery prepareBooleanQuery(String repositoryId, DataFetchingEnvironment environment, String query) {
return localRepositoryManager.getRepository(repositoryId)
.getConnection()
.prepareBooleanQuery(query);
}
@Override
public boolean supports(String repositoryId) {
return localRepositoryManager.hasRepositoryConfig(repositoryId);
}
};
}
@Bean
LocalRepositoryManager localRepositoryManager(@NonNull Rdf4jProperties rdf4jProperties,
@NonNull ConfigFactory configFactory, @NonNull ResourceLoader resourceLoader) throws IOException {
LOG.debug("Initializing repository manager");
File baseDir = Files.createTempDirectory(BASE_DIR_PREFIX)
.toFile();
LocalRepositoryManager repositoryManager = new LocalRepositoryManager(baseDir);
repositoryManager.init();
// Add & populate local repository
repositoryManager.addRepositoryConfig(createLocalRepositoryConfig());
populateLocalRepository(repositoryManager.getRepository(LOCAL_REPOSITORY_ID), resourceLoader);
// Add repositories from external config
if (rdf4jProperties.getRepositories() != null) {
rdf4jProperties.getRepositories()
.entrySet()
.stream()
.map(repositoryProperty -> createRepositoryConfig(repositoryProperty, configFactory))
.forEach(repositoryManager::addRepositoryConfig);
}
return repositoryManager;
}
@Bean
NodeShapeRegistry nodeShapeRegistry(@NonNull LocalRepositoryManager localRepositoryManager,
@NonNull Rdf4jProperties rdf4jProperties) {
Repository repository = localRepositoryManager.getRepository(LOCAL_REPOSITORY_ID);
Model shapeModel = QueryResults.asModel(repository.getConnection()
.getStatements(null, null, null, rdf4jProperties.getShape()
.getGraph()));
NodeShapeRegistry registry = new NodeShapeRegistry(rdf4jProperties.getShape()
.getPrefix());
Map<org.eclipse.rdf4j.model.Resource, NodeShape> nodeShapeMap = new HashMap<>();
Models.subjectIRIs(shapeModel.filter(null, RDF.TYPE, SHACL.NODE_SHAPE))
.forEach(subject -> createShapeFromModel(shapeModel, subject, nodeShapeMap));
nodeShapeMap.values()
.forEach(shape -> {
processInheritance(shape, nodeShapeMap);
registry.register(shape.getIdentifier(), shape);
});
return registry;
}
@Bean
public Map<String, String> queryReferenceRegistry(@NonNull ResourceLoader resourceLoader) throws IOException {
Map<String, String> result = new HashMap<>();
Optional<URI> sparqlLocation = ResourceLoaderUtils.getResourceLocation(SPARQL_PATH);
if (sparqlLocation.isPresent()) {
Resource[] resourceList = ResourcePatternUtils.getResourcePatternResolver(resourceLoader)
.getResources(sparqlLocation.get() + SPARQL_PATTERN);
for (Resource resource : resourceList) {
String content = IOUtils.toString(resource.getInputStream(), StandardCharsets.UTF_8);
String fileName = resource.getFilename();
result.put(fileName.substring(0, fileName.lastIndexOf('.')), content);
}
}
return result;
}
private static RepositoryConfig createRepositoryConfig(Entry<String, RepositoryProperties> repositoryEntry,
ConfigFactory configFactory) {
String repositoryId = repositoryEntry.getKey();
RepositoryProperties repository = repositoryEntry.getValue();
RepositoryImplConfig repositoryImplConfig = configFactory.create(repository.getType(),
repository.getArgs() != null ? repository.getArgs() : ImmutableMap.of());
repositoryImplConfig.validate();
return new RepositoryConfig(repositoryId, repositoryImplConfig);
}
private static RepositoryConfig createLocalRepositoryConfig() {
SailRepositoryConfig repositoryConfig = new SailRepositoryConfig(new MemoryStoreConfig());
return new RepositoryConfig(LOCAL_REPOSITORY_ID, repositoryConfig);
}
private static void populateLocalRepository(Repository repository, ResourceLoader resourceLoader) {
ResourceLoaderUtils.getResourceLocation(MODEL_PATH)
.ifPresent(modelPath -> {
Resource[] resourceList;
try {
resourceList = ResourcePatternUtils.getResourcePatternResolver(resourceLoader)
.getResources(modelPath.toString() + MODEL_PATTERN);
} catch (IOException e) {
throw new UncheckedIOException("Error while loading local model.", e);
}
@Cleanup
RepositoryConnection con = repository.getConnection();
Arrays.stream(resourceList)
.filter(Resource::isReadable)
.filter(resource -> resource.getFilename() != null)
.forEach(modelResource -> {
String fileExtension = Arrays.stream(modelResource.getFilename()
.split("\\."))
.reduce("", (s1, s2) -> s2);
RDFFormat format = FileFormats.getFormat(fileExtension);
if (format != null) {
LOG.debug("Adding '{}' into '{}' repository", modelResource.getFilename(), LOCAL_REPOSITORY_ID);
try {
con.add(modelResource.getInputStream(), "", format);
} catch (IOException e) {
throw new UncheckedIOException("Error while loading data.", e);
}
}
});
});
}
}
|
<gh_stars>1000+
package me.chanjar.weixin.qidian.bean.dial;
import lombok.Data;
import me.chanjar.weixin.common.util.json.WxGsonBuilder;
import me.chanjar.weixin.qidian.bean.common.QidianResponse;
import java.util.List;
@Data
public class IVRListResponse extends QidianResponse {
private List<Ivr> node;
public static IVRListResponse fromJson(String json) {
return WxGsonBuilder.create().fromJson(json, IVRListResponse.class);
}
}
|
import {NgModule} from '@angular/core';
import { MatIconModule, MatIconRegistry } from '@angular/material/icon';
import {DomSanitizer} from '@angular/platform-browser';
import {HttpClientModule} from '@angular/common/http';
@NgModule({
imports: [
HttpClientModule,
MatIconModule
],
exports: [ MatIconModule ]
})
export class IconModule {
/**constructor*/
constructor(private matIconRegistry: MatIconRegistry, private domSanitizer: DomSanitizer) {
this.registerIcons();
}
/**register icons*/
private registerIcons() {
this.matIconRegistry.addSvgIcon(
'brand',
this.domSanitizer.bypassSecurityTrustResourceUrl('./assets/brand.svg')
);
this.matIconRegistry.addSvgIcon(
'npm',
this.domSanitizer.bypassSecurityTrustResourceUrl('./assets/npm.svg')
);
this.matIconRegistry.addSvgIcon(
'github',
this.domSanitizer.bypassSecurityTrustResourceUrl('./assets/github.svg')
);
}
}
|
Dr. Arnsten is professor in the Department of Neurobiology at the Yale University School of Medicine in New Haven, Connecticut. Dr. Berridge is professor in the Department of Psychology at the University of Wisconsin in Madison. Dr. McCracken is professor in the Division of Child and Adolescent Psychiatry, Neuropsychiatric Institute, and David Geffen School of Medicine at the University of California, Los Angeles.
Disclosures: Dr. Arnsten has a licensing agreement with Shire Development Inc for the development of guanfacine for the treatment of attention-deficit/hyperactivity disorder. She has received research funding from Shire, as well as performed scientific advisory, consulting, and speaking engagements with Shire. Dr. Berridge receives honoraria from Shire and research/grant support from the National Institutes of Health and the University of Wisconsin Graduate School. Dr. McCracken has served as a consultant to sanofi-aventis and Wyeth, is an expert witness for Novopharm, and receives research support from Eli Lilly. This article was supported by Shire Development, Inc, in Wayne, Pennsylvania.
Off-label disclosure: This article includes discussion of the following experimental medications for attention-deficit/hyperactivity disorder: immediate release forms of clonidine and guanfacine.
Acknowledgments: Editorial assistance was provided by Jennifer Steeber, PhD, of Health Learning Systems in Parsippany, New Jersey. The authors did not receive financial reimbursement for preparation of this article. This article was prepared by the authors with minimal editorial assistance and all ideas conveyed in this article represent those of the authors.
Please direct all correspondence to: Amy F.T. Arnsten, PhD, Yale Medical School, Department of Neurobiology, PO Box 208001, New Haven, CT 06520-8001; Tel: 203-785-4431; Fax: 203-785-5263; E-mail: [email protected].
Focus Points
โข The prefrontal cortex (PFC) regulates attention and behavior; lesions to the PFC induce a profile of poor sustained attention, distractibility, impulsivity, disorganization, and poor planning.
โข Functional and structural imaging studies of subjects with attention-deficit/hyperactivity disorder (ADHD) reveal differences in prefrontal cortical circuits and poor performance on PFC tasks.
โข PFC function is robustly moderated by catecholaminesโhigh or low levels of norepinephrine-engaging postsynaptic ฮฑ 2A adrenoceptors and dopamine-engaging D 1 receptors are associated with reduced function.
โข ADHD is highly heritable. Genetic studies suggest that several genes involved in catecholamine signaling may confer a portion of risk for ADHD, including some genes that have been associated with poor sustained attention and reduced executive functions.
โข Most pharmacologic treatments for ADHD influence catecholamine neurotransmission. Therapeutic doses of stimulants increase norepinephrine and dopamine in key cortical regions that are presumed to lead to improved PFC function.
Abstract
Attention-deficit/hyperactivity disorder (ADHD) is a common childhood-onset neuropsychiatric disorder characterized by cardinal features of inattention, locomotor hyperactivity, and poor impulse control. Research indicates that ADHD is associated with alterations in the higher cortical circuits that mediate attention and behavioral control. Given the prominent role of the prefrontal association cortex in regulating cognition and behavior through its extensive connections to sensory and motor cortices as well as subcortical structures, impairments in prefrontal function are believed to underlie many of the behavioral features of ADHD. Prefrontal cortex is sensitively modulated by numerous neurotransmitters including catecholamines, and changes in levels of dopamine-1 and norepinephrine ฮฑ 2A -adrenoceptor stimulation are associated with prominent effects on prefrontal function. Effective treatments for ADHD (including stimulants, atomoxetine, and guanfacine) influence catecholamine signaling in the prefrontal cortex and are believed to ameliorate ADHD symptoms via their effects on improved prefrontal cortical regulation of attention and impulse control.
Introduction
Attention-deficit/hyperactivity disorder (ADHD) is a prevalent childhood-onset neuropsychiatric disorder characterized by inattention, poor impulse control, and hyperactivity. Recent advances in molecular and cognitive neuroscience are beginning to clarify how dysfunction in brain regions regulating higher-order processes may underlie many of the core features of ADHD. These prefrontal association regions regulate attention, impulse control, and cognitive and behavioral responses to various situations and stimuli. In particular, the right prefrontal cortex (PFC), which is essential for inhibiting inappropriate impulses,1 demonstrates structural and functional differences in imaging studies of subjects with ADHD.2 It is well established that prefrontal cortical function is tightly influenced by catecholamines, including norepinephrine (NE) and dopamine (DA),3 with a prominent dose-response relationship. Awareness of the small, but perhaps significant, role of several purported catecholamine genes as risk alleles for ADHD, and initial demonstrations of reduced dopamine release from in vivo studies,3,4 have contributed to continued hypotheses of catecholamine dysfunction in ADHD. Furthermore, the role of catecholamine dysfunction has found additional support from the mechanism of many ADHD treatments, since most medications that effectively treat ADHD affect catecholamine transmission in the PFC.5 These treatments are viewed as enhancing prefrontal cortical regulation of attention and behavior, thereby ameliorating ADHD symptoms.5 Key elements of this increasingly cohesive story are reviewed here.
The Association Cortices Control and Coordinate Different Aspects of Attention
Characterizing the nature of impaired attention in ADHD has been a challenging problem for many years, in large part due to the varieties of attentional and cognitive processes impacted in ADHD, a lack of specificity in characterizing ADHD symptoms, and the difficulties inherent in isolating components of attention or cognitive control in experimental paradigms.6,7 The diagnostic term โinattentionโ can be confusing because it can describe several aspects of impaired attention. In the context of ADHD, inattention usually means inadequate regulation of attentionโADHD patients are easily distracted and find it hard to pay attention for long periods, and their attention is easily disrupted by stimuli that would not be bothersome to people without ADHD. However, inattention may also describe difficulty perceiving items or events that would normally demand attention (such as an oncoming car) even when there are no competing stimuli to draw attention away. This pattern of impairment implies an inability to properly allocate attentional resources. These different aspects of attention are mediated by distinct, but interconnected, regions of the association cortices (Figure 1). The temporal and parietal association cortices are responsible for so-called โbottom-upโ attention processes, which are allocated based on the salience of the stimulus, eg, whether it is moving or brightly colored.8 In contrast, the prefrontal association cortices are responsible for so-called โtop-downโ attention, which is allocated based on relevance to a task at hand and internal goals.9-11
Temporal Association Cortices
The temporal association cortices process visual and auditory informationโthe inferior regions are devoted to visual processing and the superior regions mediate auditory processing. Extensive research on the visual cortices has shown that the processing of visual stimuli proceeds as a โventral streamโ of information from the primary visual cortex in the occipital lobe through progressive levels of analyses by the inferior temporal cortices. Damage to the inferior temporal cortices in both hemispheres can result in agnosia, an inability to recognize and attach meaning to sensory information.9 Physiologic recordings of neuronal activity in monkeys support the importance of this cortical region in visual recognition, and distinct columns of neurons are devoted to specific stimulus features (eg, a face in profile versus a face in full view).12 Interestingly, repeated exposure to the same visual stimulus leads to gradual attenuation of the physiologic response such as seen with neuronal firing patterns,13 which may relate to the physiologic challenge of sustaining attention under conditions of low novelty or salience. Interference from nearby stimuli in the same visual field also diminishes processing of visual stimuli, perhaps reflecting the behavioral phenomenon of distraction.10 Although these two suppressive mechanisms arise from intrinsic properties of inferior temporal neurons, they can be overridden by inputs from the PFC via โtop-downโ projections that allow for directed selective attention of visual feature processing.
Parietal Association Cortices
A second โdorsal streamโ of visual data proceeds from the primary visual cortex up into the parietal cortex, and is responsible for orienting attention in space and time.14 This pathway is devoted to analysis of movement,15 mapping spatial position,16 and allocating and orienting conscious attentional resources.17 In humans, the posterior right hemisphere of the parietal cortex allocates attention to parts of visual space, and lesions to this region result in contralateral neglect: the loss of awareness for the left side of visual space.9 Thus, this cortex contributes to the ability to โpayโ attention. Interconnections between the temporal and parietal cortices permit the fusion of visual perceptions regarding position and features of a stimulus, providing a person with a cohesive, conscious experience.18,19 The temporal and parietal cortices also project information about visual features and spatial positions forward to the prefrontal cortices, which are the most highly evolved portions of the human brain.
Prefrontal Association Cortices
The PFC regulates attention and thought through massive projections back to the temporal and parietal association cortices, ie, โtop-downโ attention.20 The PFC provides the ability to inhibit distractions and gate sensory inputs based on internal goals21,22; it also facilitates sustained attention (especially over long delays),23 and inhibits interference from irrelevant information.24 Taken together, these critical organizing and modulating functions subserved by the PFC make up what is referred to as โcognitive control.โ Patients with prefrontal cortical lesions are easily distracted, have poor concentration and organization, have difficulty dividing or focusing attention, and are more vulnerable to disruption from interference, resembling many facets of the behavioral features of ADHD.25
The Prefrontal Cortex is Key for Behavioral Inhibition
The PFC is also essential for regulating behavior; lesions in this region can induce locomotor hyperactivity and impulsive responses (eg, on go/no-go tasks).26,27 As shown schematically in Figure 1, the PFC is able to guide behavioral output via massive projections to the motor cortices, to basal ganglia structures (including the caudate and subthalamic nucleus), and to the cerebellum (by way of the pons).28 In humans, the right inferior PFC is particularly important for behavioral inhibition29; functional imaging studies reveal activity in the right inferior PFC when subjects successfully inhibit or stop movements.30 A fascinating recent study31 in normal subjects showed that weakened cortical function induced by transmagnetic stimulation over the right inferior PFC actually impaired the ability to inhibit prepotent motor responses. Not surprisingly, lesions to this area in humans lead to poor impulse control.29
The Prefrontal Cortex Requires Optimal Levels of Norepinephrine and Dopamine for Proper Function
As shown in Figure 2,5,32 NE and DA are important components of arousal systems that arise from the brainstem and project across the entire cortical mantle, including the prefrontal cortices.33 NE acts at ฮฑ 1 , ฮฑ 2 , and ฮฒ adrenoceptors, with the highest affinity for ฮฑ 2 adrenoceptors. There are three subtypes of ฮฑ 2 adrenoceptors: ฮฑ 2A , ฮฑ 2B , and ฮฑ 2C .34 The most prominent DA receptors in the prefrontal cortices are the dopamine (D) 1 receptor family, which includes D 1 and D 5 receptors. These receptors are very similar and there are currently no drugs that possess selective D 1 versus D 5 affinity (thus, in this review, D 1 refers to both D 1 and D 5 ). A second DA receptor family, the D2 receptor family, includes the D2, D 3 , and D 4 receptors. Importantly, the D 4 receptor possesses greater non-specific roles in catecholaminergic neurotransmission, since both NE and DA have high binding affinity for this receptor.
Optimal Level of Norepinephrine and Dopamine
The PFC requires an optimal level of NE and DA for proper function. Either too little (when we are drowsy) or too much (when we are stressed) markedly impairs prefrontal cortical regulation of behavior and thought (the so-called โinverted-Uโ dose-response relationship).35 Indeed, animal research has found that NE and DA depletion paradigms are associated with deficits of prefrontal cortical cognitive function as pronounced as removing the cortex itself.36 Given the non-linear nature of the NE/DA dose-response functions for the PFC, therapeutic strategies using agents modulating catecholaminergic regulation of cognition have evolved to consider achieving โoptimalโ NE and/or DA stimulation.
Receptor Actions
The beneficial effects of optimal levels of NE and DA release occur primarily through actions at ฮฑ 2A and D 1 receptors, respectively. NE has its beneficial actions at ฮฑ 2A receptors that reside on prefrontal cortical neurons, postsynaptic to NE axons.37 Although previous research has emphasized the important role of presynaptic ฮฑ 2 receptors on NE neurons (which reduce NE cell firing and decrease NE release),38 it is now appreciated that there are massive numbers of postsynaptic ฮฑ 2A receptors as well,39 which are the sites of the enhancing effects of NE on prefrontal cortical function.37 Electron microscopic studies37,40 indicate that ฮฑ 2A and D 1 receptors reside on separate dendritic spines on prefrontal cortical pyramidal cells, near synaptic inputs from other cortical cells. Stimulation of these receptors gates synaptic inputs to prefrontal neurons, with ฮฑ 2A adrenoceptor stimulation strengthening appropriate inputs, and D 1 receptor stimulation weakening inappropriate inputs.35 Blockade of either the ฮฑ 2A or the D 1 receptor dramatically impairs prefrontal cortical function.41,42 Recent studies43 in humans indicate that catecholamines enable the right inferior PFC to carry out behavioral regulation. Consistent with this observation, blockade of ฮฑ 2 receptors in the monkey PFC induces a profile of locomotor hyperactivity,44 poor impulse control,45 and weakened working memory needed to overcome distractors.42 Thus, insufficient ฮฑ 2A receptor stimulation in PFC mimics the profile of ADHD. In contrast, ฮฑ 2A receptor stimulation with guanfacine enhances prefrontal activity and function.37,46,47
However, excessive release of the catecholamines NE and DA (eg, during stress) markedly impairs prefrontal cortical function.48 High levels of NE release impairs prefrontal function because NE begins to engage lower affinity ฮฑ 1 and ฮฒ receptors,49,50 while high levels of DA release stimulates abnormally high numbers of D 1 receptors.51 Excessive D 1 or ฮฑ 1 receptor stimulation suppresses prefrontal cell firing.51,52
ADHD is Associated with Impaired Prefrontal Cortical Function and Structure
Structure/Function Studies
Subjects with ADHD show deficits on tasks that depend on the PFC, including tests of cognitive control, such as working memory, sustained attention, and inhibitory control.29,53 Imaging studies of subjects with ADHD show small (โค5%) but consistently reduced volumes and reduced hemodynamic activation in response to challenges of the PFC2,54,55 that are particularly prominent on the right side, consistent with the important role of the right PFC in the regulation of behavior and attention.29 Reduced size also has been reported in brain regions that are components of prefrontal circuits, including the caudate and cerebellum.56 In addition, recent studies57,58 have reported disorganized white matter tracks emanating from PFC in subjects with ADHD, consistent with weaker prefrontal connectivity. In development, the PFC matures more slowly than less evolved brain regions, and there is preliminary evidence of slower prefrontal development in some subjects with ADHD.59 However clinically, ADHD is a lifelong disorder for many patients, and imaging studies continue to show evidence of atypical prefrontal function and reduced right prefrontal volume in adults with ADHD symptoms.60,61
The data described so far refer to patients with the combined subtype of ADHD, who have symptoms of both inattention and impulsivity/hyperactivity. There is some suggestion that a different neurobiologic basis may underlie the purely inattentive ADHD subtype. Research on the purely inattentive subtype of ADHD is in its early stages and may require evaluation tools that are better able to differentiate the aspects of attention that are mediated by the different association cortices. For example, the inattention rating scale from the Diagnostic and Statistical Manual of Mental Disorders, Fourth Edition,62 primarily assesses aspects of attention regulated by the PFC (distractibility, sustained attention) rather than those regulated by the temporal or parietal cortices. These issues may be very relevant to treatment, as the parietal and temporal association cortices are modulated differently than the prefrontal association cortices (eg, their functions are not improved by ฮฑ2A-adrenoceptor stimulation).63
Genetic Studies of ADHD
ADHD is a highly heritable disorder, with estimates based on family and twin studies suggesting that risk attributable to genes may represent up to 60% to 70% of the overall risk for the disorder. Although many gene variants have been associated with increased risk of ADHD, to date the risk attributed to any one of the putative risk genes represents only a small fraction of the overall genetic liability.3 However, the putative risk genes are of interest with respect to treatment, as most involve genes encoding molecules involved in catecholamine signaling (eg, NE and DA receptors)64-67; NE and DA transporters64,65,68; and dopamine beta hydroxylase (DBH), the enzyme required for NE synthesis.65,69 For example, in one report, gene variants associated with differences in DBH activity are associated with reduced ability to regulate attention70 and with differences in executive functions.71 Such gene variants may be relevant to the mechanism of therapeutic benefit conferred by medications that affect or enhance NE actions at ฮฑ 2A adrenoceptors (see below). Nevertheless, many other putative risk genes involve genes unrelated to catecholamine synthesis and function, including some associations of gene variants of serotoninergic genes with ADHD risk.3 Despite some replications of association studies of ADHD for several risk genes, the contribution of any single gene to the development of ADHD symptoms appears to be small.
Treatments for ADHD Enhance Catecholamine Transmission in the Prefrontal Cortex
Recent biochemical studies5 have revealed that therapeutic doses of both stimulant and nonstimulant medications potentiate catecholamine neurotransmission in the PFC. Speculation of the mechanisms of therapeutic benefit of these agents in ADHD includes the notion that treatment may optimize catecholamine neurotransmission in patients with genetically mediated differences in NE and DA pathways. Alternatively, they may optimize levels of catecholaminergic neurotransmission in a way that compensates for differences in PFC function not directly related to altered catecholaminergic tone.
Stimulants
Amphetamines and methylphenidate are highly effective in treating attentional problems and the hyperactive/impulsive symptoms associated with ADHD.72 Importantly, when administered at low and clinically relevant doses to normal human subjects, these drugs have cognition-enhancing and activity-reducing effects similar to the effects seen in ADHD patients.72,73 Thus, stimulant actions in ADHD are not paradoxical, as frequently presumed. Stimulants block the action of both catecholamine transporters, the NE transporter (NET), and the DA transporter (DAT). In the PFC, where DAT levels are low, the NET clears both NE and DA.74
Early biochemical studies of amphetamine and methylphenidate in rodents employed inappropriately high doses that increase locomotor activity, impair prefrontal cortical function, and have sensitizing effects on pathways involved with drug abuse. More recent studies5,32,75 using substantially lower and clinically relevant doses have demonstrated reduced locomotor activity and improved prefrontal cortical cognitive function in rats, similar to their effects in humans. The action of these therapeutic doses appears to be especially prominent in the PFC, where there is substantially increased NE and DA release. In contrast, the effect on catecholamine release is much less pronounced in subcortical or other cortical regions.5,32 As Figure 2 illustrates, therapeutic doses of stimulants increased NE release more than DA release in the rat PFC; it is therefore inaccurate to refer to these agents as simply dopaminergic.5 Consistent with dual actions on both NE and DA, the cognitive-enhancing effects of these agents in rodents are blocked by either NE ฮฑ 2 or D 1 receptor antagonists.75 These data indicate that stimulants have their effects through stimulation of both NE ฮฑ 2 and D 1 receptors.76
These results also provide supporting evidence for the assessment of low abuse potential of therapeutic doses of stimulants when properly administered.77 In contrast to the effects of low doses of stimulants, higher doses impair prefrontal cortical function in a way similar to that observed following uncontrollable stress,75 and are probably relevant to the cognitive inflexibility that can occur with excessive doses of stimulant medication.78 High doses of stimulants also increase dopamine levels in the nucleus accumbens, considered a key neurobiologic โpathwayโ underlying the process of reward and conditioning thought to lead to the potential for drug abuse.5,32
In normal young adult human subjects, imaging studies show that therapeutic doses of stimulants improve prefrontal cortical functions and enhance the efficiency of prefrontal cortical activity73; a similar but more pronounced profile is observed in subjects with ADHD.79-81 Taken together, these animal and human studies indicate that stimulant actions in ADHD are not paradoxical, but are instead more apparent in this patient population because of their impaired attentional processes and higher impulsivity/activity levels at baseline.72,82
Nonstimulants
Atomoxetine selectively blocks the NET. However, it is important to note that the NET transports both NE and DA in the PFC; thus, atomoxetine has been shown to increase the levels of both catecholamines in the rat PFC.83 Preliminary data indicate that moderate doses of atomoxetine, like methylphenidate, improve prefrontal functions based on their activity at both NE ฮฑ 2 and D 1 receptors, whereas higher doses can impair prefrontal function in some animals (AFT Arnsten, unpublished data, 2008). In addition, recent studies84 have shown that therapeutic doses of atomoxetine can strengthen performance on measures of response inhibition in normal controls as well as in subjects with ADHD. The therapeutic effect of atomoxetine is consistent with the ability of desipramine, a tricyclic antidepressant with high selectivity for the NET, to reduce ADHD-related symptoms.85
Guanfacine mimics the beneficial effects of NE at postsynaptic ฮฑ 2A adrenoceptors in PFC, strengthening regulation of attention and behavior.37 Animal studies46,86,87 have shown that acute guanfacine administration improves a wide range of prefrontal functions via direct actions within the PFC. Guanfacine strengthens synaptic inputs onto prefrontal neurons and enhances prefrontal network connectivity, thus improving prefrontal cortical regulation of attention and behavior.37 The beneficial effects of systemically administered guanfacine on prefrontal cortical function are independent of the drugโs sedating actions,25 which probably involve all three ฮฑ 2 -adrenoceptor subtypes as well as presynaptic ฮฑ 2A adrenoceptors on NE cell bodies and terminals.34 It should be noted that guanfacine has a lower affinity for the presynaptic ฮฑ 2A adrenoceptors than does clonidine,88 and this likely contributes to its reduced liability for sedative side effects. Clinically, guanfacine is currently prescribed off-label to both children and adults with ADHD, and has been shown in one large-scale, double-blind, placebo-controlled clinical trial89 of children with ADHD to improve ratings on an ADHD inattention and hyperactivity/impulsivity scale. Two other smaller, placebo-controlled trials90,91 suggest clinically relevant improvements in ratings of ADHD behaviors in adults with ADHD and in children with ADHD and comorbid chronic tic disorders. Clinical guidelines for ADHD treatment suggest that guanfacine and clonidine may be especially helpful in ADHD patients who cannot tolerate stimulant medications because of tics, or who may have prominent aggressive symptoms or drug abuse liability.90 As with stimulants, guanfacine can improve PFC-dependent behavior in normal subjects,92,93 but it is far more effective in individuals with impaired prefrontal abilities.25,93 In addition, treatment with guanfacine extended release (SPD503; Shire Development, Inc, Wayne, PA) has shown benefits in subjects who showed suboptimal response to stimulants.94,95
Clonidine is a nonselective ฮฑ 2 -adrenoceptor agonist that can improve symptoms of ADHD96 and ADHD comorbid with tics.97 Clonidine also has been associated with the problematic side effects of prominent sedation and hypotension.96 Although clonidine and guanfacine have not been directly compared โhead to headโ in regard to efficacy in treating ADHD symptoms, they have been directly compared in regard to hypotensive and sedative properties. Clonidine is ~10 times more potent in lowering blood pressure than guanfacine, and is much more sedating than guanfacine in both human subjects98 and monkeys.99 Clonidine is less selective than guanfacine. It has high affinity for the ฮฑ 2B โ and ฮฑ 2C -adrenoceptor subtypes as well as the ฮฑ 2A adrenoceptor,100 and also has high affinity for imidazoline I1 receptors.101 In addition, clonidine is 10 times more potent than guanfacine with regard to activity at presynaptic ฮฑ 2A adrenoreceptors.88 Since stimulation of ฮฑ 2B โ and ฮฑ 2C -adrenoceptor subtypes may contribute to sedation,102,103 the nonselective profile and pronounced presynaptic activity of clonidine probably underlie its powerful sedating effects, while its activity at brainstem imidazoline I1 receptors is thought to contribute to its marked hypotensive actions.101
In summary, the stimulants and atomoxetine appear to have their therapeutic effects through indirect stimulation of prefrontal ฮฑ 2A and D 1 receptors, while guanfacine and clonidine likely enhance prefrontal cortical regulation of behavior through direct stimulation of postsynaptic ฮฑ 2A adrenoceptors on prefrontal neurons.
Conclusion
The PFC plays a crucial role in regulating attention and behavior. Differences in prefrontal cortical structure and function, including altered catecholamine transmission, likely contribute to the etiology of ADHD symptoms. The PFC requires optimal levels of catecholamines for proper functionโmoderate levels of NE-engaging postsynaptic ฮฑ 2A adrenoceptors and DA-stimulating D 1 receptors. Effective treatments for ADHD may optimize catecholamine signaling in PFC; both stimulants and atomoxetine have their effects through indirect stimulation of NE ฮฑ 2 and D 1 receptors, while guanfacine mimics NE actions at postsynaptic ฮฑ 2A adrenoceptors in PFC. All of these treatments improve prefrontal cortical regulation of attention and behavior, thus reducing ADHD symptoms. PP
References
1. Robbins TW. Shifting and stopping: fronto-striatal substrates, neurochemical modulation and clinical implications. Philos Trans R Soc Lond B Biol Sci. 2007;362(1481):917-932.
2. Rubia K, Overmeyer S, Taylor E, et al. Hypofrontality in attention deficit hyperactivity disorder during higher-order motor control: a study with functional MRI. Am J Psychiatry. 1999;156(6):891-896.
3. Faraone SV, Perlis RH, Doyle AE, et al. Molecular genetics of attention-deficit/hyperactivity disorder. Biol Psychiatry. 2005;57(11):1313-1323.
4. Volkow ND, Wang GJ, Newcorn J, et al. Depressed dopamine activity in caudate and preliminary evidence of limbic involvement in adults with attention-deficit/hyperactivity disorder. Arch Gen Psychiatry. 2007;64(8):932-940.
5. Berridge CW, Devilbiss DM, Andrzejewski ME, et al. Methylphenidate preferentially increases catecholamine neurotransmission within the prefrontal cortex at low doses that enhance cognitive function. Biol Psychiatry. 2006;60(10):1111-1120.
6. Nigg JT. Neuropsychologic theory and findings in attention-deficit/hyperactivity disorder: the state of the field and salient challenges for the coming decade. Biol Psychiatry. 2005;57(11):1424-1435.
7. Posner MI, Petersen SE. The attention system of the human brain. Annu Rev Neurosci. 1990;13:25-42.
8. Knudsen EI. Fundamental components of attention. Annu Rev Neurosci. 2007;30:57-78.
9. Mesulam MM. From sensation to cognition [review]. Brain. 1998;121(Pt 6):1013-1052.
10. Desimone R. Visual attention mediated by biased competition in extrastriate visual cortex. Philos Trans R Soc Lond B Biol Sci. 1998;353(1373):1245-1255.
11. Buschman TJ, Miller EK. Top-down versus bottom-up control of attention in the prefrontal and posterior parietal cortices. Science. 2007;315(5820):1860-1862.
12. Fujita I, Tanaka K, Ito M, Cheng K. Columns for visual features of objects in monkey inferotemporal cortex. Nature. 1992;360(6402):343-346.
13. Desimone R. Neural mechanisms for visual memory and their role in attention. Proc Natl Acad Sci USA. 1996;93(24):13494-13499.
14. Coull JT, Nobre AC. Where and when to pay attention: the neural systems for directing attention to spatial locations and to time intervals as revealed by both PET and fMRI. J Neurosci. 1998;18(18):7426-7435.
15. Rudolph K, Pasternak T. Transient and permanent deficits in motion perception after lesions of cortical areas MT and MST in the macaque monkey. Cereb Cortex. 1999;9(1):90-100.
16. Snyder LH, Grieve KL, Brotchie P, Andersen RA. Separate body- and world-referenced representations of visual space in parietal cortex. Nature. 1998;394(6696):887-891.
17. Crowe DA, Chafee MV, Averbeck BB, Georgopoulos AP. Neural activity in primate parietal area 7a related to spatial analysis of visual mazes. Cereb Cortex. 2004;14(1):23-34.
18. Ungerleider LA. The corticocortical pathways for object recognition and spatial perception. In: Chagas C, Gattass R, Gross C, eds. Pattern Recognition Mechanisms. Rome, Italy: The Pontifical Academy of Sciences; 1985:21-37.
19. Friedman-Hill SR, Robertson LC, Treisman A. Parietal contributions to visual feature binding: evidence from a patient with bilateral lesions. Science. 1995;269(5225):853-855.
20. Gazzaley A, Rissman J, Cooney J, et al. Functional interactions between prefrontal and visual association cortex contribute to top-down modulation of visual processing. Cereb Cortex. 2007;17(suppl 1):i125-i135.
21. Chao LL, Knight RT. Human prefrontal lesions increase distractibility to irrelevant sensory inputs. Neuroreport. 1995;6(12):1605-1610.
22. Moore T, Armstrong KM. Selective gating of visual signals by microstimulation of frontal cortex. Nature. 2003;421(6921):370-373.
23. Wilkins AJ, Shallice T, McCarthy R. Frontal lesions and sustained attention. Neuropsychologia. 1987;25(2):359-365.
24. Bunge SA, Ochsner KN, Desmond JE, Glover GH, Gabrieli JD. Prefrontal regions involved in keeping information in and out of mind. Brain. 2001;124(Pt 10):2074-2086.
25. Arnsten AF, Steere JC, Hunt RD. The contribution of ฮฑ2-noradrenergic mechanisms to prefrontal cortical cognitive function. Potential significance for attention-deficit hyperactivity disorder. Arch Gen Psychiatry. 1996;53(5):448-455.
26. French GM. Locomotor effects of regional ablations of frontal cortex in rhesus monkeys. J Comp Physiol Psychol. 1959;52(1):18-24.
27. Drewe EA. Go โ no go learning after frontal lobe lesions in humans. Cortex. 1975;11(1):8-16.
28. Middleton FA, Strick PL. Basal ganglia and cerebellar loops: motor and cognitive circuits. Brain Res Brain Res Rev. 2000;31(2-3):236-250.
29. Clark L, Blackwell AD, Aron AR, et al. Association between response inhibition and working memory in adult ADHD: a link to right frontal cortex pathology? Biol Psychiatry. 2007;61(12):1395-1401.
30. Aron AR, Robbins TW, Poldrack RA. Inhibition and the right inferior frontal cortex. Trends Cog Sci. 2004;8(4):170-177.
31. Chambers CD, Bellgrove MA, Stokes MG, et al. Executive โbrake failureโ following deactivation of human frontal lobe. J Cogn Neurosci. 2006;18(3):444-455.
32. Kuczenski R, Segal DS. Exposure of adolescent rats to oral methylphenidate: preferential effects on extracellular norepinephrine and absence of sensitization and cross-sensitization to methamphetamine. J Neurosci. 2002;22(16):7264-7271.
33. Lewis DA. The catecholamine innervation of primate cerebral cortex. In: Solanto MV, Arnsten AF, Castellanos FX, eds. Stimulant Drugs and ADHD: Basic and Clinical Neuroscience. New York, NY: Oxford University Press; 2001:77-103.
34. MacDonald E, Kobilka BK, Scheinin M. Gene targeting-homing in on alpha 2-adrenoceptor-subtype function. Trends Pharmacol Sci. 1997;18(6):211-219.
35. Arnsten AF. Catecholamine and second messenger influences on prefrontal cortical networks of โrepresentational knowledgeโ: a rational bridge between genetics and the symptoms of mental illness. Cereb Cortex. 2007;17(suppl 1):i6-i15.
36. Brozoski TJ, Brown RM, Rosvold HE, Goldman PS. Cognitive deficit caused by regional depletion of dopamine in prefrontal cortex of rhesus monkey. Science. 1979;205(4409):929-932.
37. Wang M, Ramos BP, Paspalas CD, et al. ฮฑ2A-adrenoceptors strengthen working memory networks by inhibiting cAMP-HCN channel signaling in prefrontal cortex. Cell. 2007;129(2):397-410.
38. Cedarbaum JM, Aghajanian GK. Catecholamine receptors on locus coeruleus neurons: pharmacological characterization. Eur J Pharmacol. 1977;44(4):375-385.
39. UโPrichard DC, Bechtel WD, Rouot BM, Snyder SH. Multiple apparent alpha-noradrenergic receptor binding sites in rat brain: effect of 6-hydroxydopamine. Mol Pharmacol. 1979;16(1):47-60.
40. Smiley JF, Williams SM, Szigeti K, Goldman-Rakic PS. Light and electron microscopic characterization of dopamine-immunoreactive axons in human cerebral cortex. J Comp Neurol. 1992;321(3):325-335.
41. Sawaguchi T, Goldman-Rakic PS. D1 dopamine receptors in prefrontal cortex: involvement in working memory. Science. 1991;251(4996):947-950.
42. Li BM, Mei ZT. Delayed-response deficit induced by local injection of the ฮฑ2-adrenergic antagonist yohimbine into the dorsolateral prefrontal cortex in young adult monkeys. Behav Neural Biol. 1994;62(2):134-139.
43. Chamberlain SR, Muller U, Blackwell AD, Clark L, Robbins TW, Sahakian BJ. Neurochemical modulation of response inhibition and probabilistic learning in humans. Science. 2006;311(5762):861-863.
44. Ma CL, Arnsten AF, Li BM. Locomotor hyperactivity induced by blockade of prefrontal cortical ฮฑ2-adrenoceptors in monkeys. Biol Psychiatry. 2005;57(2):192-195.
45. Ma CL, Qi XL, Peng JY, Li BM. Selective deficit in no-go performance induced by blockade of prefrontal cortical 2-adrenoceptors in monkeys. Neuroreport. 2003;14(7):1013-1016.
46. Avery RA, Franowicz JS, Studholme C, van Dyck CH, Arnsten AF. The alpha-2A-adrenoceptor agonist, guanfacine, increases regional cerebral blood flow in dorsolateral prefrontal cortex of monkeys performing a spatial working memory task. Neuropsychopharmacology. 2000;23(3):240-249.
47. OโNeill J, Fitten LJ, Siembieda DW, Ortiz F, Halgren E. Effects of guanfacine on three forms of distraction in the aging macaque. Life Sci. 2000;67(8):877-885.
48. Arnsten AF. The biology of being frazzled. Science. 1998;280(5370):1711-1712.
49. Birnbaum S, Gobeske KT, Auerbach J, Taylor JR, Arnsten AF. A role for norepinephrine in stress-induced cognitive deficits: ฮฑ-1-adrenoceptor mediation in the prefrontal cortex. Biol Psychiatry. 1999;46(9):1266-1274.
50. Ramos BP, Colgan L, Nou E, Ovadia S, Wilson SR, Arnsten AF. The beta-1 adrenergic antagonist, betaxolol, improves working memory performance in rats and monkeys. Biol Psychiatry. 2005;58(11):894-900.
51. Vijayraghavan S, Wang M, Birnbaum SG, Williams GV, Arnsten AF. Inverted-U dopamine D1 receptor actions on prefrontal neurons engaged in working memory. Nat Neurosci. 2007;10(3):376-384.
52. Birnbaum SG, Yuan PX, Wang M, et al. Protein kinase C overactivity impairs prefrontal cortical regulation of working memory. Science. 2004;306(5697):882-884.
53. Loo SK, Humphrey LA, Tapio T, et al. Executive functioning among Finnish adolescents with attention-deficit/hyperactivity disorder. J Am Acad Child Adolesc Psychiatry. 2007;46(12):1594-1604.
54. Bush G, Valera EM, Seidman LJ. Functional neuroimaging of attention-deficit/hyperactivity disorder: a review and suggested future directions. Biol Psychiatry. 2005;57(11):1273-1284.
55. Sheridan MA, Hinshaw S, DโEsposito M. Efficiency of the prefrontal cortex during working memory in attention-deficit/hyperactivity disorder. J Am Acad Child Adolesc Psychiatry. 2007;46(10):1357-1366.
56. Castellanos FX, Lee PP, Sharp W, et al. Developmental trajectories of brain volume abnormalities in children and adolescents with attention-deficit/hyperactivity disorder. JAMA. 2002;288(14):1740-1748.
57. Casey BJ, Epstein JN, Buhle J, et al. Frontostriatal connectivity and its role in cognitive control in parent-child dyads with ADHD. Am J Psychiatry. 2007;164(11):1729-1736.
58. Makris N, Buka SL, Biederman J, et al. Attention and executive systems abnormalities in adults with childhood ADHD: A DT-MRI Study of connections. Cereb Cortex. 2008;18(5):11210-11220.
59. Shaw P, Eckstrand K, Sharp W, et al. Attention-deficit/hyperactivity disorder is characterized by a delay in cortical maturation. Proc Natl Acad Sci U S A. 2007;104(49):19649-19654.
60. Makris N, Biederman J, Valera EM, et al. Cortical thinning of the attention and executive function networks in adults with attention-deficit/hyperactivity disorder. Cereb Cortex. 2007;17(6):1364-1375.
61. Seidman LJ, Valera EM, Makris N, et al. Dorsolateral prefrontal and anterior cingulate cortex volumetric abnormalities in adults with attention-deficit/hyperactivity disorder identified by magnetic resonance imaging. Biol Psychiatry. 2006;60(10):1071-1080.
62. Diagnostic and Statistical Manual of Mental Disorders. 4th ed. Washington, DC: Americn Psychiatric Association; 2000.
63. Witte EA, Marrocco RT. Alteration of brain noradrenergic activity in rhesus monkeys affects the alerting component of covert orienting. Psychopharmacology (Berl). 1997;132(4):315-323.
64. Bobb AJ, Addington AM, Sidransky E, et al. Support for association between ADHD and two candidate genes: NET1 and DRD1. Am J Med Genet B Neuropsychiatr Genet. 2005;134(1):67-72.
65. Daly G, Hawi Z, Fitzgerald M, Gill M. Mapping susceptibility loci in attention deficit hyperactivity disorder: preferential transmission of parental alleles at DAT1, DBH and DRD5 to affected children. Mol Psychiatry. 1999;4(2):192-196.
66. Tahir E, Yazgan Y, Cirakoglu B, Ozbay F, Waldman I, Asherson PJ. Association and linkage of DRD4 and DRD5 with attention deficit hyperactivity disorder (ADHD) in a sample of Turkish children. Mol Psychiatry. 2000;5(4):396-404.
67. Kustanovich V, Ishii J, Crawford L, et al. Transmission disequilibrium testing of dopamine-related candidate gene polymorphisms in ADHD: confirmation of association of ADHD with DRD4 and DRD5. Mol Psychiatry. 2004;9(7):711-717.
68. Mill J, Caspi A, Williams BS, et al. Prediction of heterogeneity in intelligence and adult prognosis by genetic polymorphisms in the dopamine system among children with attention-deficit/hyperactivity disorder: evidence from 2 birth cohorts. Arch Gen Psychiatry. 2006;63(4):462-469.
69. Roman T, Schmitz M, Polanczyk GV, Eizirik M, Rohde LA, Hutz MH. Further evidence for the association between attention-deficit/hyperactivity disorder and the dopamine-beta-hydroxylase gene. Am J Med Genet. 2002;114(2):154-158.
70. Bellgrove MA, Hawi Z, Gill M, Robertson IH. The cognitive genetics of attention deficit hyperactivity disorder (ADHD): sustained attention as a candidate phenotype. Cortex. 2006;42(6):838-845.
71. Kieling C, Genro JP, Hutz MH, Rohde LA. The -1021 C/T DBH polymorphism is associated with neuropsychological performance among children and adolescents with ADHD. Am J Med Genet B Neuropsychiatr Genet. 2008;147B(4):485-490.
72. Rapoport JL, Inoff-Germain G. Responses to methylphenidate in attention-deficit/hyperactivity disorder and normal children: update 2002. J Atten Disord. 2002;6(suppl 1):S57-S60.
73. Mehta MA, Owen AM, Sahakian BJ, Mavaddat N, Pickard JD, Robbins TW. Methylphenidate enhances working memory by modulating discrete frontal and parietal lobe regions in the human brain. J Neurosci. 2000;20(6):RC65.
74. Sesack SR, Hawrylak VA, Matus C, Guido MA, Levey AI. Dopamine axon varicosities in the prelimbic division of the rat prefrontal cortex exhibit sparse immunoreactivity for the dopamine transporter. J Neurosci. 1998;18(7):2697-2708.
75. Arnsten AF, Dudley AG. Methylphenidate improves prefrontal cortical cognitive function through ฮฑ2 adrenoceptor and dopamine D1 receptor actions: relevance to therapeutic effects in attention deficit hyperactivity disorder. Behav Brain Funct. 2005;1(1):2.
76. Arnsten AFT, Castellanos FX. Neurobiology of attention regulation and its disorders. In: Martin A, Scahill L, Charney DS, Leckman JF, eds. Pediatric Psychopharmacology: Principles and Practice. New York, NY: Oxford University Press, Inc.; 2003:99-109.
77. Biederman J, Faraone SV, Milberger S, et al. Is childhood oppositional defiant disorder a precursor to adolescent conduct disorder? Findings from a four-year follow-up study of children with ADHD. J Am Acad Child Adolesc Psychiatry. 1996;35(9):1193-1204.
78. Dyme IZ, Sahakian BJ, Golinko BE, Rabe EF. Perseveration induced by methylphenidate in children: preliminary findings. Prog Neuropsychopharmacol Biol Psychiatry. 1982;6(3):269-273.
79. Aron AR, Dowson JH, Sahakian BJ, Robbins TW. Methylphenidate improves response inhibition in adults with attention-deficit/hyperactivity disorder. Biol Psychiatry. 2003;54(12):1465-1468.
80. Turner DC, Blackwell AD, Dowson JH, McLean A, Sahakian BJ. Neurocognitive effects of methylphenidate in adult attention-deficit/hyperactivity disorder. Psychopharmacology (Berl). 2005;178(2-3):286-295.
81. Bush G, Spencer TJ, Holmes J, et al. Functional magnetic resonance imaging of methylphenidate and placebo in attention-deficit/hyperactivity disorder during the multi-source interference task. Arch Gen Psychiatry. 2008;65(1):102-114.
82. Mehta MA, Sahakian BJ, Robbins TW. Comparative psychopharmacology of methylphenidate and related drugs in human volunteers, patients with ADHD, and experimental animals. In: Solanto MV, Arnsten AFT, Castellanos FX, eds. Stimulant Drugs and ADHD: Basic and Clinical Neuroscience. New York, NY: Oxford University Press; 2001:303-331.
83. Bymaster FP, Katner JS, Nelson DL, et al. Atomoxetine increases extracellular levels of norepinephrine and dopamine in prefrontal cortex of rat: a potential mechanism for efficacy in attention deficit/hyperactivity disorder. Neuropsychopharmacology. 2002;27(5):699-711.
84. Chamberlain SR, Del Campo N, Dowson J, et al. Atomoxetine improved response inhibition in adults with attention deficit/hyperactivity disorder. Biol Psychiatry. 2007;62(9):977-984.
85. Biederman J, Baldessarini RJ, Wright V, Knee D, Harmatz JS. A double-blind placebo controlled study of desipramine in the treatment of ADD: I. Efficacy. J Am Acad Child Adolesc Psychiatry. 1989;28(5):777-784.
86. Steere JC, Arnsten AF. The alpha-2A noradrenergic receptor agonist guanfacine improves visual object discrimination reversal performance in aged rhesus monkeys. Behav Neurosci. 1997;111(5):883-891.
87. Mao ZM, Arnsten AF, Li BM. Local infusion of an ฮฑ-1 adrenergic agonist into the prefrontal cortex impairs spatial working memory performance in monkeys. Biol Psychiatry. 1999;46(9):1259-1265.
88. Engberg G, Eriksson E. Effects of ฮฑ2-adrenoceptor agonists on locus coeruleus firing rate and brain noradrenaline turnover in N-ethoxycarbonyl-2-ethoxy-1,2-dihydroquinoline (EEDQ)-treated rats. Naunyn Schmiedebergs Arch Pharmacol. 1991;343(5):472-477.
89. Biederman J, Melmed RD, Patel A, et al, for the SPD503 Study Group. A randomized, double-blind, placebo-controlled study of guanfacine extended release in children and adolescents with attention-deficit/hyperactivity disorder. Pediatrics. 2008;121(1):e73-e84.
90. Scahill L, Chappell PB, Kim YS, et al. A placebo-controlled study of guanfacine in the treatment of children with tic disorders and attention deficit hyperactivity disorder. Am J Psychiatry. 2001;158(7):1067-1074.
91. Taylor FB, Russo J. Comparing guanfacine and dextroamphetamine for the treatment of adult attention-deficit/hyperactivity disorder. J Clin Psychopharmacol. 2001;21(2):223-228.
92. Franowicz JS, Arnsten AF. The alpha-2a noradrenergic agonist, guanfacine, improves delayed response performance in young adult rhesus monkeys. Psychopharmacology (Berl). 1998;136(1):8-14.
93. Jakala P, Riekkinen M, Sirvio J, et al. Guanfacine, but not clonidine, improves planning and working memory performance in humans. Neuropsychopharmacology. 1999;20(5):460-470.
94. Spencer T, Greenbaum M, Ginsberg LD, Murphy WR, Farrand K. Open-Label Coadministration of Guanfacine Extended Release and Stimulants in Children and Adolescents With Attention-Deficit/Hyperactivity Disorder. Poster presented at: The 161st Annual Meeting of the American Psychiatric Association; May 3-8, 2008; Washington, DC.
95. Sallee FR, McGough JJ, Wigal T, Sea D, Lyne A, Biederman J. Long-term safety and efficacy of guanfacine extended release in children and adolescents with attention-deficit/hyperactivity disorder. Poster presented at: The 63rd Annual Meeting of the Society of Biological Psychiatry; May 1-3, 2008; Washington, DC.
96. Hunt RD, Minderaa RB, Cohen DJ. Clonidine benefits children with attention deficit disorder and hyperactivity: report of a double-blind placebo-crossover therapeutic trial. J. Am Acad Child Psychiatry. 1985;24(5):617-629.
97. The Touretteโs Syndrome Study Group. Treatment of ADHD in children with tics: a randomized controlled trial. Neurology. 2002;58(4):527-536.
98. Sorkin EM, Heel RC. Guanfacine: A review of its pharmacodynamic and pharmacokinetic properties, and therapeutic efficacy in the treatment of hypertension. Drugs. 1986;31(4):301-336.
99. Arnsten AF, Cai J, Goldman-Rakic PS. The alpha-2 adrenergic agonist guanfacine improves memory in aged monkeys without sedative or hypotensive side effects. J Neurosci. 1988;8(11):4287-4298.
100. Uhlen S, Porter AC, Neubig RR. The novel alpha-2 adrenergic radioligand [3H]-MK912 is alpha-2C selective among human alpha-2A, alpha-2B and alpha-2C adrenoceptors. J Pharmacol Exp Ther. 1994;271(3):1558-1565.
101. van Zwieten PA, Chalmers JP. Different types of centrally acting antihypertensives and their targets in the central nervous system. Cardiovasc Drugs Ther. 1994;8(6):787-799.
102. Scahill L, Barloon L, Farkas L. Alpha-2 agonists in the treatment of attention deficit hyperactivity disorder. J Child Adolesc Psychiatr Nurs. 1999;12(4):168-173.
103. Arnsten AF. Catecholamine regulation of the prefrontal cortex. J Psychopharmacol. 1997;11(2):151-162. |
/**
* Extraction phase of the Gennaro-DKG.
* <p>
* - Initializes an instance of Feldman-VSS with the honest peers from the generation phase
* - Receives and checks the secret shares again
* - Resolves complaints
* - Generates output
*/
List<Step> extractionPhase() {
honestPartiesPedersen = pedersenVSS.getHonestParties();
honestPartiesFeldman = new HashSet<>();
final Map<Integer, DKGPeerCommunicator> honestPeers = new HashMap<>();
final Map<Integer, PartialSecretMessageDTO> secretsPedersen = pedersenVSS.getSecrets();
DishonestGennaroFeldmanVSS feldmanVSS = new DishonestGennaroFeldmanVSS(broadcaster, incoming,
honestPeers, id, params, logPrefix, pol1, secretsPedersen,
wrongCommitment, noCommitment, complainAgainstHonestParty);
return Arrays.asList(
new Step(
() -> peerMap.entrySet()
.stream().filter(e -> honestPartiesPedersen.contains(e.getKey()))
.forEach(e -> honestPeers.put(e.getKey(), e.getValue())),
0, SECONDS
),
new Step(feldmanVSS::startProtocol, 0, SECONDS),
new Step(feldmanVSS::handleReceivedValues, 10, SECONDS),
new Step(feldmanVSS::handleComplaints, 10, SECONDS),
new Step(() -> honestPartiesFeldman.addAll(feldmanVSS.getHonestParties()), 0, SECONDS),
new Step(() -> this.setResult(computeKeyPair(broadcaster, honestPartiesFeldman, feldmanVSS)), 0, SECONDS)
);
} |
<reponame>lzyzsd/NewsBlur<filename>clients/android/NewsBlur/src/com/newsblur/util/AppConstants.java
package com.newsblur.util;
public class AppConstants {
// Enables high-volume logging that may be useful for debugging. This should
// never be enabled for releases, as it not only slows down the app considerably,
// it will log sensitive info such as passwords!
public static final boolean VERBOSE_LOG = false;
public static final int STATE_ALL = 0;
public static final int STATE_SOME = 1;
public static final int STATE_BEST = 2;
public static final int REGISTRATION_DEFAULT = 0;
public static final int REGISTRATION_STARTED = 1;
public static final int REGISTRATION_COMPLETED = 1;
public static final String FOLDER_PRE = "folder_collapsed";
public static final float FONT_SIZE_LOWER_BOUND = 0.7f;
public static final float FONT_SIZE_INCREMENT_FACTOR = 8;
// the name to give the "root" folder in the local DB since the API does not assign it one.
// this name should be unique and such that it will sort to the beginning of a list, ideally.
public static final String ROOT_FOLDER = "0000_TOP_LEVEL_";
public static final String LAST_APP_VERSION = "LAST_APP_VERSION";
// the max number of mark-as-read ops to batch up before flushing to the server
// set to 1 to effectively disable batching
public static final int MAX_MARK_READ_BATCH = 1;
// a pref for the time we completed the last full sync of the feed/fodler list
public static final String LAST_SYNC_TIME = "LAST_SYNC_TIME";
// how long to wait before auto-syncing the feed/folder list
public static final long AUTO_SYNC_TIME_MILLIS = 10L * 60L * 1000L;
// how many total attemtps to make at a single API call
public static final int MAX_API_TRIES = 3;
// the base amount for how long to sleep during exponential API failure backoff
public static final long API_BACKOFF_BASE_MILLIS = 500L;
// when generating a request for multiple feeds, limit the total number requested to prevent
// unworkably long URLs
public static final int MAX_FEED_LIST_SIZE = 250;
}
|
The central bank of Ecuador reports that it has paid Chevron US$112 million as a settlement on a contract dispute dating back forty years. Diego Martinez, head of the bank, confirmed the news on Friday.
The sum includes the US$96 million awarded to Chevron in 2011 by a Hague arbitration court, plus interest. The dispute began in 1973, over a deal in which Texaco would develop oil fields in the country and sell the oil to Ecuador at below-market rates. Texaco was subsequently acquired by Chevron, in 2001.
The ruling by The Hague court was later confirmed by courts in the United States and upheld by the U.S. Supreme Court. In June, the justices declined to hear the case, and let the award stand. It was Chevron that initiated the arbitration hearing by The Hague court in 2006. At that time the company claimed that the issue was not being settled in the countryโs courts in a โtimely manner.โ
On Friday, Martinez said โWe have today paid around $112 million. We don't agree with how these international mechanisms work ... however, we are respectful and we fulfill our international obligations.โ Ecuadorโs president, Rafael Correa accused the company of seeking revenge for another case in which an oil group helmed by Texaco was ordered by courts in Ecuador to pay US$9.5 billion for drilling-related contamination.
Related: Erdoganโs Power Game: Turkey On Collision Course With NATO
Correa had threatened to withhold payment for Chevron over that matter. That case was brought to court by a group of Ecuadorian villagers who claimed that Texaco caused major environmental damage to their lands during oil exploration operations carried out in the 1960s. The villagers have filed lawsuits in Canada, Brazil and Argentina. Chevron is continuing to fight those cases.
Correa is already dealing with a cash shortage in the country brought on by the decline in oil prices; the payment to Chevron comes on the heels of approximately US$1 billion in compensation from Ecuador to Occidental Petroleum, stemming from a 2006 incident in which the country seized an Occidental oilfield.
By Lincoln Brown for Oilprice.com
More Top Reads From Oilprice.com: |
from django.contrib.auth import get_user_model
from django.db.models import Prefetch, Q
from django_auto_prefetching import AutoPrefetchViewSetMixin
from rest_framework import generics
from rest_framework.permissions import IsAuthenticated
import courses.examples as examples
from courses.filters import CourseSearchFilterBackend
from courses.models import Course, Requirement, Section, StatusUpdate
from courses.search import TypedCourseSearchBackend, TypedSectionSearchBackend
from courses.serializers import (
CourseDetailSerializer,
CourseListSerializer,
MiniSectionSerializer,
RequirementListSerializer,
SectionDetailSerializer,
StatusUpdateSerializer,
UserSerializer,
)
from courses.util import get_current_semester
from PennCourses.docs_settings import PcxAutoSchema, reverse_func
SEMESTER_PARAM_DESCRIPTION = (
"The semester of the course (of the form YYYYx where x is A [for spring], "
"B [summer], or C [fall]), e.g. '2019C' for fall 2019. Alternatively, you "
"can just pass 'current' for the current semester."
)
class BaseCourseMixin(AutoPrefetchViewSetMixin, generics.GenericAPIView):
schema = PcxAutoSchema()
@staticmethod
def get_semester_field():
return "semester"
def get_semester(self):
semester = self.kwargs.get("semester", "current")
if semester == "current":
semester = get_current_semester(allow_not_found=True)
semester = semester if semester is not None else "all"
return semester
def filter_by_semester(self, queryset):
# if we're in a view without a semester parameter, only return the current semester.
semester = self.get_semester()
if semester != "all":
queryset = queryset.filter(**{self.get_semester_field(): semester})
return queryset
def get_queryset(self):
queryset = super().get_queryset()
queryset = self.filter_by_semester(queryset)
return queryset
class SectionList(generics.ListAPIView, BaseCourseMixin):
"""
Retrieve a list of sections (less detailed than [PCx] Section, or SectionDetail on the
backend). The sections are filtered by the search term (assumed to be a prefix of a
section's full code, with each chunk either space-delimited, dash-delimited, or not delimited).
"""
schema = PcxAutoSchema(
examples=examples.SectionList_examples,
response_codes={
reverse_func("section-search", args=["semester"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Sections Listed Successfully."}
}
},
custom_path_parameter_desc={
reverse_func("section-search", args=["semester"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
serializer_class = MiniSectionSerializer
queryset = Section.with_reviews.all().exclude(activity="")
filter_backends = [TypedSectionSearchBackend]
search_fields = ["^full_code"]
@staticmethod
def get_semester_field():
return "course__semester"
class SectionDetail(generics.RetrieveAPIView, BaseCourseMixin):
"""
Retrieve a detailed look at a specific course section.
"""
schema = PcxAutoSchema(
examples=examples.SectionDetail_examples,
response_codes={
reverse_func("sections-detail", args=["semester", "full_code"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Section detail retrieved successfully."}
}
},
custom_path_parameter_desc={
reverse_func("sections-detail", args=["semester", "full_code"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
serializer_class = SectionDetailSerializer
queryset = Section.with_reviews.all()
lookup_field = "full_code"
def get_semester_field(self):
return "course__semester"
class CourseList(generics.ListAPIView, BaseCourseMixin):
"""
Retrieve a list of (all) courses for the provided semester.
"""
schema = PcxAutoSchema(
examples=examples.CourseList_examples,
response_codes={
reverse_func("courses-list", args=["semester"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Courses listed successfully."}
}
},
custom_path_parameter_desc={
reverse_func("courses-list", args=["semester"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
serializer_class = CourseListSerializer
queryset = Course.with_reviews.filter(sections__isnull=False) # included redundantly for docs
def get_queryset(self):
queryset = Course.with_reviews.filter(sections__isnull=False)
queryset = queryset.prefetch_related(
Prefetch(
"sections",
Section.with_reviews.all()
.filter(credits__isnull=False)
.filter(Q(status="O") | Q(status="C"))
.distinct()
.prefetch_related("course", "meetings__room"),
)
)
queryset = self.filter_by_semester(queryset)
return queryset
class CourseListSearch(CourseList):
"""
This route allows you to list courses by certain search terms and/or filters.
Without any GET parameters, this route simply returns all courses
for a given semester. There are a few filter query parameters which constitute ranges of
floating-point numbers. The values for these are <min>-<max> , with minimum excluded.
For example, looking for classes in the range of 0-2.5 in difficulty, you would add the
parameter difficulty=0-2.5. If you are a backend developer, you can find these filters in
backend/plan/filters.py/CourseSearchFilterBackend. If you are reading the frontend docs,
these filters are listed below in the query parameters list (with description starting with
"Filter").
"""
schema = PcxAutoSchema(
examples=examples.CourseListSearch_examples,
response_codes={
reverse_func("courses-search", args=["semester"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Courses listed successfully."}
}
},
custom_path_parameter_desc={
reverse_func("courses-search", args=["semester"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
filter_backends = [TypedCourseSearchBackend, CourseSearchFilterBackend]
search_fields = ("full_code", "title", "sections__instructors__name")
class CourseDetail(generics.RetrieveAPIView, BaseCourseMixin):
"""
Retrieve a detailed look at a specific course. Includes all details necessary to display course
info, including requirements this class fulfills, and all sections.
"""
schema = PcxAutoSchema(
examples=examples.CourseDetail_examples,
response_codes={
reverse_func("courses-detail", args=["semester", "full_code"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Courses detail retrieved successfully."}
}
},
custom_path_parameter_desc={
reverse_func("courses-detail", args=["semester", "full_code"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
serializer_class = CourseDetailSerializer
lookup_field = "full_code"
queryset = Course.with_reviews.all() # included redundantly for docs
def get_queryset(self):
queryset = Course.with_reviews.all()
queryset = queryset.prefetch_related(
Prefetch(
"sections",
Section.with_reviews.all()
.filter(credits__isnull=False)
.filter(Q(status="O") | Q(status="C"))
.distinct()
.prefetch_related(
"course", "meetings", "associated_sections", "meetings__room", "instructors"
),
)
)
queryset = self.filter_by_semester(queryset)
return queryset
class RequirementList(generics.ListAPIView, BaseCourseMixin):
"""
Retrieve a list of all academic requirements in the database for this semester.
"""
schema = PcxAutoSchema(
examples=examples.RequirementList_examples,
response_codes={
reverse_func("requirements-list", args=["semester"]): {
"GET": {200: "[DESCRIBE_RESPONSE_SCHEMA]Requirements listed successfully."}
},
},
custom_path_parameter_desc={
reverse_func("requirements-list", args=["semester"]): {
"GET": {"semester": SEMESTER_PARAM_DESCRIPTION}
}
},
)
serializer_class = RequirementListSerializer
queryset = Requirement.objects.all()
class UserView(generics.RetrieveAPIView, generics.UpdateAPIView):
"""
This view exposes the Penn Labs Accounts User object.
"""
schema = PcxAutoSchema()
serializer_class = UserSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return get_user_model().objects.filter(pk=self.request.user.pk)
def get_object(self):
return self.request.user
class StatusUpdateView(generics.ListAPIView):
"""
Retrieve all Status Update objects from the current semester for a specific section.
"""
schema = PcxAutoSchema(
examples=examples.StatusUpdateView_examples,
response_codes={
reverse_func("statusupdate", args=["full_code"]): {
"GET": {
200: "[DESCRIBE_RESPONSE_SCHEMA]Status Updates for section listed successfully."
}
}
},
custom_path_parameter_desc={
reverse_func("statusupdate", args=["full_code"]): {
"GET": {
"full_code": (
"The code of the section which this status update applies to, in the "
"form '{dept code}-{course code}-{section code}', e.g. `CIS-120-001` for "
"the 001 section of CIS-120."
)
}
}
},
)
serializer_class = StatusUpdateSerializer
http_method_names = ["get"]
lookup_field = "section__full_code"
def get_queryset(self):
return StatusUpdate.objects.filter(
section__full_code=self.kwargs["full_code"],
section__course__semester=get_current_semester(),
in_add_drop_period=True,
)
|
<reponame>Tonkean/angulareact
import ReactDOM from 'react-dom';
import React, { useEffect, useState } from 'react';
import reactToAngularPortalsManager from './reactToAngularPortalsManager';
import { ReactToAngularPortalsComponentDefinitionWithId } from './ReactToAngularPortalsComponentDefinition';
import AngularInjectorContext from '../../AngularInjectorContext';
const ReactToAngularPortals: React.FC = () => {
const [componentDefinitions, setComponentDefinitions] = useState(
[] as ReactToAngularPortalsComponentDefinitionWithId[],
);
useEffect(() => {
reactToAngularPortalsManager.addSetComponentDefinitions(setComponentDefinitions);
return () => {
reactToAngularPortalsManager.removeSetComponentDefinitions();
};
}, []);
return (
<>
{componentDefinitions.map((componentDefinition) => (
<AngularInjectorContext.Provider value={componentDefinition.$injector} key={componentDefinition.id}>
{ReactDOM.createPortal(componentDefinition.component, componentDefinition.container)}
</AngularInjectorContext.Provider>
))}
</>
);
};
export default React.memo(ReactToAngularPortals);
|
/**
* Represents a bond between two atoms on screen.
*
* @author V.Ganesh
* @version 2.0 (Part of MeTA v2.0)
*/
public abstract class ScreenBond extends AbstractGlyph {
/** Holds value of property atom1. */
protected ScreenAtom atom1;
/** Holds value of property atom2. */
protected ScreenAtom atom2;
/** Creates a new instance of ScreenBond */
public ScreenBond(ScreenAtom atom1, ScreenAtom atom2) {
super();
this.atom1 = atom1;
this.atom2 = atom2;
selectionColor = ColorResource.getInstance().getSelectedBondColor();
}
/** Getter for property atom1.
* @return Value of property atom1.
*
*/
public ScreenAtom getAtom1() {
return this.atom1;
}
/** Setter for property atom1.
* @param atom1 New value of property atom1.
*
*/
public void setAtom1(ScreenAtom atom1) {
this.atom1 = atom1;
}
/** Getter for property atom2.
* @return Value of property atom2.
*
*/
public ScreenAtom getAtom2() {
return this.atom2;
}
/** Setter for property atom2.
* @param atom2 New value of property atom2.
*
*/
public void setAtom2(ScreenAtom atom2) {
this.atom2 = atom2;
}
} |
def _get_subtree_max_node(node):
current_node = node
while current_node.right is not None:
current_node = current_node.right
return current_node |
async def coverage(dut):
dut.en_i <= 0
dut.randomise_en_i <= 0
dut.input_binary_i <= 0
clk_proc = cocotb.fork(Clock(dut.clk_i, 2).start())
dut.rst_ni <= 0
await ClockCycles(dut.clk_i, 2)
dut.rst_ni <= 1
await FallingEdge(dut.clk_i)
pipeline_expected = 0
for n in range(2**INPUT_WIDTH):
dut.input_binary_i <= n
await FallingEdge(dut.clk_i)
value = 4*dut.output_thermometer_o.value.binstr.count('1')
value += dut.output_binary_o.value.integer
dut._log.info('value = %d, pipeline_expected = %d' % (value, pipeline_expected))
assert value == pipeline_expected
pipeline_expected = n |
import { css, html, LitElement, svg } from 'lit';
import { customElement, property } from 'lit/decorators.js';
import { ElementPin, GND, VCC } from './pin';
import { RGB } from './types/rgb';
import { mmToPix } from './utils/units';
const pixelWidth = 5.66;
const pixelHeight = 5;
/**
* Renders a matrix of NeoPixels (smart RGB LEDs).
* Optimized for displaying large matrices (up to thousands of elements).
*
* The color of individual pixels can be set by calling `setPixel(row, col, { r, g, b })`
* on this element, e.g. `element.setPixel(0, 0, { r: 1, g: 0, b: 0 })` to set the leftmost
* pixel to red.
*/
@customElement('wokwi-neopixel-matrix')
export class NeopixelMatrixElement extends LitElement {
/**
* Number of rows in the matrix
*/
@property() rows = 8;
/**
* Number of columns in the matrix
*/
@property() cols = 8;
/**
* The spacing between two adjacent rows, in mm
*/
@property({ attribute: 'rowspacing' }) rowSpacing = 1;
/**
* The spacing between two adjacent columns, in mm
*/
@property({ attribute: 'colspacing' }) colSpacing = 1;
/**
* Whether to apply blur to the light. Blurring the light
* creates a bit more realistic look, but negatively impacts
* performance. It's recommended to leave this off for large
* matrices.
*/
@property() blurLight = false;
/**
* Animate the LEDs in the matrix. Used primarily for testing in Storybook.
* The animation sequence is not guaranteed and may change in future releases of
* this element.
*/
@property() animation = false;
get pinInfo(): ElementPin[] {
const { cols, rows, rowSpacing, colSpacing } = this;
const pinSpacing = 2.54;
const p = pinSpacing * mmToPix;
const cx = ((cols * (colSpacing + pixelWidth)) / 2) * mmToPix;
const y = rows * (rowSpacing + pixelHeight) * mmToPix;
return [
{
name: 'GND',
x: cx - 1.5 * p,
y,
signals: [GND()],
},
{ name: 'VCC', x: cx - 0.5 * p, y, signals: [VCC()] },
{ name: 'DIN', x: cx + 0.5 * p, y, signals: [] },
{ name: 'DOUT', x: cx + 1.5 * p, y, signals: [] },
];
}
private pixelElements: Array<[SVGElement, SVGElement, SVGElement, SVGElement]> | null = null;
private animationFrame: number | null = null;
static get styles() {
return css`
:host {
display: flex;
}
`;
}
private getPixelElements() {
if (!this.shadowRoot) {
return null;
}
if (!this.pixelElements) {
this.pixelElements = Array.from(this.shadowRoot.querySelectorAll('g.pixel')).map(
(e) =>
Array.from(e.querySelectorAll('ellipse')) as unknown as [
SVGElement,
SVGElement,
SVGElement,
SVGElement
]
);
}
return this.pixelElements;
}
/**
* Resets all the pixels to off state (r=0, g=0, b=0).
*/
reset() {
const pixelElements = this.getPixelElements();
if (!pixelElements) {
return;
}
for (const [rElement, gElement, bElement, colorElement] of pixelElements) {
rElement.style.opacity = '0';
gElement.style.opacity = '0';
bElement.style.opacity = '0';
colorElement.style.opacity = '0';
}
}
/**
* Sets the color of a single neopixel in the matrix
* @param row Row number of the pixel to set
* @param col Column number of the pixel to set
* @param rgb An object containing the {r, g, b} values for the pixel
*/
setPixel(row: number, col: number, rgb: RGB) {
const pixelElements = this.getPixelElements();
if (row < 0 || col < 0 || row >= this.rows || col >= this.cols || !pixelElements) {
return null;
}
const { r, g, b } = rgb;
const spotOpacity = (value: number) => (value > 0.001 ? 0.7 + value * 0.3 : 0);
const maxOpacity = Math.max(r, g, b);
const minOpacity = Math.min(r, g, b);
const opacityDelta = maxOpacity - minOpacity;
const multiplier = Math.max(1, 2 - opacityDelta * 20);
const glowBase = 0.1 + Math.max(maxOpacity * 2 - opacityDelta * 5, 0);
const glowColor = (value: number) => (value > 0.005 ? 0.1 + value * 0.9 : 0);
const glowOpacity = (value: number) => (value > 0.005 ? glowBase + value * (1 - glowBase) : 0);
const cssVal = (value: number) =>
maxOpacity ? Math.floor(Math.min(glowColor(value / maxOpacity) * multiplier, 1) * 255) : 255;
const cssColor = `rgb(${cssVal(r)}, ${cssVal(g)}, ${cssVal(b)})`;
const pixelElement = pixelElements[row * this.cols + col];
const [rElement, gElement, bElement, colorElement] = pixelElement;
rElement.style.opacity = spotOpacity(r).toFixed(2);
gElement.style.opacity = spotOpacity(g).toFixed(2);
bElement.style.opacity = spotOpacity(b).toFixed(2);
colorElement.style.opacity = glowOpacity(maxOpacity).toFixed(2);
colorElement.style.fill = cssColor;
}
private animateStep = () => {
const time = new Date().getTime();
const { rows, cols } = this;
const pixelValue = (n: number) => (n % 2000 > 1000 ? 1 - (n % 1000) / 1000 : (n % 1000) / 1000);
for (let row = 0; row < rows; row++) {
for (let col = 0; col < cols; col++) {
const radius = Math.sqrt((row - rows / 2 + 0.5) ** 2 + (col - cols / 2 + 0.5) ** 2);
this.setPixel(row, col, {
r: pixelValue(radius * 100 + time),
g: pixelValue(radius * 100 + time + 200),
b: pixelValue(radius * 100 + time + 400),
});
}
}
this.animationFrame = requestAnimationFrame(this.animateStep);
};
updated() {
if (this.animation && !this.animationFrame) {
this.animationFrame = requestAnimationFrame(this.animateStep);
} else if (!this.animation && this.animationFrame) {
cancelAnimationFrame(this.animationFrame);
this.animationFrame = null;
}
}
private renderPixels() {
const result = [];
const { cols, rows, colSpacing, rowSpacing } = this;
const patWidth = pixelWidth + colSpacing;
const patHeight = pixelHeight + rowSpacing;
for (let row = 0; row < rows; row++) {
for (let col = 0; col < cols; col++) {
result.push(svg`
<g transform="translate(${patWidth * col}, ${patHeight * row})" class="pixel">
<ellipse cx="2.5" cy="2.3" rx="0.3" ry="0.3" fill="red" opacity="0" />
<ellipse cx="3.5" cy="3.2" rx="0.3" ry="0.3" fill="green" opacity="0" />
<ellipse cx="3.3" cy="1.45" rx="0.3" ry="0.3" fill="blue" opacity="0" />
<ellipse cx="3" cy="2.5" rx="2.2" ry="2.2" opacity="0" />
</g>`);
}
}
this.pixelElements = null;
return result;
}
render() {
const { cols, rows, rowSpacing, colSpacing, blurLight } = this;
const patWidth = pixelWidth + colSpacing;
const patHeight = pixelHeight + rowSpacing;
const width = pixelWidth * cols + colSpacing * (cols - 1);
const height = pixelHeight * rows + rowSpacing * (rows - 1);
return html`
<svg
width="${width}mm"
height="${height}mm"
version="1.1"
viewBox="0 0 ${width} ${height}"
xmlns="http://www.w3.org/2000/svg"
>
<filter id="blurLight" x="-0.8" y="-0.8" height="2.8" width="2.8">
<feGaussianBlur stdDeviation="0.3" />
</filter>
<pattern id="pixel" width="${patWidth}" height="${patHeight}" patternUnits="userSpaceOnUse">
<rect x=".33308" y="0" width="5" height="5" fill="#fff" />
<rect x=".016709" y=".4279" width=".35114" height=".9" fill="#eaeaea" />
<rect x="0" y="3.6518" width=".35114" height=".9" fill="#eaeaea" />
<rect x="5.312" y="3.6351" width=".35114" height=".9" fill="#eaeaea" />
<rect x="5.312" y=".3945" width=".35114" height=".9" fill="#eaeaea" />
<circle cx="2.8331" cy="2.5" r="2.1" fill="#ddd" />
<circle cx="2.8331" cy="2.5" r="1.7325" fill="#e6e6e6" />
<g fill="#bfbfbf">
<path
d="m4.3488 3.3308s-0.0889-0.087-0.0889-0.1341c0-0.047-6e-3 -1.1533-6e-3 -1.1533s-0.0591-0.1772-0.2008-0.1772c-0.14174 0-0.81501 0.012-0.81501 0.012s-0.24805 0.024-0.23624 0.3071c0.0118 0.2835 0.032 2.0345 0.032 2.0345 0.54707-0.046 1.0487-0.3494 1.3146-0.8888z"
/>
<path
d="m4.34 1.6405h-1.0805s-0.24325 0.019-0.26204-0.2423l6e-3 -0.6241c0.57782 0.075 1.0332 0.3696 1.3366 0.8706z"
/>
<path
d="m2.7778 2.6103-0.17127 0.124-0.8091-0.012c-0.17122-0.019-0.17062-0.2078-0.17062-0.2078-1e-3 -0.3746 1e-3 -0.2831-9e-3 -0.8122l-0.31248-0.018s0.43453-0.9216 1.4786-0.9174c-1.1e-4 0.6144-4e-3 1.2289-6e-3 1.8434z"
/>
<path
d="m2.7808 3.0828-0.0915-0.095h-0.96857l-0.0915 0.1447-3e-3 0.1127c0 0.065-0.12108 0.08-0.12108 0.08h-0.20909c0.55906 0.9376 1.4867 0.9155 1.4867 0.9155 1e-3 -0.3845-2e-3 -0.7692-2e-3 -1.1537z"
/>
</g>
<path
d="m4.053 1.8619c-0.14174 0-0.81494 0.013-0.81494 0.013s-0.24797 0.024-0.23616 0.3084c3e-3 0.077 5e-3 0.3235 9e-3 0.5514h1.247c-2e-3 -0.33-4e-3 -0.6942-4e-3 -0.6942s-0.0593-0.1781-0.20102-0.1781z"
fill="#666"
/>
</pattern>
<rect width="${width}" height="${height}" fill="url(#pixel)"></rect>
<g style="${blurLight ? 'filter: url(#blurLight)' : ''}">${this.renderPixels()}</g>
</svg>
`;
}
}
|
def _step_ccd(self):
g = get_root(self).globals
try:
np = g.ipars.nodPattern
if not np:
raise ValueError('no nod pattern defined')
nd = len(np['ra'])
di = self.dither_index % nd
raoff = np['ra'][di]
decoff = np['dec'][di]
self.dither_index += 1
except Exception as err:
self.logger.warn('could not get dither position {}: {}'.format(di, str(err)))
return
self.logger.info('moving CCD to dither position {:d} ({} {})'.format(
di, raoff, decoff
))
ra, dec = wcs.add_offset_radec(
self.ctr_ra_deg, self.ctr_dec_deg,
raoff/3600., decoff/3600.)
image = self.fitsimage.get_image()
xc, yc = image.radectopix(self.ra_as_drawn, self.dec_as_drawn)
xn, yn = image.radectopix(ra, dec)
self.ra_as_drawn, self.dec_as_drawn = ra, dec
obj = self.canvas.get_object_by_tag('ccd_overlay')
obj.move_delta(xn-xc, yn-yc)
self.canvas.update_canvas() |
package br.com.codeshare.microservices.articles.exception;
public class ArticleNotFoundException extends RuntimeException {
public ArticleNotFoundException(String cause) {
super("Article not found with " + cause);
}
}
|
def has_nan(dataframe):
is_nan = dataframe.isnull().values.any()
no_nan = dataframe.isnull().sum()
return is_nan, no_nan |
Prediction of Quality of Life Based on Spiritual Intelligence and Resiliency in Mothers of Children with Behavioral Problems
1 PhD Student in Counseling, Department of Counseling and Psychology, Faculty of Human Sciences, University of Hormozgan, Bandar Abbas, Iran. 2 Assistant Professor, Department of Clinical Psychology, Young Research and Elite Club, Bandar Abbas Branch, Islamic Azad University, Bandar Abbas, Iran. 3 MA, Occupational Counseling, Allameh Tabatabaโi University, Faculty of Psychology and Educational Sciences, Tehran, Iran. 4 Mother and Child Welfare Research Center, Hormozgan University of Medical Sciences, Bandar Abbas, Iran.
Introduction ome children are incapable of going through developmental stages and meeting developmental expectations and tasks for a variety of reasons, as a result of which they develop acute behavioral problems with aging (1). The consequences of these behavioral problems manifest in the form of rejection by others and complaints about them (2). The results of research performed in this area reveal that unresolved behavioral problems in childhood often lead to subsequent problems (3).
It is found out children's behavioral problems are closely related to parents' psychological states. Moreover, such problems may limit the interactions and social activities of the family and affect their interpersonal relationships (4)(5). In addition, children's behavioral problems not only affect family functioning, generally, and mothers' functioning, specifically, but also causes stress and create disorder in the family environment (6).
Quality of life is one of the most considerable issues for parents and other family members (7). In this regard, quality of life means meeting expectations and consequently life satisfaction which manifests in the form of happiness and vitality. This factor is a powerful force that plays a prominent role in guiding people and improving their health and well-being (8). Moreover, it is considered an active, dynamic, and multifaceted flow of perceptions, attitudes, and behavioral changes that are influenced by the mother-child relationship (9). In other words, mothers' interaction with children having behavioral problems affects the quality of their lives and reduces their performance (7).
Spiritual intelligence is one of the effective psychological components playing a significant role in the perfection and health of the human mind (10). In addition, it includes a set of adaptive capacities in the mind based on nonphysical aspects and transcendence of reality. This concept is also considered to involve a set of activities causing self-awareness and deep insight into life and its purposefulness, in addition to tenderness and flexibility in behavior (11). According to the literature, spiritual factors, such as spiritual intelligence, influence the quality of life, especially in mothers (12). Furthermore, the results of a study performed by Hojjati et al. revealed that resilience has a substantial role in the quality of life of parents (13).
Resilience is defined as a person's ability to establish a biological, psychological, and spiritual balance in the face of risky situations. In other words, it is a kind of self-healing with positive emotional and cognitive consequences (14), reducing negative emotions and increasing mental health and life satisfaction (15). The findings of the research conducted by Mortazavi and Yarolahi showed a positive and significant relationship between resilience and mental health (16).
In another study performed by Mehrafraz and Jahangir, it was revealed that resilience had a positive and significant relationship with the quality of life of mothers with children having behavioral problems (17). Therefore, the consideration of the quality of life in the mothers of these children and the components affecting its improvement is of high importance (9), especially considering the prominent role of mothers in developing the psychological and emotional characteristics of children (18), as evidenced by their high referrals to counseling centers (19).
The relationship of spiritual intelligence and resilience components with the quality of life has been investigated in various studies. For instance, the studies conducted by Noorisamarin and Noori (20) and Singh and Sinha (21) showed that spiritual intelligence and its components had a positive and significant relationship with the quality of life. Furthermore, Shabahang et al. (22) indicated that the resilience of parents of children with special needs has a positive and significant correlation with the quality of life. In addition, the findings of the research carried out by Hosseininia and Hatami (23) showed that the quality of life of retirees can be predicted based on psychological wellbeing and resilience.
The mentioned studies have separately investigated the relationship of the quality of life with the variables investigated in this research. To the best of our knowledge, no special research has been performed to predict the quality of life in the mothers of children with behavioral problems in terms of spiritual intelligence and resilience. Additionally, few studies have been conducted to investigate the effect of these variables on the target population. This highlights the necessity of studying the multiple relationships of these variables in the community of mothers with problem children with the aim of prevention or improvement of the quality of life in this group. With this background in mind, the present study was conducted to predict the quality of life in terms of spiritual intelligence and resilience in the mothers of children with behavioral problems.
Methods
In the present descriptive-correlational study, spiritual intelligence and resilience were considered predictor variables, while the quality of life was regarded as a dependent variable. The statistical population consisted of all mothers with 6 to 12-year-old children having internal and external behavioral problems, referring to the counseling centers of Neyshabour in 2018. The diagnosis of the behavioral problems of the children was made based on a clinical diagnostic interview. The final sample of the study (n=100) was selected using the multistage random sampling technique.
To this end, a list was initially prepared from the centers providing counseling and psychological services located in all areas of Neyshabour. These centers were then classified into four categories of the north, south, west, and east regions. However, to gather the necessary data, only one center from each region was selected according to the number of counseling centers. At the next stage, the necessary permissions were obtained from the relevant departments. Furthermore, the participants were informed about the confidentiality of their personal information.
The questionnaires were distributed among the eligible samples. The inclusion criteria were determined as: 1) willingness to complete the questionnaires, 2) having a child with behavioral problems, and 3) diagnosis of psychiatric disorders. On the other hand, mothers who did not meet the inclusion criteria were excluded from the study. In this regard, the exclusion criterion was the mother's referral to counseling centers to seek counseling services for problems other than their children's behavioral problems.
The data were collected using three questionnaires, namely spiritual intelligence, Connor-Davidson Resilience Scale, and World Health Organization Quality of Life-BREF. The spiritual intelligence questionnaire, developed and standardized by Abdullahzadeh et al., contains 29 items on a five-point Likert scale (strongly agree to strongly disagree) in two subscales, namely comprehension and connection with the supreme power and spiritual life or reliance on inner core (24). Therefore, this instrument has a score range of 29-145 (26).
The validity and reliability of this questionnaire, which was developed in accordance with the cultural characteristics of Iranian society, have been also confirmed (25). The validation of this 30-item questionnaire was performed using a pilot study conducted on 30 university students and then on 280 subjects, including 184 females and 96 males, rendering the Cronbach's alpha coefficients of 0.87 and 0.89, respectively (19)(20)(21)(22)(23)(24)(25)(26)(27). The validity of the questionnaire was confirmed using content validity, face validity, and factor analysis (23). In the present study, the reliability of this instrument was also measured, rendering a Cronbach's alpha coefficient of 0.82.
The Connor-Davidson Resilience Scale (2003) was used to measure the resilience of the participants. This test contains 25 items that are scored on a five-point Likert scale (completely incorrect=0 to always correct=4) in five subscales, namely personal competence, trust to instinct, acceptance of positive emotions, control, and spiritual influences (28).
According to the developers of this scale, this questionnaire is quite capable of distinguishing between resilient and non-resilient people in clinical and non-clinical groups both in research and clinical settings. This questionnaire has the minimum and maximum scores of 25 and 125, respectively (29). Internal consistency, testretest reliability, and convergent and divergent validity of this instrument have been confirmed in previous studies (30). The reliability and validity of this questionnaire have been also confirmed by Mohammadi for Iranian, using internal consistency (r=0.89) and factor analysis (0.87) methods, respectively (31). In the present study, the Cronbach's alpha coefficient of this research tool was obtained as 0.88.
The Quality of Life Questionnaire was developed by the World Health Organization in collaboration with 15 international centers in 1989. This questionnaire assesses various aspects of quality of life through 26 items (32).
The same method of scaling was used in this questionnaire as the previously mentioned ones. In this regard, a 5-point Likert scale was applied with some items being reverse scored (15). This questionnaire has a score range of 26-130 with a score of 78 as the cut-off point. Accordingly, lower scores are indicative of a low level of quality of life, while higher scores represent a high level of quality of life (33).
It should be noted that this questionnaire consists of four aspects of physical, mental, social, and environmental health, with the Cronbach's alpha range of 0.73-0.89 for the four subscales, as well as the whole scale (34). In Iran, Nasiri et al. translated this scale into Persian and reported its validity and reliability with the Cronbach's alpha coefficient of 0.84 indicating an acceptable internal consistency (35).
However, in a study conducted by Fathi-Ashtiani, the reliability of this questionnaire was reported to be 0.95 using Cronbach's alpha method (36). In addition, Levy and Litman-Ovadia measured the reliability of the above-mentioned questionnaire using three methods, namely three-week interval testretest, split-half, and Cronbach's alpha coefficient and obtained the scores of 0.67, 0.87, and 0.88, respectively (37). In the present study, the Cronbach's alpha coefficient was evaluated at 0.84.
To collect the necessary information, the purpose of the study was explained to the mothers of children with behavioral problems as a prerequisite of the research process. Subsequently, the questionnaires were distributed among the participants after obtaining their consent to participate in the study and assuring them about the confidentiality of their information.
Statistical analysis
The descriptive analysis of the data was performed using a central tendency and distribution indices. In addition, the inferential analysis of data was accomplished based on Pearson correlation and simultaneous multiple regression methods to test the statistical hypotheses and predict the changes in the dependent variables based on the predictor variables and their odds ratio. The obtained data were analyzed in SPSS software (version 23) at a significance level of < 0.01.
Result
A total of 100 subjects participated in this study with the age range of 20-30 (n=27, 27.00%), 31-40 (n=64, 64.00%), and 41-50 (n=9, 9.00%) years, respectively. The mean scores of resilience, spiritual intelligence, and quality of life questionnaires and their subscales are presented in Table 1. Based on the results of Table 1, the mean score of spiritual intelligence was obtained as 115.73ยฑ16.13. Regarding the subscales of this questionnaire, namely comprehension and connection with the supreme power and spiritual life or reliance on inner core were estimated at 48.72ยฑ9.96 and 67.01ยฑ10.42, respectively.
Concerning resilience, the mean score was calculated at 83.17ยฑ13.99. In addition, personal competence, trust to instinct, acceptance of positive emotions, control, and spiritual influences as the subscales of this instrument had the mean scores of 27.19ยฑ5.35, 21.40ยฑ4.53, 17.12ยฑ3.13, 9.71ยฑ2.52, and 7.75ยฑ1.65, respectively. Additionally, the quality of life was found to have a mean score of 94.85ยฑ10.11. As indicated by the results, the skewness and kurtosis values were < 3 and < 10, respectively, for all variables and subscales (Table 1).
Pearson correlation coefficient test was used to investigate the relationship of spiritual intelligence and resilience with quality of life. Based on the results of this test, the quality of life showed a significant and positive correlation with spiritual intelligence, resilience, and their subscales (P<0.05; tables 2-3). In this respect, the quality of life enhanced as spiritual intelligence and its subscales increased. In addition, simultaneous multiple regression was applied to determine the predictability of quality of life in terms of spiritual intelligence and resilience and their subscales in mothers of children with behavioral problems. The results of this evaluation are presented in tables 4 and 5.
Based on the results of simultaneous multiple regression analysis, spiritual intelligence accounted for 0.66 variance in quality of life (Table 4). Furthermore, according to the beta coefficients, spiritual intelligence, comprehension and connection with the supreme power (0.29), and spiritual life or reliance on inner core (0.42) could directly predict the quality of life in mothers of children with behavioral problems(P<0.01).
Considering the results of simultaneous multiple regression analysis tabulated in Table 5, 0.68 of the variance in quality of life was defined by resilience. In addition, based on the results of beta coefficients, resilience, personal competence (0.32), and spiritual influences (0.17) could predict the quality of life in the mothers of children with behavioral problems (P<0.01) On the other hand, acceptance of positive emotions, trust to instinct, and control failed to predict the quality of life in this group.
Discussion
Child-rearing is considered one of the most important and valuable issues in every mother's life. However, the presence of a child with behavioral problems can negatively affect and challenge the quality of life in mothers. Due to the importance of determining the factors affecting the quality of life in the mothers having children with behavioral problems, the present study was conducted to predict the quality of life of this group in terms of spiritual intelligence and resilience.
The results of correlation coefficients indicated that the quality of life has a positive and significant relationship with the overall score of spiritual intelligence and its components, including comprehension and connection with the supreme power and spiritual life or reliance on inner core. The findings of simultaneous multiple regression analysis showed that 0.66 of the total variance of mothers' quality of life is directly explained by spiritual intelligence. Additionally, both components of spiritual intelligence were significantly capable of predicting the quality of life in this population.
The results of this study are in line with those obtained by Zamani et al. (12), Noorisamarin and Noori (20), Pant and Srivastava (38), and Singh et al. (39). In conformity with the results of the present research, the findings of a study performed by Mossadegh et al. (40) show that spiritual understanding can predict the mental health of mothers with problem children. In a study conducted by Reisi et al. (41), it was revealed that people with higher spirituality and spiritual feelings not only would be more hopeful about their future but also would endeavor harder to change their lives and increase their quality. These findings were revealed to be in agreement with those of the present study.
The results of the current study revealed a significant relationship between the components of spiritual intelligence and quality of life. Furthermore, both components of spiritual intelligence (i.e., comprehension and connection with the supreme power and spiritual life or reliance on inner core) can predict the quality of life in the mothers of children with behavioral problems.
However, based on the obtained beta value, the component of spiritual life or reliance on inner core could be considered a stronger predictor of the quality of life in this group. This finding can be explained based on the results obtained by Noorisamarin and Noori (20), who indicated that individuals' ability to understand the meaning of events, in addition to believing in an internal superior and sacred power, can help overcome problems effortlessly and take effective steps to resolve them. Moreover, the results of a study carried out by Jafari and Hesampour (42) showed that people who have a higher level of understanding, regarding the components of connection with the supreme power and the meaning they give to themselves and the world around them, are less prone to psychological distress. This is because these individuals are more purposeful and have higher self-esteem. These findings are consistent with those of the present study.
It can be concluded that individuals with spiritual beliefs feel less abandoned, futile, and lonely. Additionally, spiritual intelligence enables humans to access meaning and value. The ability to properly make use of spiritual intelligence helps individuals to reveal their inner strength extensively and immensely, thereby contributing to the improvement of their mental health and quality of life (40).
In the present study, the results of correlation coefficients were also indicative of a positive and significant relationship between quality of life and the components of resilience, including personal competence, trust to instinct, acceptance of positive emotions, control, and spiritual influences. Furthermore, based on the results of regression analysis, resilience accounted for 0.68 of the total variance in quality of life among mothers with problem children. This finding is line with the results of the studies performed by Hojjati et al. (13), showing that resilience is significantly related to the quality of life, and Sadeghi et al. (43), implying that resilience is considered an important component of individuals' mental and psychological well-being.
In line with the present study, Sadeghi et al. (44) introduced resilience as one of the effective components of maternal mental health. To elaborate on the findings of the present study, it should be stated that mothers with high resilience acted more strongly in dealing with stressful child-rearing situations. Moreover, they were more skilled in communicating effectively with the environment and others. Such abilities develop higher levels of mental health and inner satisfaction, which, in turn, improve the quality of life. Additionally, Wolf (1995) carried out a study on the effect of resilience on psychological health to determine the basic characteristics of resilient people accounting for the enhancement of mental well-being. The results of the mentioned research revealed that such characteristics as social ability, problemsolving ability, self-management, purposefulness, and belief in a prosperous future improved the quality of life in the mothers of children with behavioral problems (43).
Our results also revealed that among the resilience components, personal competence and spiritual influences could significantly predict the quality of life among mothers with problem children. Although no similar research has been found in this field, this study is generally consistent with several studies, such as those carried out by Shabahang et al. (22) and Hosseininia and Hatami (23), in terms of the relationship between resilience components and quality of life.
According to the results of a study conducted by Gholamhosseinzadeh et al. (45), the predictive role of the components of personal competence and spiritual influences can be explained by the fact that people who are highly competent and have a high level of spirituality are able to develop and expand a set of skills helping them adapt more to stressful situations. This matter improves their competence, health, and quality of life, thereby reducing their dissatisfaction.
Eventually, since mothers of children with behavioral problems spend most of their time with such children, they need to be incredibly resilient. These mothers are expected to seek solutions when dealing with their children's behavioral problems, instead of becoming frustrated or disappointed. Moreover, mothers who enjoy a higher level of resilience are more successful in reducing their children's behavioral problems since they possess higher levels of mental toughness and selfconfidence. This matter affects mental health, happiness, inner satisfaction, and in general quality of life in mothers with problem children.
Conclusion
The results of the present study demonstrated that the components of spiritual intelligence (i.e., comprehension and connection with supreme power and spiritual life or reliance on inner core) and resilience components (i.e., personal competence and spiritual influences), can predict the quality of life in mothers with problem children. Moreover, this group of mothers who have higher levels of spiritual intelligence and resilience enjoys a higher level of quality of life than their counterparts without such qualifications.
Based on the results of the present study, psychologists, counselors, officials, and other beneficiaries are suggested to design educational programs and training courses encouraging the improvement of resilience and spiritual intelligence with the aim of improving quality of life.
Such a measure can boost the quality of life in mothers with problem children, especially in those who have a low quality of life.
One of the weaknesses of the present study is the limitation of the study population to mothers of children with behavioral problems referring to the counseling centers of Neyshabour. This removes the possibility of generalizing the results of this study to bigger populations in other cities. Another limitation that should be taken into account is the use of self-assessment tools that could be somewhat influenced by human factors.
Conflict of interest
This research was self-funded and received no specific grant from any funding agency. |
import React from "react";
import { Alert, Col } from "react-bootstrap";
export function UnsupportedField({
schema: { type },
uiSchema: any,
path
}: any) {
return (
<Col md={12}>
<Alert variant="danger">
Unsupported field @ {path} of type {type}
</Alert>
</Col>
);
}
|
Parametric analyses of fast-pyrolyzed oil production from algae in porous bed reactor = เธเธฒเธฃเธงเธดเนเธเธฃเธฒเธฐเธซเนเธเธฒเธฃเธฒเธกเธดเนเธเธญเธฃเนเนเธเธเธฒเธฃเธเธฅเธดเธเธเนเธณเธกเธฑเธเนเธเนเธฃเนเธฅเธเนเธญเธขเนเธฒเธเธฃเธงเธเนเธฃเนเธงเธเธฒเธเธชเธฒเธซเธฃเนเธฒเธขเธเนเธงเธขเธเธเธดเธเธฃเธเนเนเธเธเนเธเธเธเธฃเธธเธ / Kanyaporn Chaiwon
This research work is carried out to design the fast pyrolysis reactor and select the suitable controlling parameters to the production. Three types of dry freshwater algae, Spirulina Spp, Spirogyra Spp. and Cladophora Spp. were used in this study. Before pyrolysis, each sample was characterized by the ultimate and proximate analyses to investigate the chemical and physical properties. It could be seen that the main compositions of Spirulina Spp. were similar to those of Spirogyra Spp. while those of Cladophora Spp. were different. The thermo-decomposition property of algae by thermogravimetric analysis (TGA) showed that Spirulina Spp. had a higher mass loss rate at the volatilization than those of Spyrogra Spp. and Cladophora Spp. When compared the algae decompositions with other biomass, it could be seen that the pyrolysis reaction of the algae could occur easier with lower energy input. The maximum yield of decomposition was lower than other biomass which indicated that the amount of volatile matter from the algae or the yield of bio-oil is lower. Slow pyrolysis of algae in this present study was carried out with a fixed bed reactor. The effect of pyrolysis temperature was investigated by pyrolysing Spirulina Spp. at the temperatures of 450 600 o C. It was found that the maximum yield of the bio-oil |
/// Converts a flat serde json value into a firebase google-rpc-api inspired heavily nested and wrapped type
/// to be consumed by the Firebase REST API.
///
/// This is a low level API. You probably want to use [`crate::documents`] instead.
///
/// This method works recursively!
pub(crate) fn serde_value_to_firebase_value(v: &serde_json::Value) -> dto::Value {
if v.is_f64() {
return dto::Value {
double_value: Some(v.as_f64().unwrap()),
..Default::default()
};
} else if let Some(integer_value) = v.as_i64() {
return dto::Value {
integer_value: Some(integer_value.to_string()),
..Default::default()
};
} else if let Some(map_value) = v.as_object() {
let mut map: HashMap<String, dto::Value> = HashMap::new();
for (map_key, map_v) in map_value {
map.insert(map_key.to_owned(), serde_value_to_firebase_value(&map_v));
}
return dto::Value {
map_value: Some(dto::MapValue { fields: Some(map) }),
..Default::default()
};
} else if let Some(string_value) = v.as_str() {
return dto::Value {
string_value: Some(string_value.to_owned()),
..Default::default()
};
} else if let Some(boolean_value) = v.as_bool() {
return dto::Value {
boolean_value: Some(boolean_value),
..Default::default()
};
} else if let Some(array_value) = v.as_array() {
let mut vec: Vec<dto::Value> = Vec::new();
for k in array_value {
vec.push(serde_value_to_firebase_value(&k));
}
return dto::Value {
array_value: Some(dto::ArrayValue { values: Some(vec) }),
..Default::default()
};
}
Default::default()
} |
def SetRegistryDefaultValue(subKey, value, rootkey = None):
if rootkey is None: rootkey = GetRootKey()
if type(value)==str:
typeId = win32con.REG_SZ
elif type(value)==int:
typeId = win32con.REG_DWORD
else:
raise TypeError("Value must be string or integer - was passed " + repr(value))
win32api.RegSetValue(rootkey, subKey, typeId ,value) |
<gh_stars>1-10
import pandas as pd
import numpy as np
# Returns x*y
def multiplyData(x, y):
return x * y
# Multiply given value by 2 and returns
def doubleData(x):
return x * 2
def main():
# List of Tuples
matrix = [(222, 34, 23),
(333, 31, 11),
(444, 16, 21),
(555, 32, 22),
(666, 33, 27),
(777, 35, 11)
]
# Create a DataFrame object
dfObj = pd.DataFrame(matrix, columns=list('abc'))
print("Original Dataframe", dfObj, sep='\n')
print('************* Apply a lambda function to each row or each column in Dataframe *************')
print('*** Apply a lambda function to each column in Dataframe ***')
# Apply a lambda function to each column by adding 10 to each value in each column
modDfObj = dfObj.apply(lambda x: x + 10)
print("Modified Dataframe by applying lambda function on each column:")
print(modDfObj)
print('*** Apply a lambda function to each row in Dataframe ***')
# Apply a lambda function to each row by adding 5 to each value in each column
modDfObj = dfObj.apply(lambda x: x + 5, axis=1)
print("Modified Dataframe by applying lambda function on each row:")
print(modDfObj)
print('************* Apply a User Defined function to each row or each column in Dataframe *************')
print('*** Apply a user defined function to each column in Dataframe ***')
# Apply a user defined function to each column by doubling each value in each column
modDfObj = dfObj.apply(doubleData)
print("Modified Dataframe by applying a user defined function to each column in Dataframe :")
print(modDfObj)
print('*** Apply a user defined function to each row in Dataframe ***')
# Apply a user defined function to each row by doubling each value in each column
modDfObj = dfObj.apply(doubleData, axis=1)
print("Modified Dataframe by applying a user defined function to each row in Dataframe :")
print(modDfObj)
print(
'************* Apply a User Defined function (with Arguments) to each row or each column in Dataframe *************')
print('*** Apply a user defined function ( with arguments ) to each column in Dataframe ***')
# Apply a user defined function to each column that will multiply each value in each column by given number
modDfObj = dfObj.apply(multiplyData, args=[4])
print("Modified Dataframe by applying a user defined function (with arguments) to each column in Dataframe :")
print(modDfObj)
print('*** Apply a user defined function ( with arguments ) to each row in Dataframe ***')
# Apply a user defined function to each row by doubling each value in each column
modDfObj = dfObj.apply(multiplyData, axis=1, args=[3])
print("Modified Dataframe by applying a user defined function (with arguments) to each row in Dataframe :")
print(modDfObj)
print('************* Apply a numpy function to each row or each column in Dataframe *************')
# Apply a numpy function to each column by doubling each value in each column
modDfObj = dfObj.apply(np.square)
print("Modified Dataframe by applying a numpy function to each column in Dataframe :")
print(modDfObj)
# Apply a numpy function to each row by square root each value in each column
modDfObj = dfObj.apply(np.sqrt, axis=1)
print("Modified Dataframe by applying a numpy function to each row in Dataframe :")
print(modDfObj)
print('************* Apply a reducing function to each column or row in DataFrame *************')
# Apply a numpy function to get the sum of values in each column
modDfObj = dfObj.apply(np.sum)
print("Modified Dataframe by applying a numpy function to get sum of values in each column :")
print(modDfObj)
# Apply a numpy function to get the sum of values in each row
modDfObj = dfObj.apply(np.sum, axis=1)
print("Modified Dataframe by applying a numpy function to get sum of values in each row :")
print(modDfObj)
if __name__ == '__main__':
main() |
-- |
-- The data structures (and associated functions) used in the
-- parser. For a normal usage, it should be enough
-- to import only 'Text.Syntactical', not directly this module.
module Text.Syntactical.Data (
SExpr(..), Tree(..), Op(..), Opening(..),
Associativity(..), Hole(..), Part(..), Table, Priority(..),
infx, prefx, postfx, closed,
infx_, prefx_, postfx_, closed_,
sexpr, distfix,
buildTable, cut, setPrecedence,
begin, end, leftOpen, rightOpen, rightHole, discard,
applicator, applicator', continue, original, priority,
arity, symbol, symbols, next, previous, current,
findBoth, findBegin, FindBegin(..), FindBoth(..), Ambiguity(..),
Token(..),
showPart, showSExpr, showTree
) where
import Data.List
----------------------------------------------------------------------
-- Data structures to represent trees, operators, and parts
----------------------------------------------------------------------
-- | The s-expression data type used as input and output of the parser.
-- The type is parametrized by the type of the token.
data SExpr a = List [SExpr a]
| Atom a
deriving (Eq, Show)
-- | The s-expression data type augmented to represent parts (used in
-- the operator stack).
data Tree a = Branch [Tree a]
| Leaf a
| Part (Part a)
deriving (Eq, Show)
-- | The class of the types that can be parsed.
class Token a where
toString :: a -> String
-- ^ convert to a string (for showing purpose)
operator :: Op a -> [SExpr a] -> SExpr a
-- ^ create an output node from an operator and its arguments
consider :: a -> a -> Bool
-- ^ test if two tokens are the same (used to find match from the
-- operator table). A default definition that compares the result
-- of toString is provided.
-- default definition for consider tests the string representation
consider a b = toString a == toString b
considers :: Token a => [a] -> [a] -> Bool
considers a b = length a == length b && and (zipWith consider a b)
-- | The operator representation, parametrized by the token type.
-- It allows infix, prefix, postfix,
-- and closed operators, with possibly multiple internal holes.
-- Different holes are possible, to drive the parse in specific ways.
-- The boolean is used to specify if the operator should show up
-- in the result or be discarded. The opening further specifies
-- in the non-closed variant if the operator is prefix, infix, or postfix.
data Op a =
Op1 Bool a [(Hole,a)] Opening Precedence
| Op2 Bool a [(Hole,a)] Hole a
deriving (Eq, Show)
-- | Set the precedence of a given operator.
setPrecedence :: Precedence -> Op a -> Op a
setPrecedence p (Op1 keep x xs opening _) = Op1 keep x xs opening p
setPrecedence _ c = c
-- | Return all the tokens of a given operator.
symbols :: Op a -> [a]
symbols (Op1 _ a xs _ _) = a : map snd xs
symbols (Op2 _ a xs _ b) = a : map snd xs ++ [b]
-- | Separate an operator in its different parts.
cut :: Op a -> [Part a]
cut (Op1 keep x [] opening p) =
[Lone keep x opening p]
cut o@(Op1 _ x xs opening p) =
First ma x [snd $ head xs] (fst $ head xs) :
map f (zip4 ls ss rs ks) ++
[Last o]
where
ma = case opening of
Postfix -> Just (NonAssociative,p)
Infix a -> Just (a,p)
Prefix -> Nothing
f (l, s, r, k) = Middle l s r k
(_, xs') = holesAfter (init xs) (fst $ last xs)
fxs = inits $ map fst xs'
sxs = inits $ map snd xs'
ls = map (x:) (init fxs)
ss = map head (tail fxs)
rs = map ((++[snd $ last xs]) . tail) (tail fxs)
ks = map head (tail sxs)
cut o@(Op2 _ x [] h y) =
[First Nothing x [y] h, Last o]
cut o@(Op2 _ x xs h y) =
First Nothing x [snd $ head xs] (fst $ head xs) :
map f (zip4 ls ss rs ks) ++
[Last o]
where
f (l, s, r, k) = Middle l s r k
(_, xs') = holesAfter xs h
fxs = inits $ map fst xs'
sxs = inits $ map snd xs'
ls = map (x:) (init fxs)
ss = map head (tail fxs)
rs = map ((++[y]) . tail) (tail fxs)
ks = map head (tail sxs)
-- Takes a list of pair (hole,string) and returns
-- a list of (string,hole) where the ordre and interleaving
-- is respected: the first hole is returned and the last hole
-- is an argument.
holesAfter :: [(Hole,s)] -> Hole -> (Hole, [(s,Hole)])
holesAfter [] h = (h, [])
holesAfter [(a,b)] h = (a, [(b,h)])
holesAfter ((a,b):xs@((c,_):_)) h = (a, (b,c) : snd (holesAfter xs h))
-- | 'buildTable' constructs an operator table that can be
-- used with the 'shunt' function. Operators are given
-- in decreasing precedence order.
buildTable :: [[Op a]] -> Table a
buildTable ls = Table . concat $ zipWith f ls [n, n - 1 .. 0]
where n = length ls
f l p = concatMap (cut . setPrecedence p) l
-- | The Hole is used to give various behaviours when dealing
-- with internal holes.
data Hole =
SExpression
-- ^ SExpression means the 'content' of the hole should be
-- parsed as an s-expression. The resulting value is a List.
-- This means the hole can be empty or contain one or more
-- sub-expression(s).
| Distfix
-- ^ Distfix means the 'content' of the hole should be parsed
-- as a distfix expression. In this case feeding an empty hole
-- will generate a parse error.
deriving (Eq, Show)
-- | Specify the associativity of an infix operator.
data Associativity = NonAssociative | LeftAssociative | RightAssociative
deriving (Show, Eq)
type Precedence = Int
data Priority = Lower | Higher | NoPriority
-- | The type of the operator table.
newtype Table a = Table [Part a]
-- NoBegin: no parts with the requested symbol.
-- Begin: found a begin part.
-- MissingBegin: no begin part found but continuing part found.
data FindBegin a = NoBegin
| Begin (Part a)
| MissingBegin [[a]]
| AmbiguousBegin Ambiguity
data FindContinue a = NoContinue
| Continue (Part a)
| AmbiguousContinue Ambiguity
data Ambiguity = MiddleOrLast
| NotSameHole
| NotSameFirst
| LoneOrFirst
| MultipleLone
deriving (Eq, Show)
data FindBoth a = BNothing
| BContinue (Part a)
| BBegin (Part a)
| BMissingBegin [[a]]
| BAmbiguous Ambiguity
findParts :: Token a => Table a -> a -> [Part a]
findParts (Table ps) x = filter (consider x . symbol) ps
findContinuing :: Token a => [Part a] -> Part a -> FindContinue a
findContinuing xs y = case as of
[] -> NoContinue
(a:as') -> if isLast a
then if all isLast as'
then Continue $ groupLast as
else AmbiguousContinue MiddleOrLast
else if all isMiddle as'
then case groupMiddle as of
Just pt -> Continue pt
Nothing -> AmbiguousContinue NotSameHole
else AmbiguousContinue MiddleOrLast
where as = filter (`continue` y) xs
-- Search the operator stack for the top-most parts waiting to be completed
-- (i.e. on the left of an innner hole).
findIncompletePart :: [Tree a] -> Maybe (Part a)
findIncompletePart [] = Nothing
findIncompletePart (Part y:_) | not (end y) = Just y
findIncompletePart (_:ss) = findIncompletePart ss
-- - The operator doesn't contain any operator
-- -> returns (First,Nothing)
-- - The operator stack has an operator at its top and
-- no incomplete operator
-- -> returns (First,Just Top)
-- - The operator stack has no operator at its top but
-- has an incomplete operator below
-- -> return (Continuing of First, Nothing)
-- - The operator stack has an operator at its top and
-- an incomplete operator (at the top or below)
-- -> returns (Continuing or First, Just Top)
-- Actually, if there is no Continuing, returns what it can
-- find, even if it is not First; one of the rules will
-- generate a MissingBefore (in the [] case) or an Incomplete
-- (in the pts2 case).
findBoth :: Token a => Table a -> a -> [Tree a] -> FindBoth a
findBoth table x st = case findIncompletePart st of
Nothing -> wrap $ findBegin table x
Just y -> case findContinuing xs y of
Continue a -> BContinue a
NoContinue -> wrap $ findBegin table x
AmbiguousContinue amb -> BAmbiguous amb
where xs = findParts table x
wrap a = case a of
NoBegin -> BNothing
MissingBegin b -> BMissingBegin b
Begin b -> BBegin b
AmbiguousBegin amb -> BAmbiguous amb
findBegin :: Token a => Table a -> a -> FindBegin a
findBegin table x = case filterParts $ findParts table x of
([],[],[],[]) -> NoBegin
(_:_,_:_,_,_) -> AmbiguousBegin LoneOrFirst
([pt],_,_,_) -> Begin pt
(_:_,_,_,_) -> AmbiguousBegin MultipleLone
(_,f@(_:_),_,_) -> case groupFirst f of
Left amb -> AmbiguousBegin amb
Right pt -> Begin pt
(_,_,m,l) -> MissingBegin $ map previous (m++l)
-- | A Part represent a single symbol of an operator.
data Part a = First (Maybe (Associativity,Precedence)) a [a] Hole
-- assoc/prec if it is open, possible successor parts, non-empty, s-expr/distfix
| Last (Op a)
-- The Op1 case cannot have an empty list (this is the Lone case).
| Lone Bool a Opening Precedence
-- Same as Op1 but without the list.
| Middle [a] a [a] Hole
-- possible predecessor and successor parts, both non-empty, s-expr/distfix
deriving (Show, Eq)
-- Specify if an Op1 or a Lone is prefix, postfix, or infix.
data Opening = Infix Associativity
| Prefix
| Postfix
deriving (Show, Eq)
original :: Part a -> Op a
original (Lone keep x opening p) = Op1 keep x [] opening p
original (Last o) = o
original _ = error "can't happen"
priority :: Part a -> Part a -> Priority
priority pt1 pt2 = case (associativity pt1, associativity pt2) of
(Just (a1,p1), Just (a2,p2)) | begin pt1 && end pt2 ->
f a1 p1 a2 p2
_ | isMiddle pt1 || end pt1 && not (isLone pt1) -> Lower
| otherwise -> Higher
where f a1 p1 a2 p2
| p1 == p2 && (a1 == NonAssociative
|| a2 == NonAssociative || a1 /= a2) =
NoPriority
| p1 < p2 = Lower
| p1 == p2 && a1 == LeftAssociative = Lower
| otherwise = Higher
applicator :: Token a => Table a -> SExpr a -> Bool
applicator table (Atom x) = null $ findParts table x
applicator _ (List _) = True
applicator' :: Token a => Table a -> Tree a -> Bool
applicator' table (Leaf x) = null $ findParts table x
applicator' _ (Branch _) = True
applicator' _ _ = False
isLone :: Part a -> Bool
isLone (Lone _ _ _ _) = True
isLone _ = False
isFirst :: Part a -> Bool
isFirst (First _ _ _ _) = True
isFirst _ = False
isLast :: Part a -> Bool
isLast (Last _) = True
isLast _ = False
isMiddle :: Part a -> Bool
isMiddle (Middle _ _ _ _) = True
isMiddle _ = False
begin :: Part a -> Bool
begin (Lone _ _ _ _) = True
begin (First _ _ _ _) = True
begin _ = False
end :: Part a -> Bool
end (Lone _ _ _ _) = True
end (Last _) = True
end _ = False
discard :: Part a -> Bool
discard (First _ _ _ _) = False
discard (Last (Op1 keep _ _ _ _)) = not keep
discard (Last (Op2 keep _ _ _ _)) = not keep
discard (Lone keep _ _ _) = not keep
discard (Middle _ _ _ _) = False
-- | Return the token of a given Part.
symbol :: Part a -> a
symbol (First _ s _ _) = s
symbol (Last (Op1 _ _ xs _ _)) = snd $ last xs
symbol (Last (Op2 _ _ _ _ s)) = s
symbol (Lone _ s _ _) = s
symbol (Middle _ s _ _) = s
-- | Return the arity of a complete Part. It is an error to call this
-- function on a First or Middle part.
arity :: Part a -> Int
arity (First _ _ _ _) = error "arity: bad argument"
arity (Middle _ _ _ _) = error "arity: bad argument"
arity (Lone _ _ (Infix _) _) = 2
arity (Lone _ _ _ _) = 1
arity (Last (Op1 _ _ xs opening _)) = case opening of
Postfix -> length xs + 1
Infix _ -> length xs + 2
Prefix -> length xs + 1
arity (Last (Op2 _ _ xs _ _)) = length xs + 1
leftOpen :: Part a -> Bool
leftOpen (First (Just _) _ _ _) = True
leftOpen (First _ _ _ _) = False
leftOpen (Last _) = True
leftOpen (Lone _ _ Prefix _) = False
leftOpen (Lone _ _ _ _) = True
leftOpen (Middle _ _ _ _) = True
rightOpen :: Part a -> Bool
rightOpen (First _ _ _ _) = True
rightOpen (Last (Op1 _ _ _ Prefix _)) = True
rightOpen (Last (Op1 _ _ _ (Infix _) _)) = True
rightOpen (Last _) = False
rightOpen (Lone _ _ Postfix _) = False
rightOpen (Lone _ _ _ _) = True
rightOpen (Middle _ _ _ _) = True
rightHole :: Part a -> Maybe Hole
rightHole (First _ _ _ k) = Just k
rightHole (Last _) = Nothing
rightHole (Lone _ _ _ _) = Nothing
rightHole (Middle _ _ _ k) = Just k
-- Not the true associativity of the original operator.
-- E.g. this will return Nothing for the last part of
-- a postfix operator, but a maybe for its first part.
associativity :: Part a -> Maybe (Associativity,Precedence)
associativity (First ap _ _ _) = ap
associativity (Last (Op1 _ _ _ opening p)) = case opening of
Postfix -> Nothing
Infix a -> Just (a,p)
Prefix -> Just (NonAssociative,p)
associativity (Last (Op2 _ _ _ _ _)) = Nothing
associativity (Lone _ _ Postfix p) = Just (NonAssociative,p)
associativity (Lone _ _ Prefix p) = Just (NonAssociative,p)
associativity (Lone _ _ (Infix a) p) = Just (a,p)
associativity (Middle _ _ _ _) = Nothing
-- | Return the possible tokens continuing the given part.
next :: Part a -> [a]
next (First _ _ r _) = r
next (Last _) = []
next (Lone _ _ _ _) = []
next (Middle _ _ r _) = r
-- | Return the tokens preceding the given part.
previous :: Part a -> [a]
previous (First _ _ _ _) = []
previous (Last (Op1 _ _ [] _ _)) = error "can't happen"
previous (Last (Op1 _ a [_] _ _)) = [a]
previous (Last (Op1 _ a xs _ _)) = a : map snd (init xs)
previous (Last (Op2 _ a [] _ _)) = [a]
previous (Last (Op2 _ a xs _ _)) = a : map snd xs
previous (Lone _ _ _ _) = []
previous (Middle l _ _ _) = l
-- | Return the tokens of the given part.
current :: Part a -> [a]
current (First _ s _ _) = [s]
current (Last (Op1 _ _ [] _ _)) = error "can't happen"
current (Last (Op1 _ x xs _ _)) = x : map snd xs
current (Last (Op2 _ a xs _ b)) = a : map snd xs ++ [b]
current (Lone _ s _ _) = [s]
current (Middle l s _ _) = l ++ [s]
continue :: Token a => Part a -> Part a -> Bool
continue x y = considers (previous x) (current y)
filterParts :: [Part a] -> ([Part a],[Part a],[Part a],[Part a])
filterParts pts = (filter isLone pts, filter isFirst pts,
filter isMiddle pts, filter isLast pts)
groupFirst :: Token a => [Part a] -> Either Ambiguity (Part a)
groupFirst [] = error "groupFirst: empty list"
groupFirst (First a' x s' k':pts) = go a' s' k' pts
where go a s k [] = Right $ First a x s k
go a s k (First a2 _ s2 k2:xs)
| a == a2 && k == k2 = go a (unionBy consider s s2) k xs
| a /= a2 = Left NotSameFirst
| k /= k2 = Left NotSameHole
go _ _ _ _ = error "groupFirst: not a First part"
groupFirst _ = error "groupFirst: not a First part"
groupMiddle :: Token a => [Part a] -> Maybe (Part a)
groupMiddle [] = error "groupMiddle: empty list"
groupMiddle (Middle ss' x s' k':pts) = go ss' s' k' pts
where go ss s k [] = Just $ Middle ss x s k
go ss s k (Middle ss2 _ s2 k2:xs)
| not (considers ss ss2) = error "groupMiddle: different prefix"
| k == k2 = go ss (unionBy consider s s2) k xs
go _ _ _ _ = Nothing -- ambiguous middle parts
groupMiddle _ = error "groupMiddle: not a Middle part"
groupLast :: [Part a] -> Part a
groupLast [] = error "groupLast: empty list"
groupLast [l@(Last _)] = l
groupLast _ = error "groupLast: not a Last part"
----------------------------------------------------------------------
-- Combinators to construct the operator table
----------------------------------------------------------------------
-- | Build a infix operator. The precedence is set to 0.
infx :: Associativity -> a -> Op a
infx a f = Op1 True f [] (Infix a) 0
-- | Build a infix operator with the keep property set to False.
-- The precedence is set to 0.
infx_ :: Associativity -> a -> Op a
infx_ a f = Op1 False f [] (Infix a) 0
-- | Build a prefix operator. The precedence is set to 0.
prefx :: a -> Op a
prefx f = Op1 True f [] Prefix 0
-- | Build a prefix operator with the keep property set to False.
-- The precedence is set to 0.
prefx_ :: a -> Op a
prefx_ f = Op1 False f [] Prefix 0
-- | Build a postfix operator. The precedence is set to 0.
postfx :: a -> Op a
postfx f = Op1 True f [] Postfix 0
-- | Build a postfix operator with the keep property set to False.
-- The precedence is set to 0.
postfx_ :: a -> Op a
postfx_ f = Op1 False f [] Postfix 0
-- | Build a closed operator. The precedence is set to 0.
closed :: a -> Hole -> a -> Op a
closed f = Op2 True f []
-- | Build a closed operator with the keep property set to False.
-- The precedence is set to 0.
closed_ :: a -> Hole -> a -> Op a
closed_ f = Op2 False f []
-- | Add a new part separated by an SExpression hole to the right
-- of an operator.
sexpr :: Op a -> a -> Op a
sexpr (Op1 keep x rest opening p) y =
Op1 keep x (rest++[(SExpression,y)]) opening p
sexpr (Op2 keep x rest k y) z =
Op2 keep x (rest++[(k,y)]) SExpression z
-- | Add a new part separated by a Distfix hole to the right
-- of an operator.
distfix :: Op a -> a -> Op a
distfix (Op1 keep x rest opening p) y =
Op1 keep x (rest++[(Distfix,y)]) opening p
distfix (Op2 keep x rest k y) z =
Op2 keep x (rest++[(k,y)]) Distfix z
----------------------------------------------------------------------
-- A few 'show' functions for SExpr, and Tree
----------------------------------------------------------------------
-- | Show an s-expression using nested angle brackets.
showSExpr :: Token a => SExpr a -> String
showSExpr = tail . f
where
f (Atom s) = ' ' : toString s
f (List []) = ' ' : "โจโฉ"
f (List es) = ' ' : 'โจ' : tail (concatMap f es) ++ "โฉ"
-- Similar to showSExpr but for a Tree.
showTree :: Token a => Tree a -> String
showTree = tail . f
where
f (Leaf s) = ' ' : toString s
f (Part y) = ' ' : concatMap toString (current y)
f (Branch []) = ' ' : "โจโฉ"
f (Branch es) = ' ' : 'โจ' : tail (concatMap f es) ++ "โฉ"
showPart :: Token a => Part a -> String
showPart = toString . symbol
|
/*Code by LDDLamNT
lanrefni;
using C++;
*/
#include <bits/stdc++.h>
using namespace std;
int main(){
string t,s;
int dem = 0,sol = 0;
cin >> t >> s;
while(t.length()<s.length()){
t = "*"+t;dem++;
}
while(t.length()>s.length()){
s = "*" +s;
dem++;}
for(int i = s.length(); i>=0; i--){
if(s[i]!=t[i]){
sol = i + 1;
break;
}
}
cout<<sol*2 - dem;
}
|
<reponame>Liquid369/dogecash_update<gh_stars>0
#include "page1.h"
#include "ui_page1.h"
page1::page1(QWidget *parent) :
QWidget(parent),
ui(new Ui::page1)
{
ui->setupUi(this);
this->setFont();
}
page1::~page1()
{
delete ui;
}
void page1::setFont() {
font_label_main = new QFont();
font_label_total_value = new QFont();
font_label_main->setPixelSize(15);
font_label_main->setLetterSpacing(QFont::AbsoluteSpacing, 1.4);
font_label_main->setFamily("Chivo");
font_label_main->setBold(true);
ui->label_total->setFont(*font_label_main);
ui->label_immature->setFont(*font_label_main);
ui->label_available->setFont(*font_label_main);
ui->label_immature_value->setFont(*font_label_main);
ui->label_available_value->setFont(*font_label_main);
font_label_total_value->setPixelSize(114);
font_label_total_value->setLetterSpacing(QFont::AbsoluteSpacing, 3.8);
font_label_total_value->setFamily("Chapaza");
ui->label_total_value->setFont(*font_label_total_value);
}
|
Maroubra fire: traces of accelerant found after explosion at cafe, police
Updated
Fire investigators have found traces of accelerant in a health food cafe which was destroyed by a blaze in Sydney's east, New South Wales Police say.
Around 50 people were evacuated from the multi-storey building on Anzac Parade at Maroubra when a loud explosion ripped through the ground floor cafe about 2:00am.
Drew McPherson was among those who heard it.
"It was pretty loud," he said.
"I was probably ... 400, 500 metres down the road and I could hear it pretty clearly.
"It's pretty full on. The shopfront's blown all over the front of the street."
Acting Superintendent Stacey Maloney said police were treating the fire as suspicious.
"At this stage there appears to have been an accelerant used," she said.
Superintendent Maloney said police were currently speaking to the owners of the cafe and that officers wanted to hear from anybody who had information about what triggered the blaze.
"Police are concerned about what's happened," she said.
"There were a number of people who were evacuated as a precautionary measure but police are making inquiries and the community doesn't need to be concerned in relation to this particular incident."
Fire and Rescue Superintendent Paul Johnson said no one was injured in the fire.
"Due to the time of morning, the restaurant itself wouldn't have been open," he said.
"It's going to take further investigations.
"We've handed the area over to police. We've contacted our own fire investigators and I think they'll be working with police in the morning to try to determine the cause."
About 30 firefighters brought the fire under control and it was extinguished.
Residents have since been allowed to return home.
Topics: fires, emergency-incidents, arson, maroubra-2035
First posted |
def extractShapes( self, nodes ):
analyzedNodes = []
for n in nodes :
g = self.buildShape(n)
if g :
analyzedNodes.append( g )
if self.options.doUniformization:
analyzedNodes = self.uniformizeShapes(analyzedNodes)
return analyzedNodes |
<filename>Clase 5/app/todo.ts
export class Todo {
constructor(
public id: number,
public title: string) {
console.log('Se ha creado la lista ' + title);
}
}
|
export default {
size: 7,
viewBox: { w: 4, h: 7 },
stroke: true,
content: (
<path
d="M3.5 0.5L0.5 3.5L3.5 6.5"
strokeLinecap="round"
strokeLinejoin="round"
/>
),
}
|
def _initialize_params(self):
p = self.simulation_params.p
self.intercept_ = self.simulation_params.intercept
if self.simulation_params.one_and_none:
self.beta = np.zeros(p)
self.beta[0] = self.simulation_params.gamma * np.sqrt(p)
else:
self.p_positive = int(p / 8)
self.p_negative = self.p_positive
self.p_zero = p - self.p_positive - self.p_negative
self.beta = 2 * np.concatenate((np.ones(
self.p_positive), -np.ones(self.p_negative), np.zeros(self.p_zero)))
self.beta *= self.simulation_params.gamma
if self.simulation_params.covariance == "isotropic":
self.diag = np.ones(p)
elif self.simulation_params.covariance == "elliptical":
self.diag = self.random_state.rand(p) + 0.5
self.diag /= self.diag[:(self.p_positive + self.p_negative)].mean()
self.diag[0] = 1
else:
raise NotImplementedError("No covariance {}".format(
self.simulation_params.covariance))
if self.simulation_params.uncentered:
self.centering = np.ones(p)
self.intercept_ -= self.beta.dot(self.centering)
else:
self.centering = 0 |
def merge_blocks(results_path, n_lines, n_terms):
index = []
files = []
lines = []
n_files = 0
while True:
try:
n_files += 1
files.append(open(results_path + 'spimi_block_' + str(n_files - 1) + '.dat', 'r'))
except:
n_files -= 1
break
for i in range(n_files):
lines.append([])
index_num = 0
while True:
for j in range(n_files):
if len(lines[j]) == 0:
for i in range(n_lines):
lines[j].append(files[j].readline())
min_strs = []
for j in range(n_files):
if len(lines[j][0].split()) != 0:
min_strs.append(lines[j][0].split()[0])
if len(min_strs) == 0:
break
min_strs.sort()
mins = min_strs[0]
final_list = '' + mins
for j in range(n_files):
if len(lines[j][0].split()) > 0 and lines[j][0].split()[0] == mins:
l = lines[j].pop(0)
l = l.replace(mins, '')
final_list += l[:-1]
index.append(final_list)
if len(index) == n_terms:
with open(results_path + 'spimi_index_' + str(index_num) + '.dat', 'w') as f:
for string in index:
f.write(string + '\n')
index = []
index_num += 1
if len(index) != 0:
with open(results_path + 'spimi_index_' + str(index_num) + '.dat', 'w') as f:
for string in index:
f.write(string + '\n') |
def testSingleUnicodeKey(self):
config = dos.ReservoirConfig(
'always',
period=300,
samples=10000,
by_url=True)
sampler = dos.MultiSampler([config], gettime=self.fake_gettime)
reporter = dos.Reporter()
key = u'this-breaks-stuff\u30d6\u30ed\u30b0\u8846'
key_utf8 = key.encode('utf-8')
reporter.set(key, config)
self.gettime_results.extend([0, 10])
sampler.sample(reporter)
results = sampler.get(config)
self.assertEquals(1, results.total_samples)
self.assertEquals(1, results.unique_samples)
self.verify_sample(results, key_utf8, 1, 0.1) |
import { crc32 } from '../utils/crc32';
const NVS_BLOCK_SIZE: number = 32;
enum NvsType {
U8 = 0x01,
I8 = 0x11,
U16 = 0x02,
I16 = 0x12,
U32 = 0x04,
I32 = 0x14,
U64 = 0x08,
I64 = 0x18,
STR = 0x21,
BLOB = 0x42,
ANY = 0xff
}
export class NvsEntry implements NvsKeyValue {
namespace: number;
type: NvsType;
key: string;
data: string | number;
headerNamespace: Uint8Array;
headerType: Uint8Array;
headerSpan: Uint8Array;
headerChunkIndex: Uint8Array;
headerCRC32: Uint8Array;
headerKey: Uint8Array;
headerData: Uint8Array;
headerDataSize: Uint8Array;
headerDataCRC32: Uint8Array;
headerBuffer: Uint8Array;
dataBuffer: Uint8Array;
entriesNeeded: number = 0;
constructor(entry: NvsKeyValue) {
console.log('NEW ENTRY', entry);
this.namespace = entry.namespace;
this.type = entry.type;
this.key = entry.key += '\0';
this.data = entry.data;
if (entry.key.length > 16) {
throw Error(`NVS max key length is 15, received ${entry.key} of length ${entry.key.length}`);
}
this.headerBuffer = new Uint8Array(32);
this.headerNamespace = new Uint8Array(this.headerBuffer.buffer, 0, 1);
this.headerType = new Uint8Array(this.headerBuffer.buffer, 1, 1);
this.headerSpan = new Uint8Array(this.headerBuffer.buffer, 2, 1);
this.headerChunkIndex = new Uint8Array(this.headerBuffer.buffer, 3, 1).fill(0xFF);
this.headerCRC32 = new Uint8Array(this.headerBuffer.buffer, 4, 4);
this.headerKey = new Uint8Array(this.headerBuffer.buffer, 8, 16);
this.headerData = new Uint8Array(this.headerBuffer.buffer, 24, 8).fill(0xFF);
this.headerDataSize = new Uint8Array(this.headerBuffer.buffer, 24, 4);
this.headerDataCRC32 = new Uint8Array(this.headerBuffer.buffer, 28, 4);
this.dataBuffer = new Uint8Array(0);
this.setEntryData();
this.setEntryHeader();
this.setEntryHeaderCRC();
}
private setEntryHeader() {
const encoder = new TextEncoder()
this.headerNamespace.set([this.namespace]);
this.headerType.set([this.type]);
this.headerSpan.set([this.entriesNeeded]);
this.headerKey.set(encoder.encode(this.key));
}
private setEntryData() {
if (typeof this.data == "string") {
this.setStringEntry();
} else if (typeof this.data == "number") {
this.setPrimitiveEntry();
}
}
private setStringEntry() {
if (typeof this.data == "string") {
this.data += '\0'; // Adding null terminator.
const encoder = new TextEncoder()
const data = encoder.encode(this.data);
this.entriesNeeded = Math.ceil(data.length / NVS_BLOCK_SIZE);
this.dataBuffer = new Uint8Array(this.entriesNeeded * NVS_BLOCK_SIZE).fill(0xff);
this.dataBuffer.set(data);
this.entriesNeeded += 1; // +1 for header
const dataSizeBuffer: ArrayBuffer = new ArrayBuffer(2);
const dataSizeView: DataView = new DataView(dataSizeBuffer, 0, 2);
dataSizeView.setUint8(0, data.length);
this.headerDataSize.set(new Uint8Array(dataSizeBuffer), 0);
this.headerDataCRC32.set(crc32(data));
}
}
private setPrimitiveEntry() {
if (typeof this.data == "number") {
const dataBuffer: ArrayBuffer = new ArrayBuffer(8);
const dataBufferView: DataView = new DataView(dataBuffer, 0, 8);
const dataBufferArray: Uint8Array = new Uint8Array(dataBuffer).fill(0xFF);
switch (this.type) {
case NvsType.U8:
dataBufferView.setUint8(0, this.data);
break;
case NvsType.U16:
dataBufferView.setUint16(0, this.data);
break;
case NvsType.U32:
dataBufferView.setUint32(0, this.data);
break;
case NvsType.U64:
dataBufferView.setBigUint64(0, BigInt(this.data));
break;
case NvsType.I8:
dataBufferView.setInt8(0, this.data);
break;
case NvsType.I16:
dataBufferView.setInt16(0, this.data);
break;
case NvsType.I32:
dataBufferView.setInt32(0, this.data);
break;
case NvsType.I64:
dataBufferView.setBigInt64(0, BigInt(this.data));
break;
default:
dataBufferView.setUint32(0, this.data);
break;
}
this.headerData.set(dataBufferArray, 0);
}
this.entriesNeeded = 1;
}
private setEntryHeaderCRC() {
const crcData: Uint8Array = new Uint8Array(28);
crcData.set(this.headerBuffer.slice(0, 4), 0);
crcData.set(this.headerBuffer.slice(8, 32), 4);
this.headerCRC32.set(crc32(crcData));
}
} |
/**
* This function is called when a Thing moves onto a Hole.
* @param t The Thing that moves.
* @return Shows if the Thing has been accepted.
*/
@Override
public boolean accept(Thing t){
Boolean accepted;
if(open.booleanValue())
{
LOGGER.log( Level.FINE, "Hole killed thing");
t.destroy();
accepted = Boolean.TRUE;
}
else {
LOGGER.log( Level.FINE, "Hole closed, accepting thing as if it was a regular tile");
accepted = Boolean.valueOf(super.accept(t));
}
LOGGER.log( Level.FINE, "Hole accepted:{0}",accepted);
return accepted.booleanValue();
} |
/**
* Splits the labelToBeLinked in ngrams up to infinite size and tries to link components.
* This corresponds to a MAXGRAM_LEFT_TO_RIGHT_TOKENIZER or NGRAM_LEFT_TO_RIGHT_TOKENIZER OneToManyLinkingStrategy.
*
* @param labelToBeLinked The label that shall be linked.
* @param language The language of the label.
* @return A set of concept URIs that were found.
*/
private HashSet<String> linkLabelToTokensLeftToRight(String labelToBeLinked, Language language) {
LeftToRightTokenizer tokenizer;
String[] tokens = StringOperations.tokenizeBestGuess(labelToBeLinked);
tokenizer = new MaxGramLeftToRightTokenizer(tokens, " ");
HashSet<String> result = new HashSet<>();
String token = tokenizer.getInitialToken();
while (token != null) {
String resultingConcept = linkToSingleConcept(token, language);
if (resultingConcept == null || resultingConcept.length() == 0) {
token = tokenizer.getNextTokenNotSuccessful();
} else {
result.add(resultingConcept);
token = tokenizer.getNextTokenSuccessful();
}
}
return result;
} |
From Data Chaos to the Visualization Cosmos
Data visualization is a general term that describes any effort to help people enhance their understanding of data by placing it in a visual context. We present a ubiquitous pattern of knowledge evolution that the collective digital society is experiencing. It starts with a challenge or goal in the real world. When implementing a real-world solution, we often run into barriers. Creating a digital solution to an analogue problem create massive amounts of data. Visualization is a key technology to extract meaning from large data sets.
Introduction
Data visualization is a general term that describes any effort to help people enhance their understanding of data by placing it in a visual context. Patterns, trends and correlations that might go undetected in numeric on text-based data can be exposed and recognized easier with data visualization software . Figure 1 represents a ubiquitous pattern of knowledge evolution that the collective digital society is experiencing. It consists of six basic constituents. It starts with a challenge or goal in the real world. The goal could be to build or optimize a design, like a car or computer. The start could be a challenge such as reaching a new level of understanding or observing a behavior or phenomenon rarely or never seen previously. The goal could be running a successful business and making a profit. We all have real world goals and challenges. We all have new understanding and knowledge we would like to obtain. We all have things we would like to build, create, and optimize.
When trying to build something we generally know that whatever it is, it can theoretically be built in the real-world. For example, cars and structures can be built out of raw materials and components with the right tools. We also know that observations can be made, in general, by being in the right place at the right time, either personally or with recording equipment. Experiments can generally be conducted with the appropriate equipment. New levels of understanding can generally be obtained if we hire enough of the right people.
However, when implementing a real-world solution, we often run into barriers. Cars and structures are extremely expensive to build and may also require a long-term investment. Observations may be very expensive, very difficult, or even impossible. Some observations interfere with the very behavior or phenomena they are trying to study. Recording equipment may be too expensive or cause logistical problems. Equipment for experiments is generally very expensive. This is especially true if the equipment is specialized or for very small or very large-scale investigations. Also, hiring people for new understanding may not be feasible due to expense. A fulltime research assistant costs 100 K GBP per year under current funding agency full economic costing (FEC) requirements in the UK. Real-world solutions are generally very expensive or not feasible at all. Some real-world solutions are impossible.
It is because of the high cost of real-world solutions that collectively, as a society, we turn to digital solutions to address our challenges and goals. The dotted line in Figure 1 separates the real, physical, or analogue world on the left side from the digital world on the right. We all look to the digital world for the answers to our questions. "There must be an app for that." or "What app can be built to solve this problem?" is the collective thinking in this day in age. Society looks towards digital solutions for their real-world problems to deliver the user from the dilemma they may face. People believe that software is less-expensive to build than objects in the real world. The virtual world should be more feasible than the physical or analogue world. And this is true in many scenarios.
However, creating a digital solution to an analogue problem introduces new challenges. In particular, digital solutions, including software, create massive amounts of data. The amount of data digital approaches generate is generally unbounded. Software and storage hardware is less and less expensive with time. Thus, users collect, collect, and collect even more data. This is the point at which the knowledge evolution pipeline of Figure 1 becomes interesting. Large collections of complex data are not automatically useful. Extracting meaningful information, knowledge, and ultimately wisdom from large data collection is the main challenge facing the digital world today. The collection of essentially unbounded data is what we term data chaos. Collecting and archiving data without careful planning and well thought out information design quickly or slowly results in a chaotic data environment. Those who collect data are generally not yet aware of how difficult it is to then derive useful insight and knowledge from it.
On the other hand, the knowledge that visualization is a key technology to extract meaning from large data sets is rapidly spreading. This is one solution to the data chaos. In the early years of data visualization as a field, say the first 10 years, from 1987 to 1997, data visualization was considered very niche. Not many people knew about it nor knew of its existence. It is only since around the turn of the century that word started to spread. In the 2000s the first main-stream news stories including the phrase 'Data Visualization' were published. Nowadays, the field has come a long way from obscurity to breaking into the main stream. Its presence and importance as a field is starting to become understood. Word is spreading that a data visualization community exists and that this is a topic a student can study at university.
That's the basic pattern of knowledge evolution. The rest of the chapter provides concrete examples of these six stages from real-world challenges to the visualization cosmos. The focus is on the last two stages: from data chaos to the visualization cosmos.
The universal big data story (and quandary)
We can find this pattern everywhere. It does not matter where we look. We can see in computational fluid dynamics. Physicists and astronomers are facing these dilemmas. It's not possible to study all the stars and black holes physically. We see this pattern with marine biologists, biochemists, psychologists, sociologists, sport scientists, journalists, and those studying the humanities. We see this evolution with government councils, banks, call centers, retail websites, transportation. The list is virtually endless. You can experience this yourself as you collect your own photos. People like to collect things. This is another contributing factor to the data chaos. A person may not even have a goal to reach or a problem they are trying to solve. They just like to collect.
The visual cortex
Data visualization uses computer graphics to generate images of complex data sets. It's different from computer graphics. "Computer graphics is a branch of computer science, yes, but its appeal reaches far beyond that relatively specialized field.
Computer Graphics and Imaging
In its short lifetime, computer graphics has attracted some of the most creative people in the work to its fold," from the classic textbook "Introduction to Computer Graphics" by Foley et al. . Visualization tries to generate images of reality. Visualization exploits our powerful visual system. We have several billion neurons dedicated to our visual processing and visual cortex . See Figure 2.
The numbers of neurons are not very meaningful unless we put them into context. We have 8% of the cortex dedicated touch and 3% dedicated to hearing. We have anywhere from 4 to 10 times of our cortex dedicated to visual processing than the other senses. It makes sense to explore the visual processing power in our brains as opposed to the other senses. It's dedicated to processing color, motion, texture, and shape.
Visualization goals
Data visualization has some strengths and goals itself. One of the goals of data visualization is exploring data. This may be the case when the user does not know anything about their data set. They just want to find out what it looks like and its characteristics.
Users search for trends or patterns in the data. Exploration is for the user that's not very familiar with the dataset. Visualization is also good for analysis: to confirm or refute a hypothesis. An expert may have collected the data for a special purpose and would like to confirm or refute a hypothesis or answer a specific question. Visualization is also effective for presentation.
When our exploration and analysis is finished we can present the results to a wider audience. Visualization is also good for acceleration i.e. to speed up something such as a search process. This is often a decision-making process or knowledge discovery process. We can see things that were otherwise impossible.
Example: visualization of call center data
Let's look at this first example of this pattern of knowledge evolution. This is from a business context. One of Swansea University's industry collaborators is called QPC Ltd. They are an innovator in call center technology. Their goal is to understand call center behavior and to increase understanding of calls and all the activities that occur inside a call center. The call centers are staffed with many agents and the agents are answering hundreds of thousands of calls every day. How can we increase our understanding of all those events and what is happening inside of a call center?
We theoretically could go down the analog or physical route. We could hire more people that stand and observe what's happening in the call center and attempt to take notes to enhance understanding. Or maybe CCTV could be used to try to film everything that's going on. These analogue solutions will be very expensive and not very practical. The analog solution to hire more people for just observation is not practically feasible and will cost too much money.
So QPC Ltd. chose the digital solution. They decided to implement an event database. The database logs all events in the call center: who called, when they call, how much time they spend navigating menus inside the interactive voice recognition system (IVR), how long they spent in the queue before speaking to an agent, whether or not they abandon their call, which agent they spoke to, and how long they spoke to each agent etc. That digital solution in the form of a database stores of millions events every day. A call center generates lots of activities. The UK employs over a million people in call centers or about 5% of its workforce are employed in call centers . It's a large market.
How do we take the chaos of call center data and visualize it to make sense of it? We can use a treemap as one of the ways to visualize call center events. See Figure 3. The treemap is a hierarchical data structure. We start with an overview of the data and then zoom in down to different levels of detail. In this case, the size of the rectangles is initially mapped to call volume. The different hours start from midnight to midnight again. We can see when the call center opens and when the call volume increases and reaches its maximum at around lunchtime. Then it starts to descend again.
Color is mapped to the percentage of abandoned calls by default. We can notice call centers trying to avoid abandoned calls. We can observe a big increase in abandoned calls in the evening right after dinner around 7 pm-8 pm. The user can map the calls to different colors at different costs. They can also map the colors to different kinds of events for example abandoned calls or successful calls.
We can also navigate the treemap. We can zoom in smoothly and see more details. We can zoom in to single hour and each rectangle represents a single call. We can visualize individual calls and how long they take. There is a call that lasted 2 h. The unusual calls that last long time jump right out. Probably they spent a long time with an agent-a very dedicated agent spent a long time trying to solve a customer problem. The users can use a clock interface to smoothly zoom and navigate each hour. The software features a smooth zooming and panning operation and with the clock showing. The user does not get lost.
We can easily see which hours we are observing even when we zoom in. We can zoom in even further, 1 h is broken up into 10-min intervals and then those 10-min intervals are broken up into single minute intervals. We also see a standard histogram on the left which represents the data and provides an overview. Each bar represents a 10-min interval. Color is mapped to some data attribute chosen by the user in this case the average call length which we can see up in Figure 3. We can see, suddenly during, the evening average call length increases, and we can see over the day the average call length increases throughout the day as an overall trend. The treemap features a fine level of detail. Each rectangle can represent a single phone call and, in this case, how long each call lasted. At the top level are not individual calls. Each rectangle represents an hour and then each hour is broken up into 10-min blocks. So we have 6, 10 min blocks and then each time in the block is broken up into individual minutes. This is an exciting project because this is the first time that QPC Ltd. have ever seen overview of the call center activity in any way shape or form. As soon as we see the overview we can easily make observations about the call center volume about the increasing level of abandon calls. The average call length is also increasing as we examine the day.
We can filter calls using different sliders. This is the analytical part of the process. This is an example of focus and context visualization. See Figure 4. We focus on the calls that spend a longer time in a queue.
We can focus on the inbound calls because call centers have inbound calls and outbound calls. These can be filtered by completed calls. We can combine filters in different ways.
We can click on an individual call and then obtain the most detailed level of information like how much time the caller spent in the IVR navigating menus, how much time they spent queuing and how much time they spent talking to agents. We have two different queuing events, an agent event, a second agent event, back in the queue, back to another agent, back into the queue again. That is a complicated phone call. That is the lowest level of detail. We can also see the type of call in this case a consult call as it shows the number of events, one IVR event for queuing events and four different agent event.
One detailed view shows that the proportion each event as a unit proportion because sometimes the events disappear when they're too short for a traditional version.
Example: investigating swirl and tumble flow with a comparison of techniques
Computational fluid dynamics is the engineering discipline of trying to predict fluid flow behavior: fluid motion as it interacts with geometries like cars, ships, or airplanes . If we want to understand how fluid will interact with a surface one way to do, is to build an actual surface and build a flow environment and to visualize the flow with smoke or die or other substances. This is something fluid engineers do. This is a field of engineering. But it is very expensive. Just try a test flight and then attempt to visualize the air flow around the wings with smoke. This is a very expensive experiment. There are analog solutions. Can we come up with a digital solution that makes this investigation more feasible to accelerate the engineering and make it less expensive? That is the inspiration behind computational fluid dynamics (CFD).
Here's an example of combustion chamber in an automobile engine (Figure 5). The engineer's goal is to obtain a perfect mixture of fuel-to-air. The way the engineers propose to do that is to create this helical motion inside the combustion chamber (Figure 5 left) and for the diesel engine example the ideal pattern for the mixture of fluid flow is a tumble motion about an imaginary axis pointing out from the page (Figure 5 right).
We can build a real physical solution, but it saves time and money to go through a digital process first before we build real solutions. We do not need to build as many real solutions. The digital solution is computational fluid dynamics. As we know in computational fluid dynamics, the number one challenge is the amount of data that simulations generate which is at the gigabyte and terabyte scale. CFD simulations run from weeks to even months even on high performance computing machines. How can we use visualization to make sense of this massive amount of CFD data?
Let's look at some data visualization solutions for CFD data, visualizing the swirl and tumble motion. See Figure 6. This is the tumble motion example so those are called path or short path lines in the flow direction and the color is mapped to crank angle. We have a piston head moving up-and-down a thousand cycles per minutes (at the bottom-not shown).
We can also use vortex corelines a combination of these green paths of vortex core lines: centers of swirling flow. See Figure 7. When combined with particles, the particles show flow behavior around the vortex cores. This is what we call feature-based flow visualization-looking for special features in the flow . We can visualize the flow at the surface itself using so-called critical points. That's a sink and that's a saddle point. And then there are some curves that connect the different points. Those are special kind of streamline called separatrices and they show the topology of flow.
Topology is a skeletal representation of the flow. We can see the time-dependent topology of the flow-sinks, the sources, and the vortex corelines. The vortex core lines are tubes in the middle. We also see a separatrix on the boundaries of the surface and they are animated over time. That's a slow-motion version of time. It is slowed down quite a lot. In reality this is inside of the engine and it's moving up and down hundreds of times per minute. We can also use a volume visualization of the fluid flow specifically for the vortices so the vortices are the areas of swirling motion. The red is mapped to one direction of circular flow and blue the other.
The idea is to visualize the swirl and tumble motion. In this case the tumble motion is about an imaginary axis that points out at the viewer just like a tumble dryer. We can observe an axis pointing out and downwards to the left of the ideal axis. It's a very unstable axis of rotation. That is what these visualizations show an unstable rotational axis.
The computation fluid dynamicists see this and they observe this is not the ideal tumble motion. There's a little bit of tumble motion right around the perimeter of .
From Data Chaos to the Visualization Cosmos DOI: http://dx.doi.org/10.5772/intechopen.82159 the geometry but as soon as we look in the center we still see some swirling motion but it's very far from the ideal kind of tumble motion they strive for. They have to make some modifications to the geometry to try to realize the best mixing possible. And here this is also not the ideal swirl motion. The motion is off-center. Again they have not achieved their target of the ideal motion. That's what these visualizations show. They show the difference between the actual and predicted motion.
One of the things that the engineers like to know is where precisely the flow is misbehaving. They know what they want to see and what they expect to see. They like to see visualizations that highlight unwanted behavior. That's what all users want to see. In fact, that could be in the knowledge evolution pipeline. One of the things QPC would like to see where the abandoned calls are and when people are not behaving properly. Here the engineers can see where the flow does not behave properly. This is one of the strengths of visualization-to show when and where behavior go wrong.
Example: visualization of sensor data from animal movement
The next example is from marine biology. Marine biologists would like to understand marine wildlife and how marine wild life behaves. One of the challenges that they face is deep sea underwater diving. How do you study animals that dive deep underwater for hours or even days at a time? How is that possible? Theoretically the solution might be to follow the animal. That might be kind of an approach. But there are some problems with that. People cannot just dive a few kilometers underneath the water. They can try to build submarines or similar but to try to follow a cormorant or tortoise in a submarine is not a very practical solution. It's not feasible, very expensive, and the analog solution is one of those cases where the observation itself influences the behavior we are trying to study.
Marine biologists look to the digital world for a solution. They use sensor devices at Swansea University called a daily diary . They actually capture the animals like a cormorant. They attach the digital sensor or maybe more than one digital sensor to the subject and then release it. See Figure 8.
Then they recapture the sensor a few hours or a few days later. They remove it from the animal and they study the information that it collects about the local environment. It collects information on acceleration, local acceleration, local temperature, pressure, ultraviolet light, and a few other properties. Another challenge currently is that GPS does not work underwater at great depths. It's not possible to just plot a path naively in a dead reckoning fashion the same way we can for land animals. However, when the user get this data this is what it looks like (see Figure 8 right). This is a tiny little piece of what it looks like. They plot, for every attribute, magnitude versus time. Acceleration Magnitude is on the y-axis and time is on the x-axis. They claim they can infer animal behavior based on these wave patterns. They can look at a wave pattern and say that it looks like the animal is diving or the animal hunting.
But you can see that that's not easy. This is only a few seconds of data. If you plot the day's worth of data in this fashion, it will wrap around a building a few times. The acceleration has three components: x, y, z. These are three components decoupled. In reality they form a vector in 3-space.
The marine biologists asked us if we can drive visualizations that facilitate the understanding of marine wildlife behavior. We have a standard visualization coupled with a new visualization (see Figure 9). In the new visual design we can see the geometry of the animals and how the animal is oriented immediately. What Grundy et al. did was reintegrate the x, y, z components of the acceleration and plot them in spherical space rather than time versus amplitude space. And they map the unit vectors onto a sphere and can immediately infer animal behavior. They can also map pressure to the radius. Figure 9 shows the animal swimming at the surface and then the pressure increases. Pressure mapped to radius represents diving behavior and the diving behavior is very easy to notice. Now that is visualized in spherical space we can observe swimming, hunting, searching behavior. This spherical space is interactive so that we can rotate, zoom, and pan at different angles. Figure 10 presents a spherical histogram. The vectors are binned into unit rectangles and the more time an animal spends in a given posture at that orientation, Utilizing data clustering methods of sensor data. Image courtesy of Grundy et al. .
Figure 12.
Visualization of molecular dynamics simulation data. Image courtesy of Alharbi et al. .
can cluster the vectors into different groups (see Figure 11). Assigning each data point to a group that represents some interesting aspect of the animal behavior. The user can adjust the probability of any data sample belonging to one of the clusters. These are clusters of animal postures calculated using K-means clustering. Grundy et al. can represent clusters as spheres and then connect the spheres or the postures with edges that represent transitions from one orientation to another successively. We can observe the transitions between various states and postures. We can see the most popular or dominant states. That information pops up immediately.
Example: visualization of molecular dynamics simulation data
The goal here is to understand biology at the molecular level. There are analog approaches and solutions to this challenge. Biologists run experiments at the molecular level and try to understand behavior of molecules using experiments and nuclear magnetic resonance spectroscopy. These machines and experiments are very expensive.
The whole field of computational biology attempts to address this challenge in the digital world because it's much less expensive than the analog world. As with any simulation data all the simulation experts generate massive amounts of data. They try to use the latest high-performance computing machines. This is the interaction of lipids and proteins. See Figure 12. That's what this simulation data shows and Alharbi et al. develop some visualization software to enhance understanding of this. These holes are protein and then the paths are lipid trajectories. See Figure 12. The computational biologists attempt to visualize the interaction between trajectories and the proteins.
Alharibi et al. are trying to develop visualizations to help computational biologist understand the data with a special focus, in this case, on path filtering. Given the massive number of trajectories hundreds of thousands or millions of trajectories ยฉ 2019 The Author(s). Licensee IntechOpen. This chapter is distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/ by/3.0), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
From Data Chaos to the Visualization Cosmos DOI: http://dx.doi.org/10.5772/intechopen.82159 over multiple time steps, is it possible to select a subset of those trajectories based on interesting properties that help the biologists understanding the behavior? Alharibi et al. develop tools for filtering and selection of these trajectories to try to understand behavior. One example is just changing the time step of the simulation or filtering the path by its length. They can focus on shorter paths or on longer paths. They can slide the filter over to long paths or the long trajectories.
The user can filter the paths based on other characteristics. They chose a few properties that they hope will be interesting for the computational biologists. One property is curvature. There are highly curved paths.
The atom trajectories are actually three dimensions, but they're limited to a layer analogous to the biosphere such that the z dimension is relatively small compared to the x and y dimensions. They can visualize projected 2D space or the volumetric 3-space. The user can experiment with 2D versus 3D. The standard visualization packages for this are constrained to a two-dimensional plane and they're generally not interactive.
Conclusion
This chapter presents a ubiquitous model of knowledge evolution witnessed at a collective level by a society deeply involved with the digital world. It presents a theory supported by a number of case studies ranging from the call center industry, to automotive engineering, to computational biology. It sets the stage for data visualization as a vital technology to evolve our understanding of data and the world it describes to the next level. It will be exciting to witness how this model and pattern evolve over time.
Author details
Chao Tong* and Robert S. Laramee Visual and Interactive Computing Group, Swansea University, Swansea, UK *Address all correspondence to: [email protected] |
<filename>src/user/user.dto.ts<gh_stars>0
import {IsNotEmpty, IsEmail, IsString, MinLength, MaxLength, Matches} from 'class-validator';
import {IdeaEntity} from '../idea/idea.entity';
export class UserDTO {
@IsNotEmpty()
@IsString()
@MinLength(3)
@MaxLength(11)
username: string;
@IsNotEmpty()
@IsString()
@MinLength(3)
@MaxLength(11)
@Matches(/((?=.*d)|(?=.*\W+))(?![.\n])(?=.*[A-Z])(?=.*[a-z]).*$/, {message: 'Password too weak'})
password: string;
}
// tslint:disable-next-line:max-classes-per-file
export class UserDTOFull extends UserDTO {
@IsNotEmpty()
@IsEmail()
email: string;
}
// tslint:disable-next-line:max-classes-per-file
export class UserRO {
id: string;
username: string;
email: string;
created: Date;
token?: string;
bookmarks?: IdeaEntity[];
password?: string;
}
|
S,k=map(int,raw_input().split())
s=map(int,raw_input().split())
chk=s[k-1]
flag=0
for i in range(k,len(s)):
if s[i]!=chk:
flag=1
print -1
break
if flag==0:
for i in range(k-2,-1,-1):
if s[i]!=chk:
print i+1
flag=1
break
if flag==0:
print 0
|
<filename>spring-core/src/main/java/configurations/ProfiledConfigurations.java
package configurations;
import org.springframework.context.annotation.*;
import profile.SimpleString;
/**
* Created by krishan on 8/16/15.
*/
@Configuration
@ComponentScan("profile")
@ImportResource("classpath:profile-application-context.xml") // no space after or before colon
public class ProfiledConfigurations {
@Bean
@Profile("prod")
SimpleString prodSimpleString(){
return new SimpleString("prod simple string");
}
}
|
//
// CVSBrushPickerViewController.h
// DrawQuest
//
// Created by <NAME> on 9/13/13.
// Copyright (c) 2013 Canvas. All rights reserved.
//
#import "DQViewController.h"
#import "CVSDrawingTypes.h"
#import "CVSBrushView.h"
#import "CVSBrushesViewCell.h"
extern const CGFloat kCVSBrushPickerViewControllerDesiredHeight;
extern const CGFloat kCVSBrushPickerViewControllerDesiredOffsetFromBottom;
@class CVSBrushPickerViewController;
@protocol CVSBrushPickerViewControllerDelegate <NSObject>
- (NSArray *)ownedBrushesForBrushPickerViewController:(CVSBrushPickerViewController *)vc;
- (NSArray *)globalBrushesForBrushPickerViewController:(CVSBrushPickerViewController *)vc;
@end
@interface CVSBrushPickerViewController : UIViewController <UICollectionViewDataSource, UICollectionViewDelegate, UICollectionViewDelegateFlowLayout>
@property (nonatomic, strong) UIColor *activeColor;
@property (nonatomic, strong) NSLayoutConstraint *bottomConstraint;
@property (nonatomic, weak, readonly) UICollectionView *collectionView;
@property (nonatomic, readonly, assign, getter = isHidden) BOOL hidden;
@property (nonatomic, readonly, assign, getter = isStowed) BOOL stowed;
@property (nonatomic, readonly) CVSBrushType selectedBrush;
@property (nonatomic, weak) CVSBrushesViewCell *activeBrushCell;
@property (nonatomic, weak) UICollectionViewFlowLayout *flowLayout;
@property (nonatomic, copy) void(^brushSelectedBlock)(CVSBrushPickerViewController *vc, CVSBrushType brushType);
@property (nonatomic, copy) void(^lockedBrushTappedBlock)(CVSBrushPickerViewController *vc, CVSBrushType brushType);
- (id)initWithDelegate:(id<CVSBrushPickerViewControllerDelegate>)delegate;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil MSDesignatedInitializer(initWithDelegate:);
- (void)setHidden:(BOOL)hidden withDuration:(CGFloat)duration;
- (void)setStowed:(BOOL)stowed withDuration:(CGFloat)duration distance:(CGFloat)distance;
- (void)deselectAll;
- (void)setStowed:(BOOL)stowed;
- (CVSBrushType)brushTypeForIndexPath:(NSIndexPath *)indexPath;
- (void)updateOwnedBrushes;
- (CGFloat)widthOfBrushes;
- (NSInteger)numberOfBrushes;
@end
|
<gh_stars>0
package sonya
import (
"encoding/json"
"time"
)
type gwPayload struct {
Opcode int `json:"op"`
DataRaw json.RawMessage `json:"d"`
SequenceNumber *int `json:"s,omitempty"`
Type *string `json:"t,omitempty"`
Data interface{} `json:"-"`
}
type plHello struct {
HeartbeatInterval time.Duration `json:"heartbeat_interval"`
}
func (p *plHello) UnmarshalJSON(data []byte) error {
j := &struct {
HeartbeatInterval int64 `json:"heartbeat_interval"`
}{}
if err := json.Unmarshal(data, &j); err != nil {
return err
}
p.HeartbeatInterval = time.Duration(j.HeartbeatInterval) * time.Millisecond
return nil
}
type plIdentify struct {
Token string `json:"token"`
Intents int `json:"intents"`
Properties struct {
OS string `json:"$os"`
Browser string `json:"$browser"`
Device string `json:"$device"`
} `json:"properties"`
}
type plHeartbeat struct {
}
type plHeartbeatACK struct {
}
|
/**
* Tests the {@link ShowInstructionInfoPlugin} class.
*
*
*/
public class ShowInstructionInfoPluginTest extends AbstractGhidraHeadedIntegrationTest {
private static final String startAddressString = "1000000";
private static final String beyondAddressString = "100000a";
private static final byte[] BYTES =
new byte[] { (byte) 0xff, 0x15, 0x10, 0x32, 0x00, 0x01, (byte) 0xff, 0x75, 0x14, 0x5f };
private TestEnv env;
private PluginTool tool;
private ProgramBuilder builder;
private Program program;
private ShowInstructionInfoPlugin plugin;
private CodeBrowserPlugin cb;
@Before
public void setUp() throws Exception {
env = new TestEnv();
tool = env.getTool();
tool.addPlugin(CodeBrowserPlugin.class.getName());
tool.addPlugin(DisassemblerPlugin.class.getName());
tool.addPlugin(ShowInstructionInfoPlugin.class.getName());
plugin = env.getPlugin(ShowInstructionInfoPlugin.class);
cb = env.getPlugin(CodeBrowserPlugin.class);
env.showTool();
builder = new ProgramBuilder("test", ProgramBuilder._X86);
builder.createMemory(".text", startAddressString, 0x1000);
builder.setBytes(startAddressString, BYTES);
builder.disassemble(startAddressString, BYTES.length);
program = builder.getProgram();
ProgramManager pm = tool.getService(ProgramManager.class);
pm.openProgram(program.getDomainFile());
}
@After
public void tearDown() throws Exception {
env.dispose();
}
@Test
public void testGetProcessorManualEntry() throws Exception {
changeLocationToAddress(beyondAddressString);
ManualEntry manualEntry = plugin.locateManualEntry(null, null);
assertNull(manualEntry);
ListingActionContext context = getCurrentContext();
Instruction currentInstruction = plugin.getInstructionForContext(context);
assertNull("The current Instruction is not null as expected", currentInstruction);
// now try the calling the method with an invalid Instruction -
Language language = program.getLanguage();
manualEntry = plugin.locateManualEntry(context, language);
assertNotNull(manualEntry);
assertNull(manualEntry.getPageNumber()); // default entry has no page number
// now move to a valid Instruction to test that condition
currentInstruction = changeLocationToAddress("01000000");
assertNotNull("Found a null Instruction at a point in the program " +
"where we expected a valid Instruction.", currentInstruction);
// now try the calling the method with an valid Instruction
context = getCurrentContext();
manualEntry = plugin.locateManualEntry(context, language);
assertNotNull(manualEntry);
assertNotNull(manualEntry.getPageNumber());
}
// @Test
public void testShowProcessorManual_ErrorDialog() throws Exception {
// FIXME: This test is bogus and needs to be corrected by refering to
// an instruction whose manual is missing. Test apepars to work with
// CI test environment because none of the manuals are found
changeLocationToAddress(beyondAddressString);
Language language = program.getLanguage();
ListingActionContext context = getCurrentContext();
context = getCurrentContext();
callGetUrl(context, language);
DialogComponentProvider dialog = waitForDialogComponent("Missing Processor Manual");
close(dialog);
}
@Test
public void testInstructionInfo() throws Exception {
// test the models to make sure no errors are encountered on valid
// and invalid instructions by exercising the Java model's public
// API
changeLocationToAddress(beyondAddressString);
// get the action that will show the window for the Instruction info
DockingActionIf infoAction = getAction(plugin, "Show Instruction Info");
// show the window
performAction(infoAction, cb.getProvider(), true);
// make sure we are at an invalid Instruction
ListingActionContext context = getCurrentContext();
Instruction currentInstruction =
(Instruction) invokeInstanceMethod("getInstructionForContext", plugin,
new Class[] { ListingActionContext.class }, new Object[] { context });
// make sure that the current instruction is null
assertNull("The current Instruction is not null as expected.", currentInstruction);
assertTrue(
"The tables of the component provider have data even " +
"though there is not Instruction selected in the proram.",
!componentProviderTablesHaveData());
// change to a valid instruction
currentInstruction = changeLocationToAddress("01000000");
assertNotNull("Found a null Instruction at a point in the program " +
"where we expected a valid Instruction.", currentInstruction);
// make sure that there is some data
Object[] data = getComponentProviderTableData(true);
assertTrue(
"There is not data in the component provider " +
"even though there is a valid instruction selected in the " + "program",
componentProviderTablesHaveData());
// verify dynamic update has changed the window's contents
ComponentProvider componentProvider = getCurrentComponentProviderFromPlugin();
JComponent comp = componentProvider.getComponent();
final JCheckBox dynamicCheckBox = findComponent(comp, JCheckBox.class);
// make sure dynamic update is enabled
if (!dynamicCheckBox.isSelected()) {
runSwing(() -> dynamicCheckBox.doClick());
}
// change to another valid Instruction
currentInstruction = changeLocationToAddress("01000006");
assertNotNull("Found a null Instruction at a point in the program " +
"where we expected a valid Instruction.", currentInstruction);
Object[] newData = getComponentProviderTableData(true);
boolean differentData = !(data[0].equals(newData[0]) && data[1].equals(newData[1]));
assertTrue("The data of the component provider is not different " +
"than it was after changing instructions.", differentData);
// verify the Instruction data is that of the Instruction
// selected in the plugin
verifyAddressWithTableModels(currentInstruction.getMinAddress(), true, true);
// turn off dynamic update
runSwing(() -> dynamicCheckBox.doClick());
// change to another valid Instruction
currentInstruction = changeLocationToAddress("01000009");
assertNotNull("Found a null Instruction at a point in the program " +
"where we expected a valid Instruction.", currentInstruction);
// verify that the contents have NOT changed
Object[] newData2 = getComponentProviderTableData(false);
differentData = data[0].equals(newData2[0]) && data[1].equals(newData2[1]);
assertTrue("The data of the component provider is different " +
"than it was after changing instructions even though dynamic " + "update is disabled.",
!differentData);
// verify the Instruction data is NOT that of the Instruction
// selected in the plugin
verifyAddressWithTableModels(currentInstruction.getMinAddress(), false, false);
// Now test moving from a valid, non-decompiled address will cause
// the update of the display when the decompilation process takes
// place
// turn dynamic update back on
runSwing(() -> dynamicCheckBox.doClick());
// move to a valid location that has yet to be disassembled
currentInstruction = changeLocationToAddress("01000ffe");
assertNull("The current Instruction is not null when the selected " +
"program address has not been disassembled.", currentInstruction);
// make sure there are no contents in the display
assertTrue(
"The tables of the component provider have data even " +
"though there is not Instruction selected in the proram.",
!componentProviderTablesHaveData());
// decompile at the location
Plugin disassemblePlugin = env.getPlugin(DisassemblerPlugin.class);
DockingActionIf disassembleAction = getAction(disassemblePlugin, "Disassemble");
performAction(disassembleAction, cb.getProvider(), false);
waitForBusyTool(tool);
waitForTasks();
waitForProgram(program);
currentInstruction = changeLocationToAddress("01000ffe");
assertNotNull("Found a null Instruction at a point in the program " +
"where we expected a valid Instruction.", currentInstruction);
// make sure that the contents now display the current Instruction
assertTrue("There is not data in the component provider even " +
"though there is a valid instruction selected in the " + "program after we decompile.",
componentProviderTablesHaveData());
currentInstruction = changeLocationToAddress("01000ffe");
context = getCurrentContext();
// verify the Instruction data is that of the Instruction
// selected in the plugin
currentInstruction = (Instruction) invokeInstanceMethod("getInstructionForContext", plugin,
new Class[] { ListingActionContext.class }, new Object[] { context });
}
@Test
public void testCloseProgram() throws Exception {
changeLocationToAddress("01000000");
// get the action that will show the window for the Instruction info
DockingActionIf infoAction = getAction(plugin, "Show Instruction Info");
// show the window
performAction(infoAction, cb.getProvider(), true);
ComponentProvider componentProvider = getCurrentComponentProviderFromPlugin();
JComponent comp = componentProvider.getComponent();
final JCheckBox dynamicCheckBox = findComponent(comp, JCheckBox.class);
// turn off the checkbox
runSwing(() -> dynamicCheckBox.setSelected(false));
changeLocationToAddress("01000006");
performAction(infoAction, cb.getProvider(), true);
List<?> list = getDisconnectedProviderList();
assertEquals(1, list.size());
assertNotNull(getCurrentComponentProviderFromPlugin());
final ProgramManager pm = tool.getService(ProgramManager.class);
runSwing(() -> pm.closeProgram());
list = getDisconnectedProviderList();
//should only be the dynamic provider left
assertEquals(0, list.size());
}
@Test
public void testUpdates() throws Exception {
// display a provider, clear the instruction,
// make sure the the provider is cleared, etc.
changeLocationToAddress("01000000");
// get the action that will show the window for the Instruction info
DockingActionIf infoAction = getAction(plugin, "Show Instruction Info");
// show the window
performAction(infoAction, cb.getProvider(), true);
ComponentProvider provider = getCurrentComponentProviderFromPlugin();
clearAt100000();
assertNull(((InstructionInfoProvider) provider).getInstruction());
assertTrue(!componentProviderTablesHaveData());
}
private void clearAt100000() {
int transactionID = program.startTransaction("Test");
Address start = addr(0x01000000);
Instruction inst = program.getListing().getInstructionAt(start);
try {
program.getListing().clearCodeUnits(start, inst.getMaxAddress(), false);
}
finally {
program.endTransaction(transactionID, true);
}
waitForProgram(program);
}
@Test
public void testUndoRedo() throws Exception {
String addrString = "01000000";
changeLocationToAddress(addrString);
// get the action that will show the window for the Instruction info
DockingActionIf infoAction = getAction(plugin, "Show Instruction Info");
// show the window
performAction(infoAction, cb.getProvider(), true);
ComponentProvider provider = getCurrentComponentProviderFromPlugin();
clearAt100000();
assertNull(((InstructionInfoProvider) provider).getInstruction());
// undo
undo(program);
verifyAddressWithTableModels(addr(0x1000000), true, true);
// redo
redo(program);
assertNull(((InstructionInfoProvider) provider).getInstruction());
assertTrue(!componentProviderTablesHaveData());
}
private void callGetUrl(ListingActionContext context, Language language) {
runSwing(() -> {
try {
plugin.getValidUrl(context, language);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}, false);
}
/**
* Moves the program location to the given address and returns the
* instruction at that location.
*
* @param addressString The address location to move to.
* @return The instruction at the new location or null if there is no
* instruction.
*/
private Instruction changeLocationToAddress(String addressString) throws Exception {
CodeBrowserPlugin cbp = env.getPlugin(CodeBrowserPlugin.class);
final Address address = program.getAddressFactory().getAddress(addressString);
final GoToService goToService = tool.getService(GoToService.class);
runSwing(() -> goToService.goTo(new AddressFieldLocation(program, address)));
waitForPostedSwingRunnables();
cbp.updateNow();
ListingActionContext context =
(ListingActionContext) cbp.getProvider().getActionContext(null);
return (Instruction) invokeInstanceMethod("getInstructionForContext", plugin,
new Class[] { ListingActionContext.class }, new Object[] { context });
}
private ListingActionContext getCurrentContext() {
CodeBrowserPlugin cbp = env.getPlugin(CodeBrowserPlugin.class);
return (ListingActionContext) cbp.getProvider().getActionContext(null);
}
private Address addr(long offset) {
return program.getMinAddress().getNewAddress(offset);
}
/**
* Tests the addresses of the table models of the "Instruction Info" dialog.
* The method will fail the current test if the result is not as
* expected by the caller of this method. For example, if
* <tt>expectedSame</tt> is true, then the method expects the values to
* be the same when compared with the given address and will fail if
* they are not. If <tt>expectedSame</tt> is false, then the method will
* fail if the test values are the same.
*
* @param instructionAddress The address to compare against the address
* stored in the table model of the dialog.
* @param expectedSame True means a match is expected; false means a
* match is not expected.
*/
private void verifyAddressWithTableModels(Address instructionAddress, boolean fromConnected,
boolean expectedSame) {
ComponentProvider provider = fromConnected ? getCurrentComponentProviderFromPlugin()
: getFirstDisconnectedProviderFromPlugin();
JTextArea instructionText = (JTextArea) getInstanceField("instructionText", provider);
JTable opTable = (JTable) getInstanceField("opTable", provider);
// get the instruction address from each table model and make sure that
// it is the same as the current instruction
String stateString = expectedSame ? "is not" : "is";
String text = instructionText.getText();
String address = instructionAddress.toString(true);
Pattern pattern = Pattern.compile("Address\\s*:\\s*" + address);
Matcher matcher = pattern.matcher(text);
boolean comparisonResult = matcher.find();
// if the caller of this method expects the results to be NOT equal,
// then toggle the comparison result
if (!expectedSame) {
comparisonResult = !comparisonResult;
}
assertTrue("The address of the mnemonic table " + stateString +
" the same as that of the current program instruction.", comparisonResult);
Instruction opInstr = (Instruction) getInstanceField("instruction", opTable.getModel());
comparisonResult = instructionAddress.equals(opInstr.getMinAddress());
if (!expectedSame) {
comparisonResult = !comparisonResult;
}
assertTrue("The address of the op table " + stateString +
" the same as that of the current program instruction.", comparisonResult);
}
/**
* A simple method to test that the tables of the "Instruction Info"
* dialog contain data.
*
* @return True if either of the tables have data.
*/
private boolean componentProviderTablesHaveData() {
Object[] data = getComponentProviderTableData(true);
return ((data[0] != null) && !"-- No Instruction --".equals(data[0])) || (data[1] != null);
}
/**
* Gets data from the two tables of the "Instruction Info" dialog.
*
* @return data from the two tables of the "Instruction Info" dialog.
*/
private Object[] getComponentProviderTableData(boolean fromConnected) {
ComponentProvider provider = fromConnected ? getCurrentComponentProviderFromPlugin()
: getFirstDisconnectedProviderFromPlugin();
JTextArea instructionText = (JTextArea) getInstanceField("instructionText", provider);
JTable opTable = (JTable) getInstanceField("opTable", provider);
Object[] data = new Object[2];
// the following two values are based upon the objString() method of
// each table model
data[0] = instructionText.getText();
data[1] = opTable.getColumnCount() != 0 ? opTable.getValueAt(6, 0) : null;
return data;
}
private ComponentProvider getFirstDisconnectedProviderFromPlugin() {
List<?> disconnectedProviderList = getDisconnectedProviderList();
return (ComponentProvider) disconnectedProviderList.get(0);
}
/**
* Returns the current ComponentProvider in use by the plugin.
*
* @return the current ComponentProvider in use by the plugin.
*/
private ComponentProvider getCurrentComponentProviderFromPlugin() {
return (ComponentProvider) getInstanceField("connectedProvider", plugin);
}
private List<?> getDisconnectedProviderList() {
return (List<?>) getInstanceField("disconnectedProviders", plugin);
}
} |
Narrating Confluent Experiences in a Child Welfare Case
This constructed narrative inquiry illustrates confluent stories of a young mother, Jenny, charged with child abuse and neglect; her foster care case worker, Rachel; and her therapist, Kathleen. As researchers, we discuss the positions of each person: mother, caseworker, therapist through storied fragments representing what is most important in how they came to understand the process of their year-long work together that led to Jenny's releasing her parental rights. Layering interviews and reflexive writings, we focus on decision-making and voice; about what it means to be a parent, a foster care worker, and a therapist in a community context where parent benefit from services and the child's best interest is a privileged societal discourse. |
/**
* <p>
* This class implements a spectral peak follower as described in Sethares et
* al. 2009 - Spectral Tools for Dynamic Tonality and Audio Morphing - section
* "Analysis-Resynthessis". It calculates a noise floor and picks spectral peaks
* rising above a calculated noise floor with a certain factor. The noise floor
* is determined using a simple median filter.
* </p>
* <p>
* Parts of the code is modified from <a
* href="http://www.dynamictonality.com/spectools.htm">the code accompanying
* "Spectral Tools for Dynamic Tonality and Audio Morphing"</a>.
* </p>
* <p>
* To get the spectral peaks from an audio frame, call <code>getPeakList</code>
* <code><pre>
AudioDispatcher dispatcher = new AudioDispatcher(stream, fftsize, overlap);
dispatcher.addAudioProcessor(spectralPeakFollower);
dispatcher.addAudioProcessor(new AudioProcessor() {
public void processingFinished() {
}
public boolean process(AudioEvent audioEvent) {
float[] noiseFloor = SpectralPeakProcessor.calculateNoiseFloor(spectralPeakFollower.getMagnitudes(), medianFilterLength, noiseFloorFactor);
List<Integer> localMaxima = SpectralPeakProcessor.findLocalMaxima(spectralPeakFollower.getMagnitudes(), noiseFloor);
List<> list = SpectralPeakProcessor.findPeaks(spectralPeakFollower.getMagnitudes(), spectralPeakFollower.getFrequencyEstimates(), localMaxima, numberOfPeaks);
// do something with the list...
return true;
}
});
dispatcher.run();
</pre></code>
*
* @author Joren Six
* @author William A. Sethares
* @author Andrew J. Milne
* @author Stefan Tiedje
* @author Anthony Prechtl
* @author James Plamondon
*
*/
public class SpectralPeakProcessor implements AudioProcessor {
/**
* The sample rate of the signal.
*/
private final int sampleRate;
/**
* Cached calculations for the frequency calculation
*/
private final double dt;
private final double cbin;
private final double inv_2pi;
private final double inv_deltat;
private final double inv_2pideltat;
/**
* The fft object used to calculate phase and magnitudes.
*/
private final FFT fft;
/**
* The pahse info of the current frame.
*/
private final float[] currentPhaseOffsets;
/**
* The magnitudes in the current frame.
*/
private final float[] magnitudes;
/**
* Detailed frequency estimates for each bin, using phase info
*/
private final float[] frequencyEstimates;
/**
* The phase information of the previous frame, or null.
*/
private float[] previousPhaseOffsets;
public SpectralPeakProcessor(int bufferSize, int overlap, int sampleRate) {
fft = new FFT(bufferSize, new HammingWindow());
magnitudes = new float[bufferSize / 2];
currentPhaseOffsets = new float[bufferSize / 2];
frequencyEstimates = new float[bufferSize / 2];
dt = (bufferSize - overlap) / (double) sampleRate;
cbin = (double) (dt * sampleRate / (double) bufferSize);
inv_2pi = (double) (1.0 / (2.0 * Math.PI));
inv_deltat = (double) (1.0 / dt);
inv_2pideltat = (double) (inv_deltat * inv_2pi);
this.sampleRate = sampleRate;
}
private void calculateFFT(float[] audio) {
// Clone to prevent overwriting audio data
float[] fftData = audio.clone();
// Extract the power and phase data
fft.powerPhaseFFT(fftData, magnitudes, currentPhaseOffsets);
}
private void normalizeMagintudes(){
float maxMagnitude = (float) -1e6;
for(int i = 0;i<magnitudes.length;i++){
maxMagnitude = Math.max(maxMagnitude, magnitudes[i]);
}
//log10 of the normalized value
//adding 75 makes sure the value is above zero, a bit ugly though...
for(int i = 1;i<magnitudes.length;i++){
magnitudes[i] = (float) (10 * Math.log10(magnitudes[i]/maxMagnitude)) + 75;
}
}
@Override
public boolean process(AudioEvent audioEvent) {
float[] audio = audioEvent.getFloatBuffer();
// 1. Extract magnitudes, and phase using an FFT.
calculateFFT(audio);
// 2. Estimate a detailed frequency for each bin.
calculateFrequencyEstimates();
// 3. Normalize the each magnitude.
normalizeMagintudes();
// 4. Store the current phase so it can be used for the next frequency estimates block.
previousPhaseOffsets = currentPhaseOffsets.clone();
return true;
}
@Override
public void processingFinished() {
}
/**
* For each bin, calculate a precise frequency estimate using phase offset.
*/
private void calculateFrequencyEstimates() {
for(int i = 0;i < frequencyEstimates.length;i++){
frequencyEstimates[i] = getFrequencyForBin(i);
}
}
/**
* @return the magnitudes.
*/
public float[] getMagnitudes() {
return magnitudes.clone();
}
/**
* @return the precise frequency for each bin.
*/
public float[] getFrequencyEstimates(){
return frequencyEstimates.clone();
}
/**
* Calculates a frequency for a bin using phase info, if available.
* @param binIndex The FFT bin index.
* @return a frequency, in Hz, calculated using available phase info.
*/
private float getFrequencyForBin(int binIndex){
final float frequencyInHertz;
// use the phase delta information to get a more precise
// frequency estimate
// if the phase of the previous frame is available.
// See
// * Moore 1976
// "The use of phase vocoder in computer music applications"
// * Sethares et al. 2009 - Spectral Tools for Dynamic
// Tonality and Audio Morphing
// * Laroche and Dolson 1999
if (previousPhaseOffsets != null) {
float phaseDelta = currentPhaseOffsets[binIndex] - previousPhaseOffsets[binIndex];
long k = Math.round(cbin * binIndex - inv_2pi * phaseDelta);
frequencyInHertz = (float) (inv_2pideltat * phaseDelta + inv_deltat * k);
} else {
frequencyInHertz = (float) fft.binToHz(binIndex, sampleRate);
}
return frequencyInHertz;
}
/**
* Calculate a noise floor for an array of magnitudes.
* @param magnitudes The magnitudes of the current frame.
* @param medianFilterLength The length of the median filter used to determine the noise floor.
* @param noiseFloorFactor The noise floor is multiplied with this factor to determine if the
* information is either noise or an interesting spectral peak.
* @return a float array representing the noise floor.
*/
public static float[] calculateNoiseFloor(float[] magnitudes, int medianFilterLength, float noiseFloorFactor) {
double[] noiseFloorBuffer;
float[] noisefloor = new float[magnitudes.length];
float median = (float) median(magnitudes.clone());
// Naive median filter implementation.
// For each element take a median of surrounding values (noiseFloorBuffer)
// Store the median as the noise floor.
for (int i = 0; i < magnitudes.length; i++) {
noiseFloorBuffer = new double[medianFilterLength];
int index = 0;
for (int j = i - medianFilterLength/2; j <= i + medianFilterLength/2 && index < noiseFloorBuffer.length; j++) {
if(j >= 0 && j < magnitudes.length){
noiseFloorBuffer[index] = magnitudes[j];
} else{
noiseFloorBuffer[index] = median;
}
index++;
}
// calculate the noise floor value.
noisefloor[i] = (float) (median(noiseFloorBuffer) * (noiseFloorFactor)) ;
}
float rampLength = 12.0f;
for(int i = 0 ; i <= rampLength ; i++){
//ramp
float ramp = 1.0f;
ramp = (float) (-1 * (Math.log(i/rampLength))) + 1.0f;
noisefloor[i] = ramp * noisefloor[i];
}
return noisefloor;
}
/**
* Finds the local magintude maxima and stores them in the given list.
* @param magnitudes The magnitudes.
* @param noisefloor The noise floor.
* @return a list of local maxima.
*/
public static List<Integer> findLocalMaxima(float[] magnitudes,float[] noisefloor){
List<Integer> localMaximaIndexes = new ArrayList<Integer>();
for (int i = 1; i < magnitudes.length - 1; i++) {
boolean largerThanPrevious = (magnitudes[i - 1] < magnitudes[i]);
boolean largerThanNext = (magnitudes[i] > magnitudes[i + 1]);
boolean largerThanNoiseFloor = (magnitudes[i] > noisefloor[i]);
if (largerThanPrevious && largerThanNext && largerThanNoiseFloor) {
localMaximaIndexes.add(i);
}
}
return localMaximaIndexes;
}
/**
* @param magnitudes the magnitudes.
* @return the index for the maximum magnitude.
*/
private static int findMaxMagnitudeIndex(float[] magnitudes){
int maxMagnitudeIndex = 0;
float maxMagnitude = (float) -1e6;
for (int i = 1; i < magnitudes.length - 1; i++) {
if(magnitudes[i] > maxMagnitude){
maxMagnitude = magnitudes[i];
maxMagnitudeIndex = i;
}
}
return maxMagnitudeIndex;
}
/**
*
* @param magnitudes the magnitudes..
* @param frequencyEstimates The frequency estimates for each bin.
* @param localMaximaIndexes The indexes of the local maxima.
* @param numberOfPeaks The requested number of peaks.
* @param minDistanceInCents The minimum distance in cents between the peaks
* @return A list with spectral peaks.
*/
public static List<SpectralPeak> findPeaks(float[] magnitudes, float[] frequencyEstimates, List<Integer> localMaximaIndexes, int numberOfPeaks, int minDistanceInCents){
int maxMagnitudeIndex = findMaxMagnitudeIndex(magnitudes);
List<SpectralPeak> spectralPeakList = new ArrayList<SpectralPeak>();
if(localMaximaIndexes.size()==0)
return spectralPeakList;
float referenceFrequency=0;
//the frequency of the bin with the highest magnitude
referenceFrequency = frequencyEstimates[maxMagnitudeIndex];
//remove frequency estimates below zero
for(int i = 0 ; i < localMaximaIndexes.size() ; i++){
if(frequencyEstimates[localMaximaIndexes.get(i)] < 0 ){
localMaximaIndexes.remove(i);
frequencyEstimates[localMaximaIndexes.get(i)]=1;//Hz
i--;
}
}
//filter the local maxima indexes, remove peaks that are too close to each other
//assumes that localmaximaIndexes is sorted from lowest to higest index
for(int i = 1 ; i < localMaximaIndexes.size() ; i++){
double centCurrent = PitchConverter.hertzToAbsoluteCent(frequencyEstimates[localMaximaIndexes.get(i)]);
double centPrev = PitchConverter.hertzToAbsoluteCent(frequencyEstimates[localMaximaIndexes.get(i-1)]);
double centDelta = centCurrent - centPrev;
if(centDelta < minDistanceInCents ){
if(magnitudes[localMaximaIndexes.get(i)] > magnitudes[localMaximaIndexes.get(i-1)]){
localMaximaIndexes.remove(i-1);
}else{
localMaximaIndexes.remove(i);
}
i--;
}
}
// Retrieve the maximum values for the indexes
float[] maxMagnitudes = new float[localMaximaIndexes.size()];
for(int i = 0 ; i < localMaximaIndexes.size() ; i++){
maxMagnitudes[i] = magnitudes[localMaximaIndexes.get(i)];
}
// Sort the magnitudes in ascending order
Arrays.sort(maxMagnitudes);
// Find the threshold, the first value or somewhere in the array.
float peakthresh = maxMagnitudes[0];
if (maxMagnitudes.length > numberOfPeaks) {
peakthresh = maxMagnitudes[maxMagnitudes.length - numberOfPeaks];
}
//store the peaks
for(Integer i : localMaximaIndexes){
if(magnitudes[i]>= peakthresh){
final float frequencyInHertz= frequencyEstimates[i];
//ignore frequencies lower than 30Hz
float binMagnitude = magnitudes[i];
SpectralPeak peak = new SpectralPeak(0,frequencyInHertz, binMagnitude, referenceFrequency,i);
spectralPeakList.add(peak);
}
}
return spectralPeakList;
}
public static final float median(double[] arr){
return percentile(arr, 0.5);
}
/**
* Returns the p-th percentile of values in an array. You can use this
* function to establish a threshold of acceptance. For example, you can
* decide to examine candidates who score above the 90th percentile (0.9).
* The elements of the input array are modified (sorted) by this method.
*
* @param arr An array of sample data values that define relative standing.
* The contents of the input array are sorted by this method.
* @param p The percentile value in the range 0..1, inclusive.
* @return The p-th percentile of values in an array. If p is not a multiple
* of 1/(n - 1), this method interpolates to determine the value at
* the p-th percentile.
**/
public static final float percentile( double[] arr, double p ) {
if (p < 0 || p > 1)
throw new IllegalArgumentException("Percentile out of range.");
// Sort the array in ascending order.
Arrays.sort(arr);
// Calculate the percentile.
double t = p*(arr.length - 1);
int i = (int)t;
return (float) ((i + 1 - t)*arr[i] + (t - i)*arr[i + 1]);
}
public static double median(float[] m) {
// Sort the array in ascending order.
Arrays.sort(m);
int middle = m.length/2;
if (m.length%2 == 1) {
return m[middle];
} else {
return (m[middle-1] + m[middle]) / 2.0;
}
}
public static class SpectralPeak{
private final float frequencyInHertz;
private final float magnitude;
private final float referenceFrequency;
private final int bin;
/**
* Timestamp in fractional seconds
*/
private final float timeStamp;
public SpectralPeak(float timeStamp,float frequencyInHertz, float magnitude,float referenceFrequency,int bin){
this.frequencyInHertz = frequencyInHertz;
this.magnitude = magnitude;
this.referenceFrequency = referenceFrequency;
this.timeStamp = timeStamp;
this.bin = bin;
}
public float getRelativeFrequencyInCents(){
if(referenceFrequency > 0 && frequencyInHertz > 0){
float refInCents = (float) PitchConverter.hertzToAbsoluteCent(referenceFrequency);
float valueInCents = (float) PitchConverter.hertzToAbsoluteCent(frequencyInHertz);
return valueInCents - refInCents;
}else{
return 0;
}
}
public float getTimeStamp(){
return timeStamp;
}
public float getMagnitude(){
return magnitude;
}
public float getFrequencyInHertz(){
return frequencyInHertz;
}
public float getRefFrequencyInHertz(){
return referenceFrequency;
}
public String toString(){
return String.format("%.2f %.2f %.2f", frequencyInHertz,getRelativeFrequencyInCents(),magnitude);
}
public int getBin() {
return bin;
}
}
} |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Firefox about:memory log parser.
import argparse
import gzip
import json
from collections import defaultdict
# This value comes from nsIMemoryReporter.idl.
KIND_HEAP = 1
def path_total(data, path):
"""
Calculates the sum for the given data point path and its children. If
path does not end with a '/' then only the value for the exact path is
returned.
"""
path_totals = defaultdict(int)
# Bookkeeping for calculating the heap-unclassified measurement.
explicit_heap = defaultdict(int)
heap_allocated = defaultdict(int)
discrete = not path.endswith("/")
def match(value):
"""
Helper that performs either an explicit match or a prefix match
depending on the format of the path passed in.
"""
if discrete:
return value == path
else:
return value.startswith(path)
def update_bookkeeping(report):
"""
Adds the value to the heap total if this an explicit entry that is a
heap measurement and updates the heap allocated value if necessary.
"""
if report["kind"] == KIND_HEAP and report["path"].startswith("explicit/"):
explicit_heap[report["process"]] += report["amount"]
elif report["path"] == "heap-allocated":
heap_allocated[report["process"]] = report["amount"]
def heap_unclassified(process):
"""
Calculates the heap-unclassified value for the given process. This is
simply the difference between all values reported as heap allocated
under the explicit/ tree and the value reported for heap-allocated by
the allocator.
"""
# Memory reports should always include heap-allocated. If it's missing
# just assert.
assert process in heap_allocated
unclassified = heap_allocated[process] - explicit_heap[process]
# Make sure the value is sane. A misbehaving reporter could lead to
# negative values.
# This assertion fails on Beta while running TP6, in the Google Docs process.
# Disable this for now, but only on Beta. See bug 1735556.
# assert unclassified >= 0, "heap-unclassified was negative: %d" % unclassified
return unclassified
needs_bookkeeping = path in ("explicit/", "explicit/heap-unclassified")
# Process all the reports.
for report in data["reports"]:
if needs_bookkeeping:
update_bookkeeping(report)
if match(report["path"]):
path_totals[report["process"]] += report["amount"]
# Handle special processing for explicit and heap-unclassified.
if path == "explicit/":
# If 'explicit/' is requested we need to add the 'explicit/heap-unclassified'
# node that is generated by about:memory.
for k, v in explicit_heap.items():
path_totals[k] += heap_unclassified(k)
elif path == "explicit/heap-unclassified":
# If 'explicit/heap-unclassified' is requested we need to calculate the
# value as it's generated by about:memory, not explicitly reported.
for k, v in explicit_heap.items():
path_totals[k] = heap_unclassified(k)
return path_totals
def calculate_memory_report_values(
memory_report_path, data_point_path, process_names=None
):
"""
Opens the given memory report file and calculates the value for the given
data point.
:param memory_report_path: Path to the memory report file to parse.
:param data_point_path: Path of the data point to calculate in the memory
report, ie: 'explicit/heap-unclassified'.
:param process_name: Name of processes to limit reports to. ie 'Main'
"""
try:
with open(memory_report_path) as f:
data = json.load(f)
except ValueError:
# Check if the file is gzipped.
with gzip.open(memory_report_path, "rb") as f:
data = json.load(f)
totals = path_total(data, data_point_path)
# If a process name is provided, restricted output to processes matching
# that name.
if process_names is not None:
for k in list(totals.keys()):
if not any([process_name in k for process_name in process_names]):
del totals[k]
return totals
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Extract data points from about:memory reports"
)
parser.add_argument("report", action="store", help="Path to a memory report file.")
parser.add_argument(
"prefix",
action="store",
help="Prefix of data point to measure. "
"If the prefix does not end in a '/' "
"then an exact match is made.",
)
parser.add_argument(
"--proc-filter",
action="store",
nargs="*",
default=None,
help="Process name filter. " "If not provided all processes will be included.",
)
parser.add_argument(
"--mebi",
action="store_true",
help="Output values as mebibytes (instead of bytes)" " to match about:memory.",
)
args = parser.parse_args()
totals = calculate_memory_report_values(args.report, args.prefix, args.proc_filter)
sorted_totals = sorted(totals.items(), key=lambda item: (-item[1], item[0]))
for (k, v) in sorted_totals:
if v:
print("{0}\t".format(k)),
print("")
bytes_per_mebibyte = 1024.0 * 1024.0
for (k, v) in sorted_totals:
if v:
if args.mebi:
print("{0:.2f} MiB".format(v / bytes_per_mebibyte)),
else:
print("{0} bytes".format(v)),
print("\t"),
print("")
|
<reponame>cameronswinoga/OpenAstroTracker-Addons
#!/usr/bin/env python3
from astropy import units as u
from astropy.coordinates import SkyCoord, EarthLocation, AltAz, Angle
from astropy.time import Time
from astropy.utils import iers
import sys, subprocess, time, math
from autopa_modules import indi
from autopa_modules import platesolve
from autopa_modules import LX200
from datetime import datetime
from statistics import mean
import argparse
def slewCaptureSolve(indiclient, RA, DEC, radius, exposure, telescope, ccd, blobEvent, pixel_resolution, filename="platesolve.fits"):
print (f"Slewing \"{telescope}\" to {Angle(RA*u.deg).to_string(unit=u.hour)}, {Angle(DEC*u.deg).to_string(unit=u.degree)}")
indi.slewSync(indiclient, telescope, RA, DEC)
#Wait one second to let the telescope stop moving and let any motor vibrations reduce
time.sleep(1)
original_exposure = exposure
solved = False
while not solved:
print (f"Capturing {exposure} second exposure on \"{ccd}\"")
captureTime = indi.capture(indiclient, ccd, exposure, filename, blobEvent)
print (f"Attempting plate solving in the region of: RA {Angle(RA*u.deg).to_string(unit=u.hour)}, DEC {Angle(DEC*u.deg).to_string(unit=u.degree)}.")
solveTimeStart = time.time()
#resultRA, resultDEC = platesolve.solve_ASTROMETRY(filename, RA, DEC, radius, pixel_resolution)
result = platesolve.solve_ASTAP(filename, RA, DEC, radius, pixel_resolution)
if not isinstance(result, str):
solved = True
else:
print("Image could not be solved. Attempting another capture with increased exposure time (Max is 10 second exposure)")
if exposure < 10:
exposure += 1
elif exposure - original_exposure >= 5:
print ("Image could not be solved. Exiting.")
solveTimeFinish = time.time()
print (f"Image solved in {(solveTimeFinish - solveTimeStart):.2f} seconds.")
#print (f"Image solved. Coordinates are RA {Angle(resultRA, unit=u.deg).to_string(unit=u.hour)}, DEC {Angle(resultDEC, unit=u.deg).to_string(unit=u.degree)}")
return (result, captureTime)
def initSteppers(indiclient, telescope, serialport, backlashCorrection):
print ("Initializing steppers")
#Disconnect telescope mount from INDI to free up serial port for Alt/Az adjustments
indi.disconnectScope(indiclient, telescope)
#Increase both Az & Alt by approximately 16-steps
LX200.sendCommand(f":MAZ{backlashCorrection[0]}#", serialport)
LX200.sendCommand(f":MAL{backlashCorrection[1]}#", serialport)
time.sleep(3)
#Re-connect telescope mount to INDI before disconnecting from the INDI server
indi.connectScope(indiclient, telescope)
return
parser = argparse.ArgumentParser(usage='%(prog)s [mylat] [mylong] [myelev] [options]', description='OpenAstroTracker AutoPA: This tool is used to automatically rotate the mount, capture images,\
plate solve, and calculate the polar alignment error of the OAT. Serial commands are automatically issued to the OAT to adjust the motorized altitude/azimuth axis to correct this error.')
parser.add_argument("mylat", help="your latitude in degrees", type=float)
parser.add_argument("mylong", help="your longitude in degrees", type=float)
parser.add_argument("myelev", help="your elevation in metres", type=float)
parser.add_argument("--serialport", help="serial port address for the OAT (default is /dev/ttyACM0)", type=str)
parser.add_argument("--targetRA", help="initial starting RA in degrees (default is 0)", type=float)
parser.add_argument("--targetDEC", help="initial starting DEC in degrees (default is 85)", type=float)
parser.add_argument("--exposure", help="exposure time in seconds (default is 8 seconds)", type=float)
parser.add_argument("--telescope", help="name of INDI telescope to control movement (default is Ekos \"Telescope Simulator\")", type=str)
parser.add_argument("--ccd", help="name of INDI CCD for capturing exposures (default is Ekos \"CCD Simulator\")", type=str)
parser.add_argument("--radius", help="field radius of plate solving", type=float)
parser.add_argument("--pixelSize", help="CCD pixel size in micrometres. Used to decrease plate solving time.", type=float)
parser.add_argument("--pixelX", help="Quantity of pixels in the X direction.", type=float)
parser.add_argument("--pixelY", help="Quantity of pixels in the Y direction.", type=float)
parser.add_argument("--focalLength", help="Lens focal length in millimetres. Used to decrease plate solving time.", type=float)
parser.add_argument("--nomove", help="Run AutoPA sequence but do not move the steppers.", action="store_true")
args = parser.parse_args()
mylat = args.mylat
mylong = args.mylong
myelev = args.myelev
if args.serialport:
serialport = args.serialport
else:
serialport = "/dev/ttyACM0"
if args.targetRA:
targetRA = args.targetRA
else:
targetRA = 0
if args.targetDEC:
targetDEC = args.targetDEC
else:
targetDEC = 85
if args.exposure:
exposure = args.exposure
else:
exposure = 8.0
if args.telescope:
telescope = args.telescope
else:
telescope = "Telescope Simulator"
if args.ccd:
ccd = args.ccd
else:
ccd = "CCD Simulator"
if args.radius:
radius = args.radius
else:
radius = 30
if args.pixelSize and args.focalLength:
pixel_resolution = (args.pixelSize/args.focalLength)*206.265
else:
pixel_resolution = 0
if args.nomove:
nomove = True
else:
nomove = False
if args.pixelX and args.pixelY:
pixelX = args.pixelX
pixelY = args.pixelY
if pixel_resolution != 0:
radius = (max(pixelX, pixelY) * pixel_resolution) / 3600 * 1.1 #Calculate FOV and double for range of plate solving
else:
pixelX = 0
pixelY = 0
startTime = time.time()
solveTimeFinish = time.time()
#Backlash correction in arcminutes (approximately 16 full steps of each axis)
backlashCorrection = [4.2665,0.1647]
#Connect to indi server
indiclient, blobEvent = indi.indiserverConnect()
if not nomove:
#Initialize steppers to a known backlash position
initSteppers(indiclient, telescope, serialport, backlashCorrection)
#Execute polar alignment routine to capture and solve three times 30 degrees apart
p1, p1Time = slewCaptureSolve(indiclient, (targetRA % 360), targetDEC, radius, exposure, telescope, ccd, blobEvent, pixel_resolution, "capture1.fits")
p2, p2Time = slewCaptureSolve(indiclient, ((targetRA+30) % 360), targetDEC, radius, exposure, telescope, ccd, blobEvent, pixel_resolution, "capture2.fits")
p3, p3Time = slewCaptureSolve(indiclient, ((targetRA+60) % 360), targetDEC, radius, exposure, telescope, ccd, blobEvent, pixel_resolution, "capture3.fits")
#Calculate capture time based on the average timestamps of each image
observing_time = datetime.utcfromtimestamp(mean((p1Time, p2Time, p3Time)))
print (f"Time of captures is {observing_time} (UTC).")
#Calculate polar alignment error
result = platesolve.polarCalc(mylat, mylong, myelev, observing_time, p1, p2, p3)
print(f"Azimuth error correction is: {result[0]:.4f} arcminutes.")
print(f"Altitude error correction is: {result[1]:.4f} arcminutes.")
if not nomove:
#Disconnect telescope mount from INDI to free up serial port for Alt/Az adjustments
print (f"Disconnecting {telescope} from INDI server")
indi.disconnectScope(indiclient, telescope)
print ("Disconnected.")
#Adjust alt/az axis
print ("Adjusting altitude/azimuth axes.")
platesolve.adjustAltAz(result, serialport, backlashCorrection)
#Re-connect telescope mount to INDI before disconnecting from the INDI server
indi.connectScope(indiclient, telescope)
#Disconnect from indi server
indi.indiserverDisconnect(indiclient)
print (f"Polar alignment took {time.strftime('%Mm%Ss', time.gmtime(time.time() - startTime))}.") |
import java.util.*;
import static java.lang.Math.*;
import java.util.stream.*;
/*
Problem name: 541 Error Correction
Problem url: https://uva.onlinejudge.org/external/5/541.pdf
Author: <NAME>
*/
public class _541_ErrorCorrection {
public static void main(String[] args){
Scanner s = new Scanner(System.in);
while(s.hasNext()){
int N = s.nextInt();
if(N == 0) break;
int[][] m = new int[N][N];
for(int i = 0; i < m.length; i++){
for(int j = 0; j < m[0].length; j++){
m[i][j] = s.nextInt();
}
}
/*for(int i = 0; i < m.length; i++){
System.out.println(Arrays.toString(m[i]));
}*/
if(hasParityProperty(m)) System.out.println("OK");
else{
int[] fixBit = fixMatrix(m);
if(fixBit == null) System.out.println("Corrupt");
else System.out.printf("Change bit (%d,%d)\n", fixBit[0] + 1, fixBit[1] + 1);
}
}
}
static int[] fixMatrix(int[][] m){
for(int i = 0; i < m.length; i++){
for(int j = 0; j < m[0].length; j++){
// invert bit in m[i][j]
m[i][j] ^= 1;
if(hasParityProperty(m)) return new int[]{i, j};
// restore bit in m[i][j]
m[i][j] ^= 1;
}
}
return null;
}
static boolean hasParityProperty(int[][] m){
for(int i = 0; i < m.length; i++){
if(!isEven(sumRow(m, i))) return false;
}
for(int j = 0; j < m[0].length; j++){
if(!isEven(sumCol(m, j))) return false;
}
return true;
}
static boolean isEven(int n){
return n % 2 == 0;
}
static int sumRow(int[][] m, int row){
int s = 0;
for(int j = 0; j < m[0].length; j++){
s += m[row][j];
}
return s;
}
static int sumCol(int[][] m, int col){
int s = 0;
for(int i = 0; i < m.length; i++){
s += m[i][col];
}
return s;
}
}
|
(Top photo: two tagged plants)
This post originally appeared on VICE UK
In the UK, growing weed is usually a pretty clandestine procedure. It has to be, really, considering it's still very much illegal and can see you handed anything from a community service sentence to a decade in prison. Good news for green-fingered smokers, then, that the United Kingdom Cannabis Social Clubs (UKCSC) has recently launched a system that, in theory, would help you battle a court case if your grow was busted.
There are four categories of cannabis grows in the eyes of the law. Category 1 is where your operation is capable of producing enough for commercial distribution, and the remaining categories work their way down to number four, meaning nine or less plants, which can be considered a "domestic operation."
The UKCSC sells a kit containing branded tags complete with unique serial numbers, and a poster bearing a notice for the police. You can use these to tag up to nine plants in one grow location, which signifies your operation is not one with criminal intentions. In other words, you are not a street or commercial dealer.
So why would you need to grow nine plants if you don't intend to deal? The idea is that this one garden provides for multiple cannabis consumers who are part of a "collective"โa "separate and legally distinct group of consenting adults that wish to avoid engaging with the black-market by the communal growing and sharing of cannabis," according to the UKCSC website.
Related: Watch 'High SocietyโHow Weed Laws Are Failing the UK'
These collectives consist of many medical users of cannabis who are looking for the safest and fairest access possible, as well as recreational enthusiasts who don't want to associate with the criminal market and also wish to grow their preferred strains to a much cleaner standard than what's available on the street. As well as making it clear to the court that your grow was not funding organized crime, the money you paid for the tags goes into a pot maintained by UKCSC, which helps to fund your legal defense if you do ever get raided.
James, a grower who has been raided before, has recently registered his garden under the tagged collective model. He told me: "This scheme allows us to show that we are not commercial growers if we do get another knock at the door. And it shows the authorities that whilst cultivation is illegal at the moment, we are trying to do it in as professional a manner as possible and be responsible."
No one who is registered under the tagging system has actually been raided yet, so how the police and courts will view this model has not yet been tested. "I'd like to think the police would look at the tags and be able to clearly see that the plants are not intended for sale on the street and that they are for helping people to have a decent quality of life," says James.
A poster provided as part of the UKCSC's tagging system.
Alongside the tagging system, domestic growers in certain parts of the country have something else their side: The fact that Sara Thornton, head of the National Police Chiefs Council, has said cracking down on weed has "never been a top priority," and that if police are alerted to small-scale grows they're more likely to just "record" the news rather than carry out an investigation.
Another grower under the UKCSC scheme, Trev, has confidence in the project. He says: "Sooner or later we'll hit a tipping point where the police have to work far more effectively with us, rather than against us. The same will happen vice versa, which all goes towards community relations and cracking down on crime gangs. The tags show the police that I'm part of something bigger than myself. It shows them that I'm part of a culture that would far sooner work with them for change."
James also hopes the scheme will help to foster better relations with the police, as he'd like to be able to access his medicine without fear of arrest or prosecution. "I've had bad joint and muscle pain for about five years, and I've used cannabis concentrates to help with pain relief," he says. "After years of different tests, I just found out this week that it's fibromyalgia. This is why I grow cannabis; even though I have been raided before, it is the only way I can guarantee consistent quality meds."
The tagged plant model also does more than just sending a message, it also allows the UKCSC to track data around how many potential medical users and growers exist in the UK. Greg de Hoedt, the President of the UKCSC, got the idea for this comprehensive anonymous database after seeing similar systems in US states where cannabis is legal, like California and Colorado.
"The inspiration initially came from an area in California called Mendocino," says Greg. "When the area was doing badly economically, the police force risked having major cuts. As the area was already known to be full of weed growers, they decided to drastically slow down on raiding weed farmsโinstead, they offered growers tags and flags for $8,000 that would make them immune from being a police target. The only condition was that there were no more than 99 plants being grown."
Nine tagged plants
The money raised would be added directly to the community's tax budget, and therefore was a win-win situation for everyoneโgrowers who signed up to the system were no longer anxious about being raided, and the community benefited economically.
"The sheriff who headed up the idea was praised for his innovation by most, and a bridge was built between cannabis growers and the police for the first time ever," says Greg. "I'd love to achieve this bridge in the UK."
In Colorado, cannabis is tracked by batch and by gram from seed to sale. This system of regulation was another source of inspiration for Greg, who sees such moves as an important part of cannabis coming out of the underground and becoming an accepted part of society. "This is about taking cannabis into our own hands and away from criminals," he says. "The tags are about knowing your cannabis has been grown properly, cleanly, and is of medical quality. The tags are about being ethicalโknowing that acquiring your medication or your recreational drug doesn't fund the dealing of hard drugs, sex trafficking, or other real crime."
Whether or not this tagging system will make a tangible difference to a potential court case is yet to be seen, but the message is clear: Some people just want to grow the weed they smoke, and which affects only them, without being dealt with like gangsters by the authorities. Whether it's for medical or recreational purposes, Greg tells me he sees it as a human right for someone to be able to grow and supply their own medicine and to have the freedom to choose what they do with their own bodies.
Follow Ali Cedar on Twitter. |
//Computes average vuln of a region (helper method)
float Data::ComputeAvgScore(const std::vector<std::string>& region) {
float sum = 0;
for (const auto& country : region) {
sum += adjusted_vuln_index_.at(country);
}
return (sum / region.size());
} |
// Find the neighbor field in a FaceStruct that points to i, and change
// it to point to j
void update_neighbor(FaceStruct &f, int i, int j)
{
if (f.n12 == i)
f.n12 = j;
else if (f.n23 == i)
f.n23 = j;
else
f.n31 = j;
} |
#include <bits/stdc++.h>
#define _itr ::iterator
#define forn(i, x, y) for (ll i = x; i < y; ++i)
#define ford(i, x, y) for (ll i = x; i > y; --i)
#define forne(i, x, y) for (ll i = x; i <= y; ++i)
#define forde(i, x, y) for (ll i = x; i >= y; --i)
#define rd(x) cin >> x; cin.ignore();
#define rdstr(x) getline(cin, x);
#define rdarr(x, y) for (ll i = 0; i < y; ++i) cin >> x[i];
#define rdmtr(x, y, z) for (ll i = 0; i < y; ++i) for (ll j = 0; j < z; ++j) cin >> x[i][j];
#define pb push_back
#define X first
#define Y second
using namespace std;
typedef long long ll;
typedef long double ld;
const ll MaxN = 1e5 + 7;
const ll MaxM = 1e2 + 7;
const ll MDL = 1e9 + 7;
const ll INF = 1e18 + 7;
typedef pair<ll, ll> pi;
typedef vector<ll> vi;
typedef ll arr[MaxN];
ll REPIT = 1;
const bool MQ = true;
ll n;
ll cnt[MaxN], par[MaxN];
bool visit[MaxN];
set<ll> edg[MaxN];
ll DFS(ll u)
{
visit[u] = 1;
if (edg[u].size() == 1) return 0;
for (ll v : edg[u])
{
if (v == par[u]) continue;
par[v] = u;
if (visit[v]) cnt[u] += (cnt[v] + 1);
else cnt[u] += (DFS(v) + 1);
}
return cnt[u];
}
void Solve()
{
cin >> n;
forne(i, 1, n) edg[i].clear();
forn(i, 1, n)
{
ll u, v;
cin >> u >> v;
edg[u].insert(v); edg[v].insert(u);
}
fill(cnt + 1, cnt + n + 1, 0);
fill(par + 1, par + n + 1, -1);
fill(visit + 1, visit + n + 1, 0);
forne(i, 1, n)
{
if (!visit[i]) DFS(i);
//cout << cnt[i] << " \n"[i == n];
}
vector<ll> cand;
ll mn = n + 1;
forne(i, 1, n)
{
ll sz = n - (cnt[i] + 1);
for (ll v : edg[i])
{
if (v == par[i]) continue;
sz = max(sz, cnt[v] + 1);
}
if (sz <= mn)
{
if (sz < mn) cand.clear();
cand.pb(i);
mn = sz;
}
}
if (cand.size() == 1)
{
cout << 1 << ' ' << *edg[1].begin() << '\n' << 1 << ' ' << *edg[1].begin() << '\n';
return;
}
ll r;
for (ll v : edg[cand.front()])
{
if (v == cand[1]) continue;
r = v;
break;
}
cout << cand[0] << ' ' << r << '\n' << cand[1] << ' ' << r << '\n';
}
int main()
{
ios_base::sync_with_stdio(0); cin.tie(0);
#ifdef OFFLINE
freopen("input.inp", "r", stdin);
#endif
if (MQ == true)
{
rd(REPIT);
}
while (REPIT--) Solve();
cout.flush();
#ifdef OFFLINE
fclose(stdin);
#endif
}
|
def print_count_results():
print("Total results: {}".format(count_results()))
for operator in operators:
print("{} results: {}".format(
operator.capitalize(),
count_results(operator))) |
Quality of healthcare websites: A comparison of a general-purpose vs. domain-specific search engine.
In a pilot study, we had five typical Internet users evaluate the quality of health websites returned by a general-purpose search engine (Google) and a healthcare-specific search engine (Healthfinder). The evaluators used a quality criteria developed by Mitretek/Health Information Technology Institute. Although both search engines provided high quality health websites, we found some important differences between the two types of search engines. |
/**
* A {@link NameResolver} that resolves {@link InetAddress} and force Round Robin by choosing a single address
* randomly in {@link #resolve(String)} and {@link #resolve(String, Promise)}
* if multiple are returned by the {@link NameResolver}.
* Use {@link #asAddressResolver()} to create a {@link InetSocketAddress} resolver
*/
@UnstableApi
public class RoundRobinInetAddressResolver extends InetNameResolver {
private final NameResolver<InetAddress> nameResolver;
/**
* @param executor the {@link EventExecutor} which is used to notify the listeners of the {@link Future} returned by
* {@link #resolve(String)}
* @param nameResolver the {@link NameResolver} used for name resolution
*/
public RoundRobinInetAddressResolver(EventExecutor executor, NameResolver<InetAddress> nameResolver) {
super(executor);
this.nameResolver = nameResolver;
}
@Override
protected void doResolve(final String inetHost, final Promise<InetAddress> promise) throws Exception {
// hijack the doResolve request, but do a doResolveAll request under the hood.
// Note that InetSocketAddress.getHostName() will never incur a reverse lookup here,
// because an unresolved address always has a host name.
nameResolver.resolveAll(inetHost).addListener(new FutureListener<List<InetAddress>>() {
@Override
public void operationComplete(Future<List<InetAddress>> future) throws Exception {
if (future.isSuccess()) {
List<InetAddress> inetAddresses = future.getNow();
int numAddresses = inetAddresses.size();
if (numAddresses > 0) {
// if there are multiple addresses: we shall pick one by one
// to support the round robin distribution
promise.setSuccess(inetAddresses.get(randomIndex(numAddresses)));
} else {
promise.setFailure(new UnknownHostException(inetHost));
}
} else {
promise.setFailure(future.cause());
}
}
});
}
@Override
protected void doResolveAll(String inetHost, final Promise<List<InetAddress>> promise) throws Exception {
nameResolver.resolveAll(inetHost).addListener(new FutureListener<List<InetAddress>>() {
@Override
public void operationComplete(Future<List<InetAddress>> future) throws Exception {
if (future.isSuccess()) {
List<InetAddress> inetAddresses = future.getNow();
if (!inetAddresses.isEmpty()) {
// create a copy to make sure that it's modifiable random access collection
List<InetAddress> result = new ArrayList<InetAddress>(inetAddresses);
// rotate by different distance each time to force round robin distribution
Collections.rotate(result, randomIndex(inetAddresses.size()));
promise.setSuccess(result);
} else {
promise.setSuccess(inetAddresses);
}
} else {
promise.setFailure(future.cause());
}
}
});
}
private static int randomIndex(int numAddresses) {
return numAddresses == 1 ? 0 : ThreadLocalRandom.current().nextInt(numAddresses);
}
} |
/**
* Returns a set of all keys for the configurables. The "short" names are used whenever
* unambiguous.
*/
Set<String> allKeys() {
ImmutableSet.Builder<String> keys = ImmutableSet.builder();
Set<String> unneeded = Sets.newHashSet();
for (Map.Entry<String, Collection<String>> entry : names.asMap().entrySet()) {
if (entry.getValue().size() == 1) {
keys.add(entry.getKey());
unneeded.add(Iterables.getOnlyElement(entry.getValue()));
}
}
for (String key : configs.keySet()) {
if (!unneeded.contains(key)) {
keys.add(key);
}
}
return keys.build();
} |
package util
import (
"log"
"fmt"
"os"
"strings"
"io/ioutil"
"encoding/json"
"gopkg.in/yaml.v3"
"strconv"
"os/exec"
)
type Text struct {
lines []string
}
func GetHomeDir() string {
home,err := os.UserHomeDir()
if err != nil { log.Fatal(err) }
return home
}
func FindIDFile() (string,string) {
home:=GetHomeDir()
var ssh=home+"/.ssh"
files, err := ioutil.ReadDir(ssh)
if err != nil {
log.Print("Can't find a local ID file")
log.Fatal(err)
}
for _, file := range files {
name:=file.Name()
if strings.HasSuffix(name, ".pub") {
// got a matching private key?
priv := ssh+"/"+strings.TrimSuffix(name, ".pub")
if _, err := os.Stat(priv); err == nil {
return ssh+"/"+name , priv
}
}
}
return "",""
}
func WriteFile(dir string,fname string, contents string) {
WriteFile2(dir+"/"+fname,contents)
}
func WriteFile2(path string, contents string) {
log.Printf("save file %s",path)
if path=="/tmp/foo/openshift/bootstrap.ign" { log.Panic()}
f, err := os.Create(path)
if err!=nil { log.Fatal(err)}
defer f.Close()
f.WriteString(contents)
}
func ToString(n int) string {
v := strconv.Itoa(n)
return v
}
func ToInt(n string) int {
v, err := strconv.Atoi(n)
if err == nil {return v}
return -1
}
func LoadAsYaml (path string, b interface{}) {
yamlFile, err := os.Open(path)
if err!=nil { log.Fatal(err)}
defer yamlFile.Close()
byteValue, _ := ioutil.ReadAll(yamlFile)
err = yaml.Unmarshal([]byte(byteValue), b)
if err != nil {
log.Fatalf("cannot unmarshal data: %v", err)
}
}
func SaveAsYaml (path string, d interface{}) {
data,err := yaml.Marshal(&d)
if err != nil {
log.Fatalf("cannot marshal data: %v", err)
}
WriteFile2(path,string(data))
}
func SaveAsJson (path string, d interface{}) {
data,err := json.Marshal(&d)
if err != nil {
log.Fatalf("cannot marshal data: %v", err)
}
WriteFile2(path,string(data))
}
func LoadFromJsonFile(ignfile string) map[string]interface{} {
jsonFile, err := os.Open(ignfile)
if err!=nil { log.Fatal(err)}
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
var result map[string]interface{}
json.Unmarshal([]byte(byteValue), &result)
return result
}
func CreateText() *Text {
var t Text
return &t
}
func LoadFile(file string) string {
data, err := ioutil.ReadFile(file)
if err!=nil {
log.Printf("error loading file [%s]",file)
log.Fatal(err)
}
return strings.TrimSuffix(string(data), "\n")
}
func (t *Text) AsString() string {
return strings.Join(t.lines,"\n")+"\n"
}
func (t *Text) Add(format string, a ...interface{}) {
t.lines=append(t.lines,fmt.Sprintf(format,a...))
}
func CopyFile(from string,todir string, tofile string) {
log.Printf("copy %s to %s/%s",from,todir,tofile)
f :=LoadFile(from)
WriteFile(todir,tofile,f)
}
func MoveFile(from string, to string) {
err := os.Rename(from,to)
if err != nil {
log.Fatal(err)
}
}
func RecreateDir(path string,dirname string) string {
dir := path+"/"+dirname
err := os.RemoveAll(dir)
if err!=nil {
log.Fatal(err)
}
return CreateDir(path,dirname)
}
func CreateDir(path string,dirname string) string {
dir := path+"/"+dirname
if _, err := os.Stat(dir); os.IsNotExist(err) {
os.Mkdir(dir, 0777)
}
return dir
}
func CreateDirFromPath(dir string) string {
if _, err := os.Stat(dir); os.IsNotExist(err) {
os.Mkdir(dir, 0777)
}
return dir
}
func FileMustExist(path string) {
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Fatalf("required file %s does not exist",path)
}
}
func DirMustExist(path string) {
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Fatalf("required directory %s does not exist",path)
}
}
func FileExists(path string) bool {
if _, err := os.Stat(path); os.IsNotExist(err) {
return false
}
return true
}
func CreateFile(path string,fname string) *os.File {
log.Printf("Create File %s/%s",path,fname)
f, err := os.Create(path+"/"+fname)
if err!=nil { log.Fatal(err)}
return f
}
func Emit(f *os.File,format string, a ...interface{}) {
_, err:=fmt.Fprintf(f,format+"\n",a...)
if err != nil {
log.Fatal(err)
}
}
func FetchFiles(dir string, url string,files []string) {
for k,n := range files {
Info("File %d/%d",k+1,len(files))
FetchFile(dir,url,n)
}
}
func SpeculativeExecute(name string, args ...string) (string,error) {
out, err := exec.Command(name,args...).Output()
return strings.TrimSuffix(string(out), "\n"),err
}
func Execute(name string, args ...string) string {
out, err := SpeculativeExecute(name,args...)
if err != nil { log.Fatal(err)}
return out
}
|
/**
* Draws an image of the parameter graph including search information:
* opened and investigated vertices and shortest route.
*
* @param searchAlgo The searchAlgo for which the map and route is to be
* drawn
* @param filename The name of the file to which the image is to be written
*
*/
public void draw(SearchAlgo searchAlgo, String filename) {
Graph graph = searchAlgo.getGraph();
try {
int width = graph.getMap().getNcols() + 500;
int height = graph.getMap().getNrows();
if (graph.getMap().getNrows() < 380 ) {
height = 380;
}
BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
Graphics2D graphic = bi.createGraphics();
graphic.setBackground(Color.WHITE);
graphic.setColor(Color.WHITE);
graphic.fillRect(0,0, width, height);
double lowest = Double.MAX_VALUE;
double highest = Double.MIN_VALUE;
for (int i = 0; i < graph.getMap().getNcols(); i++) {
for (int j = 0; j < graph.getMap().getNrows(); j++) {
if (graph.getVertice(i, j).getZ() < lowest) {
lowest = graph.getVertice(i, j).getZ();
}
if (graph.getVertice(i, j).getZ() > highest) {
highest = graph.getVertice(i, j).getZ();
}
}
}
double range = highest - lowest;
drawMap(graph, highest, range, graphic);
DynamicList<Vertice> shortestRoute = searchAlgo.returnShortestPath();
if (!shortestRoute.isEmpty()) {
drawRoute(graphic, shortestRoute);
}
int pX = writePictureInfoToLegend(graphic, graph, filename, searchAlgo);
drawColorScale(graphic, highest, pX, lowest);
ImageIO.write(bi, "PNG", new File("images/" + filename + ".PNG"));
} catch (IOException ie) {
ie.printStackTrace();
}
} |
// JENA-1862 Turtle and TriG
public static void main(String...a) {
String str = "BASE <http://base/> PREFIX : <http://example/> <urn:a> <b#c> :d .";
Graph graph = GraphFactory.createDefaultGraph();
Context context = RIOT.getContext().copy();
RDFParser.create()
.lang(Lang.TTL)
.source(new StringReader(str))
.parse(graph);
System.out.println("----------");
RDFWriter.create()
.base("http://base/")
.format(RDFFormat.TURTLE_FLAT)
.source(graph)
.context(context)
.output(System.out);
System.out.println("----------");
RDFWriter.create()
.base("http://base/")
.format(RDFFormat.TURTLE_PRETTY)
.source(graph)
.context(context)
.output(System.out);
System.exit(0);
} |
<filename>disconnect-classlib/src/main/java/js/web/cssom/CSSKeyframesRule.java
package js.web.cssom;
import org.teavm.jso.JSBody;
import org.teavm.jso.JSProperty;
import javax.annotation.Nullable;
/**
* An object representing a complete set of keyframes for a CSS animation. It corresponds to the contains of a whole @keyframes at-rule. It implements the CSSRule interface with a type value of 7 (CSSRule.KEYFRAMES_RULE).
*/
public interface CSSKeyframesRule extends CSSRule {
@JSBody(script = "return CSSKeyframesRule.prototype")
static CSSKeyframesRule prototype() {
throw new UnsupportedOperationException("Available only in JavaScript");
}
@JSBody(script = "return new CSSKeyframesRule()")
static CSSKeyframesRule create() {
throw new UnsupportedOperationException("Available only in JavaScript");
}
@JSProperty
CSSRuleList getCssRules();
@JSProperty
String getName();
@JSProperty
void setName(String name);
void appendRule(String rule);
void deleteRule(String select);
@Nullable
CSSKeyframeRule findRule(String select);
}
|
<reponame>dani-garcia/Rocket
use std::borrow::Cow;
use crate::{Request, http::Method, local::asynchronous};
use super::{Client, LocalResponse};
/// A `blocking` local request as returned by [`Client`](super::Client).
///
/// For details, see [the top-level documentation](../index.html#localrequest).
///
/// ## Example
///
/// The following snippet uses the available builder methods to construct and
/// dispatch a `POST` request to `/` with a JSON body:
///
/// ```rust
/// use rocket::local::blocking::{Client, LocalRequest};
/// use rocket::http::{ContentType, Cookie};
///
/// let client = Client::tracked(rocket::ignite()).expect("valid rocket");
/// let req = client.post("/")
/// .header(ContentType::JSON)
/// .remote("127.0.0.1:8000".parse().unwrap())
/// .cookie(Cookie::new("name", "value"))
/// .body(r#"{ "value": 42 }"#);
///
/// let response = req.dispatch();
/// ```
#[derive(Clone)]
pub struct LocalRequest<'c> {
inner: asynchronous::LocalRequest<'c>,
client: &'c Client,
}
impl<'c> LocalRequest<'c> {
#[inline]
pub(crate) fn new(
client: &'c Client,
method: Method,
uri: Cow<'c, str>
) -> LocalRequest<'c> {
let inner = asynchronous::LocalRequest::new(&client.inner, method, uri);
Self { inner, client }
}
#[inline]
fn _request(&self) -> &Request<'c> {
self.inner._request()
}
#[inline]
fn _request_mut(&mut self) -> &mut Request<'c> {
self.inner._request_mut()
}
fn _body_mut(&mut self) -> &mut Vec<u8> {
self.inner._body_mut()
}
fn _dispatch(self) -> LocalResponse<'c> {
let inner = self.client.block_on(self.inner.dispatch());
LocalResponse { inner, client: self.client }
}
pub_request_impl!("# use rocket::local::blocking::Client;\n\
use rocket::local::blocking::LocalRequest;");
}
impl std::fmt::Debug for LocalRequest<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self._request().fmt(f)
}
}
impl<'c> std::ops::Deref for LocalRequest<'c> {
type Target = Request<'c>;
fn deref(&self) -> &Self::Target {
self.inner()
}
}
|
import java.io.*;
import java.util.*;
public class A {
public static void main(String[] args) throws Exception {
new A().solve();
}
void solve() throws IOException {
// BufferedReader in = new BufferedReader(new
// InputStreamReader(System.in));
Scanner sc = new Scanner(System.in);
int x = sc.nextInt();
int y = sc.nextInt();
int a = sc.nextInt();
int b = sc.nextInt();
StringBuilder str = new StringBuilder();
int n = 0;
for (int aa = a; aa <= x; aa++) {
for (int bb = b; bb <= y; bb++) {
if (aa > bb) {
str.append(aa + " " + bb + "\n");
n++;
}
}
}
System.out.println(n);
System.out.println(str);
}
}
//
|
/**
* This method test slaying weapon enchantment
*/
@Test
public void testSlayingEnchantment(){
gamePlayScreen.character.setWeaponName("custom_slaying_weapon");
for(int i = 0; i < 3 ; i++)
gamePlayScreen.playerMomentMechanics.new UP_PRESSED().actionPerformed(null);
for(int i = 0; i < 18 ; i++)
gamePlayScreen.playerMomentMechanics.new RIGHT_PRESSED().actionPerformed(null);
assertEquals(0, GameMechanics.getAHostileMonster(gamePlayScreen.currentMap).getHitScore());
} |
/**
* view unresolved subjects
* @param request
* @param response
*/
public void viewUnresolvedSubjects(HttpServletRequest request, HttpServletResponse response) {
final GrouperRequestContainer grouperRequestContainer = GrouperRequestContainer.retrieveFromRequestOrCreate();
final SubjectResolutionContainer subjectResolutionContainer = grouperRequestContainer.getSubjectResolutionContainer();
final Subject loggedInSubject = GrouperUiFilter.retrieveSubjectLoggedIn();
GrouperSession grouperSession = null;
try {
grouperSession = GrouperSession.start(loggedInSubject);
subjectResolutionContainer.assertSubjectResolutionEnabledAndAllowed();
final GuiResponseJs guiResponseJs = GuiResponseJs.retrieveGuiResponseJs();
GuiPaging guiPaging = subjectResolutionContainer.getGuiPaging();
final QueryOptions queryOptions = new QueryOptions();
QuerySort querySort = new QuerySort("sourceId, subjectId", true);
queryOptions.sort(querySort);
GrouperPagingTag2.processRequest(request, guiPaging, queryOptions);
String includeDeleted = request.getParameter("includeDeleted");
Boolean deleted = null;
if (StringUtils.equals(includeDeleted, "showDeleted")) {
deleted = true;
subjectResolutionContainer.setShowDeleted(true);
} else if (StringUtils.equals(includeDeleted, "doNotShowDeleted") || StringUtils.isBlank(includeDeleted)) {
deleted = false;
subjectResolutionContainer.setShowDeleted(false);
} else if (StringUtils.equals(includeDeleted, "showAll")) {
deleted = null;
subjectResolutionContainer.setShowDeleted(null);
} else {
throw new RuntimeException("Invalid value for includeDeleted: '" + includeDeleted + "'");
}
subjectResolutionContainer.setShowDeleted(deleted);
final Boolean DELETED = deleted;
GrouperSession.internal_callbackRootGrouperSession(new GrouperSessionHandler() {
@Override
public Object callback(GrouperSession grouperSession) throws GrouperSessionException {
Set<SubjectResolutionAttributeValue> unresolvedSubjects = UsduService.getUnresolvedSubjects(queryOptions, DELETED);
subjectResolutionContainer.setUnresolvedSubjects(unresolvedSubjects);
return null;
}
});
guiPaging.setTotalRecordCount(queryOptions.getQueryPaging().getTotalRecordCount());
guiResponseJs.addAction(GuiScreenAction.newInnerHtmlFromJsp("#grouperMainContentDivId",
"/WEB-INF/grouperUi2/subjectResolution/unresolvedSubjects.jsp"));
} finally {
GrouperSession.stopQuietly(grouperSession);
}
} |
<reponame>yuulive/j
use crate::{Citizen, validator};
use crate::country::Code;
use crate::validator::algorithms;
pub(crate) struct LuxembourgValidator;
/**
Luxembourg National Identifier Number code validation.
TIN validation logic source: https://www.oecd.org/tax/automatic-exchange/crs-implementation-and-assistance/tax-identification-numbers/Luxembourg-TIN.pdf
Another (english) version for the validation can be viewed in wikipedia: https://en.wikipedia.org/wiki/National_identification_number#Luxembourg
**/
impl validator::CountryValidator for LuxembourgValidator {
fn validate_id(&self, id: &str) -> bool {
let standard_id = id.replace(" ", "").replace("-", "");
if standard_id.len() != 13 {
return false;
}
if !algorithms::validate_luhn_10(&standard_id[0..12]) {
return false;
}
return algorithms::validate_verhoeff(&standard_id);
}
fn country_code(&self) -> Code {
return crate::country::Code::LU;
}
fn extract_citizen(&self, _id: &str) -> Option<Citizen> {
return None;
}
}
#[cfg(test)]
mod tests {
use crate::validator::CountryValidator;
#[test]
fn lu_validator_requires_len_of_13() {
let validator = super::validator::luxembourg::LuxembourgValidator;
assert_eq!(false, validator.validate_id("123"));
assert_eq!(false, validator.validate_id("123-456-7"));
}
#[test]
fn lu_validator_invalid_ids() {
let validator = super::validator::luxembourg::LuxembourgValidator;
assert_eq!(validator.validate_id("1994789587182"), false);
assert_eq!(validator.validate_id("5971654782313"), false);
assert_eq!(validator.validate_id("2055101054879"), false);
assert_eq!(validator.validate_id("1997053045687"), false);
}
#[test]
fn lu_validator_valid_ids() {
let validator = super::validator::luxembourg::LuxembourgValidator;
assert_eq!(validator.validate_id("1983081246783"), true);
assert_eq!(validator.validate_id("2003042581931"), true);
assert_eq!(validator.validate_id("1971110258746"), true);
assert_eq!(validator.validate_id("2012051469336"), true);
assert_eq!(validator.validate_id("1994092874551"), true);
}
}
|
/**
* @author cearagon
* Inner Class: La presente clase interna es a los efectos de armar el tipo de dato Metadata.
*/
public class Metadata{
private String nombreMetadata;
private boolean obligatoriedadMetadata;
private String tipoMetadata;
private int ordenMetadata;
public String getNombreMetadata() {
return nombreMetadata;
}
public void setNombreMetadata(String nombreMetadata) {
this.nombreMetadata = nombreMetadata;
}
public boolean isObligatoriedadMetadata() {
return obligatoriedadMetadata;
}
public void setObligatoriedadMetadata(boolean obligatoriedadMetadata) {
this.obligatoriedadMetadata = obligatoriedadMetadata;
}
public String getTipoMetadata() {
return tipoMetadata;
}
public void setTipoMetadata(String tipoMetadata) {
this.tipoMetadata = tipoMetadata;
}
public int getOrdenMetadata() {
return ordenMetadata;
}
public void setOrdenMetadata(int ordenMetadata) {
this.ordenMetadata = ordenMetadata;
}
} |
import torch
import torch.nn as nn
import torch.nn.functional as F
class MSELoss(nn.Module):
def __init__(self, reduction="mean"):
# TODO: repsect reduction rule
super(MSELoss, self).__init__()
def forward(self, x, y):
loss = torch.pow(x - y, 2)
return loss.mean()
|
Island Records Copyright: Island Records
It's a long time since Moulin Rouge, but Ewan McGregor is flexing his vocal cords again.
The actor stars in the new video for Radio 1 favourites Catfish and the Bottleman - after the band staged a somewhat creepy campaign to get his attention.
Singer Van McCann stitched together a three-minute montage of McGregor's smiling face to soundtrack the song Kathleen ; while drummer Bob Hall played a tour with McGregor's picture on his drum skin.
Luckily, the Scottish star was more flattered than frightened - and became a big fan of the Bottlemen's scrappy, infectious album.
And so, he agreed to star in the band's new video Hourglass - even singing the song a capella for good measure.
"Working with Sir Ewan was one of the funnest things I think I'll ever get to do," said McCann. "He's the most interesting and interested guy I've ever met. He's just [expletive] brilliant, a total gentleman.
Watch the video on YouTube - but beware of the strong language. |
/**
* Insert or update message delivery status.
*
* @param status Message status to insert or update.
*/
public void upsert(ChatMessageStatus status) {
Map<String, UIMessageItem> perConversation = messageData.get(status.getConversationId());
if (perConversation == null) {
perConversation = new HashMap<>();
messageData.put(status.getConversationId(), perConversation);
}
UIMessageItem perMessage = perConversation.get(status.getMessageId());
if (perMessage == null) {
return;
}
perMessage.updateStatus(status);
} |
// Copyright (C) 2010-2015 <NAME>
// See the file COPYING for copying permission.
#pragma once
namespace hadesmem
{
namespace cerberus
{
class PluginInterface;
}
}
void InitializeGui(hadesmem::cerberus::PluginInterface* cerberus);
void CleanupGui(hadesmem::cerberus::PluginInterface* cerberus);
|
from .core import *
from .kernel import *
from .map import *
|
<filename>src/state/index.ts
// ==== Automation import: Import Type ==== //
import {
AdminGetters,
AdminMutations,
AdminActions,
ADMIN_MODULE_NAME,
} from './modules/users/admin';
import {
AuthGetters,
AuthStates,
AuthMutations,
AuthActions,
AUTH_MODULE_NAME,
} from './modules/auth';
import {
UsersGetters,
UsersStates,
UsersMutations,
UsersActions,
USERS_MODULE_NAME,
} from './modules/users';
// ==== Dont remove comment of this section ==== //
// ==== Automation import: Namespaced Type ==== //
//#region Admin
type AdminGettersNamespaced = Namespaced<
AdminGetters,
typeof ADMIN_MODULE_NAME
>;
type AdminMutationsNamespaced = Namespaced<
AdminMutations,
typeof ADMIN_MODULE_NAME
>;
type AdminActionsNamespaced = Namespaced<
AdminActions,
typeof ADMIN_MODULE_NAME
>;
//#endregion
//#region Auth
type AuthGettersNamespaced = Namespaced<AuthGetters, typeof AUTH_MODULE_NAME>;
type AuthMutationsNamespaced = Namespaced<
AuthMutations,
typeof AUTH_MODULE_NAME
>;
type AuthActionsNamespaced = Namespaced<AuthActions, typeof AUTH_MODULE_NAME>;
//#endregion
//#region User
type UsersGettersNamespaced = Namespaced<
UsersGetters,
typeof USERS_MODULE_NAME
>;
type UsersMutationsNamespaced = Namespaced<
UsersMutations,
typeof USERS_MODULE_NAME
>;
type UsersActionsNamespaced = Namespaced<
UsersActions,
typeof USERS_MODULE_NAME
>;
//#endregion
// ==== Dont remove comment of this section ==== //
// ==== Automation import: Root Type ==== //
//#region Root Type
export type RootGetters = AdminGettersNamespaced &
AuthGettersNamespaced &
UsersGettersNamespaced &
unknown;
export type RootMutations = AdminMutationsNamespaced &
AuthMutationsNamespaced &
UsersMutationsNamespaced &
unknown;
export type RootActions = AdminActionsNamespaced &
AuthActionsNamespaced &
UsersActionsNamespaced &
unknown;
export type RootState = {
[AUTH_MODULE_NAME]: AuthStates;
} & {
[USERS_MODULE_NAME]: UsersStates;
} & unknown;
//#endregion
// ==== Dont remove comment of this section ==== //
|
/*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package grpcvtgateconn
// This is agnostic of grpc and was in a separate package 'vtgateconntest'.
// This has been moved here for better readability. If we introduce
// protocols other than grpc in the future, this will have to be
// moved back to its own package for reusability.
import (
"errors"
"fmt"
"io"
"strings"
"testing"
"context"
"github.com/golang/protobuf/proto"
"github.com/stretchr/testify/require"
"vitess.io/vitess/go/sqltypes"
"vitess.io/vitess/go/tb"
"vitess.io/vitess/go/vt/callerid"
"vitess.io/vitess/go/vt/vterrors"
"vitess.io/vitess/go/vt/vtgate/vtgateconn"
"vitess.io/vitess/go/vt/vtgate/vtgateservice"
binlogdatapb "vitess.io/vitess/go/vt/proto/binlogdata"
querypb "vitess.io/vitess/go/vt/proto/query"
topodatapb "vitess.io/vitess/go/vt/proto/topodata"
vtgatepb "vitess.io/vitess/go/vt/proto/vtgate"
vtrpcpb "vitess.io/vitess/go/vt/proto/vtrpc"
)
// fakeVTGateService has the server side of this fake
type fakeVTGateService struct {
t *testing.T
panics bool
hasError bool
errorWait chan struct{}
}
const (
expectedErrMatch string = "test vtgate error"
expectedCode vtrpcpb.Code = vtrpcpb.Code_INVALID_ARGUMENT
)
var errTestVtGateError = vterrors.New(expectedCode, expectedErrMatch)
func newContext() context.Context {
ctx := context.Background()
ctx = callerid.NewContext(ctx, testCallerID, nil)
return ctx
}
func (f *fakeVTGateService) checkCallerID(ctx context.Context, name string) {
ef := callerid.EffectiveCallerIDFromContext(ctx)
if ef == nil {
f.t.Errorf("no effective caller id for %v", name)
} else {
if !proto.Equal(ef, testCallerID) {
f.t.Errorf("invalid effective caller id for %v: got %v expected %v", name, ef, testCallerID)
}
}
}
// queryExecute contains all the fields we use to test Execute
type queryExecute struct {
SQL string
BindVariables map[string]*querypb.BindVariable
Session *vtgatepb.Session
}
func (q *queryExecute) equal(q2 *queryExecute) bool {
return q.SQL == q2.SQL &&
sqltypes.BindVariablesEqual(q.BindVariables, q2.BindVariables) &&
proto.Equal(q.Session, q2.Session)
}
// Execute is part of the VTGateService interface
func (f *fakeVTGateService) Execute(ctx context.Context, session *vtgatepb.Session, sql string, bindVariables map[string]*querypb.BindVariable) (*vtgatepb.Session, *sqltypes.Result, error) {
if f.hasError {
return session, nil, errTestVtGateError
}
if f.panics {
panic(fmt.Errorf("test forced panic"))
}
f.checkCallerID(ctx, "Execute")
execCase, ok := execMap[sql]
if !ok {
return session, nil, fmt.Errorf("no match for: %s", sql)
}
query := &queryExecute{
SQL: sql,
BindVariables: bindVariables,
Session: session,
}
if !query.equal(execCase.execQuery) {
f.t.Errorf("Execute:\n%+v, want\n%+v", query, execCase.execQuery)
return session, nil, nil
}
if execCase.outSession != nil {
*session = *execCase.outSession
}
return session, execCase.result, nil
}
// ExecuteBatch is part of the VTGateService interface
func (f *fakeVTGateService) ExecuteBatch(ctx context.Context, session *vtgatepb.Session, sqlList []string, bindVariablesList []map[string]*querypb.BindVariable) (*vtgatepb.Session, []sqltypes.QueryResponse, error) {
if f.hasError {
return session, nil, errTestVtGateError
}
if f.panics {
panic(fmt.Errorf("test forced panic"))
}
f.checkCallerID(ctx, "ExecuteBatch")
execCase, ok := execMap[sqlList[0]]
if !ok {
return session, nil, fmt.Errorf("no match for: %s", sqlList[0])
}
query := &queryExecute{
SQL: sqlList[0],
BindVariables: bindVariablesList[0],
Session: session,
}
if !query.equal(execCase.execQuery) {
f.t.Errorf("Execute: %+v, want %+v", query, execCase.execQuery)
return session, nil, nil
}
if execCase.outSession != nil {
*session = *execCase.outSession
}
return session, []sqltypes.QueryResponse{{
QueryResult: execCase.result,
QueryError: nil,
}}, nil
}
// StreamExecute is part of the VTGateService interface
func (f *fakeVTGateService) StreamExecute(ctx context.Context, session *vtgatepb.Session, sql string, bindVariables map[string]*querypb.BindVariable, callback func(*sqltypes.Result) error) error {
if f.panics {
panic(fmt.Errorf("test forced panic"))
}
execCase, ok := execMap[sql]
if !ok {
return fmt.Errorf("no match for: %s", sql)
}
f.checkCallerID(ctx, "StreamExecute")
query := &queryExecute{
SQL: sql,
BindVariables: bindVariables,
Session: session,
}
if !query.equal(execCase.execQuery) {
f.t.Errorf("StreamExecute:\n%+v, want\n%+v", query, execCase.execQuery)
return nil
}
if execCase.result != nil {
result := &sqltypes.Result{
Fields: execCase.result.Fields,
}
if err := callback(result); err != nil {
return err
}
if f.hasError {
// wait until the client has the response, since all streaming implementation may not
// send previous messages if an error has been triggered.
<-f.errorWait
f.errorWait = make(chan struct{}) // for next test
return errTestVtGateError
}
for _, row := range execCase.result.Rows {
result := &sqltypes.Result{
Rows: [][]sqltypes.Value{row},
}
if err := callback(result); err != nil {
return err
}
}
}
return nil
}
// ResolveTransaction is part of the VTGateService interface
func (f *fakeVTGateService) ResolveTransaction(ctx context.Context, dtid string) error {
if f.hasError {
return errTestVtGateError
}
if f.panics {
panic(fmt.Errorf("test forced panic"))
}
f.checkCallerID(ctx, "ResolveTransaction")
if dtid != dtid2 {
return errors.New("ResolveTransaction: dtid mismatch")
}
return nil
}
func (f *fakeVTGateService) VStream(ctx context.Context, tabletType topodatapb.TabletType, vgtid *binlogdatapb.VGtid, filter *binlogdatapb.Filter, send func([]*binlogdatapb.VEvent) error) error {
panic("unimplemented")
}
// CreateFakeServer returns the fake server for the tests
func CreateFakeServer(t *testing.T) vtgateservice.VTGateService {
return &fakeVTGateService{
t: t,
panics: false,
errorWait: make(chan struct{}),
}
}
// RegisterTestDialProtocol registers a vtgateconn implementation under the "test" protocol
func RegisterTestDialProtocol(impl vtgateconn.Impl) {
vtgateconn.RegisterDialer("test", func(ctx context.Context, address string) (vtgateconn.Impl, error) {
return impl, nil
})
}
// HandlePanic is part of the VTGateService interface
func (f *fakeVTGateService) HandlePanic(err *error) {
if x := recover(); x != nil {
// gRPC 0.13 chokes when you return a streaming error that contains newlines.
*err = fmt.Errorf("uncaught panic: %v, %s", x,
strings.Replace(string(tb.Stack(4)), "\n", ";", -1))
}
}
// RunTests runs all the tests
func RunTests(t *testing.T, impl vtgateconn.Impl, fakeServer vtgateservice.VTGateService) {
vtgateconn.RegisterDialer("test", func(ctx context.Context, address string) (vtgateconn.Impl, error) {
return impl, nil
})
conn, err := vtgateconn.DialProtocol(context.Background(), "test", "")
if err != nil {
t.Fatalf("Got err: %v from vtgateconn.DialProtocol", err)
}
session := conn.Session("connection_ks@rdonly", testExecuteOptions)
fs := fakeServer.(*fakeVTGateService)
testExecute(t, session)
testStreamExecute(t, session)
testExecuteBatch(t, session)
// force a panic at every call, then test that works
fs.panics = true
testExecutePanic(t, session)
testExecuteBatchPanic(t, session)
testStreamExecutePanic(t, session)
fs.panics = false
}
// RunErrorTests runs all the tests that expect errors
func RunErrorTests(t *testing.T, fakeServer vtgateservice.VTGateService) {
conn, err := vtgateconn.DialProtocol(context.Background(), "test", "")
if err != nil {
t.Fatalf("Got err: %v from vtgateconn.DialProtocol", err)
}
session := conn.Session("connection_ks@rdonly", testExecuteOptions)
fs := fakeServer.(*fakeVTGateService)
// return an error for every call, make sure they're handled properly
fs.hasError = true
testExecuteError(t, session, fs)
testExecuteBatchError(t, session, fs)
testStreamExecuteError(t, session, fs)
fs.hasError = false
}
func expectPanic(t *testing.T, err error) {
expected1 := "test forced panic"
expected2 := "uncaught panic"
if err == nil || !strings.Contains(err.Error(), expected1) || !strings.Contains(err.Error(), expected2) {
t.Fatalf("Expected a panic error with '%v' or '%v' but got: %v", expected1, expected2, err)
}
}
// Verifies the returned error has the properties that we expect.
func verifyError(t *testing.T, err error, method string) {
if err == nil {
t.Errorf("%s was expecting an error, didn't get one", method)
return
}
// verify error code
code := vterrors.Code(err)
if code != expectedCode {
t.Errorf("Unexpected error code from %s: got %v, wanted %v", method, code, expectedCode)
}
verifyErrorString(t, err, method)
}
func verifyErrorString(t *testing.T, err error, method string) {
if err == nil {
t.Errorf("%s was expecting an error, didn't get one", method)
return
}
if !strings.Contains(err.Error(), expectedErrMatch) {
t.Errorf("Unexpected error from %s: got %v, wanted err containing: %v", method, err, errTestVtGateError.Error())
}
}
func testExecute(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
qr, err := session.Execute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
require.NoError(t, err)
if !qr.Equal(execCase.result) {
t.Errorf("Unexpected result from Execute: got\n%#v want\n%#v", qr, execCase.result)
}
_, err = session.Execute(ctx, "none", nil)
want := "no match for: none"
if err == nil || !strings.Contains(err.Error(), want) {
t.Errorf("none request: %v, want %v", err, want)
}
}
func testExecuteError(t *testing.T, session *vtgateconn.VTGateSession, fake *fakeVTGateService) {
ctx := newContext()
execCase := execMap["errorRequst"]
_, err := session.Execute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
verifyError(t, err, "Execute")
}
func testExecutePanic(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
_, err := session.Execute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
expectPanic(t, err)
}
func testExecuteBatch(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
qr, err := session.ExecuteBatch(ctx, []string{execCase.execQuery.SQL}, []map[string]*querypb.BindVariable{execCase.execQuery.BindVariables})
require.NoError(t, err)
if !qr[0].QueryResult.Equal(execCase.result) {
t.Errorf("Unexpected result from Execute: got\n%#v want\n%#v", qr, execCase.result)
}
_, err = session.ExecuteBatch(ctx, []string{"none"}, nil)
want := "no match for: none"
if err == nil || !strings.Contains(err.Error(), want) {
t.Errorf("none request: %v, want %v", err, want)
}
}
func testExecuteBatchError(t *testing.T, session *vtgateconn.VTGateSession, fake *fakeVTGateService) {
ctx := newContext()
execCase := execMap["errorRequst"]
_, err := session.ExecuteBatch(ctx, []string{execCase.execQuery.SQL}, []map[string]*querypb.BindVariable{execCase.execQuery.BindVariables})
verifyError(t, err, "ExecuteBatch")
}
func testExecuteBatchPanic(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
_, err := session.ExecuteBatch(ctx, []string{execCase.execQuery.SQL}, []map[string]*querypb.BindVariable{execCase.execQuery.BindVariables})
expectPanic(t, err)
}
func testStreamExecute(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
stream, err := session.StreamExecute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
if err != nil {
t.Fatal(err)
}
var qr sqltypes.Result
for {
packet, err := stream.Recv()
if err != nil {
if err != io.EOF {
t.Error(err)
}
break
}
if len(packet.Fields) != 0 {
qr.Fields = packet.Fields
}
if len(packet.Rows) != 0 {
qr.Rows = append(qr.Rows, packet.Rows...)
}
}
wantResult := *execCase.result
wantResult.RowsAffected = 0
wantResult.InsertID = 0
if !qr.Equal(&wantResult) {
t.Errorf("Unexpected result from StreamExecute: got %+v want %+v", qr, wantResult)
}
stream, err = session.StreamExecute(ctx, "none", nil)
if err != nil {
t.Fatal(err)
}
_, err = stream.Recv()
want := "no match for: none"
if err == nil || !strings.Contains(err.Error(), want) {
t.Errorf("none request: %v, want %v", err, want)
}
}
func testStreamExecuteError(t *testing.T, session *vtgateconn.VTGateSession, fake *fakeVTGateService) {
ctx := newContext()
execCase := execMap["request1"]
stream, err := session.StreamExecute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
if err != nil {
t.Fatalf("StreamExecute failed: %v", err)
}
qr, err := stream.Recv()
if err != nil {
t.Fatalf("StreamExecute failed: cannot read result1: %v", err)
}
if !qr.Equal(&streamResultFields) {
t.Errorf("Unexpected result from StreamExecute: got %#v want %#v", qr, &streamResultFields)
}
// signal to the server that the first result has been received
close(fake.errorWait)
// After 1 result, we expect to get an error (no more results).
_, err = stream.Recv()
if err == nil {
t.Fatalf("StreamExecute channel wasn't closed")
}
verifyError(t, err, "StreamExecute")
}
func testStreamExecutePanic(t *testing.T, session *vtgateconn.VTGateSession) {
ctx := newContext()
execCase := execMap["request1"]
stream, err := session.StreamExecute(ctx, execCase.execQuery.SQL, execCase.execQuery.BindVariables)
if err != nil {
t.Fatal(err)
}
_, err = stream.Recv()
if err == nil {
t.Fatalf("Received packets instead of panic?")
}
expectPanic(t, err)
}
var testCallerID = &vtrpcpb.CallerID{
Principal: "test_principal",
Component: "test_component",
Subcomponent: "test_subcomponent",
}
var testExecuteOptions = &querypb.ExecuteOptions{
IncludedFields: querypb.ExecuteOptions_TYPE_ONLY,
}
var execMap = map[string]struct {
execQuery *queryExecute
result *sqltypes.Result
outSession *vtgatepb.Session
err error
}{
"request1": {
execQuery: &queryExecute{
SQL: "request1",
BindVariables: map[string]*querypb.BindVariable{
"bind1": sqltypes.Int64BindVariable(0),
},
Session: &vtgatepb.Session{
TargetString: "connection_ks@rdonly",
Options: testExecuteOptions,
Autocommit: true,
},
},
result: &result1,
},
"errorRequst": {
execQuery: &queryExecute{
SQL: "errorRequst",
BindVariables: map[string]*querypb.BindVariable{
"bind1": sqltypes.Int64BindVariable(0),
},
Session: &vtgatepb.Session{
TargetString: "connection_ks@rdonly",
Options: testExecuteOptions,
},
},
},
}
var result1 = sqltypes.Result{
Fields: []*querypb.Field{
{
Name: "field1",
Type: sqltypes.Int16,
},
{
Name: "field2",
Type: sqltypes.Int32,
},
},
RowsAffected: 123,
InsertID: 72,
Rows: [][]sqltypes.Value{
{
sqltypes.TestValue(sqltypes.Int16, "1"),
sqltypes.NULL,
},
{
sqltypes.TestValue(sqltypes.Int16, "2"),
sqltypes.NewInt32(3),
},
},
}
// streamResultFields is only the fields, sent as the first packet
var streamResultFields = sqltypes.Result{
Fields: result1.Fields,
Rows: [][]sqltypes.Value{},
}
var dtid2 = "aa"
|
def forward(self, x: T) -> Tuple[T, T, T, T]:
q_m, q_v = split_in_half(self.fc(torch.sqrt(x)))
z_loc, z_scale = q_m[..., :-1], nn.functional.softplus(q_v[..., :-1])
l_loc, l_scale = q_m[..., -1:], nn.functional.softplus(q_v[..., -1:])
return z_loc, z_scale, l_loc, l_scale |
A 46-year-old Moroccan asylum seeker has been sentenced to 13 years in prison in Vienna for the attempted murder of his roommate at an asylum centre in Simmering.
In his defence the man said that he thought the 27-year-old Syrian man and a group of other men were planning to rape him.
He told the jury that he had been sexually abused by two men the day before in an apartment in Ottakring. He went to a hospital after the alleged attack but doctors could find no evidence that he had been raped. After being released from hospital and giving a statement to the police he returned to his accommodation and stabbed his sleeping roommate with an ornamental 8cm-long knife that was hanging on the wall.
However, the judge said that the real reason for the attack last September was the fact that the Moroccan man was claiming to be a Syrian refugee and that he was afraid the younger man would betray his real origins. He had travelled to Austria through Italy, using false identity documents, but the Syrian refugees at the asylum centre soon realised he was Moroccan because of the Arabic dialect he spoke.
The Moroccan stabbed the younger man so violently that he ripped open his abdomen and injured his liver. He also broke one of this ribs and severed a thoracic artery. If the 27-year-old had not screamed loudly for help, doctors said he would not have survived the attack.
The Syrian man told the jury that he has had problems sleeping since the attack and still has pain in his liver. He worked as a coffee-roaster in his homeland and had found a job as a waiter in Vienna.
He said he couldn't understand why the man attacked him and that he had always supported and helped him. โHe still owes me โฌ500," he said. "One day he came to the kitchen with a copy of the Koran and asked us to swear on it that we would not cooperate with the Austrian authorities and would not betray him," he added.
The Moroccan also fought with fellow inmates whilst in police custody and accused them of wanting to rape him, the prosecutor said. |
/**
* Mark the specific delivery tag as acknowledgment received and return the specific {@link AckData} object.
*
* @param deliveryTag delivery tag of the acknowledged message delivery
* @return AckData object for the corresponding delivery tag
*/
AckData markAcknowledgement(long deliveryTag) {
AckData ackData = pendingAcknowledgments.remove(deliveryTag);
if (Objects.nonNull(ackData)) {
markedAcknowledgments.put(deliveryTag, ackData);
}
return ackData;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.