content
stringlengths 10
4.9M
|
---|
// Insert one send/recv event in two nodes
Status StreamAllocator::InsertOneEventInTwoNodes(const NodePtr &cur_node, const NodePtr &next_node) {
GE_CHECK_NOTNULL(cur_node->GetOpDesc());
GE_CHECK_NOTNULL(next_node->GetOpDesc());
int64_t cur_stream_id = cur_node->GetOpDesc()->GetStreamId();
if (cur_stream_id == kInvalidStream) {
GELOGD("No need to insert event after node %s.", cur_node->GetName().c_str());
return SUCCESS;
}
int64_t next_stream_id = next_node->GetOpDesc()->GetStreamId();
if (cur_stream_id == next_stream_id) {
return SUCCESS;
}
if (((cur_node->GetType() == ENTER) || (cur_node->GetType() == REFENTER)) && (next_node->GetType() != STREAMACTIVE)) {
GELOGD("No need to insert event between %s and %s.", cur_node->GetName().c_str(), next_node->GetName().c_str());
return SUCCESS;
}
if (next_stream_id == kInvalidStream) {
REPORT_INNER_ERROR("E19999", "Stream id of next_node %s(%s) should not be %ld",
next_node->GetName().c_str(), next_node->GetType().c_str(), kInvalidStream);
GELOGE(FAILED, "[Check][Param] Stream id of next_node %s should not be %ld",
next_node->GetName().c_str(), kInvalidStream);
return FAILED;
}
string next_node_label;
if (AttrUtils::GetStr(next_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, next_node_label) && !next_node_label.empty()) {
auto iter = specific_activated_labels_.find(next_node_label);
if (iter != specific_activated_labels_.end()) {
for (const auto &active_node : iter->second) {
OpDescPtr active_op = active_node->GetOpDesc();
GE_CHECK_NOTNULL(active_op);
if ((cur_stream_id == active_op->GetStreamId()) && (cur_node->GetOpDesc()->GetId() <= active_op->GetId())) {
GELOGI("No need to insert event between node %s and %s.", cur_node->GetName().c_str(),
next_node->GetName().c_str());
return SUCCESS;
}
}
}
}
AddSendEventId(cur_node, event_num_);
AddRecvEventId(next_node, event_num_);
GELOGD("Insert event %u between node %s(stream %ld) and %s(stream %ld)", event_num_, cur_node->GetName().c_str(),
cur_stream_id, next_node->GetName().c_str(), next_stream_id);
++event_num_;
return SUCCESS;
}
|
def promote_chat_member(self, chat_id, user_id, can_change_info=None,
can_post_messages=None, can_edit_messages=None,
can_delete_messages=None, can_invite_users=None,
can_restrict_members=None, can_pin_messages=None,
can_promote_members=None, timeout=None, **kwargs):
url = '{0}/promoteChatMember'.format(self.base_url)
data = {'chat_id': chat_id, 'user_id': user_id}
if can_change_info is not None:
data['can_change_info'] = can_change_info
if can_post_messages is not None:
data['can_post_messages'] = can_post_messages
if can_edit_messages is not None:
data['can_edit_messages'] = can_edit_messages
if can_delete_messages is not None:
data['can_delete_messages'] = can_delete_messages
if can_invite_users is not None:
data['can_invite_users'] = can_invite_users
if can_restrict_members is not None:
data['can_restrict_members'] = can_restrict_members
if can_pin_messages is not None:
data['can_pin_messages'] = can_pin_messages
if can_promote_members is not None:
data['can_promote_members'] = can_promote_members
data.update(kwargs)
result = self._request.post(url, data, timeout=timeout)
return result
|
/**
* List a user's transactions (where he is a sender or recipient).
* <p>
* Sortable properties are:
* <ul>
* <li>- id</li>
* <li>- amount</li>
* </ul>
*
* @param userId ID of the user to returns transactions
* @param cursorRequest pagination parameters
* @return the transaction list
*/
@Transactional(readOnly = true)
public CursorResponse<Transaction> listTransactions(long userId, CursorRequest cursorRequest) {
return CursorFetcher.<Transaction, TransactionEntity>create()
.recordsQuery(q -> transactionRepository.findAll(
q.getSpecification().and(isSender(userId).or(isRecipient(userId))),
q.getPageable()))
.recordMapper(transactionMapper::toTransaction)
.property("id", new LongPropertyType(), TransactionEntity::getId, true)
.property("amount", new BigDecimalPropertyType(), TransactionEntity::getAmount)
.fetch(cursorRequest);
}
|
/**
* PostgreSQL data model option.
*
* @author yokochi
*/
public class PgSchemaOption implements Serializable {
/** The default version ID. */
private static final long serialVersionUID = 1L;
/** The root schema location. */
public String root_schema_location = "";
/** The relational model extension. */
public boolean rel_model_ext = true;
/** The relational data extension. */
public boolean rel_data_ext = true;
/** Whether to inline simple content. */
public boolean inline_simple_cont = false;
/** Whether to realize simple bridge tables, otherwise implement them as PostgreSQL views by default. */
public boolean realize_simple_brdg = false;
/** The wild card extension. */
public boolean wild_card = true;
/** Whether to add document key in PostgreSQL DDL. */
public boolean document_key = true;
/** Whether to add serial key in PostgreSQL DDL. */
public boolean serial_key = false;
/** Whether to add XPath key in PostgreSQL DDL. */
public boolean xpath_key = false;
/** Whether to retain case sensitive name in PostgreSQL DDL. */
public boolean case_sense = true;
/** Whether to enable explicit named schema. */
public boolean pg_named_schema = false;
/** Whether to retain primary key/foreign key/unique constraint in PostgreSQL DDL. */
public boolean pg_retain_key = true;
/** The max tuple size of unique constraint in PostgreSQL DDL derived from xs:key (ignore the limit if non-positive value). */
public int pg_max_uniq_tuple_size = 1;
/** Whether to use TSV format in PostgreSQL data migration. */
public boolean pg_tab_delimiter = true;
/** The current delimiter code. */
public char pg_delimiter = '\t';
/** Whether to set annotation as comment in PostgreSQL DB. */
public boolean pg_comment_on = false;
/** The current null code. */
public String pg_null = PgSchemaUtil.pg_tsv_null;
/** Whether to delete invalid XML. */
public boolean del_invalid_xml = false;
/** Whether to enable data type/range check while data conversion. */
public boolean type_check = false;
/** The verbose mode. */
public boolean verbose = false;
/** Whether to prefer local XML Schema file. */
@Flat
public boolean cache_xsd = true;
/** Whether to output PostgreSQL DDL. */
@Flat
public boolean ddl_output = false;
/** Whether not to retrieve field annotation in PostgreSQL DDL. */
@Flat
public boolean no_field_anno = true;
/** Whether to execute XML Schema validation. */
@Flat
public boolean validate = false;
/** Whether to enable canonical XML Schema validation or not (validate only whether document is well-formed). */
@Flat
public boolean full_check = true;
/** Whether to output processing message to stdout or not (stderr). */
@Flat
public boolean stdout_msg = true;
/** The default document key name in PostgreSQL DDL. */
@Flat
public final String def_document_key_name = "document_id";
/** The default serial key name in PostgreSQL DDL. */
@Flat
public final String def_serial_key_name = "serial_id";
/** The default XPath key name in PostgreSQL DDL. */
@Flat
public final String def_xpath_key_name = "xpath_id";
/** The document key name in PostgreSQL DDL. */
public String document_key_name = def_document_key_name;
/** The serial key name in PostgreSQL DDL. */
public String serial_key_name = def_serial_key_name;
/** The XPath key name in PostgreSQL DDL. */
public String xpath_key_name = def_xpath_key_name;
/** The list of discarded document key name. */
public HashSet<String> discarded_document_key_names = null;
/** The list of in-place document key name. */
public HashSet<String> in_place_document_key_names = null;
/** The mapping of integer numbers in PostgreSQL. */
public PgIntegerType pg_integer = PgIntegerType.defaultType();
/** The mapping of decimal numbers in PostgreSQL. */
public PgDecimalType pg_decimal = PgDecimalType.defaultType();
/** The mapping of xs:date in PostgreSQL. */
public PgDateType pg_date = PgDateType.defaultType();
/** The name of hash algorithm. */
public String hash_algorithm = PgSchemaUtil.def_hash_algorithm;
/** The size of hash key. */
public PgHashSize hash_size = PgHashSize.defaultSize();
/** The size of serial key. */
public PgSerSize ser_size = PgSerSize.defaultSize();
/** Whether to adopt strict synchronization (insert if not exists, update if required, and delete if XML not exists). */
public boolean sync = false;
/** Whether to adopt weak synchronization (insert if not exists, no update even if exists, no deletion). */
public boolean sync_weak = false;
/** Whether to dry-run synchronization (no update on existing check sum files). */
public boolean sync_dry_run = false;
/** Whether to run diagnostic synchronization (set all constraints deferred). */
public boolean sync_rescue = false;
/** Whether in-place document key exists. */
public boolean in_place_document_key = false;
/** Whether to append document key if in-place key not exists. */
public boolean document_key_if_no_in_place = false;
/** Whether to fill @default value. */
public boolean fill_default_value = false;
/** The directory name contains check sum files. */
public String check_sum_dir_name = null;
/** The default algorithm for check sum. */
public String check_sum_algorithm = PgSchemaUtil.def_check_sum_algorithm;
/** The default file extension of check sum file. */
public String check_sum_ext = check_sum_algorithm.toLowerCase();
/** The JSON item name of xs:simpleContent. */
public String simple_content_name = PgSchemaUtil.simple_content_name;
/** Whether to use data model of PgSchema server. */
@Flat
public boolean pg_schema_server = true;
/** The default host name of PgSchema server. */
@Flat
public String pg_schema_server_host = PgSchemaUtil.pg_schema_server_host;
/** The default port number of PgSchema server. */
@Flat
public int pg_schema_server_port = PgSchemaUtil.pg_schema_server_port;
/** The default lifetime of unused PostgreSQL data model on PgSchema server in milliseconds. */
@Flat
public long pg_schema_server_lifetime = PgSchemaUtil.pg_schema_server_lifetime;
/** The prefix of xs_namespace_uri. */
@Flat
public String xs_prefix = null;
/** The xs_prefix.isEmpty() ? "" : xs_prefix + ":". */
@Flat
public String xs_prefix_ = null;
/** Whether XML post editor has been applied. */
@Flat
public boolean post_editor_resolved = false;
/** Whether attribute selection has been resolved. */
@Flat
public boolean attr_resolved = false;
/** Whether field selection has been resolved. */
@Flat
public boolean field_resolved = false;
/** Whether show orphan table. */
@Flat
public boolean show_orphan_table = false;
/** The internal status corresponding to --doc-key option. */
@Flat
private boolean _doc_key = false;
/** The internal status corresponding to --no-doc-key option. */
@Flat
private boolean _no_doc_key = false;
/** Whether check sum directory exists. */
@Flat
private boolean _check_sum_dir_exists = false;
/**
* Instance of PostgreSQL data model option.
*
* @param document_key the document key
*/
public PgSchemaOption(boolean document_key) {
this.document_key = document_key;
discarded_document_key_names = new HashSet<String>();
in_place_document_key_names = new HashSet<String>();
}
/**
* Instance of PostgreSQL data model option for JSON Schema conversion.
*
* @param json_type JSON type
*/
public PgSchemaOption(JsonType json_type) {
setDefaultForJsonSchema(json_type);
discarded_document_key_names = new HashSet<String>();
in_place_document_key_names = new HashSet<String>();
}
/**
* Default settings for JSON Schema conversion.
*
* @param json_type JSON type
*/
public void setDefaultForJsonSchema(JsonType json_type) {
rel_model_ext = !json_type.equals(JsonType.relational);
cancelRelDataExt();
}
/**
* Cancel relational model extension in PostgreSQL.
*/
public void cancelRelModelExt() {
rel_model_ext = false;
cancelRelDataExt();
}
/**
* Cancel relational data extension.
*/
public void cancelRelDataExt() {
rel_data_ext = document_key = serial_key = xpath_key = pg_retain_key = false;
inline_simple_cont = true;
}
/**
* Enable relational data extension.
*/
public void enableRelDataExt() {
rel_model_ext = rel_data_ext = document_key = pg_retain_key = true;
inline_simple_cont = false;
}
/**
* Return minimum size of field.
*
* @return int the minimum size of field
*/
public int getMinimumSizeOfField() {
return (rel_model_ext ? 1 : 0) + (document_key ? 1 : 0);
}
/**
* Set internal status corresponding to --doc-key and --no-doc-key options.
*
* @param doc_key whether to add document key
* @return boolean whether status changed
*/
public boolean setDocKeyOption(boolean doc_key) {
if (doc_key) {
if (_no_doc_key) {
System.err.println("--no-doc-key is already set.");
return false;
}
_doc_key = true;
}
else {
if (_doc_key) {
System.err.println("--doc-key is already set.");
return false;
}
_no_doc_key = true;
}
return true;
}
/**
* Decide whether to add document key.
*/
public void resolveDocKeyOption() {
if (_doc_key || _no_doc_key)
document_key = _doc_key;
in_place_document_key = in_place_document_key_names.size() > 0;
if (document_key && in_place_document_key) {
in_place_document_key = false;
in_place_document_key_names.clear();
System.out.println("Ignored --inplace-doc-key-name option because default document key was enabled.");
}
if (document_key_if_no_in_place && !in_place_document_key) {
document_key_if_no_in_place = false;
document_key = true;
}
}
/**
* Set prefix of namespace URI representing XML Schema 1.x (http://www.w3.org/2001/XMLSchema)
*
* @param doc XML Schema document
* @param def_schema_location default schema location
* @throws PgSchemaException the pg schema exception
*/
public void setPrefixOfXmlSchema(Document doc, String def_schema_location) throws PgSchemaException {
NodeList node_list = doc.getElementsByTagNameNS(PgSchemaUtil.xs_namespace_uri, "*");
if (node_list == null)
throw new PgSchemaException("No namespace declaration stands for " + PgSchemaUtil.xs_namespace_uri + " in XML Schema: " + def_schema_location);
Node xs_namespace_uri_node = node_list.item(0);
xs_prefix = xs_namespace_uri_node != null ? xs_namespace_uri_node.getPrefix() : null;
if (xs_prefix == null || xs_prefix.isEmpty())
xs_prefix_ = xs_prefix = "";
else
xs_prefix_ = xs_prefix + ":";
}
/**
* Use tab delimiter code.
*/
public void usePgTsv() {
pg_tab_delimiter = true;
pg_delimiter = '\t';
pg_null = PgSchemaUtil.pg_tsv_null;
}
/**
* Use comma delimiter code.
*/
public void usePgCsv() {
pg_tab_delimiter = false;
pg_delimiter = ',';
pg_null = "";
}
/**
* Set case insensitive mode.
*/
public void setCaseInsensitive() {
case_sense = false;
document_key_name = document_key_name.toLowerCase();
serial_key_name = serial_key_name.toLowerCase();
xpath_key_name = xpath_key_name.toLowerCase();
if (!discarded_document_key_names.isEmpty()) {
String[] names = discarded_document_key_names.stream().toArray(String[]::new);
discarded_document_key_names.clear();
for (String name : names)
discarded_document_key_names.add(name.toLowerCase());
}
if (!in_place_document_key_names.isEmpty()) {
String[] names = in_place_document_key_names.stream().toArray(String[]::new);
in_place_document_key_names.clear();
for (String name : names)
in_place_document_key_names.add(name.toLowerCase());
}
}
/**
* Set document key name.
*
* @param document_key_name document key name
*/
public void setDocumentKeyName(String document_key_name) {
if (document_key_name == null || document_key_name.isEmpty())
return;
this.document_key_name = case_sense ? document_key_name : document_key_name.toLowerCase();
}
/**
* Set serial key name.
*
* @param serial_key_name serial key name
*/
public void setSerialKeyName(String serial_key_name) {
if (serial_key_name == null || serial_key_name.isEmpty())
return;
this.serial_key_name = case_sense ? serial_key_name : serial_key_name.toLowerCase();
}
/**
* Set XPath key name.
*
* @param xpath_key_name xpath key name
*/
public void setXPathKeyName(String xpath_key_name) {
if (xpath_key_name == null || xpath_key_name.isEmpty())
return;
this.xpath_key_name = case_sense ? xpath_key_name : xpath_key_name.toLowerCase();
}
/**
* Add discarded document key name.
*
* @param discarded_document_key_name discarded document key name
* @return result of addition
*/
public boolean addDiscardedDocKeyName(String discarded_document_key_name) {
if (discarded_document_key_name == null || discarded_document_key_name.isEmpty())
return false;
return discarded_document_key_names.add(case_sense ? discarded_document_key_name : discarded_document_key_name.toLowerCase());
}
/**
* Add in-place document key name.
*
* @param in_place_document_key_name in-place document key name
* @return result of addition
*/
public boolean addInPlaceDocKeyName(String in_place_document_key_name) {
if (in_place_document_key_name == null || in_place_document_key_name.isEmpty())
return false;
return in_place_document_key_names.add(case_sense ? in_place_document_key_name : in_place_document_key_name.toLowerCase());
}
/**
* Instance message digest for check sum.
*
* @param check_sum_algorithm algorithm name of message digest
* @return boolean whether algorithm name is valid
*/
public boolean setCheckSumAlgorithm(String check_sum_algorithm) {
try {
MessageDigest.getInstance(check_sum_algorithm);
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
return false;
}
this.check_sum_algorithm = check_sum_algorithm;
check_sum_ext = check_sum_algorithm.toLowerCase();
return true;
}
/**
* Return whether synchronization is possible.
*
* @param allow_sync_weak whether to allow weak synchronization
* @return boolean whether synchronization is possible
*/
public boolean isSynchronizable(boolean allow_sync_weak) {
return (allow_sync_weak && sync_weak) || (sync && check_sum_dir_name != null && (_check_sum_dir_exists || (_check_sum_dir_exists = Files.isDirectory(Paths.get(check_sum_dir_name)))));
}
/**
* Set item name in JSON document of xs:simpleContent.
*
* @param simple_content_name item name of xs:simpleContent in JSON document
*/
public void setSimpleContentName(String simple_content_name) {
if (simple_content_name == null)
simple_content_name= PgSchemaUtil.simple_content_name;
this.simple_content_name = case_sense ? simple_content_name : simple_content_name.toLowerCase();
}
/**
* Send PING query to PgSchema server.
*
* @param fst_conf FST configuration
* @return boolean whether PgSchema server is alive
*/
public boolean pingPgSchemaServer(FSTConfiguration fst_conf) {
if (!pg_schema_server)
return false;
try (Socket socket = new Socket(InetAddress.getByName(pg_schema_server_host), pg_schema_server_port)) {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
PgSchemaUtil.writeObjectToStream(fst_conf, out, new PgSchemaServerQuery(PgSchemaServerQueryType.PING));
PgSchemaServerReply reply = (PgSchemaServerReply) PgSchemaUtil.readObjectFromStream(fst_conf, in);
/*
in.close();
out.close();
*/
return reply.message.contains("OK");
} catch (IOException | ClassNotFoundException e) {
return false;
}
}
/**
* Send MATCH query to PgSchema server.
*
* @param fst_conf FST configuration
* @param client_type PgSchema client type
* @return boolean whether PgSchema server does not have data model (true)
*/
public boolean matchPgSchemaServer(FSTConfiguration fst_conf, PgSchemaClientType client_type) {
if (!pg_schema_server)
return false;
try (Socket socket = new Socket(InetAddress.getByName(pg_schema_server_host), pg_schema_server_port)) {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
PgSchemaUtil.writeObjectToStream(fst_conf, out, new PgSchemaServerQuery(PgSchemaServerQueryType.MATCH, this, client_type));
PgSchemaServerReply reply = (PgSchemaServerReply) PgSchemaUtil.readObjectFromStream(fst_conf, in);
/*
in.close();
out.close();
*/
return !reply.message.contains("NOT");
} catch (IOException | ClassNotFoundException e) {
return false;
}
}
/**
* Send GET query to PgSchema server.
*
* @param fst_conf FST configuration
* @param client_type PgSchema client type
* @return PgSchema PostgreSQL data model
*/
public PgSchema getPgSchemaServer(FSTConfiguration fst_conf, PgSchemaClientType client_type) {
if (!pg_schema_server)
return null;
try (Socket socket = new Socket(InetAddress.getByName(pg_schema_server_host), pg_schema_server_port)) {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
PgSchemaUtil.writeObjectToStream(fst_conf, out, new PgSchemaServerQuery(PgSchemaServerQueryType.GET, this, client_type));
PgSchemaServerReply reply = (PgSchemaServerReply) PgSchemaUtil.readObjectFromStream(fst_conf, in);
PgSchema schema = null;
if (reply.schema_bytes != null) {
if (stdout_msg)
System.out.print(reply.message);
else
System.err.print(reply.message);
schema = (PgSchema) fst_conf.asObject(reply.schema_bytes);
}
/*
in.close();
out.close();
*/
return schema;
} catch (IOException | ClassNotFoundException e) {
return null;
}
}
/**
* Send Add query to PgSchema server.
*
* @param fst_conf FST configuration
* @param schema PostgreSQL data model
* @param client_type PgSchema client type
* @param original_caller original caller class name (optional)
*/
public void addPgSchemaServer(FSTConfiguration fst_conf, PgSchema schema, PgSchemaClientType client_type, String original_caller) {
if (!pg_schema_server)
return;
try (Socket socket = new Socket(InetAddress.getByName(pg_schema_server_host), pg_schema_server_port)) {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
PgSchemaUtil.writeObjectToStream(fst_conf, out, new PgSchemaServerQuery(PgSchemaServerQueryType.ADD, fst_conf, schema, client_type, original_caller));
PgSchemaServerReply reply = (PgSchemaServerReply) PgSchemaUtil.readObjectFromStream(fst_conf, in);
if (stdout_msg)
System.out.print(reply.message);
else
System.err.print(reply.message);
/*
in.close();
out.close();
*/
} catch (IOException | ClassNotFoundException e) {
}
}
/**
* Send UPDATE query to PgSchema server.
*
* @param fst_conf FST configuration
* @param schema PostgreSQL data model
* @param client_type PgSchema client type
* @param original_caller original caller class name (optional)
*/
public void updatePgSchemaServer(FSTConfiguration fst_conf, PgSchema schema, PgSchemaClientType client_type, String original_caller) {
if (!pg_schema_server)
return;
if (!pingPgSchemaServer(fst_conf))
return;
try (Socket socket = new Socket(InetAddress.getByName(pg_schema_server_host), pg_schema_server_port)) {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
PgSchemaUtil.writeObjectToStream(fst_conf, out, new PgSchemaServerQuery(PgSchemaServerQueryType.UPDATE, fst_conf, schema, client_type, original_caller));
PgSchemaServerReply reply = (PgSchemaServerReply) PgSchemaUtil.readObjectFromStream(fst_conf, in);
if (stdout_msg)
System.out.print(reply.message);
else
System.err.print(reply.message);
/*
in.close();
out.close();
*/
} catch (IOException | ClassNotFoundException e) {
} catch (RuntimeException e) {
}
}
/**
* Return equality of PostgreSQL data model option.
*
* @param option compared PostgreSQL data model option
* @return boolean whether the PostgreSQL data model option matches
*/
public boolean equals(PgSchemaOption option) {
if (!root_schema_location.equals(option.root_schema_location))
return false;
if (rel_model_ext != option.rel_model_ext)
return false;
if (rel_data_ext != option.rel_data_ext)
return false;
if (inline_simple_cont != option.inline_simple_cont)
return false;
if (realize_simple_brdg != option.realize_simple_brdg)
return false;
if (wild_card != option.wild_card)
return false;
if (document_key != option.document_key)
return false;
if (serial_key != option.serial_key)
return false;
if (xpath_key != option.xpath_key)
return false;
if (case_sense != option.case_sense)
return false;
if (pg_named_schema != option.pg_named_schema)
return false;
if (pg_retain_key != option.pg_retain_key)
return false;
if (pg_max_uniq_tuple_size != option.pg_max_uniq_tuple_size)
return false;
if (pg_tab_delimiter != option.pg_tab_delimiter)
return false;
if (pg_delimiter != option.pg_delimiter)
return false;
if (!pg_null.equals(option.pg_null))
return false;
if (del_invalid_xml != option.del_invalid_xml)
return false;
if (type_check != option.type_check)
return false;
if (verbose != option.verbose)
return false;
if (!document_key_name.equals(option.document_key_name))
return false;
if (!serial_key_name.equals(option.serial_key_name))
return false;
if (!xpath_key_name.equals(option.xpath_key_name))
return false;
if (!pg_integer.equals(option.pg_integer))
return false;
if (!pg_decimal.equals(option.pg_decimal))
return false;
if (!pg_date.equals(option.pg_date))
return false;
if (!hash_algorithm.equals(option.hash_algorithm))
return false;
if (!hash_size.equals(option.hash_size))
return false;
if (!ser_size.equals(option.ser_size))
return false;
if (sync != option.sync)
return false;
if (sync_weak != option.sync_weak)
return false;
if (sync_dry_run != option.sync_dry_run)
return false;
if (sync_rescue != option.sync_rescue)
return false;
if (in_place_document_key != option.in_place_document_key)
return false;
if (document_key_if_no_in_place != option.document_key_if_no_in_place)
return false;
if (fill_default_value != option.fill_default_value)
return false;
if (!check_sum_algorithm.equals(option.check_sum_algorithm))
return false;
if (discarded_document_key_names != null && option.discarded_document_key_names != null) {
if (discarded_document_key_names.size() > 0 || option.discarded_document_key_names.size() > 0) {
if (!discarded_document_key_names.containsAll(option.discarded_document_key_names))
return false;
if (!option.discarded_document_key_names.containsAll(discarded_document_key_names))
return false;
}
}
if (in_place_document_key_names != null && option.in_place_document_key_names != null) {
if (in_place_document_key_names.size() > 0 || option.in_place_document_key_names.size() > 0) {
if (!in_place_document_key_names.containsAll(option.in_place_document_key_names))
return false;
if (!option.in_place_document_key_names.containsAll(in_place_document_key_names))
return false;
}
}
else if (in_place_document_key_names != null || option.in_place_document_key_names != null)
return false;
if (check_sum_dir_name != null && option.check_sum_dir_name != null) {
if (!check_sum_dir_name.equals(option.check_sum_dir_name))
return false;
}
else if (check_sum_dir_name != null || option.check_sum_dir_name != null)
return false;
// JSON builder option
if (!simple_content_name.equals(option.simple_content_name))
return false;
return true;
}
}
|
import { CType } from '@kiltprotocol/sdk-js'
const cType = require('./data/ctype.json')
const CONFIG = Object.freeze({
CONNECT: {
BLOCKCHAIN_NODE: 'wss://full-nodes.kilt.io:9944',
MESSAGING_SERVICE_URL_FALLBACK: 'https://services.kilt.io:443/messaging',
CLAIMER_SERVICE_ADDRESS_DEFAULT: 'https://services.kilt.io:443/messaging',
CONTACTS_SERVICE_URL: 'https://services.kilt.io:443/contacts',
POLLING_PERIOD_MESSAGES_MS: 3000,
POLLING_PERIOD_CHAIN_MS: 8000,
},
CLAIM: {
CTYPE: CType.fromSchema(cType),
// display name: used in the Dashboard as a title for each claim card
CLAIM_CARD_TITLE: 'Claim',
},
THEME: {
// primary: used for buttons, selection highlight and contacts
CLR_PRIMARY: '#f05a28',
CLR_PRIMARY_LIGHT: 'rgba(240,90,40,0.09)',
// secondary: used for tab navigation, QR code and contacts
CLR_SECONDARY: '#280021',
CLR_SECONDARY_DARK: '#751869',
// logo and more
LOGO_HORIZONTAL_WIDTH: 320,
LOGO_HORIZONTAL_HEIGHT: 148,
SYMBOL_SERVICE_ADDRESS: '📭',
},
})
export default CONFIG
export const CONFIG_THEME = CONFIG.THEME
export const CONFIG_CONNECT = CONFIG.CONNECT
export const CONFIG_CLAIM = CONFIG.CLAIM
|
/*
* hid_create_pm_components:
* Create the pm components required for power management.
* For keyboard/mouse, the components is created only if the device
* supports a remote wakeup.
* For other hid devices they are created unconditionally.
*/
static void
hid_create_pm_components(dev_info_t *dip, hid_state_t *hidp)
{
hid_power_t *hidpm;
uint_t pwr_states;
USB_DPRINTF_L4(PRINT_MASK_PM, hidp->hid_log_handle,
"hid_create_pm_components: Begin");
hidpm = kmem_zalloc(sizeof (hid_power_t), KM_SLEEP);
hidp->hid_pm = hidpm;
hidpm->hid_state = hidp;
hidpm->hid_raise_power = B_FALSE;
hidpm->hid_pm_capabilities = 0;
hidpm->hid_current_power = USB_DEV_OS_FULL_PWR;
switch (hidp->hid_if_descr.bInterfaceProtocol) {
case KEYBOARD_PROTOCOL:
case MOUSE_PROTOCOL:
hidpm->hid_pm_strategy = HID_PM_ACTIVITY;
if ((hid_is_pm_enabled(dip) == USB_SUCCESS) &&
(usb_handle_remote_wakeup(dip, USB_REMOTE_WAKEUP_ENABLE) ==
USB_SUCCESS)) {
USB_DPRINTF_L3(PRINT_MASK_PM, hidp->hid_log_handle,
"hid_create_pm_components: Remote Wakeup Enabled");
if (usb_create_pm_components(dip, &pwr_states) ==
USB_SUCCESS) {
hidpm->hid_wakeup_enabled = 1;
hidpm->hid_pwr_states = (uint8_t)pwr_states;
}
}
break;
default:
hidpm->hid_pm_strategy = HID_PM_OPEN_CLOSE;
if ((hid_is_pm_enabled(dip) == USB_SUCCESS) &&
(usb_create_pm_components(dip, &pwr_states) ==
USB_SUCCESS)) {
hidpm->hid_wakeup_enabled = 0;
hidpm->hid_pwr_states = (uint8_t)pwr_states;
}
break;
}
USB_DPRINTF_L4(PRINT_MASK_PM, hidp->hid_log_handle,
"hid_create_pm_components: END");
}
|
/**
* Contains Unicode helpers for parsing StringValue types in the grammar
*/
@Internal
public class UnicodeUtil {
public static int MAX_UNICODE_CODE_POINT = 0x10FFFF;
public static int LEADING_SURROGATE_LOWER_BOUND = 0xD800;
public static int LEADING_SURROGATE_UPPER_BOUND = 0xDBFF;
public static int TRAILING_SURROGATE_LOWER_BOUND = 0xDC00;
public static int TRAILING_SURROGATE_UPPER_BOUND = 0xDFFF;
public static int parseAndWriteUnicode(StringWriter writer, String string, int i, SourceLocation sourceLocation) {
// Unicode code points can either be:
// 1. Unbraced: four hex characters in the form \\u597D, or
// 2. Braced: any number of hex characters surrounded by braces in the form \\u{1F37A}
// Extract the code point hex digits. Index i points to 'u'
int startIndex = isBracedEscape(string, i) ? i + 2 : i + 1;
int endIndexExclusive = getEndIndexExclusive(string, i, sourceLocation);
// Index for parser to continue at, the last character of the escaped unicode character. Either } or hex digit
int continueIndex = isBracedEscape(string, i) ? endIndexExclusive : endIndexExclusive - 1;
String hexStr = string.substring(startIndex, endIndexExclusive);
Integer codePoint = Integer.parseInt(hexStr, 16);
if (isTrailingSurrogateValue(codePoint)) {
throw new InvalidSyntaxException(sourceLocation, "Invalid unicode - trailing surrogate must be preceded with a leading surrogate -", null, string.substring(i - 1, continueIndex + 1), null);
} else if (isLeadingSurrogateValue(codePoint)) {
if (!isEscapedUnicode(string, continueIndex + 1)) {
throw new InvalidSyntaxException(sourceLocation, "Invalid unicode - leading surrogate must be followed by a trailing surrogate -", null, string.substring(i - 1, continueIndex + 1), null);
}
// Shift parser ahead to 'u' in second escaped Unicode character
i = continueIndex + 2;
int trailingStartIndex = isBracedEscape(string, i) ? i + 2 : i + 1;
int trailingEndIndexExclusive = getEndIndexExclusive(string, i, sourceLocation);
String trailingHexStr = string.substring(trailingStartIndex, trailingEndIndexExclusive);
Integer trailingCodePoint = Integer.parseInt(trailingHexStr, 16);
continueIndex = isBracedEscape(string, i) ? trailingEndIndexExclusive : trailingEndIndexExclusive - 1;
if (isTrailingSurrogateValue(trailingCodePoint)) {
writeCodePoint(writer, codePoint);
writeCodePoint(writer, trailingCodePoint);
return continueIndex;
}
throw new InvalidSyntaxException(sourceLocation, "Invalid unicode - leading surrogate must be followed by a trailing surrogate -", null, string.substring(i - 1, continueIndex + 1), null);
} else if (isValidUnicodeCodePoint(codePoint)) {
writeCodePoint(writer, codePoint);
return continueIndex;
}
throw new InvalidSyntaxException(sourceLocation, "Invalid unicode - not a valid code point -", null, string.substring(i - 1, continueIndex + 1), null);
}
private static int getEndIndexExclusive(String string, int i, SourceLocation sourceLocation) {
// Unbraced case, with exactly 4 hex digits
if (string.length() > i + 5 && !isBracedEscape(string, i)) {
return i + 5;
}
// Braced case, with any number of hex digits
int endIndexExclusive = i + 2;
do {
if (endIndexExclusive + 1 >= string.length()) {
throw new InvalidSyntaxException(sourceLocation, "Invalid unicode - incorrectly formatted escape -", null, string.substring(i - 1, endIndexExclusive), null);
}
} while (string.charAt(++endIndexExclusive) != '}');
return endIndexExclusive;
}
private static boolean isValidUnicodeCodePoint(int value) {
return value <= MAX_UNICODE_CODE_POINT;
}
private static boolean isEscapedUnicode(String string, int index) {
if (index + 1 >= string.length()) {
return false;
}
return string.charAt(index) == '\\' && string.charAt(index + 1) == 'u';
}
private static boolean isLeadingSurrogateValue(int value) {
return LEADING_SURROGATE_LOWER_BOUND <= value && value <= LEADING_SURROGATE_UPPER_BOUND;
}
private static boolean isTrailingSurrogateValue(int value) {
return TRAILING_SURROGATE_LOWER_BOUND <= value && value <= TRAILING_SURROGATE_UPPER_BOUND;
}
private static void writeCodePoint(StringWriter writer, int codepoint) {
char[] chars = Character.toChars(codepoint);
try {
writer.write(chars);
} catch (IOException e) {
assertShouldNeverHappen();
}
}
private static boolean isBracedEscape(String string, int i) {
return string.charAt(i + 1) == '{';
}
}
|
//package div3;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
public class k_garlands {
static class FastReader {
BufferedReader br;
StringTokenizer st;
public FastReader() {
br = new BufferedReader(new
InputStreamReader(System.in));
}
String next() {
while (st == null || !st.hasMoreElements()) {
try {
st = new StringTokenizer(br.readLine());
} catch (IOException e) {
e.printStackTrace();
}
}
return st.nextToken();
}
int nextInt() {
return Integer.parseInt(next());
}
long nextLong() {
return Long.parseLong(next());
}
double nextDouble() {
return Double.parseDouble(next());
}
String nextLine() {
String str = "";
try {
str = br.readLine();
} catch (IOException e) {
e.printStackTrace();
}
return str;
}
}
static class Node{
int x;int y;
Node(int x,int y){
this.x=x;
this.y=y;
}
}
static long hash(int a,int b){
return (a + b) * (a + b + 1) / 2 + a;
}
public static void main(String[] args) {
FastReader sc = new FastReader();
int t = sc.nextInt();
while(t-->0){
int n = sc.nextInt();
int m = sc.nextInt();
//System.out.println(n);
long[][] mat = new long[n][m];
for(int i=0;i<n;i++){
for(int j=0;j<m;j++){
mat[i][j] = sc.nextInt();
}
}
long ops = 0;
for(int i=0;i<((n%2==1)?(n/2+1):n/2);i++){
for(int j=0;j<((m%2==1)?(m/2+1):m/2);j++){
HashSet<Long> set = new HashSet();
HashSet<Node> send = new HashSet<>();
long sum = 0;
long ele1 = mat[i][j];
set.add(hash(i,j));
send.add(new Node(i,j));
sum += ele1;
long ele2 = mat[i][m-j-1];
if(!set.contains(hash(i,m-j-1))){
set.add(hash(i,m-j-1));
send.add(new Node(i,m-j-1));
sum += ele2;
}
long ele3 = mat[n-i-1][j];
if(!set.contains(hash(n-i-1,j))){
sum += ele3;
send.add(new Node(n-i-1,j));
set.add(hash(n-i-1,j));
}
long ele4 = mat[n-i-1][m-j-1];
if(!set.contains(hash(n-i-1,m-j-1))){
sum += ele4;
send.add(new Node(n-i-1,m-j-1));
set.add(hash(n-i-1,m-j-1));
}
// long avg1 = (long)Math.floor((double)sum/(double)send.size());
// long avg2 = (long)Math.ceil((double)sum/(double)send.size());
long localOps1 = 0;
long localOps2 = 0;
List<Long> list = new ArrayList<>();
for(Node node : send){
int x = node.x;
int y = node.y;
list.add(mat[x][y]);
// localOps1 += Math.abs(avg1-mat[x][y]);
// localOps2 += Math.abs(avg2-mat[x][y]);
}
Collections.sort(list);
long k = list.get(list.size()/2);
int N = list.size();
long cost = 0;
for (int p = 0; p < N; ++p)
cost += Math.abs(list.get(p) - k);
// If n, is even. Take minimum of the
// Cost obtained by considering both
// middle elements
if (N % 2 == 0) {
long tempCost = 0;
long K = list.get((N / 2) - 1);
// Find cost again
for (int p = 0; p < N; ++p)
tempCost += Math.abs(list.get(p) - K);
// Take minimum of two cost
cost = Math.min(cost, tempCost);
}
ops += cost;
}
}
System.out.println(ops);
}
}
}
|
def _init_conn(self):
if self._codec.tag:
self._writer.write(self._codec.tag)
|
<reponame>suomitek/suomitek-appboard<filename>dashboard/src/components/DeploymentFormBody/DifferentialSelector.test.tsx
import { shallow } from "enzyme";
import * as React from "react";
import Differential from "./Differential";
import DifferentialSelector from "./DifferentialSelector";
it("should use default values when first deploying", () => {
const wrapper = shallow(
<DifferentialSelector
deploymentEvent="install"
deployedValues=""
defaultValues="foo"
appValues="bar"
/>,
);
expect(wrapper.find(Differential).props()).toMatchObject({
title: "Difference from chart defaults",
oldValues: "foo",
newValues: "bar",
});
});
it("should use deployed values when upgrading", () => {
const wrapper = shallow(
<DifferentialSelector
deploymentEvent="upgrade"
deployedValues="foobar"
defaultValues="foo"
appValues="bar"
/>,
);
expect(wrapper.find(Differential).props()).toMatchObject({
title: "Difference from deployed version",
oldValues: "foobar",
newValues: "bar",
});
});
|
.
We present a case of a 44-year-old woman, with pseudoaneurysm formation at the middle of the prosthetic graft, 60 mm in diameter. She had been diagnosed with atypical coarctation due to aortitis 27 years before, and had undergone a bypass operation with 14 mm-diameter Cooley double velour graft from the ascending aorta to the abdominal aorta. This time, endovascular aortic repair was performed to prevent rupture of the pseudoaneurysm. Though a knitted Dacron graft has a risk of psuedaneurysm formation long patency could be obtained when used in ascending aorta-abdominal aorta bypass.
|
Tonight, the new Virginia Tech uniforms for the Hokies’ “Battle at Bristol” game against Tennessee were revealed.
The new Virginia Tech uniforms are officially titled “Hokie Stone,” a tribute to the legendary on-campus stone structures. In addition to the heavy use of grays, the uniforms also contain a major black element. A description from one of the creators said the entire uniform “feels strong and powerful.”
The Hokies teased their new uniform on Twitter and also did a full YouTube reveal. We’ve included both below.
Virginia Tech fans, what do you think? It’s a far cry from the team’s usual maroon-and-orange look but, in our opinion, it is pretty clean and distinctive. You also have to love the nod to university tradition and the Hokie stone.
Virginia Tech and Tennessee will face off in the “Battle at Bristol” at Bristol Motor Speedway in Bristol, Tenn. on Sept. 10. Kickoff will be at 8 p.m. ET on ABC.
|
/*
* Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* Placeholder documentation for CaptionSelectorSettings
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/CaptionSelectorSettings" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CaptionSelectorSettings implements Serializable, Cloneable, StructuredPojo {
private AribSourceSettings aribSourceSettings;
private DvbSubSourceSettings dvbSubSourceSettings;
private EmbeddedSourceSettings embeddedSourceSettings;
private Scte20SourceSettings scte20SourceSettings;
private Scte27SourceSettings scte27SourceSettings;
private TeletextSourceSettings teletextSourceSettings;
/**
* @param aribSourceSettings
*/
public void setAribSourceSettings(AribSourceSettings aribSourceSettings) {
this.aribSourceSettings = aribSourceSettings;
}
/**
* @return
*/
public AribSourceSettings getAribSourceSettings() {
return this.aribSourceSettings;
}
/**
* @param aribSourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withAribSourceSettings(AribSourceSettings aribSourceSettings) {
setAribSourceSettings(aribSourceSettings);
return this;
}
/**
* @param dvbSubSourceSettings
*/
public void setDvbSubSourceSettings(DvbSubSourceSettings dvbSubSourceSettings) {
this.dvbSubSourceSettings = dvbSubSourceSettings;
}
/**
* @return
*/
public DvbSubSourceSettings getDvbSubSourceSettings() {
return this.dvbSubSourceSettings;
}
/**
* @param dvbSubSourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withDvbSubSourceSettings(DvbSubSourceSettings dvbSubSourceSettings) {
setDvbSubSourceSettings(dvbSubSourceSettings);
return this;
}
/**
* @param embeddedSourceSettings
*/
public void setEmbeddedSourceSettings(EmbeddedSourceSettings embeddedSourceSettings) {
this.embeddedSourceSettings = embeddedSourceSettings;
}
/**
* @return
*/
public EmbeddedSourceSettings getEmbeddedSourceSettings() {
return this.embeddedSourceSettings;
}
/**
* @param embeddedSourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withEmbeddedSourceSettings(EmbeddedSourceSettings embeddedSourceSettings) {
setEmbeddedSourceSettings(embeddedSourceSettings);
return this;
}
/**
* @param scte20SourceSettings
*/
public void setScte20SourceSettings(Scte20SourceSettings scte20SourceSettings) {
this.scte20SourceSettings = scte20SourceSettings;
}
/**
* @return
*/
public Scte20SourceSettings getScte20SourceSettings() {
return this.scte20SourceSettings;
}
/**
* @param scte20SourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withScte20SourceSettings(Scte20SourceSettings scte20SourceSettings) {
setScte20SourceSettings(scte20SourceSettings);
return this;
}
/**
* @param scte27SourceSettings
*/
public void setScte27SourceSettings(Scte27SourceSettings scte27SourceSettings) {
this.scte27SourceSettings = scte27SourceSettings;
}
/**
* @return
*/
public Scte27SourceSettings getScte27SourceSettings() {
return this.scte27SourceSettings;
}
/**
* @param scte27SourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withScte27SourceSettings(Scte27SourceSettings scte27SourceSettings) {
setScte27SourceSettings(scte27SourceSettings);
return this;
}
/**
* @param teletextSourceSettings
*/
public void setTeletextSourceSettings(TeletextSourceSettings teletextSourceSettings) {
this.teletextSourceSettings = teletextSourceSettings;
}
/**
* @return
*/
public TeletextSourceSettings getTeletextSourceSettings() {
return this.teletextSourceSettings;
}
/**
* @param teletextSourceSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CaptionSelectorSettings withTeletextSourceSettings(TeletextSourceSettings teletextSourceSettings) {
setTeletextSourceSettings(teletextSourceSettings);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAribSourceSettings() != null)
sb.append("AribSourceSettings: ").append(getAribSourceSettings()).append(",");
if (getDvbSubSourceSettings() != null)
sb.append("DvbSubSourceSettings: ").append(getDvbSubSourceSettings()).append(",");
if (getEmbeddedSourceSettings() != null)
sb.append("EmbeddedSourceSettings: ").append(getEmbeddedSourceSettings()).append(",");
if (getScte20SourceSettings() != null)
sb.append("Scte20SourceSettings: ").append(getScte20SourceSettings()).append(",");
if (getScte27SourceSettings() != null)
sb.append("Scte27SourceSettings: ").append(getScte27SourceSettings()).append(",");
if (getTeletextSourceSettings() != null)
sb.append("TeletextSourceSettings: ").append(getTeletextSourceSettings());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CaptionSelectorSettings == false)
return false;
CaptionSelectorSettings other = (CaptionSelectorSettings) obj;
if (other.getAribSourceSettings() == null ^ this.getAribSourceSettings() == null)
return false;
if (other.getAribSourceSettings() != null && other.getAribSourceSettings().equals(this.getAribSourceSettings()) == false)
return false;
if (other.getDvbSubSourceSettings() == null ^ this.getDvbSubSourceSettings() == null)
return false;
if (other.getDvbSubSourceSettings() != null && other.getDvbSubSourceSettings().equals(this.getDvbSubSourceSettings()) == false)
return false;
if (other.getEmbeddedSourceSettings() == null ^ this.getEmbeddedSourceSettings() == null)
return false;
if (other.getEmbeddedSourceSettings() != null && other.getEmbeddedSourceSettings().equals(this.getEmbeddedSourceSettings()) == false)
return false;
if (other.getScte20SourceSettings() == null ^ this.getScte20SourceSettings() == null)
return false;
if (other.getScte20SourceSettings() != null && other.getScte20SourceSettings().equals(this.getScte20SourceSettings()) == false)
return false;
if (other.getScte27SourceSettings() == null ^ this.getScte27SourceSettings() == null)
return false;
if (other.getScte27SourceSettings() != null && other.getScte27SourceSettings().equals(this.getScte27SourceSettings()) == false)
return false;
if (other.getTeletextSourceSettings() == null ^ this.getTeletextSourceSettings() == null)
return false;
if (other.getTeletextSourceSettings() != null && other.getTeletextSourceSettings().equals(this.getTeletextSourceSettings()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAribSourceSettings() == null) ? 0 : getAribSourceSettings().hashCode());
hashCode = prime * hashCode + ((getDvbSubSourceSettings() == null) ? 0 : getDvbSubSourceSettings().hashCode());
hashCode = prime * hashCode + ((getEmbeddedSourceSettings() == null) ? 0 : getEmbeddedSourceSettings().hashCode());
hashCode = prime * hashCode + ((getScte20SourceSettings() == null) ? 0 : getScte20SourceSettings().hashCode());
hashCode = prime * hashCode + ((getScte27SourceSettings() == null) ? 0 : getScte27SourceSettings().hashCode());
hashCode = prime * hashCode + ((getTeletextSourceSettings() == null) ? 0 : getTeletextSourceSettings().hashCode());
return hashCode;
}
@Override
public CaptionSelectorSettings clone() {
try {
return (CaptionSelectorSettings) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.medialive.model.transform.CaptionSelectorSettingsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
<reponame>WolleDoerr/enhavo<gh_stars>10-100
import AbstractLoader from "@enhavo/form/loader/AbstractLoader";
import WysiwygType from "@enhavo/form/type/WysiwygType";
import * as tinymce from "tinymce";
import FormRegistry from "@enhavo/app/form/FormRegistry";
export default class WysiwygLoader extends AbstractLoader
{
public release(element: HTMLElement): void
{
let elements = this.findElements(element, '[data-wysiwyg]');
for(element of elements) {
FormRegistry.registerType(new WysiwygType(element));
}
}
public move(element: HTMLElement): void
{
let elements = this.findElements(element, '[data-wysiwyg]');
for(element of elements) {
tinymce.EditorManager.remove('#'+element.id);
}
}
public remove(element: HTMLElement): void
{
let elements = this.findElements(element, '[data-wysiwyg]');
for(element of elements) {
tinymce.EditorManager.remove('#'+element.id);
}
}
public drop(element: HTMLElement): void
{
let elements = this.findElements(element, '[data-wysiwyg]');
for(element of elements) {
FormRegistry.registerType(new WysiwygType(element));
}
}
}
|
/**
* The iterator returned does not invoke the {@code batchValidationStep} for the first batch of calls as it is
* assumed to be prefetched from its caller and the {@code batchValidationStep} can run where this is called.
* <p>
* If a batch returned by the internal iterator is empty or has size smaller than the batch size specified in
* {@code columnRangeSelection}, that batch will be considered the last batch, and there will be no further
* invocations of {@code batchValidationStep}.
* <p>
* This <em>may</em> request more elements than the specified batch hint inside {@code columnRangeSelection} if
* there is detection of many deleted values.
*/
public static Iterator<Map.Entry<Cell, byte[]>> iterator(
BatchProvider<Map.Entry<Cell, Value>> batchProvider,
RowColumnRangeIterator rawIterator,
BatchColumnRangeSelection columnRangeSelection,
Runnable batchValidationStep,
PostFilterer postFilterer) {
BatchSizeIncreasingIterator<Map.Entry<Cell, Value>> batchIterator = new BatchSizeIncreasingIterator<>(
batchProvider, columnRangeSelection.getBatchHint(), ClosableIterators.wrap(rawIterator));
GetRowsColumnRangeIterator postFilteredIterator =
new GetRowsColumnRangeIterator(batchIterator, batchValidationStep, postFilterer);
return Iterators.concat(postFilteredIterator);
}
|
Third Shade of Simulation
Dear Sir, We would like to thank Jutric and colleagues for their work to our review article titled “Novel Simulation Device for Targeting Tumors in Laparoscopic Ablation: A Learning Curve Study.” There are several unmet training needs in surgery skills and the gaps that simulation training methods can help fill. The first type of simulation used as an assistive tool in clinical teaching is either softwareor hardware-based simulation. The second is a computerized virtual “microworld” that learners can freely explore to develop their clinical and problem-solving skills by manipulating virtual objects. The third is a virtual learning environment (VLE), which can be used for teaching reflective and cooperative learning to clinical teachers. In particular, VLEs can facilitate interaction between teachers working at various levels in education, and it can assist them in rethinking what topics should be taught, the appropriate pedagogy to use, and how student performance should be assessed. VLEs can not only enrich the professional knowledge of teachers from different backgrounds but also refine their teaching methods and skills. It can cultivate the appreciative attitude toward cooperating with peer learners who have different skill sets and past experiences.
|
import { Service, default as ServiceLocator } from "../service_locator";
import { ServerService, Response } from "./server/server";
import { BackRequest, HTMLTemplate } from "./server/back/server_back";
import { createServer, ServerRequest, ServerResponse } from "http";
import { existsSync, statSync, createReadStream } from "fs";
import * as mime from "mime";
export default class ServerBackService extends Service implements ServerService {
constructor(sl: ServiceLocator, private htmlTemplate: HTMLTemplate) {
super(sl);
}
private httpServer = createServer(
(request, response) => this.dispatchRequest(request as ServerRequest, response)
);
private dispatchRequest(serverRequest: ServerRequest, serverResponse: ServerResponse) {
let request = new BackRequest(serverRequest, serverResponse, this.htmlTemplate);
let staticPath = 'public_html/' + serverRequest.url;
let stat = existsSync(staticPath) && statSync(staticPath);
if (stat && stat.isFile()) {
serverResponse.writeHead(200, {
'Content-Type': mime.lookup(serverRequest.url),
'Content-Length': stat.size
});
let readStream = createReadStream(staticPath);
// We replaced all the event handlers with a simple call to readStream.pipe()
readStream.pipe(serverResponse);
return;
}
request.loadPostData().then<Response>(
() => this.router.dispatchRequest(request)
).then(
(response) => response.send(),
(error) => console.log(error)
);
}
listen(port: number, host: string) {
let httpServer = this.httpServer;
httpServer.addListener("error", function (error) {
console.log(error);
});
httpServer.listen(port, host);
}
}
|
// SendImage Method to encapsulate the processing of the image captcha
// Given a base64 string from the image, it sends to the api and waits until
// the processing is complete to return the evaluated key
func (this *Client) SendImage(imgString string) (string, error) {
taskID, err := this.createTaskImage(imgString)
if err != nil {
return "", err
}
response, err := this.getTaskResult(taskID)
if err != nil {
return "", err
}
for {
if response["status"] == "processing" {
time.Sleep(sendInterval)
response, err = this.getTaskResult(taskID)
if err != nil {
return "", err
}
} else {
break
}
}
if response["solution"] == nil {
return "", errors.New("anticaptcha error")
}
return response["solution"].(map[string]interface{})["text"].(string), nil
}
|
/*
* Copyright 2021 MeshDynamics.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React, { ChangeEvent, useCallback, useEffect, useState } from 'react';
import { Col, Row, Grid, FormControl, Button } from 'react-bootstrap';
import { connect } from 'react-redux';
import { IStoreState, ITestConfigDetails } from '../../../reducers/state.types';
import { configsService } from '../../../services/configs.service';
import { v4 as uuid } from 'uuid';
import { MultiLineInputComponent } from './MultiLineInputComponent';
import { IPathListResponse } from 'src/src/common/apiResponse.types';
interface IEditTestConfigProps {
testConfigId: number,
onClose: (refresh: boolean) => void,
testConfigToEdit?: ITestConfigDetails,
appId: string | undefined,
appName: string | undefined,
customerName: string | undefined
}
export interface IValueConfig {
uniqueId: string,
value: string
}
function EditTestConfig(props: IEditTestConfigProps) {
const [name, setName] = useState<string>("");
const [pathList, setPathList] = useState<IValueConfig[]>([]);
const [mockList, setMockList] = useState<IValueConfig[]>([]);
const [serviceListInApp, setServiceListInApp] = useState<string[]>([]);
const [pathListInApp, setPathListInApp] = useState<string[]>([]);
useEffect(() => {
if (props.testConfigToEdit) {
setName(props.testConfigToEdit.testConfigName);
setPathList(props.testConfigToEdit.testPaths.map(item => {
return {
value: item,
uniqueId: uuid()
}
}));
setMockList((props.testConfigToEdit.testServices || []).map(item => {
return {
value: item,
uniqueId: uuid()
}
}));
}
}, [props.testConfigToEdit]);
useEffect(() => {
if (props.appId) {
configsService.getPathsList(props.appId).then((data) => {
let paths: string[] = [];
data.forEach((currentValue: IPathListResponse) => {
paths.push(...currentValue.paths);
}, []);
paths = paths.filter((item, index) => !!item && !(paths.indexOf(item) < index));
setPathListInApp(paths);
}).catch(error => {
console.error(error);
})
configsService.getServicesList(props.appId).then((data) => {
setServiceListInApp(data.map(service => service.service.name));
}).catch(error => {
console.error(error);
})
}
}, [props.appId]);
const onNameChange = useCallback(
(event: ChangeEvent<HTMLInputElement & FormControl>) => {
setName(event.target.value);
},
[],
);
const onAddOrUpdateClick = () => {
if (props.appId && props.appName && props.customerName) {
const testConfig = {
testConfigName: name,
services: mockList.map(u => u.value).filter(u => !!u),
paths: pathList.map(u => u.value).filter((u, index) => !!u)
} as unknown as ITestConfigDetails;
if (props.testConfigId) {
testConfig.id = props.testConfigId;
}
configsService.createOrUpdateTestConfig(props.customerName, props.appName, testConfig).then(data => {
props.onClose(true);
}).catch(error => {
alert(error.message)
console.error(error);
})
}
}
return (
<div className="edit-test-config prop-rules">
<h3>{props.testConfigId ? "Edit config" : "Add Config"}</h3>
<Grid>
<Row>
<Col sm={10} md={3} lg={3}>
Name
</Col>
<Col sm={2} md={2} lg={1}>
:
</Col>
<Col sm={12} md={7} lg={8}>
<FormControl as="input" name="name" id="name" value={name} onChange={onNameChange} disabled={!!props.testConfigId} />
</Col>
</Row>
<Row className="margin-top-10">
<Col sm={10} md={3} lg={3}>
Test Services
</Col>
<Col sm={2} md={2} lg={1}>
:
</Col>
<Col sm={12} md={7} lg={8}>
<datalist id="mockServiceList">
{serviceListInApp.map(service => <option value={service}>{service}</option>)}
</datalist>
<MultiLineInputComponent value={mockList} onChange={setMockList} name="Mock" listId="mockServiceList" />
</Col>
</Row>
<Row className="margin-top-10">
<Col sm={10} md={3} lg={3}>
Paths
</Col>
<Col sm={2} md={2} lg={1}>
:
</Col>
<Col sm={12} md={7} lg={8}>
<datalist id="pathListSuggestion">
{pathListInApp.map(service => <option value={service}>{service}</option>)}
</datalist>
<MultiLineInputComponent value={pathList} onChange={setPathList} name="Path" listId="pathListSuggestion" />
</Col>
</Row>
<Row className="margin-top-10">
<Col sm={10} md={3} lg={3}>
</Col>
<Col sm={2} md={2} lg={1}>
</Col>
<Col sm={12} md={7} lg={8}>
<Button onClick={onAddOrUpdateClick}>{props.testConfigId ? "Update" : "Add"}</Button>
<Button onClick={() => props.onClose(false)}>Cancel</Button>
</Col>
</Row>
</Grid>
</div>
);
};
function mapStateToProps(state: IStoreState, props: IEditTestConfigProps) {
const testConfigToEdit = (state.cube.testConfigList || []).find(config => config.id == props.testConfigId);
const appId = state.cube.selectedAppObj?.app.id?.toString();
const appName = state.cube.selectedAppObj?.app.name;
const customerName = state.cube.selectedAppObj?.app.customer.name;
return {
testConfigToEdit,
appId: appId,
appName,
customerName
}
}
const connectedEditTestConfig = connect(mapStateToProps)(EditTestConfig);
export default connectedEditTestConfig
|
def fixture_grid_result():
return xr.DataArray(
data=[
[-1800.0, -1800.0, -1800.0, -1800.0],
[-1800.0, -1800.0, -1800.0, -1800.0],
[-656.0, 40.0, -1800.0, -1800.0],
],
coords=dict(lon=[-2.5, -1.5, -0.5, 0.5], lat=[2.5, 3.5, 4.5]),
dims=["lat", "lon"],
)
|
<gh_stars>0
/****************************************************************************
*
* Copyright (c) 2012-2022 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/**
* @file px4io.cpp
* Top-level logic for the PX4IO module.
*
* @author <NAME> <<EMAIL>>
*/
#include <px4_platform_common/px4_config.h>
#include <stdio.h> // required for task_create
#include <stdbool.h>
#include <stdlib.h>
#include <errno.h>
#include <string.h>
#include <malloc.h>
#include <poll.h>
#include <signal.h>
#include <crc32.h>
#include <syslog.h>
#include <drivers/drv_pwm_output.h>
#include <drivers/drv_hrt.h>
#include <drivers/drv_watchdog.h>
#if defined(PX4IO_PERF)
# include <lib/perf/perf_counter.h>
#endif
#include <stm32_uart.h>
#define DEBUG
#include "px4io.h"
struct sys_state_s system_state;
static struct hrt_call serial_dma_call;
/*
* a set of debug buffers to allow us to send debug information from ISRs
*/
static volatile uint32_t msg_counter;
static volatile uint32_t last_msg_counter;
static volatile uint8_t msg_next_out;
static volatile uint8_t msg_next_in;
/*
* WARNING: too large buffers here consume the memory required
* for mixer handling. Do not allocate more than 80 bytes for
* output.
*/
#define NUM_MSG 1
static char msg[NUM_MSG][CONFIG_USART1_TXBUFSIZE];
static void heartbeat_blink(void);
static void ring_blink(void);
static void update_mem_usage(void);
void atomic_modify_or(volatile uint16_t *target, uint16_t modification)
{
if ((*target | modification) != *target) {
PX4_CRITICAL_SECTION(*target |= modification);
}
}
void atomic_modify_clear(volatile uint16_t *target, uint16_t modification)
{
if ((*target & ~modification) != *target) {
PX4_CRITICAL_SECTION(*target &= ~modification);
}
}
void atomic_modify_and(volatile uint16_t *target, uint16_t modification)
{
if ((*target & modification) != *target) {
PX4_CRITICAL_SECTION(*target &= modification);
}
}
/*
* add a debug message to be printed on the console
*/
void
isr_debug(uint8_t level, const char *fmt, ...)
{
if (level > r_page_setup[PX4IO_P_SETUP_SET_DEBUG]) {
return;
}
va_list ap;
va_start(ap, fmt);
vsnprintf(msg[msg_next_in], sizeof(msg[0]), fmt, ap);
va_end(ap);
msg_next_in = (msg_next_in + 1) % NUM_MSG;
msg_counter++;
}
/*
* show all pending debug messages
*/
static void
show_debug_messages(void)
{
if (msg_counter != last_msg_counter) {
uint32_t n = msg_counter - last_msg_counter;
if (n > NUM_MSG) { n = NUM_MSG; }
last_msg_counter = msg_counter;
while (n--) {
debug("%s", msg[msg_next_out]);
msg_next_out = (msg_next_out + 1) % NUM_MSG;
}
}
}
/*
* Get the memory usage at 2 Hz while not armed
*/
static void
update_mem_usage(void)
{
if (/* IO armed */ (r_status_flags & PX4IO_P_STATUS_FLAGS_SAFETY_OFF)
/* and FMU is armed */ && (r_setup_arming & PX4IO_P_SETUP_ARMING_FMU_ARMED)) {
return;
}
static uint64_t last_mem_time = 0;
uint64_t now = hrt_absolute_time();
if (now - last_mem_time > (500 * 1000)) {
struct mallinfo minfo = mallinfo();
r_page_status[PX4IO_P_STATUS_FREEMEM] = minfo.fordblks;
last_mem_time = now;
}
}
static void
heartbeat_blink(void)
{
#if defined(LED_BLUE)
static bool heartbeat = false;
LED_BLUE(heartbeat = !heartbeat);
#endif /* LED_BLUE */
}
static void
ring_blink(void)
{
#if defined(LED_GREEN)
if (/* IO armed */ (r_status_flags & PX4IO_P_STATUS_FLAGS_SAFETY_OFF)
/* and FMU is armed */ && (r_setup_arming & PX4IO_P_SETUP_ARMING_FMU_ARMED)) {
LED_GREEN(true);
return;
}
// XXX this led code does have
// intentionally a few magic numbers.
const unsigned max_brightness = 118;
static unsigned counter = 0;
static unsigned brightness = max_brightness;
static unsigned brightness_counter = 0;
static unsigned on_counter = 0;
if (brightness_counter < max_brightness) {
bool on = ((on_counter * 100) / brightness_counter + 1) <= ((brightness * 100) / max_brightness + 1);
// XXX once led is PWM driven,
// remove the ! in the line below
// to return to the proper breathe
// animation / pattern (currently inverted)
LED_GREEN(!on);
brightness_counter++;
if (on) {
on_counter++;
}
} else {
if (counter >= 62) {
counter = 0;
}
int n;
if (counter < 32) {
n = counter;
} else {
n = 62 - counter;
}
brightness = (n * n) / 8;
brightness_counter = 0;
on_counter = 0;
counter++;
}
#endif
}
static uint64_t reboot_time;
/**
schedule a reboot in time_delta_usec microseconds
*/
void schedule_reboot(uint32_t time_delta_usec)
{
reboot_time = hrt_absolute_time() + time_delta_usec;
}
/**
check for a scheduled reboot
*/
static void check_reboot(void)
{
if (reboot_time != 0 && hrt_absolute_time() > reboot_time) {
up_systemreset();
}
}
static void
calculate_fw_crc(void)
{
#define APP_SIZE_MAX 0xf000
#define APP_LOAD_ADDRESS 0x08001000
// compute CRC of the current firmware
uint32_t sum = 0;
for (unsigned p = 0; p < APP_SIZE_MAX; p += 4) {
uint32_t bytes = *(uint32_t *)(p + APP_LOAD_ADDRESS);
sum = crc32part((uint8_t *)&bytes, sizeof(bytes), sum);
}
r_page_setup[PX4IO_P_SETUP_CRC] = sum & 0xFFFF;
r_page_setup[PX4IO_P_SETUP_CRC + 1] = sum >> 16;
}
extern "C" __EXPORT int user_start(int argc, char *argv[])
{
/* configure the first 8 PWM outputs (i.e. all of them) */
up_pwm_servo_init(0xff);
/* reset all to zero */
memset(&system_state, 0, sizeof(system_state));
/* configure the high-resolution time/callout interface */
hrt_init();
/* calculate our fw CRC so FMU can decide if we need to update */
calculate_fw_crc();
/*
* Poll at 1ms intervals for received bytes that have not triggered
* a DMA event.
*/
#ifdef CONFIG_ARCH_DMA
hrt_call_every(&serial_dma_call, 1000, 1000, (hrt_callout)stm32_serial_dma_poll, NULL);
#endif
/* print some startup info */
syslog(LOG_INFO, "\nPX4IO: starting\n");
/* default all the LEDs to off while we start */
LED_AMBER(false);
#if defined(LED_BLUE)
LED_BLUE(false);
#endif /* LED_BLUE */
LED_SAFETY(false);
#if defined(LED_GREEN)
LED_GREEN(false);
#endif /* LED_GREEN */
/* turn off S.Bus out (if supported) */
#ifdef ENABLE_SBUS_OUT
ENABLE_SBUS_OUT(false);
#endif
/* start the safety switch handler */
safety_init();
/* initialise the control inputs */
controls_init();
/* set up the ADC */
adc_init();
/* start the FMU interface */
interface_init();
#if defined(PX4IO_PERF)
/* add a performance counter for mixing */
perf_counter_t mixer_perf = perf_alloc(PC_ELAPSED, "mix");
/* add a performance counter for controls */
perf_counter_t controls_perf = perf_alloc(PC_ELAPSED, "controls");
/* and one for measuring the loop rate */
perf_counter_t loop_perf = perf_alloc(PC_INTERVAL, "loop");
#endif
struct mallinfo minfo = mallinfo();
r_page_status[PX4IO_P_STATUS_FREEMEM] = minfo.mxordblk;
syslog(LOG_INFO, "MEM: free %u, largest %u\n", minfo.mxordblk, minfo.fordblks);
/* Start the failsafe led init */
failsafe_led_init();
/*
* Run everything in a tight loop.
*/
uint64_t last_debug_time = 0;
uint64_t last_heartbeat_time = 0;
watchdog_init();
for (;;) {
watchdog_pet();
#if defined(PX4IO_PERF)
/* track the rate at which the loop is running */
perf_count(loop_perf);
/* kick the mixer */
perf_begin(mixer_perf);
#endif
mixer_tick();
#if defined(PX4IO_PERF)
perf_end(mixer_perf);
/* kick the control inputs */
perf_begin(controls_perf);
#endif
controls_tick();
#if defined(PX4IO_PERF)
perf_end(controls_perf);
#endif
/*
blink blue LED at 4Hz in normal operation. When in
override blink 4x faster so the user can clearly see
that override is happening. This helps when
pre-flight testing the override system
*/
uint32_t heartbeat_period_us = 250 * 1000UL;
if ((hrt_absolute_time() - last_heartbeat_time) > heartbeat_period_us) {
last_heartbeat_time = hrt_absolute_time();
heartbeat_blink();
}
#if defined(HEATER_OUTPUT_EN)
if (r_page_setup[PX4IO_P_SETUP_THERMAL] != PX4IO_THERMAL_IGNORE) {
if (r_page_setup[PX4IO_P_SETUP_THERMAL] < PX4IO_THERMAL_FULL) {
/* switch resistive heater off */
HEATER_OUTPUT_EN(false);
} else {
/* switch resistive heater hard on */
HEATER_OUTPUT_EN(true);
}
}
#endif /* HEATER_OUTPUT_EN */
update_mem_usage();
ring_blink();
check_reboot();
/* check for debug activity (default: none) */
show_debug_messages();
/* post debug state at ~1Hz - this is via an auxiliary serial port
* DEFAULTS TO OFF!
*/
if (hrt_absolute_time() - last_debug_time > (1000 * 1000)) {
isr_debug(1, "d:%u s=0x%x a=0x%x f=0x%x m=%u",
(unsigned)r_page_setup[PX4IO_P_SETUP_SET_DEBUG],
(unsigned)r_status_flags,
(unsigned)r_setup_arming,
(unsigned)r_setup_features,
(unsigned)mallinfo().mxordblk);
last_debug_time = hrt_absolute_time();
}
}
}
|
/**
* Provides a default implementation of {@link SzBulkLoadResponse}.
*/
@JsonDeserialize
public class SzBulkLoadResponseImpl extends SzBasicResponseImpl
implements SzBulkLoadResponse
{
/**
* The {@link SzBulkLoadResult} describing the record.
*/
private SzBulkLoadResult bulkLoadResult;
/**
* Protected default constructor.
*/
protected SzBulkLoadResponseImpl() {
this.bulkLoadResult = null;
}
/**
* Constructs with only the HTTP method and the self link, leaving the
* record data to be initialized later.
*
* @param meta The response meta data.
*
* @param links The links for the response.
*/
public SzBulkLoadResponseImpl(SzMeta meta, SzLinks links)
{
this(meta, links, null);
}
/**
* Constructs with the HTTP method, self link and the {@link
* SzBulkLoadResult} describing the record.
*
* @param meta The response meta data.
*
* @param links The links for the response.
*
* @param bulkLoadResult The {@link SzBulkLoadResult} describing the result
* of the bulk load.
*/
public SzBulkLoadResponseImpl(SzMeta meta,
SzLinks links,
SzBulkLoadResult bulkLoadResult)
{
super(meta, links);
this.bulkLoadResult = bulkLoadResult;
}
/**
* {@inheritDoc}
*/
@Override
public SzBulkLoadResult getData() {
return this.bulkLoadResult;
}
/**
* {@inheritDoc}
*/
@Override
public void setData(SzBulkLoadResult bulkLoadResult) {
this.bulkLoadResult = bulkLoadResult;
}
}
|
package public
type AuthRequest struct {
User string `json:"user"`
Password string `json:"password"`
}
type Request struct {
User string `json:"user"`
Password string `json:"password"`
LocationID string `json:"locationId"`
ZID string `json:"zId"`
HistoryLimit int `json:"historyLimit"`
}
// RingDeviceStatus represents the Device data on Ring Alarm Devices
type RingDeviceStatus struct {
Name string `json:"name"`
Type string `json:"type"`
Faulted bool `json:"faulted"`
Mode string `json:"mode"`
}
type RingLockStatus struct {
Name string `json:"name"`
ZID string `json:"zid"`
Type string `json:"type"`
Status string `json:"status"`
}
type RingDeviceEvent struct {
DeviceName string `json:"name"`
Time int64 `json:"time"`
Type string `json:"type"`
}
type Response struct {
DeviceStatus []RingDeviceStatus `json:"deviceStatus"`
Events []RingDeviceEvent `json:"events"`
}
type RingMetaData struct {
LocationID string `json:"locationId"`
ZID string `json:"zId"`
}
|
/**
* INTERNAL:
* Object to hold onto cache index metadata.
*
* Key notes:
* - any metadata mapped from XML to this class must be compared in the
* equals method.
* - when loading from annotations, the constructor accepts the metadata
* accessor this metadata was loaded from. Used it to look up any
* 'companion' annotation needed for processing.
* - methods should be preserved in alphabetical order.
*
* @author James Sutherland
* @since EclipseLink 2.2
*/
public class CacheIndexMetadata extends ORMetadata {
private List<String> m_columnNames = new ArrayList();
private Boolean updateable;
/**
* INTERNAL:
* Used for OX mapping.
*/
public CacheIndexMetadata() {
super("<cache-index>");
}
/**
* INTERNAL:
*/
public CacheIndexMetadata(MetadataAnnotation index, MetadataAccessor accessor) {
super(index, accessor);
if (index != null) {
for (Object columnName : index.getAttributeArray("columnNames")) {
m_columnNames.add((String) columnName);
}
this.updateable = index.getAttributeBooleanDefaultTrue("updateable");
}
}
/**
* INTERNAL:
*/
@Override
public boolean equals(Object objectToCompare) {
if (objectToCompare instanceof CacheIndexMetadata) {
CacheIndexMetadata index = (CacheIndexMetadata) objectToCompare;
if (this.updateable != index.getUpdateable()) {
return false;
}
return this.m_columnNames.equals(index.getColumnNames());
}
return false;
}
/**
* INTERNAL:
* Used for OX mapping.
*/
public List<String> getColumnNames() {
return m_columnNames;
}
/**
* INTERNAL:
* Used for OX mapping.
*/
public Boolean getUpdateable() {
return updateable;
}
/**
* INTERNAL:
* Process the index metadata
*/
public void process(MetadataDescriptor descriptor, String defaultColumnName) {
if (m_columnNames.isEmpty() && (defaultColumnName != null)) {
CachePolicy cachePolicy = descriptor.getClassDescriptor().getCachePolicy();
DatabaseField field = new DatabaseField(defaultColumnName);
if (m_project.useDelimitedIdentifier()) {
field.setUseDelimiters(true);
} else if (m_project.getShouldForceFieldNamesToUpperCase() && !field.shouldUseDelimiters()) {
field.useUpperCaseForComparisons(true);
}
cachePolicy.addCacheIndex(new DatabaseField[] {field});
} else {
CacheIndex index = new CacheIndex();
if (this.updateable != null) {
index.setIsUpdateable(this.updateable);
}
for (String column : m_columnNames) {
index.addFieldName(column);
}
descriptor.getClassDescriptor().getCachePolicy().addCacheIndex(index);
}
}
/**
* INTERNAL:
* Used for OX mapping.
*/
public void setColumnNames(List<String> columnNames) {
this.m_columnNames = columnNames;
}
/**
* INTERNAL:
* Used for OX mapping.
*/
public void setUpdateable(Boolean updateable) {
this.updateable = updateable;
}
}
|
/**
* Find the selected material-parameter override in this view's copy of its
* C-G model.
*
* @return the pre-existing instance (not null)
*/
protected MatParamOverride findSelectedMpo() {
Spatial spatial = selectedSpatial();
String parameterName = cgm.getOverride().parameterName();
MatParamOverride result
= MySpatial.findOverride(spatial, parameterName);
assert result != null;
return result;
}
|
import os,io
import sys
input=io.BytesIO(os.read(0,os.fstat(0).st_size)).readline
n=int(input())
a=list(map(int,input().split()))
b=[-1]*n
failflag=0
visited=set()
visited.add(a[0])
for i in range(1,n):
if a[i-1]!=a[i]:
b[i]=a[i-1]
if a[i]>i+1:
failflag=1
break
visited.add(a[i])
if failflag==1:
print(-1)
sys.exit()
counter=0
visited2=set()
for i in range(n):
while counter in visited:
counter+=1
if b[i]==-1:
b[i]=counter
if not counter in visited:
counter+=1
elif counter in visited2:
counter+=1
else:
visited2.add(b[i])
ans=[]
for i in range(n):
ans.append(str(b[i]))
print(' '.join(ans))
|
On Friday, conservative radio host Rush Limbaugh responded to the Wall Street Journal which criticized House Speaker John Boehner (R-OH) for bowing to the GOP’s talk radio constituency when he abandoned a recent push for immigration reform. Limbaugh said that President Barack Obama cannot be trusted to enforce border control provisions of an immigration reform bill and there is no mechanism available to Republicans to hold him accountable if he failed to perform his duty. “You can’t impeach the first black president,” he said.
Limbaugh warned his audience on Friday that a “constitutional crisis” could be imminent. He cited as evidence for this impending crisis the fact that some Democratic members of Congress are presently drafting executive orders for Obama to sign. “They are totally willing to grant dictatorial powers to one of their own,” the talk radio host said.
“He’s not obeying the laws we have now,” Limbaugh said. “What’s the next logical step in a case like this?”
RELATED: Rush Limbaugh Chides Fox News for Defending Christie
“You can’t impeach the first black president,” Limbaugh observed. “No matter how corrupt or how lawless.”
He added that the president has embraced “lawlessness” in his lack of enforcement of the provisions of the Affordable Care Act. That precedent, he said, should be enough for conservatives to understand that Obama cannot be trusted to enforce immigration reform.
Listen to the clip below via The Rush Limbaugh Show:
Please enable Javascript to listen.
h/t Daily Rushbo
[Photo via screen grab ]
— —
> >Follow Noah Rothman (@NoahCRothman) on Twitter
Have a tip we should know? [email protected]
|
package ru.babay.blockquotespan;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.text.Layout;
import android.text.TextPaint;
import android.text.style.LeadingMarginSpan;
import android.text.style.LineHeightSpan;
import android.text.style.MetricAffectingSpan;
/**
* Created by babay on 27.08.2017.
*/
public class BlockQuoteSpan extends MetricAffectingSpan implements LeadingMarginSpan, LineHeightSpan, DrawBlockBackgroundSpan, EndAwareSpan {
private final int leftPaddingPx;
private final Drawable drawable;
private final int leftMarginPx;
private int spanEnd;
private int ascentAdd;
private int initialAscent;
public BlockQuoteSpan(int leftMarginPx, int leftPaddingPx, Drawable drawable) {
this.leftMarginPx = leftMarginPx;
this.leftPaddingPx = leftPaddingPx;
this.drawable = drawable;
}
@Override
public int getLeadingMargin(boolean first) {
return leftPaddingPx + leftMarginPx;
}
@Override
public void drawLeadingMargin(Canvas c, Paint p, int x, int dir, int top, int baseline, int bottom, CharSequence text, int start, int end, boolean first, Layout layout) {
}
/**
* we can change TextPaint here
* called when measuring
*
* @param p
*/
@Override
public void updateMeasureState(TextPaint p) {
//this is to emulate some right padding: increase textScaleX when measuring and do not increase it when drawing
p.setTextScaleX(1.05f);
}
/**
* we can change TextPaint here
* called when drawing
*
* @param p
*/
@Override
public void updateDrawState(TextPaint p) {
}
/**
* if text is multiline, this method is called for each line inside span
*
* @param text
* @param start current line start in text
* @param end current line end in text
* @param spanstartv span's top position (in pixels)
* @param v -- current line's top position (in pixels)
* @param fm -- font metrics
*/
@Override
public void chooseHeight(CharSequence text, int start, int end, int spanstartv, int v, Paint.FontMetricsInt fm) {
if (spanstartv == v) { // this is definitely first line, increase line ascent
initialAscent = fm.ascent;
fm.ascent *= 1.5;
ascentAdd = (int) Math.abs(initialAscent * 0.5);
} else if (initialAscent != 0) { // this is second line, restore line ascent
fm.ascent = initialAscent;
initialAscent = 0;
}
boolean newlineAfterSpan = text.charAt(spanEnd) == '\n';
if (newlineAfterSpan) {
--end;
}
if (end == this.spanEnd) { // this is last line, increase descent
fm.descent += ascentAdd;
}
}
@Override
public void draw(Canvas c, int blockLeft, int blockTop, int blockRight, int blockBottom, int availWidth) {
blockLeft += leftMarginPx;
drawable.setBounds(blockLeft, blockTop, blockRight, blockBottom);
drawable.draw(c);
}
@Override
public void setSpanEnd(int spanEnd) {
this.spanEnd = spanEnd;
}
}
|
/**
* Capitalizes the passed string and removes all '_' for properly printing the Title enum. Furthermore, the method
* can be used to also ensure that all names are actually capitalized when printing
* @param str The {@code String} to capitalize
* @return The {@code String} of the capitalized input
*/
public static String capitalizeString(String str) {
if (str == null || str.equals("") || str.equals(" ")) return "";
StringBuilder capitalizedString = new StringBuilder();
char[] strArray = str.toLowerCase().toCharArray();
for (int i = 0; i < strArray.length; i++) if (strArray[i] == '_') strArray[i] = ' ';
capitalizedString.append(Character.toString(strArray[0]).toUpperCase());
capitalizedString.append(String.valueOf(Arrays.copyOfRange(strArray, 1, strArray.length)));
return capitalizedString.toString();
}
|
// Wrap interaction with DB and Panopto SOAP services for a particular Blackboard course
public class PanoptoData {
private static final Logger mLog = LoggerFactory.getLogger(PanoptoData.class);
// Keys for Blackboard course registry settings
private static final String hostnameRegistryKey = "CourseCast_Hostname";
private static final String sessionGroupIDRegistryKey = "CourseCast_SessionGroupID";
private static final String sessionGroupDisplayNameRegistryKey = "CourseCast_SessionGroupDisplayName";
private static final String copySessionGroupIDsRegistryKey = "CourseCast_CopySessionGroupIDs";
private static final String copySessionGroupDisplayNamesRegistryKey = "CourseCast_CopySessionGroupDisplayNames";
private static final String originalContextRegistryKey = "CourseCast_OriginalContext";
// version strings to report back to Panopto, only need to grab if not defined (see below in InitPanoptoData).
private static String plugInVersion;
private static String platformVersion;
// Argument used to return empty arrays as they are immutable.
private static final String[] emptyStringArray = new String[0];
// Constants used for paging.
private final int maxPages = 100;
private final int perPage = 100;
// Blackboard course we are associating with
private Course bbCourse;
// Blackboard username of currently logged in user
private String bbUserName;
private List<String> instructorRoleIds = new ArrayList<String>();
private List<String> taRoleIds = new ArrayList<String>();
private boolean isInstructor = false;
private boolean canAddLinks = false;
// Panopto server to talk to
private String serverName;
// Version number of the current Panopto server
private PanoptoVersion serverVersion;
// B2 configuration
private static Map<String, Object> resultMap;
// User key to use when talking to Panopto SOAP services
// (Instance-decorated username of currently-logged-in Blackboard user)
private String apiUserKey;
private String apiUserPwd;
// Hash of username, server, and shared secret for securing web services
private String apiUserAuthCode;
// ID and display name of currently associated Panopto course
private String[] sessionGroupPublicIDs;
private String[] sessionGroupDisplayNames;
// ID and display name of associated Panopto courses through a copy
private String[] copySessionGroupPublicIDs;
private String[] copySessionGroupDisplayNames;
// SOAP port for talking to Panopto
private ISessionManagement sessionManagement;
private IUsageReporting usageReporting;
private IUserManagement userManagement;
// Construct the PanoptoData object using the current Blackboard context
// (e.g. from <bbData:context ...> tag)
// Pulls in stored property values from BB course registry if available.
// Ensure that serverName and
// sessionGroupPublicIDs are set before calling any instance methods that
// rely on these properties (most).
public PanoptoData(String api) {
//Get B2 configuration
resultMap = CommonUtil.getInstance().getValues("config/props/config.properties");
serverName = resultMap.get("servername").toString();
apiUserKey = resultMap.get("panoptoId").toString();
apiUserPwd = resultMap.get("panoptoPwd").toString();
if(api.equals("sessionManagement")) sessionManagement = getPanoptoSessionManagementSOAPService(serverName);
else if(api.equals("userManagement")) userManagement = getPanoptoUserManagementSOAPService(serverName);
else if(api.equals("usageReporting")) usageReporting = getPanoptoUsageReportingSOAPService(serverName);
}
public PanoptoData(String server, String id, String pwd, String api) {
serverName = server;
apiUserKey = id;
apiUserPwd = pwd;
if(api.equals("sessionManagement")) sessionManagement = getPanoptoSessionManagementSOAPService(serverName);
else if(api.equals("userManagement")) userManagement = getPanoptoUserManagementSOAPService(serverName);
else if(api.equals("usageReporting")) usageReporting = getPanoptoUsageReportingSOAPService(serverName);
}
public PanoptoData(Context ctx) {
// Get B2 configuration
resultMap = CommonUtil.getInstance().getValues("config/props/config.properties");
InitPanoptoData(ctx.getCourse(), ctx.getUser().getUserName());
}
public PanoptoData(String bbCourseId, String bbUserName) {
// Get B2 configuration
resultMap = CommonUtil.getInstance().getValues("config/props/config.properties");
InitPanoptoData(null, bbUserName);
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
Course bbCourse = null;
try {
CourseDbLoader courseLoader = (CourseDbLoader) bbPm.getLoader(CourseDbLoader.TYPE);
bbCourse = courseLoader.loadByCourseId(bbCourseId);
InitPanoptoData(bbCourse, bbUserName);
} catch (Exception e) {
mLog.error("Error getting course info (course ID: %s).", bbCourseId, e);
}
}
public PanoptoData(Course bbCourse, String bbUserName) {
// Get B2 configuration
resultMap = CommonUtil.getInstance().getValues("config/props/config.properties");
InitPanoptoData(bbCourse, bbUserName);
}
private void InitPanoptoData(Course bbCourse, String bbUserName) {
this.bbCourse = bbCourse;
this.bbUserName = bbUserName;
this.isInstructor = PanoptoData.isUserInstructor(this.bbCourse.getId(), this.bbUserName, false);
this.canAddLinks = PanoptoData.canUserAddLinks(this.bbCourse.getId(), this.bbUserName);
// If one is pluginVersion is null set both the plugIn and platform versions.
if (plugInVersion == null) {
PlugInManager plugInManager = PlugInManagerFactory.getInstance();
List<PlugIn> plugins = plugInManager.getPlugIns();
for (PlugIn plugIn : plugins) {
if (plugIn.getName().equals("Panopto Connector")) {
plugInVersion = plugIn.getVersion().toString();
break;
}
}
platformVersion = plugInManager.getPlatformVersion().toString();
}
Utils.pluginSettings = new Settings();
updateServerName(resultMap.get("servername").toString());
// List<String> serverList = Utils.pluginSettings.getServerList();
// // If there is only one server available, use it
// if (serverList.size() == 1) {
// updateServerName(serverList.get(0));
// } else {
// updateServerName(getCourseRegistryEntry(hostnameRegistryKey));
// }
String roleMappingsString = Utils.pluginSettings.getRoleMappingString();
String[] roleMappingsSplit = roleMappingsString.split(";");
for (String mappingString : roleMappingsSplit) {
String[] mappingArray = mappingString.split(":");
if (mappingArray.length == 2) {
String RoleId = mappingArray[0];
if (mappingArray[1].trim().equalsIgnoreCase("instructor")) {
instructorRoleIds.add(RoleId);
} else if (mappingArray[1].trim().equalsIgnoreCase("ta")) {
taRoleIds.add(RoleId);
}
}
}
sessionGroupPublicIDs = getCourseRegistryEntries(sessionGroupIDRegistryKey);
sessionGroupDisplayNames = getCourseRegistryEntries(sessionGroupDisplayNameRegistryKey);
copySessionGroupPublicIDs = getCourseRegistryEntries(copySessionGroupIDsRegistryKey);
copySessionGroupDisplayNames = getCourseRegistryEntries(copySessionGroupDisplayNamesRegistryKey);
// Check that the list of Ids and names are valid. They must both be the
// same length
if ((sessionGroupPublicIDs == null && sessionGroupDisplayNames != null)
|| (sessionGroupPublicIDs != null && sessionGroupDisplayNames == null)
|| (sessionGroupPublicIDs != null && sessionGroupPublicIDs.length != sessionGroupDisplayNames.length)) {
mLog.error(String.format(
"Invalid course registry settings. Reseting both to null. sessionGroupPublicIDs = %s"
+ "sessionGroupDisplayNames = %s",
Utils.encodeArrayOfStrings(sessionGroupPublicIDs),
Utils.encodeArrayOfStrings(sessionGroupDisplayNames)));
this.sessionGroupPublicIDs = null;
this.sessionGroupDisplayNames = null;
setCourseRegistryEntry(hostnameRegistryKey, null);
setCourseRegistryEntries(sessionGroupIDRegistryKey, null);
setCourseRegistryEntries(sessionGroupDisplayNameRegistryKey, null);
}
}
public Course getBBCourse() {
return bbCourse;
}
public String getProvisionUrl(String serverName, String returnUrl) {
if (serverName == null) {
return "";
}
return "Course_Provision.jsp?provisionServerName=" + serverName + "&bbCourses=" + getBBCourse().getCourseId()
+ "&returnUrl=" + returnUrl;
}
public String getServerName() {
return serverName;
}
/**
* Get the display names of all folders provisioned to this course.
*
* @return String Array of friendly names of folders provisioned to this
* course
*/
public String[] getFolderDisplayNames() {
if (sessionGroupDisplayNames == null) {
return emptyStringArray;
}
return sessionGroupDisplayNames;
}
/**
* Get the display names of all folders that this course has copied
* permissions to.
*
* @return String Array of friendly names of folders copied to this course
*/
public String[] getCopiedFolderDisplayNames() {
if (copySessionGroupDisplayNames == null) {
return emptyStringArray;
}
return copySessionGroupDisplayNames;
}
// gets the number of folders associated with this course
public int getNumberOfFolders() {
if (sessionGroupPublicIDs == null) {
return 0;
}
return sessionGroupPublicIDs.length;
}
/**
* Gets the number of copied folders associated with this course
*
* @return Integer representing number of folders copied to this course
*/
public int getNumberOfCopiedFolders() {
if (copySessionGroupPublicIDs == null) {
return 0;
}
return copySessionGroupPublicIDs.length;
}
// Update properties that depend on serverName
private void updateServerName(String serverName) {
this.serverName = serverName;
if ((serverName != null) && !serverName.equals("")) {
// apiUserKey = Utils.decorateBlackboardUserName(bbUserName);
apiUserKey = resultMap.get("panoptoId").toString();
apiUserPwd = resultMap.get("panoptoPwd").toString();
// apiUserAuthCode = Utils.generateAuthCode(serverName, apiUserKey + "@" + serverName);
sessionManagement = getPanoptoSessionManagementSOAPService(serverName);
usageReporting = getPanoptoUsageReportingSOAPService(serverName);
serverVersion = getServerVersion();
} else {
apiUserKey = null;
apiUserAuthCode = null;
sessionManagement = null;
serverVersion = null;
}
}
public boolean isServerSet() {
return (serverName != null);
}
// i.e. a Panopto course been selected for this Blackboard course
public boolean isMapped() {
return (sessionGroupPublicIDs != null);
}
// Returns true if this course has inherited any permissions due to course
// copy
public boolean isCopyMapped() {
return (copySessionGroupPublicIDs != null);
}
// Get the Panopto user being used for SOAP calls
public String getApiUserKey() {
return apiUserKey;
}
// Determine if this course is in the original context and the course has
// NOT been copied.
public boolean isOriginalContext() {
boolean isOriginal = true;
// If the registry does not exist, assume this is original and set the
// value.
if (getCourseRegistryEntry(originalContextRegistryKey) == null) {
setCourseRegistryEntry(originalContextRegistryKey, bbCourse.getId().toExternalString());
} else if (!getCourseRegistryEntry(originalContextRegistryKey)
.equalsIgnoreCase(bbCourse.getId().toExternalString())) {
// There is a context and it doesn't match the current context.
isOriginal = false;
}
return (isOriginal);
}
// If this course has no original context set it to the context of the
// source course.
public void setOriginalCopyContext(String sourceId) {
// If the registry does not exist, assume this is original and set the
// value.
if (getCourseRegistryEntry(originalContextRegistryKey) == null) {
setCourseRegistryEntry(originalContextRegistryKey, sourceId);
}
}
///////////////////////////////////////////////////////////////////////////////////////////
// The following "get..." functions are just wrappers for the relevant SOAP calls,
// using stored course mapping and credentials.
///////////////////////////////////////////////////////////////////////////////////////////
// Gets all the sessions in a folder from the Panopto server. Returns null
// on error.
public Session[] getSessions(String folderId) {
Session[] returnValue;
try {
// Get all the sessions
int page = 0;
int responseCount = 0;
int totalSessionsExpected = -1;
ListSessionsResponse listResponse;
List<Session> allSessions = new ArrayList<Session>();
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
do {
ListSessionsRequest request = new ListSessionsRequest();
request.setFolderId(folderId);
request.setPagination(new Pagination(this.perPage, page));
request.setSortBy(SessionSortField.Date);
request.setSortIncreasing(true); // sortIncreasing = true
request.setStates(new SessionState[] { SessionState.Broadcasting, SessionState.Complete, SessionState.Recording });
listResponse = sessionManagement.getSessionsList(auth, request, null); // searchQuery
// =
// null
allSessions.addAll(Arrays.asList(listResponse.getResults()));
if (totalSessionsExpected == -1) {
// First time through, grab the expected total count.
totalSessionsExpected = listResponse.getTotalNumberResults();
}
Session[] returnedSessions = listResponse.getResults();
responseCount += returnedSessions.length;
page++;
} while ((responseCount < totalSessionsExpected) && (page < this.maxPages));
returnValue = new Session[allSessions.size()];
returnValue = allSessions.toArray(returnValue);
} catch (Exception e) {
mLog.error("Error getting sessions (folder ID: %s, api user: %s).", folderId, apiUserKey, e);
returnValue = null;
}
return returnValue;
}
// Gets all the folders associated with this course. Any folder we can't get
// will return as null.
public Folder[] getFolders() {
Folder[] retVal = null;
if (sessionGroupPublicIDs != null && sessionGroupPublicIDs.length > 0) {
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
try {
retVal = sessionManagement.getFoldersById(auth, sessionGroupPublicIDs);
} catch (Exception e) {
mLog.debug("first attempt at getFoldersById failed, calling syncUser");
// Got an error from the panopto server. sync the user's
// credentials and try again
syncUser(serverName, bbUserName);
try {
retVal = sessionManagement.getFoldersById(auth, sessionGroupPublicIDs);
} catch (Exception e2) {
// Still failed. Could be because one of the folders has
// been deleted. Get them one at a time.
retVal = new Folder[sessionGroupPublicIDs.length];
for (int i = 0; i < sessionGroupPublicIDs.length; i++) {
try {
retVal[i] = sessionManagement.getFoldersById(auth,
new String[] { sessionGroupPublicIDs[i] })[0];
} catch (Exception e3) {
mLog.error("Error getting folder(courseId: %s, courseTitle %s, folder ID: %s, api" + "user: %s).",
bbCourse.getId().toExternalString(), bbCourse.getTitle(), sessionGroupPublicIDs[i], apiUserKey, e3);
retVal[i] = null;
}
}
}
}
}
return retVal;
}
// Gets all the SessionDetailedUsage associated with this session.
public DetailedUsageResponseItem[] getSessionDetailedUsage(String sessionId, String beginRange, String endRange) {
DetailedUsageResponseItem[] returnValue;
try {
// Get all the sessions
int page = 0;
int responseCount = 0;
int totalSessionsExpected = -1;
ListUsageReportingResponse listResponse;
List<DetailedUsageResponseItem> allSessions = new ArrayList<DetailedUsageResponseItem>();
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
do {
listResponse = usageReporting.getSessionDetailedUsage(auth, sessionId, new Pagination(this.perPage, page), beginRange, endRange);
allSessions.addAll(Arrays.asList(listResponse.getPagedResponses()));
if (totalSessionsExpected == -1) {
// First time through, grab the expected total count.
totalSessionsExpected = listResponse.getTotalNumberResponses();
}
DetailedUsageResponseItem[] returnedSessions = listResponse.getPagedResponses();
responseCount += returnedSessions.length;
page++;
} while ((responseCount < totalSessionsExpected) && (page < this.maxPages));
returnValue = new DetailedUsageResponseItem[allSessions.size()];
returnValue = allSessions.toArray(returnValue);
} catch (Exception e) {
mLog.error("Error getting SessionDetailedUsage", e);
returnValue = null;
}
return returnValue;
}
// Gets all the SessionUserDetailedUsage associated with this session.
public DetailedUsageResponseItem[] getSessionUserDetailedUsage(String sessionId, String userId) {
DetailedUsageResponseItem[] returnValue;
try {
// Get all the sessions
int page = 0;
int responseCount = 0;
int totalSessionsExpected = -1;
ListUsageReportingResponse listResponse;
List<DetailedUsageResponseItem> allSessions = new ArrayList<DetailedUsageResponseItem>();
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
do {
listResponse = usageReporting.getSessionUserDetailedUsage(auth, sessionId, userId, new Pagination(this.perPage, page));
allSessions.addAll(Arrays.asList(listResponse.getPagedResponses()));
if (totalSessionsExpected == -1) {
// First time through, grab the expected total count.
totalSessionsExpected = listResponse.getTotalNumberResponses();
}
DetailedUsageResponseItem[] returnedSessions = listResponse.getPagedResponses();
responseCount += returnedSessions.length;
page++;
} while ((responseCount < totalSessionsExpected) && (page < this.maxPages));
returnValue = new DetailedUsageResponseItem[allSessions.size()];
returnValue = allSessions.toArray(returnValue);
} catch (Exception e) {
mLog.error("Error getting SessionUserDetailedUsage", e);
returnValue = null;
}
return returnValue;
}
public com.panopto.services.User getUserBykey(String userKey) {
com.panopto.services.User returnValue = null;
try {
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
returnValue = userManagement.getUserByKey(auth, userKey);
} catch (Exception e) {
mLog.error("Error getting SessionUserDetailedUsage", e);
}
return returnValue;
}
/**
* Get the all of the public IDs for the folders provisioned to the course
*
* @return String Array of folder public IDs provisioned to course (Guids)
*/
private String[] getFolderIDs() {
if (sessionGroupPublicIDs == null) {
return emptyStringArray;
}
return sessionGroupPublicIDs;
}
/**
* Get the all of the public IDs for the folders this course has access to
* through a copy
*
* @return String Array of folder public IDs course has copied permission to
* (Guids)
*/
private String[] getCopiedFolderIDs() {
if (copySessionGroupPublicIDs == null) {
return emptyStringArray;
}
return copySessionGroupPublicIDs;
}
// Gets the urls to download the recorders
public com.panopto.services.RecorderDownloadUrlResponse getRecorderDownloadUrls() {
try {
return sessionManagement.getRecorderDownloadUrls();
} catch (RemoteException e) {
mLog.error("Error getRecorderDownloadUrls", e);
return null;
}
}
// Used to sort folders alphabetically by name
class FolderComparator implements Comparator<Folder> {
public int compare(Folder f1, Folder f2) {
return f1.getName().toLowerCase().compareTo(f2.getName().toLowerCase());
}
}
// Gets all the Panopto folders the user has creator access to
private Folder[] getFoldersWithCreatorAccess() {
try {
// First sync the user so his memberships will be up to date
syncUser(serverName, bbUserName);
// Next get the user's access details
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
HashSet<String> foldersWithCreatorAccess = new HashSet<String>();
// Get all the folders
int page = 0;
int responseCount = 0;
int totalFoldersExpected = -1;
ListFoldersResponse listResponse;
List<Folder> allFolders = new ArrayList<Folder>();
do {
ListFoldersRequest foldersRequest = new ListFoldersRequest(new Pagination(this.perPage, page), null,
false, FolderSortField.Name, true);
listResponse = getPanoptoSessionManagementSOAPService(serverName).getCreatorFoldersList(auth, foldersRequest,
null);
allFolders.addAll(Arrays.asList(listResponse.getResults()));
if (totalFoldersExpected == -1) {
// First time through, grab the expected total count.
totalFoldersExpected = listResponse.getTotalNumberResults();
}
Folder[] returnedFolders = listResponse.getResults();
// Log which folders we got back. foldersWithCreatorAccess,
// folderIdList, and returnedFolders are all
// just in place for logging.
foldersWithCreatorAccess = new HashSet<String>();
for (Folder folder : returnedFolders) {
foldersWithCreatorAccess.add(folder.getId());
}
String[] folderIdList = foldersWithCreatorAccess.toArray(new String[0]);
mLog.debug(String.format(
"getFoldersWithCreatorAccess. User: %s, page: %d, returned from getCreatorFoldersList: %s",
bbUserName, page, Utils.encodeArrayOfStrings(folderIdList)));
responseCount += returnedFolders.length;
page++;
} while ((responseCount < totalFoldersExpected) && (page < this.maxPages));
mLog.debug(
String.format("Expected %d folders, returned %d folders", totalFoldersExpected, allFolders.size()));
return allFolders.toArray(new Folder[allFolders.size()]);
} catch (RemoteException e) {
mLog.error("Error getting folders with creator access (server: %s, apiUserKey: %s).", serverName, apiUserKey, e);
return null;
}
}
// Gets all the public folders from the server
private Folder[] getPublicFolders() {
try {
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
HashSet<String> publicFolders = new HashSet<String>();
// Get all the folders
int page = 0;
int responseCount = 0;
int totalFoldersExpected = -1;
ListFoldersResponse listResponse;
List<Folder> allFolders = new ArrayList<Folder>();
do {
ListFoldersRequest request = new ListFoldersRequest();
request.setPublicOnly(true);
request.setPagination(new Pagination(this.perPage, page));
listResponse = getPanoptoSessionManagementSOAPService(serverName).getCreatorFoldersList(auth, request,
null);
allFolders.addAll(Arrays.asList(listResponse.getResults()));
if (totalFoldersExpected == -1) {
// First time through, grab the expected total count.
totalFoldersExpected = listResponse.getTotalNumberResults();
}
Folder[] returnedFolders = listResponse.getResults();
// Log which folders we got back. foldersWithCreatorAccess,
// folderIdList, and returnedFolders are all
// just in place for logging.
publicFolders = new HashSet<String>();
for (Folder folder : returnedFolders) {
publicFolders.add(folder.getId());
}
String[] folderIdList = publicFolders.toArray(new String[0]);
mLog.debug(
String.format("getPublicFolders. User: %s, page: %d, returned from getPublicFolders: %s",
bbUserName, page, Utils.encodeArrayOfStrings(folderIdList)));
responseCount += returnedFolders.length;
page++;
} while ((responseCount < totalFoldersExpected) && (page < this.maxPages));
mLog.debug(
String.format("Expected %d folders, returned %d folders", totalFoldersExpected, allFolders.size()));
return allFolders.toArray(new Folder[allFolders.size()]);
} catch (RemoteException e) {
mLog.error("Error getting public folders (server: %s, apiUserKey: %s).", serverName, apiUserKey, e);
return null;
}
}
// Generates the list of selected folders for the course config page's
// listbox
public String generateCourseConfigSelectedFoldersOptionsHTML() {
StringBuffer result = new StringBuffer();
if (sessionGroupDisplayNames != null) {
for (int i = 0; i < sessionGroupDisplayNames.length; i++) {
result.append("<option");
result.append(" value='" + sessionGroupPublicIDs[i] + "'");
result.append(">");
result.append(Utils.escapeHTML(sessionGroupDisplayNames[i]));
result.append("</option>\n");
}
}
return result.toString();
}
/**
* The HTML generation code for copied Panopto folders so that it can be
* properly displayed
*
* @return HTML string of all copied folders as <options> tags
*/
public String generateCourseConfigCopyFoldersOptionsHTML() {
StringBuffer result = new StringBuffer();
if (copySessionGroupDisplayNames != null) {
for (int i = 0; i < copySessionGroupDisplayNames.length; i++) {
result.append("<option");
result.append(" value='" + copySessionGroupPublicIDs[i] + "'");
result.append(">");
result.append(Utils.escapeHTML(copySessionGroupDisplayNames[i]));
result.append("</option>\n");
}
}
return result.toString();
}
// Generates the list of available folders for the course config page's
// listbox
public String generateCourseConfigAvailableFoldersOptionsHTML() {
StringBuffer result = new StringBuffer();
// Get all the folder the user has access to
Folder[] folders = getFoldersWithCreatorAccess();
// Sort them by name
ArrayList<Folder> sortedFolders = new ArrayList<Folder>();
sortedFolders.addAll(Arrays.asList(folders));
Collections.sort(sortedFolders, new FolderComparator());
// Build a hash of the currently selected folders so we can quickly
// exclude them
HashSet<String> currentFolderIds = new HashSet<String>();
currentFolderIds.addAll(Arrays.asList(sessionGroupPublicIDs));
// Finally write out the options in sorted order
for (Folder folder : sortedFolders) {
if (!currentFolderIds.contains(folder.getId())) {
result.append("<option");
result.append(" value='" + folder.getId() + "'");
result.append(">");
result.append(Utils.escapeHTML(folder.getName()));
result.append("</option>\n");
}
}
return result.toString();
}
// Looks up the display string for a given folderID. The folderId must be
// already mapped with the course
public String getFolderDisplayString(String folderId) {
if (folderId != null) {
for (int i = 0; i < sessionGroupDisplayNames.length; i++) {
if (sessionGroupPublicIDs[i].equals(folderId)) {
return sessionGroupDisplayNames[i];
}
}
}
return null;
}
// Generate <option>s for available folders. Used by Item_Create to select a
// folder
public String generateFolderOptionsHTML(String folderId) {
StringBuffer result = new StringBuffer();
Folder[] publicFolders = this.getPublicFolders();
int numFolders = getNumberOfFolders();
numFolders += publicFolders != null ? publicFolders.length : 0;
if (numFolders == 0) {
result.append("<option value=''>-- No Folders Available --</option>");
} else {
result.append("<option value=''>-- Select a Folder --</option>");
// Only use option groups if we have elements in both groups
boolean useOptionalGroups = sessionGroupDisplayNames != null && sessionGroupDisplayNames.length > 0
&& publicFolders != null && publicFolders.length > 0;
// Add all the mapped folders
if (sessionGroupDisplayNames != null && sessionGroupDisplayNames.length > 0) {
if (useOptionalGroups) {
result.append("<optgroup label='Mapped Folders'>\n");
}
for (int i = 0; i < sessionGroupDisplayNames.length; i++) {
String strDisplayName = Utils.escapeHTML(sessionGroupDisplayNames[i]);
String strID = sessionGroupPublicIDs[i];
result.append("<option");
result.append(" value='" + strID + "'");
if (strID.equals(folderId)) {
result.append(" SELECTED");
}
result.append(">");
result.append(strDisplayName);
result.append("</option>\n");
}
if (useOptionalGroups) {
result.append("</optgroup>\n");
}
}
// Add all the public folders
if (publicFolders != null && publicFolders.length > 0) {
if (useOptionalGroups) {
result.append("<optgroup label='Public Folders'>\n");
}
for (int i = 0; i < publicFolders.length; i++) {
String strDisplayName = Utils.escapeHTML(publicFolders[i].getName());
String strID = publicFolders[i].getId();
result.append("<option");
result.append(" value='" + strID + "'");
if (strID.equals(folderId)) {
result.append(" SELECTED");
}
result.append(">");
result.append(strDisplayName);
result.append("</option>\n");
}
if (useOptionalGroups) {
result.append("</optgroup>\n");
}
}
}
return result.toString();
}
// Generate <option>s for available sessions. Used by Item_Create to select
// a session once a fodler is selected
public String generateSessionOptionsHTML(String folderID) {
StringBuffer result = new StringBuffer();
if (folderID == null || folderID == "") {
result.append("<option value=''>-- Please select a folder first --</option>\n");
} else {
mLog.error("Folder ID: " + folderID);
Session[] sessions = this.getSessions(folderID);
if (sessions != null) {
if (sessions.length == 0) {
result.append("<option value=''>-- The folder is empty --</option>");
} else {
result.append("<option value=''>-- Select a Lecture --</option>");
for (Session session : sessions) {
String strDisplayName = Utils.escapeHTML(session.getName());
result.append("<option");
result.append(" value='" + session.getViewerUrl() + "'");
result.append(">");
result.append(strDisplayName);
result.append("</option>\n");
}
}
} else {
result.append("<option value=''>!! Unable to retrieve lecture list !!</option>\n");
}
}
return result.toString();
}
// Returns true if the user is an instructor for this course
public boolean IsInstructor() {
return isInstructor;
}
public boolean canAddLinks() {
return canAddLinks;
}
// Insert a content item in the current course with a link to the specified
// delivery.
public LinkAddedResult addBlackboardContentItem(String content_id, String lectureUrl, String title,
String description) {
Session[] sessionArray;
LinkAddedResult linkAddedResult = LinkAddedResult.FAILURE;
try {
Utils.pluginSettings = new Settings();
// Get folder ID from URL param and add to an array to pass into
// updateFoldersAvailabilityStartSettings();
Map<String, String> urlParams = this.getQueryMap(lectureUrl);
String sessionID = urlParams.get("id");
String[] sessionIds = { sessionID };
// retrieve the Db persistence manager from the persistence service
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
// Generate AuthenticationInfo for calling availability window
// update method.
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
// Get user's blackboard ID from their username
UserDbLoader userLoader = (UserDbLoader) bbPm.getLoader(UserDbLoader.TYPE);
User user = userLoader.loadByUserName(bbUserName);
Id bbUserId = user.getId();
// Load the user's membership in the current course to get their
// role
CourseMembershipDbLoader membershipLoader = (CourseMembershipDbLoader) bbPm
.getLoader(CourseMembershipDbLoader.TYPE);
CourseMembership usersCourseMembership = membershipLoader.loadByCourseAndUserId(bbCourse.getId(), bbUserId);
Role userRole = usersCourseMembership.getRole();
// Determine if current user has creator access to session.
boolean isCreator = (isInstructorRole(userRole)
|| (isTARole(userRole) && Utils.pluginSettings.getGrantTACreator()));
if (isCreator) {
// isInAvailabilityWindow defaults to true, so sessions will be
// added if calls to the availability
// window API cannot be made.
PanoptoAvailabilityWindow.AvailabilityState availabilityState = PanoptoAvailabilityWindow.AvailabilityState.Unknown;
if (PanoptoVersions.canCallAvailabilityWindowApiMethods(serverVersion)) {
try {
// If user is a creator on Panopto, check if the session
// is in its availability window.
availabilityState = this.checkSessionAvailabilityState(sessionID, auth);
} catch (Exception e) {
// Problem getting availability window information from
// the API. Do not add the session to the
// course.
availabilityState = PanoptoAvailabilityWindow.AvailabilityState.Unknown;
mLog.error("Error getting availability information for sessions from server.", e);
}
}
if (availabilityState == PanoptoAvailabilityWindow.AvailabilityState.Available) {
// If the session is in its availability window. add it to
// the course and return success.
addSessionLinkToCourse(content_id, lectureUrl, title, description, bbPm);
linkAddedResult = LinkAddedResult.SUCCESS;
} else if (availabilityState != PanoptoAvailabilityWindow.AvailabilityState.Unknown) {
// We successfully determined the availability but it's
// either unavailable or unpublished. Try to
// make the session available immediately.
try {
sessionManagement.updateSessionsAvailabilityStartSettings(auth, sessionIds,
SessionStartSettingType.Immediately, null);
// The session is now available, add the session and
// return success
addSessionLinkToCourse(content_id, lectureUrl, title, description, bbPm);
linkAddedResult = LinkAddedResult.SUCCESS;
} catch (Exception e) {
if (availabilityState == PanoptoAvailabilityWindow.AvailabilityState.Unpublished) {
// The session needs publishing, but our attempt to
// publish failed. We must not have publish
// rights
linkAddedResult = LinkAddedResult.NOTPUBLISHER;
}
}
}
} else {
// If the user does not have a creator role, first call API to
// determine whether session is already
// available.
sessionArray = sessionManagement.getSessionsById(auth, sessionIds);
if (sessionArray.length < 1) {
// If no session is returned, it means the session is not in
// its availability window, and the
// current user cannot make it, available. Return failure
// indicating the user must ask a creator to
// make the session available.
linkAddedResult = LinkAddedResult.NOTCREATOR;
} else {
// If the session is currently in its availability window,
// it can be added to the course without
// having to make any API call. Return success.
addSessionLinkToCourse(content_id, lectureUrl, title, description, bbPm);
linkAddedResult = LinkAddedResult.SUCCESS;
}
}
} catch (Exception e) {
// General error when trying to add a session to a course. Print
// details to log.
mLog.error("Error adding content item (content ID: %s, lecture Url: %s, title: %s, description: %s).", content_id, lectureUrl, title, description, e);
linkAddedResult = LinkAddedResult.FAILURE;
}
return linkAddedResult;
}
// Determine the availability state of a session
private PanoptoAvailabilityWindow.AvailabilityState checkSessionAvailabilityState(String sessionId,
AuthenticationInfo auth) throws RemoteException {
String[] sessionIds = { sessionId };
// Get availability window settings for the session.
SessionAvailabilitySettings sessionSettings = sessionManagement
.getSessionsAvailabilitySettings(auth, sessionIds).getResults()[0];
FolderAvailabilitySettings folderSettings = null;
if (PanoptoAvailabilityWindow.isFolderRequiredForSessionAvailability(sessionSettings)) {
// Folder availability settings are also needed to determine whether
// the session is available. Load the
// session data to get the folder, then get the folder availability
Session session = sessionManagement.getSessionsById(auth, sessionIds)[0];
folderSettings = sessionManagement
.getFoldersAvailabilitySettings(auth, new String[] { session.getFolderId() }).getResults()[0];
}
return PanoptoAvailabilityWindow.getSessionAvailability(sessionSettings, folderSettings);
}
// Adds a link to a Panopto session to the content area of the current
// Blackboard course.
private void addSessionLinkToCourse(String content_id, String lectureUrl, String title, String description,
BbPersistenceManager bbPm) throws PersistenceException, ValidationException {
// Create a course document and set all desired attributes
Content content = new Content();
content.setTitle(title);
content.setBody(new FormattedText(description, FormattedText.Type.HTML));
content.setUrl(lectureUrl);
content.setRenderType(Content.RenderType.URL);
content.setLaunchInNewWindow(true);
content.setContentHandler("hyperlink/coursecast");
// Set course and parent content IDs (required)
Id parentId = bbPm.generateId(Content.DATA_TYPE, content_id);
content.setParentId(parentId);
Id courseId = bbCourse.getId();
content.setCourseId(courseId);
// retrieve the content persister and persist the content item
ContentDbPersister persister = (ContentDbPersister) bbPm.getPersister(ContentDbPersister.TYPE);
persister.persist(content);
}
// Returns map of url's query parameters and their values. Used for getting
// session ID for session to make
// available.
public Map<String, String> getQueryMap(String url) {
Map<String, String> map = null;
String[] splitURL = url.split("\\?");
if (splitURL[1] != null && !splitURL[1].isEmpty()) {
String[] params = splitURL[1].split("&");
map = new HashMap<String, String>();
for (String param : params) {
String name = param.split("=")[0];
String value = param.split("=")[1];
map.put(name, value);
}
}
return map;
}
// Sync's a user with Panopto so that his course memberships are up to date.
public void syncUser() {
PanoptoData.syncUser(serverName, bbUserName);
}
// Sync's a user with Panopto so that his course memberships are up to date.
// Note: This method calculates External IDs for Panopto, this is not
// standard practice as we are trying to keep all
// string calculation code on the Panopto side.
public static void syncUser(String serverName, String bbUserName) {
if(resultMap == null || resultMap.isEmpty()) {
resultMap = CommonUtil.getInstance().getValues("config/props/config.properties");
}
AuthenticationInfo auth = new AuthenticationInfo(null, resultMap.get("panoptoPwd").toString(), resultMap.get("panoptoId").toString());
try {
// Load the user's profile info
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
UserDbLoader userLoader = (UserDbLoader) bbPm.getLoader(UserDbLoader.TYPE);
User user = userLoader.loadByUserName(bbUserName);
Id bbUserId = user.getId();
CourseDbLoader courseLoader = (CourseDbLoader) bbPm.getLoader(CourseDbLoader.TYPE);
CourseMembershipDbLoader courseMembershipLoader = (CourseMembershipDbLoader) bbPm.getLoader(CourseMembershipDbLoader.TYPE);
List<Course> instructorCourses = new ArrayList<Course>();
List<Course> studentCourses = new ArrayList<Course>();
List<Course> taCourses = new ArrayList<Course>();
List<CourseMembership> allCourseMemberships = courseMembershipLoader.loadByUserId(bbUserId);
Course currentCourse;
Utils.pluginSettings = new Settings();
for (CourseMembership membership : allCourseMemberships) {
try {
// This is membership availability, this user was specifically marked unavailable for this membership. Affects all roles.
if (!Utils.pluginSettings.getSyncAvailabilityStatus() || membership.getIsAvailable()) {
Role membershipRole = membership.getRole();
currentCourse = courseLoader.loadById(membership.getCourseId());
if (isInstructorRole(membershipRole)) {
instructorCourses.add(currentCourse);
} else if (isTARole(membershipRole)) {
taCourses.add(currentCourse);
} else if (isStudentRole(membershipRole)) {
studentCourses.add(currentCourse);
} else {
// This is for a user with no role for this course.
// This is not added to any course list.
}
} else {
mLog.debug(String.format("The membership associated with the course with id %1$s is unavailable.",
membership.getCourseId()));
}
} catch (KeyNotFoundException e) {
mLog.error(String.format("The course with id %1$s either does not exist or is unavailable.",
membership.getCourseId()));
} catch (Exception ex) {
mLog.error("Failed to load course %1$s for membership %2$s", membership.getCourseId(), membership.getId(), ex);
}
}
ArrayList<String> externalGroupIds = new ArrayList<String>();
StringBuilder courseList = new StringBuilder();
for (Course course : studentCourses) {
Calendar startDate = course.getStartDate();
Calendar endDate = course.getEndDate();
// Just to be safe let's make sure the current calendar is in the right timezone
Calendar currentDate;
boolean isAfterStartDate = true;
boolean isBeforeEndDate = true;
// we need to include the start and end date so offset the dates we are checking by 1 each.
if (startDate != null) {
currentDate = Calendar.getInstance(startDate.getTimeZone());
isAfterStartDate = currentDate.after(startDate);
}
if (endDate != null) {
currentDate = Calendar.getInstance(endDate.getTimeZone());
isBeforeEndDate = currentDate.before(endDate);
}
// This is course based availability, only reason this is restricting students is because both instructors and TA's have access to unavailable courses they are enrolled in.
// Course availability only affects students in blackboard.
if (!Utils.pluginSettings.getSyncAvailabilityStatus() ||
(course.getIsAvailable() && (isAfterStartDate && isBeforeEndDate))) {
courseList.append(course.getTitle());
Id courseId = course.getId();
String courseServerName = getCourseRegistryEntry(courseId, hostnameRegistryKey);
if (courseIsCorrectlyProvisioned(courseId, serverName, courseServerName, courseList)) {
String groupName = Utils.decorateBlackboardCourseID(courseId.toExternalString()) + "_viewers";
externalGroupIds.add(groupName);
courseList.append('(' + groupName + ')');
}
courseList.append(';');
}
}
mLog.debug(
String.format("Sync'ing user %s group membership to server %s. Student group membership: %s",
bbUserName, serverName, courseList.toString()));
courseList = new StringBuilder();
for (Course course : instructorCourses) {
courseList.append(course.getTitle());
Id courseId = course.getId();
String courseServerName = getCourseRegistryEntry(courseId, hostnameRegistryKey);
if (courseIsCorrectlyProvisioned(courseId, serverName, courseServerName, courseList)) {
String groupName = Utils.decorateBlackboardCourseID(courseId.toExternalString()) + "_creators";
externalGroupIds.add(groupName);
courseList.append('(' + groupName + ')');
}
courseList.append(';');
}
mLog.debug(
String.format("Sync'ing user %s group membership to server %s. Instructor group membership: %s",
bbUserName, serverName, courseList.toString()));
courseList = new StringBuilder();
for (Course course : taCourses) {
courseList.append(course.getTitle());
Id courseId = course.getId();
String courseServerName = getCourseRegistryEntry(courseId, hostnameRegistryKey);
if (courseIsCorrectlyProvisioned(courseId, serverName, courseServerName, courseList)) {
String groupName;
if (Utils.pluginSettings.getGrantTACreator()) {
groupName = Utils.decorateBlackboardCourseID(courseId.toExternalString()) + "_creators";
} else {
groupName = Utils.decorateBlackboardCourseID(courseId.toExternalString()) + "_viewers";
}
externalGroupIds.add(groupName);
courseList.append('(' + groupName + ')');
}
courseList.append(';');
}
mLog.debug(String.format("Sync'ing user %s group membership to server %s. TA group membership: %s",
bbUserName, serverName, courseList.toString()));
getPanoptoUserManagementSOAPService(serverName).syncExternalUser(auth, user.getGivenName(),
user.getFamilyName(), user.getEmailAddress(), Utils.pluginSettings.getMailLectureNotifications(),
externalGroupIds.toArray(new String[0]));
} catch (Exception e) {
mLog.error("Error sync'ing user's group membership (server: %s, user: %s).", serverName, bbUserName, e);
}
}
/**
* Returns true if the course with the given id is provisioned to the
* correct server
*
* Checks both that the course server name in the registry entry is filled
* in and correct, and that the original context key is equal to the current
* course id. If it isn't that means this is one of the copies that we
* didn't initiate.
*
* Will also fill in any information to the courseList string builder.
*
* @param courseId
* Course that we are checking if is provisioned
* @param serverName
* Server name we want to know if course is provisioned against
* @param courseServerName
* Server that course might be provisioned against
* @param courseList
* StringBuilder that contains the notes for what happened
* @return True if the course is provisioned
*/
private static Boolean courseIsCorrectlyProvisioned(Id courseId, String serverName, String courseServerName,
StringBuilder courseList) {
String originalId = getCourseRegistryEntry(courseId, originalContextRegistryKey);
Boolean result = false;
if (courseServerName != null) {
if (!courseServerName.equalsIgnoreCase(serverName)) {
courseList.append("(provisioned against " + courseServerName + ")");
} else if (originalId != null && !originalId.equalsIgnoreCase(courseId.toExternalString())) {
courseList.append("(never provisioned)");
} else {
result = true;
}
}
return result;
}
/**
* Attempts to report basic integration info to the panopto server, called after succesful reprovisioning.
* @param auth authentication info of user performing the call.
*/
private void reportIntegrationInfo(AuthenticationInfo auth) {
try {
if (this.serverVersion == null) {
this.serverVersion = getServerVersion();
}
// We can only report back the integration info on a Panopto server v5.4 or greater.
if (PanoptoVersions.canReportIntegrationInfo(serverVersion)) {
IAuth iAuth = getPanoptoAuthSOAPService(serverName);
iAuth.reportIntegrationInfo(auth, Utils.pluginSettings.getInstanceName(), plugInVersion, platformVersion);
}
} catch (RemoteException ex) {
mLog.error("Error reporting Integration Info to server: %s", serverName, ex);
}
}
// Updates the course so it is mapped to the given folders
public boolean reprovisionCourse(String[] folderIds) {
try {
Utils.pluginSettings = new Settings();
String externalCourseId = Utils.decorateBlackboardCourseID(bbCourse.getId().toExternalString());
String fullName = bbCourse.getCourseId() + ": " + bbCourse.getTitle();
// Provision the course
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
Folder[] folders = getPanoptoSessionManagementSOAPService(serverName).setExternalCourseAccess(auth,
fullName, externalCourseId, folderIds);
updateCourseFolders(folders);
this.reportIntegrationInfo(auth);
// Add menu item if setting is enabled
if (Utils.pluginSettings.getInsertLinkOnProvision()) {
addCourseMenuLink();
}
} catch (Exception e) {
String folderString = Utils.encodeArrayOfStrings(folderIds);
if (folderString == null) {
folderString = "empty";
}
mLog.error("Error reprovisioning course (id: %s, server: %s, user: %s, folders: %s).", bbCourse.getId().toExternalString(), serverName, bbUserName, folderString, e);
return false;
}
return true;
}
// Updates the course so it has no Panopto data
public void resetCourse() throws RemoteException {
if (!isMapped()) {
mLog.error(String.format("Cannot reset BB course, not mapped yet. ID: %s, Title: %s\n",
bbCourse.getId().toExternalString(), bbCourse.getTitle()));
} else {
// Before blowing away the data, get the folder list.
Folder[] courseFolders = getFolders();
mLog.error(String.format(
"Resetting BB course, ID: %s, Title: %s, Old Server: %s, Old Panopto IDs: %s\n, Old folders count: %s\n",
bbCourse.getId().toExternalString(), bbCourse.getTitle(), serverName,
Utils.encodeArrayOfStrings(sessionGroupPublicIDs), courseFolders.length));
// In the set registry entry function, we delete existing entries
// and only create new ones if the value is
// not null.
setCourseRegistryEntry(hostnameRegistryKey, null);
setCourseRegistryEntry(originalContextRegistryKey, null);
setCourseRegistryEntries(sessionGroupIDRegistryKey, null);
setCourseRegistryEntries(sessionGroupDisplayNameRegistryKey, null);
// If there are empty Panopto folders for this course, then delete
// it so we don't trail empty unused
// folders. This also reduces provisioning errors when a folder
// already exists.
if (courseFolders.length < 0) {
ArrayList<String> foldersToDelete = new ArrayList<String>(courseFolders.length);
for (int idx = 0; idx < courseFolders.length; idx++) {
Session[] sessionsInFolder = this.getSessions(courseFolders[idx].getId());
if ((sessionsInFolder == null) || (sessionsInFolder.length == 0)) {
foldersToDelete.add(courseFolders[idx].getId());
}
}
// Batch delete all empty folders to reduce the API calls made.
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
sessionManagement.deleteFolders(auth, foldersToDelete.toArray(new String[foldersToDelete.size()]));
}
}
}
// Re-provisions the course with the current settings. If it has never been
// provisioned before a new folder will be
// created
public boolean reprovisionCourse() {
return reprovisionCourse(sessionGroupPublicIDs);
}
public boolean provisionCourse(String serverName) {
Utils.pluginSettings = new Settings();
updateServerName(serverName);
setCourseRegistryEntry(hostnameRegistryKey, serverName);
setCourseRegistryEntry(originalContextRegistryKey, bbCourse.getId().toExternalString());
try {
String externalCourseId = Utils.decorateBlackboardCourseID(bbCourse.getId().toExternalString());
String fullName = bbCourse.getCourseId() + ": " + bbCourse.getTitle();
// Provision the course
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
Folder[] folders = new Folder[] { getPanoptoSessionManagementSOAPService(serverName)
.provisionExternalCourse(auth, fullName, externalCourseId) };
updateCourseFolders(folders);
this.reportIntegrationInfo(auth);
// Add menu item if setting is enabled
if (Utils.pluginSettings.getInsertLinkOnProvision()) {
addCourseMenuLink();
}
} catch (Exception e) {
mLog.error("Error provisioning course (id: %s, server: %s, user: %s).", bbCourse.getId().toExternalString(), serverName, bbUserName, e);
return false;
}
return true;
}
/**
* Copy Panopto folder permissions from the source course into this course
* as viewer access.
*
* Copies Panopto folder permissions via taking all provisioned folders and
* all copied permissions from the source and giving the current course's
* Panopto groups viewer access to those folders. Will also add these copied
* folder public IDs and display names to the course registry so it can be
* later referenced.
*
* @param sourceCourse
* Blackboard Course that we are copying Panopto permission from
*/
public void copyCoursePermissions(Course sourceCourse) {
try {
// We only work against 5_3 and above so evaluate the current server
// version before attempting to copy
PanoptoData sourceCourseData = new PanoptoData(sourceCourse, this.bbUserName);
updateServerName(sourceCourseData.serverName);
if (this.serverVersion == null) {
this.serverVersion = getServerVersion();
}
// We only do something here if the source course's users have
// permissions to view some folders in Panopto
// this can be either regular permission or copied permissions.
if (PanoptoVersions.canCallCopyApiMethods(serverVersion)
&& (sourceCourseData.isMapped() || sourceCourseData.isCopyMapped())) {
// Source course has some Panopto folder permissions, generate a
// list of all folders it can see
// including both direct and copied. Generate list of both ids
// and names.
Set<String> folderIDs = new HashSet<String>(
sourceCourseData.getNumberOfFolders() + sourceCourseData.getNumberOfCopiedFolders());
folderIDs.addAll(Arrays.asList(sourceCourseData.getFolderIDs()));
folderIDs.addAll(Arrays.asList(sourceCourseData.getCopiedFolderIDs()));
Set<String> folderNames = new HashSet<String>(
sourceCourseData.getNumberOfFolders() + sourceCourseData.getNumberOfCopiedFolders());
folderNames.addAll(Arrays.asList(sourceCourseData.getFolderDisplayNames()));
folderNames.addAll(Arrays.asList(sourceCourseData.getCopiedFolderDisplayNames()));
// Update registry to set this course as mapped to a server and
// context
setCourseRegistryEntry(hostnameRegistryKey, serverName);
setCourseRegistryEntry(originalContextRegistryKey, bbCourse.getId().toExternalString());
// AuthenticationInfo auth = new AuthenticationInfo(null, "panopto321", "jykim");
AuthenticationInfo auth = new AuthenticationInfo(null, apiUserPwd, apiUserKey);
String externalCourseId = Utils.decorateBlackboardCourseID(bbCourse.getId().toExternalString());
String fullName = bbCourse.getCourseId() + ": " + bbCourse.getTitle();
getPanoptoSessionManagementSOAPService(serverName).setCopiedExternalCourseAccess(auth, fullName,
externalCourseId, folderIDs.toArray(emptyStringArray));
// Save the new list of folders back into the course registry,
// includes both public IDs and names,
// concatenated with any existing copied folders that might
// already be there.
folderIDs.addAll(Arrays.asList(this.getCopiedFolderIDs()));
setCourseRegistryEntries(copySessionGroupIDsRegistryKey, folderIDs.toArray(emptyStringArray));
folderNames.addAll(Arrays.asList(this.getCopiedFolderDisplayNames()));
setCourseRegistryEntries(copySessionGroupDisplayNamesRegistryKey,
folderNames.toArray(emptyStringArray));
// Log the action
mLog.error(String.format(
"Copied BB course, Source ID: %s, Target ID: %s, Title: %s, Server: %s, Panopto ID: %s\n",
sourceCourse.getCourseId(), bbCourse.getId().toExternalString(), bbCourse.getTitle(),
serverName, Utils.encodeArrayOfStrings(sessionGroupPublicIDs)));
}
} catch (Exception e) {
mLog.error(String.format("Error provisioning course copy (id: %s, server: %s, user: %s).", bbCourse.getId().toExternalString(), serverName, bbUserName), e);
}
}
// Called after provision or reprovision to update the local store of folder
// metadata
private void updateCourseFolders(Folder[] folders) {
setCourseRegistryEntry(hostnameRegistryKey, serverName);
// First sort the folders.
ArrayList<Folder> sortedFolders = new ArrayList<Folder>();
sortedFolders.addAll(Arrays.asList(folders));
Collections.sort(sortedFolders, new FolderComparator());
// Now construct the list of ids and the list of names
this.sessionGroupPublicIDs = new String[folders.length];
this.sessionGroupDisplayNames = new String[folders.length];
for (int i = 0; i < folders.length; i++) {
// These are GUIDs and will never be too long
sessionGroupPublicIDs[i] = sortedFolders.get(i).getId();
// Display names might go past the 255 Blackboard limit so we elide
// the string to be no more than 255
// characters
sessionGroupDisplayNames[i] = Utils.elideMiddle(sortedFolders.get(i).getName(), 100, 255);
}
// Save the new list of folders back into the registry
mLog.error(String.format("Provisioned BB course, ID: %s, Title: %s, Server: %s, Panopto ID: %s\n",
bbCourse.getId().toExternalString(), bbCourse.getTitle(), serverName,
Utils.encodeArrayOfStrings(sessionGroupPublicIDs)));
setCourseRegistryEntries(sessionGroupIDRegistryKey, sessionGroupPublicIDs);
setCourseRegistryEntries(sessionGroupDisplayNameRegistryKey, sessionGroupDisplayNames);
}
public static boolean HasPanoptoServer(Course bbCourse) {
return getCourseRegistryEntry(bbCourse.getId(), hostnameRegistryKey) != null;
}
public static List<Course> GetAllCourses() {
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
try {
CourseDbLoader courseLoader = (CourseDbLoader) bbPm.getLoader(CourseDbLoader.TYPE);
return courseLoader.loadAllCourses();
} catch (Exception e) {
mLog.error("Error getting all courses.", e);
}
return null;
}
// Gets all the members of the course from Blackboard
private static List<CourseMembership> getCourseMemberships(Course bbCourse) {
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
// Get the course membership (instructors, students, etc.)
List<CourseMembership> courseMemberships = null;
try {
CourseMembershipDbLoader courseMembershipLoader = (CourseMembershipDbLoader) bbPm
.getLoader(CourseMembershipDbLoader.TYPE);
courseMemberships = courseMembershipLoader.loadByCourseId(bbCourse.getId(), null, true);
} catch (Exception e) {
mLog.error(String.format("Error getting course membership (course ID: %s).", bbCourse.getId()), e);
}
return courseMemberships;
}
// Gets the user key of all the students of the course
public List<String> getTAs() {
ArrayList<String> lstTAs = new ArrayList<String>();
// Get the course membership (instructors, students, etc.)
List<CourseMembership> courseMemberships = getCourseMemberships(bbCourse);
List<CourseMembership> TACourseMemberships = new ArrayList<CourseMembership>();
for (CourseMembership membership : courseMemberships) {
blackboard.data.course.CourseMembership.Role membershipRole = membership.getRole();
if (isTARole(membershipRole)) {
TACourseMemberships.add(membership);
}
}
if (!TACourseMemberships.isEmpty()) {
for (Object membershipObject : TACourseMemberships) {
CourseMembership courseMembership = (CourseMembership) membershipObject;
User courseUser = courseMembership.getUser();
if (courseUser != null) {
String courseUserKey = Utils.decorateBlackboardUserName(courseUser.getUserName());
lstTAs.add(courseUserKey);
}
}
}
return lstTAs;
}
// Gets the user key of all the students of the course
public List<String> getStudents() {
ArrayList<String> lstStudents = new ArrayList<String>();
// Get the course membership (instructors, students, etc.)
List<CourseMembership> courseMemberships = getCourseMemberships(bbCourse);
List<CourseMembership> studentCourseMemberships = new ArrayList<CourseMembership>();
for (CourseMembership membership : courseMemberships) {
blackboard.data.course.CourseMembership.Role membershipRole = membership.getRole();
if (isStudentRole(membershipRole)) {
studentCourseMemberships.add(membership);
}
}
if (!studentCourseMemberships.isEmpty()) {
for (Object membershipObject : studentCourseMemberships) {
CourseMembership courseMembership = (CourseMembership) membershipObject;
User courseUser = courseMembership.getUser();
if (courseUser != null) {
String courseUserKey = Utils.decorateBlackboardUserName(courseUser.getUserName());
lstStudents.add(courseUserKey);
}
}
}
return lstStudents;
}
// Gets info about all the instructors of the course
public List<String> getInstructors() {
ArrayList<String> lstInstructors = new ArrayList<String>();
// Get the course membership (instructors, students, etc.)
List<CourseMembership> courseMemberships = getCourseMemberships(bbCourse);
List<CourseMembership> instructorCourseMemberships = new ArrayList<CourseMembership>();
for (CourseMembership membership : courseMemberships) {
blackboard.data.course.CourseMembership.Role membershipRole = membership.getRole();
if (isInstructorRole(membershipRole)) {
instructorCourseMemberships.add(membership);
}
}
if (!instructorCourseMemberships.isEmpty()) {
for (Object membershipObject : instructorCourseMemberships) {
CourseMembership courseMembership = (CourseMembership) membershipObject;
User courseUser = courseMembership.getUser();
if (courseUser != null) {
String courseUserKey = Utils.decorateBlackboardUserName(courseUser.getUserName());
lstInstructors.add(courseUserKey);
}
}
}
return lstInstructors;
}
/**
* Gets info about all users with no role for the course.
*/
public List<String> getNoRoleUsers() {
ArrayList<String> lstNoRoleUsers = new ArrayList<String>();
// Get the course membership (instructors, students, etc.)
List<CourseMembership> courseMemberships = getCourseMemberships(bbCourse);
List<CourseMembership> noRoleCourseMemberships = new ArrayList<CourseMembership>();
for (CourseMembership membership : courseMemberships) {
blackboard.data.course.CourseMembership.Role membershipRole = membership.getRole();
if (isNoRole(membershipRole)) {
noRoleCourseMemberships.add(membership);
}
}
if (!noRoleCourseMemberships.isEmpty()) {
for (Object membershipObject : noRoleCourseMemberships) {
CourseMembership courseMembership = (CourseMembership) membershipObject;
User courseUser = courseMembership.getUser();
if (courseUser != null) {
String courseUserKey = Utils.decorateBlackboardUserName(courseUser.getUserName());
lstNoRoleUsers.add(courseUserKey);
}
}
}
return lstNoRoleUsers;
}
/*
* Returns true if role should be treated as an Instructor. Instructors get
* creator access in Panopto.
*/
private static boolean isInstructorRole(blackboard.data.course.CourseMembership.Role membershipRole) {
// Role is instructor role if it is the 'Instructor' or 'Course Builder'
// built in blackboard role, or if it is
// in the custom instructor roles list.
return membershipRole.equals(CourseMembership.Role.INSTRUCTOR)
|| membershipRole.equals(CourseMembership.Role.COURSE_BUILDER)
|| getIdsForRole("instructor").contains(membershipRole.getIdentifier().toLowerCase());
}
/*
* Returns true if role should be treated as a Student. Students get viewer
* access in Panopto.
*/
private static boolean isStudentRole(blackboard.data.course.CourseMembership.Role membershipRole) {
// Role is student role if it is not a built in instructor or ta role,
// or a mapped custom role.
return !isInstructorRole(membershipRole) && !isTARole(membershipRole) && !isNoRole(membershipRole);
}
/*
* Returns true if role should be treated as a TA. TA's get viewer access in
* Panopto, unless otherwise specified in the Blackboard block settings. Any
* custom blackboard roles are treated as TAs, unless they are marked with
* the 'Act As Instructor' flag.
*/
private static boolean isTARole(blackboard.data.course.CourseMembership.Role membershipRole) {
// Role is a TA role if it is the 'Teaching Assistant' built in
// blackboard role or if it is in the list of
// custom ta roles
return membershipRole.equals(Role.TEACHING_ASSISTANT)
|| (getIdsForRole("ta").contains(membershipRole.getIdentifier().toLowerCase()));
}
/**
* Returns true if role should be treated as no privilege on Panopto. Users
* with this role do not get access in Panopto, except publicly or
* organization wide viewable.
*/
private static boolean isNoRole(blackboard.data.course.CourseMembership.Role membershipRole) {
return (getIdsForRole("none").contains(membershipRole.getIdentifier().toLowerCase()));
}
public boolean userMayProvision() {
Utils.pluginSettings = new Settings();
// Admins may provision any course. Instructors may provision their
// own course if the setting is enabled
return Utils.userCanConfigureSystem() || (IsInstructor() && Utils.pluginSettings.getInstructorsCanProvision());
}
// Check for whether a user may add course menu links. This may be done by
// Admins and instructors at any time or by
// TAs if setting is checked
public boolean userMayAddLinks() {
// Admins can add links to any course. Instructors can add links to
// their own courses
return Utils.userCanConfigureSystem() || this.canAddLinks();
}
public boolean userMayConfig() {
// Admins may config any course. Instructors may configure their own
// courses
return Utils.userCanConfigureSystem() || this.IsInstructor();
}
/**
* Determine if there are any copied permissions the user can see
*
* Will be true for admins and instructors if there are any copied folders
* for the course, is not used on any pages that students should be able to
* see.
*
* @return Boolean true if the course has copied Panopto permissions and
* user can see them
*/
public boolean courseHasCopiedPermissionsToBeDisplayed() {
return this.userMayConfig() && (this.getNumberOfCopiedFolders() > 0);
}
// Returns true if the specified user is an instructor of the specified
// course
// If checkTACanCreateLinks is true, a check will be performed if either
// getGrantTAProvision
// or getTAsCanCreateLinks returns true. This is used when checking if TAs
// may add course menu links
public static boolean isUserInstructor(Id bbCourseId, String bbUserName, boolean checkTACanCreateLinks) {
BbPersistenceManager bbPm = PersistenceServiceFactory.getInstance().getDbPersistenceManager();
try {
// Get user's blackboard ID from their username
UserDbLoader userLoader = (UserDbLoader) bbPm.getLoader(UserDbLoader.TYPE);
User user = userLoader.loadByUserName(bbUserName);
Id bbUserId = user.getId();
// Load the user's membership in the current course to get their
// role
CourseMembershipDbLoader membershipLoader = (CourseMembershipDbLoader) bbPm
.getLoader(CourseMembershipDbLoader.TYPE);
CourseMembership usersCourseMembership = membershipLoader.loadByCourseAndUserId(bbCourseId, bbUserId);
Role userRole = usersCourseMembership.getRole();
Utils.pluginSettings = new Settings();
if (isInstructorRole(userRole)) {
return true;
}
// If settings are configured to treat TAs as an instructor, and the
// user is either a Teaching Assistant or
// has a custom role, return true.
else if (Utils.pluginSettings.getGrantTAProvision()
|| (checkTACanCreateLinks && Utils.pluginSettings.getTAsCanCreateLinks())) {
if (isTARole(userRole)) {
return true;
}
}
} catch (Exception e) {
mLog.error(String.format("Error getting user's course membership (course ID: %s, userName: %s).",
bbCourseId, bbUserName), e);
}
// User is not an instructor
return false;
}
// Returns true if the specified user can add a course menu link. Nearly
// identical to isUserInstructor but includes
// a check for TAsCanCreateLinks
public static boolean canUserAddLinks(Id bbCourseId, String bbUserName) {
return isUserInstructor(bbCourseId, bbUserName, true);
}
// Will be used in the future.
@SuppressWarnings("unused")
private static IAccessManagement getPanoptoAccessManagementSOAPService(String serverName) {
IAccessManagement port = null;
try {
URL SOAP_URL = new URL("https://" + serverName + "/Panopto/PublicAPI/4.6/AccessManagement.svc");
// Connect to the SessionManagement SOAP service on the specified
// Panopto server
AccessManagementLocator service = new AccessManagementLocator();
port = (IAccessManagement) service.getBasicHttpBinding_IAccessManagement(SOAP_URL);
} catch (Exception e) {
mLog.error(String.format("Error getting Access Management SOAP service (server: %s).", serverName), e);
}
return port;
}
private static ISessionManagement getPanoptoSessionManagementSOAPService(String serverName) {
ISessionManagement port = null;
try {
URL SOAP_URL = new URL("https://" + serverName + "/Panopto/PublicAPI/4.6/SessionManagement.svc");
// Connect to the SessionManagement SOAP service on the specified
// Panopto server
SessionManagementLocator service = new SessionManagementLocator();
port = (ISessionManagement) service.getBasicHttpBinding_ISessionManagement(SOAP_URL);
} catch (Exception e) {
mLog.error(String.format("Error getting Session Management SOAP service (server: %s).", serverName), e);
}
return port;
}
private static IUserManagement getPanoptoUserManagementSOAPService(String serverName) {
IUserManagement port = null;
try {
URL SOAP_URL = new URL("https://" + serverName + "/Panopto/PublicAPI/4.6/UserManagement.svc");
// Connect to the UserManagement SOAP service on the specified
// Panopto server
UserManagementLocator service = new UserManagementLocator();
port = (IUserManagement) service.getBasicHttpBinding_IUserManagement(SOAP_URL);
} catch (Exception e) {
mLog.error(String.format("Error getting User Management SOAP service (server: %s).", serverName), e);
}
return port;
}
private static IUsageReporting getPanoptoUsageReportingSOAPService(String serverName) {
IUsageReporting port = null;
try {
URL SOAP_URL = new URL("https://" + serverName + "/Panopto/PublicAPI/4.6/UsageReporting.svc");
// Connect to the UsageReporting SOAP service on the specified
// Panopto server
UsageReportingLocator service = new UsageReportingLocator();
port = (IUsageReporting) service.getBasicHttpBinding_IUsageReporting(SOAP_URL);
} catch (Exception e) {
mLog.error(String.format("Error getting Usage Reporting SOAP service (server: %s).", serverName), e);
}
return port;
}
private static IAuth getPanoptoAuthSOAPService(String serverName) {
IAuth port = null;
try {
URL SOAP_URL = new URL("https://" + serverName + "/Panopto/PublicAPI/4.6/Auth.svc");
// Connect to the UserManagement SOAP service on the specified
// Panopto server
AuthLocator service = new AuthLocator();
port = (IAuth) service.getBasicHttpBinding_IAuth(SOAP_URL);
} catch (Exception e) {
mLog.error(String.format("Error getting Auth SOAP service (server: %s).", serverName), e);
}
return port;
}
// Instance method just calls out to static method below
private String getCourseRegistryEntry(String key) {
return getCourseRegistryEntry(bbCourse.getId(), key);
}
// Instance method just calls out to static method below
private void setCourseRegistryEntry(String key, String value) {
setCourseRegistryEntry(bbCourse.getId(), key, value);
}
private static String getCourseRegistryEntry(Id courseId, String key) {
String value = null;
try {
CourseRegistryEntryDbLoader creLoader = CourseRegistryEntryDbLoader.Default.getInstance();
Registry registry = creLoader.loadRegistryByCourseId(courseId);
value = registry.getValue(key);
} catch (Exception e) {
mLog.error(String.format("Error getting course registry entry (key: %s).", key), e);
}
return value;
}
// Blackboard DB values have a limited length so we store each string entry
// as its own value with a
// number-post-fixed key. Here we have to handle legacy values that might be
// stored with just the key name.
// Assumption: only called for keys previously stored with
// setCourseRegistryEntries or the old method with
// double-quoted, comma-separated values.
private String[] getCourseRegistryEntries(String key) {
String[] values = null;
try {
mLog.debug("Getting CourseRegistry for course: " + bbCourse.getId().toExternalString());
CourseRegistryEntryDbLoader creLoader = CourseRegistryEntryDbLoader.Default.getInstance();
Registry registry = creLoader.loadRegistryByCourseId(bbCourse.getId());
String value = registry.getValue(key);
// If there's a value for the plain key, we are dealing with a
// legacy value which should be in the "","",""
// format. We decode the string and return the individual values
if (value != null) {
values = Utils.decodeArrayOfStrings(value);
}
// If no value, then we are dealing with the new case where we split
// values up into individual keys so look
// each of those up until we stop finding them
else {
// Use an ArrayList because we don't know ahead of time how many
// we may have. This is Ok for small
// numbers (e.g., less than 100).
ArrayList<String> list = new ArrayList<String>();
String tempValue = registry.getValue(key + list.size());
while (tempValue != null) {
list.add(tempValue);
tempValue = registry.getValue(key + list.size());
}
// If there were no values at all then 'value' will remain null
// as expected
if (list.size() > 0) {
values = new String[list.size()];
list.toArray(values);
}
}
} catch (Exception e) {
mLog.error(String.format("Error getting course registry entry (key: %s).", key), e);
}
return values;
}
// The Blackboard course registry stores key/values pairs per-course. Static
// setter enables us to store registry
// values for newly-provisioned courses without the cost of instantiating
// and populating an object for each one.
private static void setCourseRegistryEntry(Id courseId, String key, String value) {
try {
CourseRegistryEntryDbPersister crePersister = CourseRegistryEntryDbPersister.Default.getInstance();
DeleteKeyForCourse(crePersister, key, courseId);
if (value != null) {
CourseRegistryEntry cre = new CourseRegistryEntry(key, value);
cre.setCourseId(courseId);
crePersister.persist(cre);
}
} catch (Exception e) {
mLog.error(String.format("Error setting course registry entry (course ID: %s, key: %s, value: %s).",
courseId.toExternalString(), key, value), e);
}
}
// Blackboard DB values have a limited length so we store each string entry
// as its own value with a
// number-post-fixed key.
private void setCourseRegistryEntries(String key, String[] values) {
try {
mLog.debug("Setting CourseRegistry for course: " + bbCourse.getId().toExternalString());
CourseRegistryEntryDbPersister crePersister = CourseRegistryEntryDbPersister.Default.getInstance();
CourseRegistryEntryDbLoader creLoader = CourseRegistryEntryDbLoader.Default.getInstance();
Registry registry = creLoader.loadRegistryByCourseId(bbCourse.getId());
// Delete any legacy key still hanging around
mLog.debug("Checking for legacy key " + key);
RegistryEntry entry = registry.getEntry(key);
if (entry != null) {
mLog.debug("Deleting legacy key " + key);
DeleteKeyForCourse(crePersister, key, bbCourse.getId());
}
// Now delete any per-folder entries that may exist. We don't know
// how many so we just start at 0 and keep
// looking until we find a missing one.
int j = 0;
String keyToDelete = key + j;
entry = registry.getEntry(keyToDelete);
mLog.debug("Checking for key " + keyToDelete);
while (entry != null) {
mLog.debug("Deleting key " + keyToDelete);
DeleteKeyForCourse(crePersister, keyToDelete, bbCourse.getId());
j++;
keyToDelete = key + j;
mLog.debug("Checking for key " + keyToDelete);
entry = registry.getEntry(keyToDelete);
}
// If we have values, save them each in their own key
if (values != null) {
mLog.debug("Adding new values - count: " + values.length);
for (int i = 0; i < values.length; i++) {
mLog.debug("Adding new key: " + key + i + " value: " + values[i]);
CourseRegistryEntry cre = new CourseRegistryEntry(key + i, values[i]);
cre.setCourseId(bbCourse.getId());
crePersister.persist(cre);
}
}
} catch (Exception e) {
mLog.error(String.format("Error setting course registry entry (course ID: %s, key: %s, value: %s).",
bbCourse.getId().toExternalString(), key, Utils.encodeArrayOfStrings(values)), e);
}
}
// Helper method that just makes 6 lines into one in the calling method
private static void DeleteKeyForCourse(CourseRegistryEntryDbPersister crePersister, String keyToDelete,
Id courseId) {
try {
crePersister.deleteByKeyAndCourseId(keyToDelete, courseId);
} catch (KeyNotFoundException knfe) {
} catch (PersistenceException pe) {
}
}
private PanoptoVersion getServerVersion() {
IAuth iAuth = getPanoptoAuthSOAPService(serverName);
return PanoptoVersion.fetchOrEmpty(iAuth);
}
private void addCourseMenuLink() throws ValidationException, PersistenceException {
Id cid;
cid = bbCourse.getId();
Utils.pluginSettings = new Settings();
// Get list of page's current menu links
List<CourseToc> courseTocList = CourseTocDbLoader.Default.getInstance().loadByCourseId(cid);
// Iterate through each link and check if it's text matches the text of
// the item to be created
boolean linkExists = false;
while (courseTocList.iterator().hasNext() && linkExists == false) {
CourseToc ct = courseTocList.iterator().next();
// If the text matches, set linkExists to true so we don't add a
// duplicate link
linkExists = ct.getLabel().equals(Utils.pluginSettings.getMenuLinkText());
// BBList's iterator doesn't support remove(), so we have to remove
// manually by ID
courseTocList.remove(ct);
}
// If link with desired text doesn't exists, create a new one
if (!linkExists) {
CourseToc panLink = new CourseToc();
panLink.setCourseId(cid);
// Create application type course link. This will direct to url set
// for the target plugin in
// bb-manifest.xml, with the coure's id appended as an argument.
// This courseToc target type will allow the
// link to open in within the current course without needing to be
// wrapped in an external content frame, and
// the plugin content will always point to the corresponding panopto
// content of the course that the link is
// being accessed from.
panLink.setTargetType(CourseToc.Target.APPLICATION);
panLink.setLabel(Utils.pluginSettings.getMenuLinkText());
panLink.setLaunchInNewWindow(false);
panLink.setIsEntryPoint(false);
// Set the internal handle for the target application, in the case
// the handle for the "Panopto course tool"
// application Internal handle name is set by BlackBoard and we have
// no control. Somehow, it is set other
// than suffix -1. Try -2 and -3 as fallback.
try {
panLink.setInternalHandle("ppto-PanoptoCourseToolApp-nav-1");
CourseTocDbPersister.Default.getInstance().persist(panLink);
} catch (blackboard.persist.PersistenceException e1) {
mLog.error(String.format(
"addCourseMenuLink(%s): ppto-PanoptoCourseToolApp-nav-1 failed. retrying with -2.",
bbCourse.getId().toExternalString()));
try {
panLink.setInternalHandle("ppto-PanoptoCourseToolApp-nav-2");
CourseTocDbPersister.Default.getInstance().persist(panLink);
} catch (blackboard.persist.PersistenceException e2) {
mLog.error(String.format(
"addCourseMenuLink(%s): ppto-PanoptoCourseToolApp-nav-1 failed. retrying with -3.",
bbCourse.getId().toExternalString()));
panLink.setInternalHandle("ppto-PanoptoCourseToolApp-nav-3");
CourseTocDbPersister.Default.getInstance().persist(panLink);
}
}
}
}
// Retrieves list of role ids mapped to either the "ta" or "instructor" role
// in the block settings. If a string
// other than "ta" or "instructor" is passed, an empty list will be
// returned.
private static List<String> getIdsForRole(String rolename) {
Utils.pluginSettings = new Settings();
List<String> roleIds = new ArrayList<String>();
String roleMappingsString = Utils.pluginSettings.getRoleMappingString();
String[] roleMappingsSplit = roleMappingsString.split(";");
for (String mappingString : roleMappingsSplit) {
String[] mappingArray = mappingString.split(":");
if (mappingArray.length == 2) {
String RoleId = mappingArray[0];
if (mappingArray[1].trim().equalsIgnoreCase(rolename)) {
roleIds.add(RoleId.trim().toLowerCase());
}
}
}
return roleIds;
}
// Enum types returned by addBlackboardContentItem, indicating whether a
// Panopto link has been successfully added to
// a course.
public static enum LinkAddedResult {
SUCCESS, // Link was added successfully.
NOTCREATOR, // Link was not added because the session is not available
// and the user does not have creator access
// in order to make it available.
NOTPUBLISHER, // Link was not added because session requires publisher
// approval.
FAILURE; // Link was not added for an unspecified reason.
}
}
|
import numpy as np
import tensorflow as tf
import keras
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from deep_boltzmann.networks import IndexLayer
def log_det_jacobian(outputs, inputs):
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
J = batch_jacobian(outputs, inputs, use_pfor=False)
s = tf.svd(J, compute_uv=False)
s = tf.abs(s) + 1e-6 # regularize
return tf.reduce_sum(tf.log(s), axis=1, keepdims=True)
def pca(X0, keepdims=None):
if keepdims is None:
keepdims = X0.shape[1]
# pca
X0mean = X0.mean(axis=0)
X0meanfree = X0 - X0mean
C = np.dot(X0meanfree.T, X0meanfree) / (X0meanfree.shape[0] - 1.0)
eigval, eigvec = np.linalg.eigh(C)
# sort in descending order and keep only the wanted eigenpairs
I = np.argsort(eigval)[::-1]
I = I[:keepdims]
eigval = eigval[I]
std = np.sqrt(eigval)
eigvec = eigvec[:, I]
# whiten and unwhiten matrices
X0mean = tf.constant(X0mean)
Twhiten = tf.constant(eigvec.dot(np.diag(1.0 / std)))
Tblacken = tf.constant(np.diag(std).dot(eigvec.T))
return X0mean, Twhiten, Tblacken, std
class FixedWhiten(object):
def __init__(self, X0, keepdims=None):
""" Permutes dimensions
Parameters:
-----------
X0 : array
Initial Data on which PCA will be computed.
keepdims : int or None
Number of dimensions to keep. By default, all dimensions will be kept
"""
if keepdims is None:
keepdims = X0.shape[1]
self.dim = X0.shape[1]
self.keepdims = keepdims
self.X0mean, self.Twhiten, self.Tblacken, self.std = pca(X0, keepdims=keepdims)
if np.any(self.std <= 0):
raise ValueError('Cannot construct whiten layer because trying to keep nonpositive eigenvalues.')
self.jacobian_xz = -np.sum(np.log(self.std))
@classmethod
def from_dict(cls, D):
dim = D['dim']
keepdims = D['keepdims']
X0mean = D['X0mean']
Twhiten = D['Twhiten']
Tblacken = D['Tblacken']
std = D['std']
c = cls(np.random.randn(2 * dim, dim), keepdims=keepdims)
c.keepdims = keepdims
c.X0mean = tf.constant(X0mean)
c.Twhiten = tf.constant(Twhiten)
c.Tblacken = tf.constant(Tblacken)
c.std = std
return c
def to_dict(self):
D = {}
D['dim'] = self.dim
D['keepdims'] = self.keepdims
D['X0mean'] = keras.backend.eval(self.X0mean)
D['Twhiten'] = keras.backend.eval(self.Twhiten)
D['Tblacken'] = keras.backend.eval(self.Tblacken)
D['std'] = self.std
return D
def connect_xz(self, x):
# Whiten
self.output_z = keras.layers.Lambda(lambda x: tf.matmul(x - self.X0mean, self.Twhiten))(x)
if self.keepdims < self.dim:
junk_dims = self.dim - self.keepdims
self.output_z = keras.layers.Lambda(lambda z: tf.concat([z, tf.random_normal([tf.shape(z)[0], junk_dims], stddev=1.0)], 1))(self.output_z)
# Jacobian
self.log_det_xz = keras.layers.Lambda(lambda x: self.jacobian_xz * keras.backend.ones((tf.shape(x)[0], 1)))(x)
return self.output_z
def connect_zx(self, z):
# if we have reduced the dimension, we ignore the last dimensions from the z-direction.
if self.keepdims < self.dim:
z = IndexLayer(np.arange(0, self.keepdims))(z)
self.output_x = keras.layers.Lambda(lambda z: tf.matmul(z, self.Tblacken) + self.X0mean)(z)
# Jacobian
self.log_det_zx = keras.layers.Lambda(lambda z: -self.jacobian_xz * keras.backend.ones((tf.shape(z)[0], 1)))(z)
return self.output_x
@property
def log_det_Jxz(self):
""" Log of |det(dz/dx)| for the current batch. Format is batchsize x 1 or a number """
return self.log_det_xz
@property
def log_det_Jzx(self):
""" Log of |det(dx/dz)| for the current batch. Format is batchsize x 1 or a number """
return self.log_det_zx
def xyz2ic_np(x, Z_indices, torsion_cut=None):
""" Computes internal coordinates from Cartesian coordinates
Parameters
----------
x : array
Catesian coordinates
Z_indices : array
Internal coordinate index definition. Use -1 to switch off internal coordinates
when the coordinate system is not fixed.
mm : energy model
Molecular model
torsion_cut : None or array
If given, defines at which angle to cut the torsions.
"""
from deep_boltzmann.models.MM import dist, angle, torsion
global_ic = (Z_indices.min() < 0)
if global_ic:
bond_indices = Z_indices[1:, :2]
angle_indices = Z_indices[2:, :3]
torsion_indices = Z_indices[3:, :4]
else:
bond_indices = Z_indices[:, :2]
angle_indices = Z_indices[:, :3]
torsion_indices = Z_indices[:, :4]
atom_indices = np.arange(int(3*(np.max(Z_indices)+1))).reshape((-1, 3))
xbonds = dist(x[:, atom_indices[bond_indices[:, 0]]],
x[:, atom_indices[bond_indices[:, 1]]])
xangles = angle(x[:, atom_indices[angle_indices[:, 0]]],
x[:, atom_indices[angle_indices[:, 1]]],
x[:, atom_indices[angle_indices[:, 2]]])
xtorsions = torsion(x[:, atom_indices[torsion_indices[:, 0]]],
x[:, atom_indices[torsion_indices[:, 1]]],
x[:, atom_indices[torsion_indices[:, 2]]],
x[:, atom_indices[torsion_indices[:, 3]]])
if torsion_cut is not None:
xtorsions = np.where(xtorsions < torsion_cut, xtorsions+360, xtorsions)
# Order ic's by atom
if global_ic:
iclist = [xbonds[:, 0:2], xangles[:, 0:1]]
for i in range(Z_indices.shape[0]-3):
iclist += [xbonds[:, i+2:i+3], xangles[:, i+1:i+2], xtorsions[:, i:i+1]]
else:
iclist = []
for i in range(Z_indices.shape[0]):
iclist += [xbonds[:, i:i+1], xangles[:, i:i+1], xtorsions[:, i:i+1]]
ics = np.concatenate(iclist, axis=-1)
return ics
def xyz2ic_tf(x, Z_indices, torsion_cut=None):
""" Computes internal coordinates from Cartesian coordinates
Parameters
----------
x : array
Catesian coordinates
Z_indices : array
Internal coordinate index definition. Use -1 to switch off internal coordinates
when the coordinate system is not fixed.
mm : energy model
Molecular model
torsion_cut : None or array
If given, defines at which angle to cut the torsions.
"""
from deep_boltzmann.models.MM import dist_tf, angle_tf, torsion_tf
global_ic = (Z_indices.min() < 0)
if global_ic:
bond_indices = Z_indices[1:, :2]
angle_indices = Z_indices[2:, :3]
torsion_indices = Z_indices[3:, :4]
else:
bond_indices = Z_indices[:, :2]
angle_indices = Z_indices[:, :3]
torsion_indices = Z_indices[:, :4]
atom_indices = np.arange(int(3*(np.max(Z_indices)+1))).reshape((-1, 3))
xbonds = dist_tf(tf.gather(x, atom_indices[bond_indices[:, 0]], axis=1),
tf.gather(x, atom_indices[bond_indices[:, 1]], axis=1))
xangles = angle_tf(tf.gather(x, atom_indices[angle_indices[:, 0]], axis=1),
tf.gather(x, atom_indices[angle_indices[:, 1]], axis=1),
tf.gather(x, atom_indices[angle_indices[:, 2]], axis=1))
xtorsions = torsion_tf(tf.gather(x, atom_indices[torsion_indices[:, 0]], axis=1),
tf.gather(x, atom_indices[torsion_indices[:, 1]], axis=1),
tf.gather(x, atom_indices[torsion_indices[:, 2]], axis=1),
tf.gather(x, atom_indices[torsion_indices[:, 3]], axis=1))
if torsion_cut is not None:
xtorsions = tf.where(xtorsions < torsion_cut, xtorsions+360, xtorsions)
# Order ic's by atom
if global_ic:
iclist = [xbonds[:, 0:2], xangles[:, 0:1]]
for i in range(Z_indices.shape[0]-3):
iclist += [xbonds[:, i+2:i+3], xangles[:, i+1:i+2], xtorsions[:, i:i+1]]
else:
iclist = []
for i in range(Z_indices.shape[0]):
iclist += [xbonds[:, i:i+1], xangles[:, i:i+1], xtorsions[:, i:i+1]]
ics = tf.concat(iclist, axis=-1)
return ics
def bestcut(torsion):
cuts = np.linspace(-180, 180, 37)[:-1]
stds = []
for cut in cuts:
torsion_cut = np.where(torsion < cut, torsion+360, torsion)
stds.append(np.std(torsion_cut))
stds = np.array(stds)
stdmin = stds.min()
minindices = np.where(stds == stdmin)[0]
return cuts[minindices[int(0.5*minindices.size)]]
def icmoments(Z_indices, X0=None, torsion_cut=None):
global_ic = (Z_indices.min() < 0)
if global_ic:
dim = 3*Z_indices.shape[0] - 6
ntorsions = Z_indices.shape[0]-3
else:
dim = 3*Z_indices.shape[0]
ntorsions = Z_indices.shape[0]
if X0 is not None:
ics = xyz2ic_np(X0, Z_indices)
if global_ic:
torsions = ics[:, 5::3]
else:
torsions = ics[:, 2::3]
if torsion_cut is None:
torsion_cut = np.array([bestcut(torsions[:, i]) for i in range(ntorsions)])
# apply torsion cut
torsion_cut_row = np.array([torsion_cut])
torsions = np.where(torsions < torsion_cut_row, torsions+360, torsions)
# write torsions back to ics
if global_ic:
ics[:, 5::3] = torsions
else:
ics[:, 2::3] = torsions
means = np.mean(ics, axis=0)
stds = np.sqrt(np.mean((ics-means) ** 2, axis=0))
else:
torsion_cut = -180 * np.ones((1, ntorsions))
means = np.zeros((1, dim-6))
stds = np.ones((1, dim-6))
return means, stds, torsion_cut
def ic2xyz(p1, p2, p3, d14, a124, t1234):
# convert angles to radians
a124 = a124 * np.pi/180.0
t1234 = t1234 * np.pi/180.0
v1 = p1 - p2
v2 = p1 - p3
n = tf.cross(v1, v2)
nn = tf.cross(v1, n)
n /= tf.norm(n, axis=1, keepdims=True)
nn /= tf.norm(nn, axis=1, keepdims=True)
n *= -tf.sin(t1234)
nn *= tf.cos(t1234)
v3 = n + nn
v3 /= tf.norm(v3, axis=1, keepdims=True)
v3 *= d14 * tf.sin(a124)
v1 /= tf.norm(v1, axis=1, keepdims=True)
v1 *= d14 * tf.cos(a124)
position = p1 + v3 - v1
return position
def ic2xy0(p1, p2, d14, a124):
#t1234 = tf.Variable(np.array([[90.0 * np.pi / 180.0]], dtype=np.float32))
t1234 = tf.Variable(np.array([[90.0]], dtype=np.float32))
p3 = tf.Variable(np.array([[0, 1, 0]], dtype=np.float32))
return ic2xyz(p1, p2, p3, d14, a124, t1234)
def ics2xyz_global(ics, Z_indices):
""" For systems exclusively described in internal coordinates: convert global Z matrix to Cartesian """
batchsize = tf.shape(ics)[0]
index2zorder = np.argsort(Z_indices[:, 0])
# Fix coordinate system by placing first three atoms
xyz = []
# first atom at 0,0,0
xyz.append(tf.zeros((batchsize, 3)))
# second atom at 0,0,d
xyz.append(tf.concat([tf.zeros((batchsize, 2)), ics[:, 0:1]], axis=-1))
# third atom at x,0,z
xyz.append(ic2xy0(xyz[index2zorder[Z_indices[2, 1]]],
xyz[index2zorder[Z_indices[2, 2]]],
ics[:, 1:2], ics[:, 2:3]))
# fill in the rest
ics2xyz_local(ics[:, 3:], Z_indices[3:], index2zorder, xyz)
# reorganize indexes
xyz = [xyz[i] for i in index2zorder]
return tf.concat(xyz, axis=1)
def ics2xyz_local(ics, Z_indices, index2zorder, xyz):
""" For systems exclusively described in internal coordinates: convert global Z matrix to Cartesian
Parameters
----------
ics : array (batchsize x dim)
IC matrix flattened by atom to place (bond1, angle1, torsion1, bond2, angle2, torsion2, ...)
"""
for i in range(Z_indices.shape[0]):
xyz.append(ic2xyz(xyz[index2zorder[Z_indices[i, 1]]],
xyz[index2zorder[Z_indices[i, 2]]],
xyz[index2zorder[Z_indices[i, 3]]],
ics[:, 3*i:3*i+1], ics[:, 3*i+1:3*i+2], ics[:, 3*i+2:3*i+3]))
# def ics2xyz_local_log_det_jac(ics, Z_indices, index2zorder, xyz):
#
# batchsize = tf.shape(ics)[0]
#
# log_det_jac = tf.zeros((batchsize,))
#
# for i in range(Z_indices.shape[0]):
# args = tf.concat([
# ics[:, 3*i:3*i+1],
# ics[:, 3*i+1:3*i+2],
# ics[:, 3*i+2:3*i+3]
# ], axis=-1)
# xyz.append(ic2xyz(xyz[index2zorder[Z_indices[i, 1]]],
# xyz[index2zorder[Z_indices[i, 2]]],
# xyz[index2zorder[Z_indices[i, 3]]],
# args[..., 0:1], args[..., 1:2], args[..., 2:3]))
# log_det_jac += tf.linalg.slogdet(batch_jacobian(xyz[-1], args))[-1]
#
# return log_det_jac
def ics2xyz_local_log_det_jac(ics, Z_indices, index2zorder, xyz):
batchsize = tf.shape(ics)[0]
log_det_jac = tf.zeros((batchsize,))
for i in range(Z_indices.shape[0]):
args = ics[:, 3*i:3*i+3]
xyz.append(ic2xyz(xyz[index2zorder[Z_indices[i, 1]]],
xyz[index2zorder[Z_indices[i, 2]]],
xyz[index2zorder[Z_indices[i, 3]]],
args[:, 0:1], args[:, 1:2], args[:, 2:3]))
log_det_jac += tf.linalg.slogdet(batch_jacobian(xyz[-1], args))[-1]
return log_det_jac
def log_det_jac_lists(ys, xs):
from tensorflow.python.ops import gradients as gradient_ops
batch_dim = xs[0].shape[0]
output_dim = ys[0].shape[-1]
jacs = []
for y, x in zip(ys, xs):
cols = []
for i in range(output_dim):
cols.append(gradient_ops.gradients(y[:, i], x)[0])
jac = tf.stack(cols, axis=-1)
jacs.append(jac)
log_det = tf.linalg.slogdet(jacs)[-1]
log_det = tf.reduce_sum(log_det, axis=0)
return log_det
def ics2xyz_local_log_det_jac_lists(ics, Z_indices, index2zorder, xyz):
batchsize = tf.shape(ics)[0]
log_det_jac = tf.zeros((batchsize,))
all_args = []
all_outputs = []
for i in range(Z_indices.shape[0]):
all_args.append(tf.concat([
ics[:, 3*i:3*i+1],
ics[:, 3*i+1:3*i+2],
ics[:, 3*i+2:3*i+3]
], axis=-1))
xyz.append(ic2xyz(xyz[index2zorder[Z_indices[i, 1]]],
xyz[index2zorder[Z_indices[i, 2]]],
xyz[index2zorder[Z_indices[i, 3]]],
all_args[-1][..., 0:1], all_args[-1][..., 1:2], all_args[-1][..., 2:3]))
all_outputs.append(xyz[-1])
log_det_jac = log_det_jac_lists(all_outputs, all_args)
return log_det_jac
def decompose_Z_indices(cart_indices, Z_indices):
known_indices = cart_indices
Z_placed = np.zeros(Z_indices.shape[0])
Z_indices_decomposed = []
while np.count_nonzero(Z_placed) < Z_indices.shape[0]:
Z_indices_cur = []
for i in range(Z_indices.shape[0]):
if not Z_placed[i] and np.all([Z_indices[i, j] in known_indices for j in range(1, 4)]):
Z_indices_cur.append(Z_indices[i])
Z_placed[i] = 1
Z_indices_cur = np.array(Z_indices_cur)
known_indices = np.concatenate([known_indices, Z_indices_cur[:, 0]])
Z_indices_decomposed.append(Z_indices_cur)
index2order = np.concatenate([cart_indices] + [Z[:, 0] for Z in Z_indices_decomposed])
return Z_indices_decomposed, index2order
def ics2xyz_local_log_det_jac_batchexpand(ics, Z_indices, index2zorder, xyz, eps=1e-10):
""" For systems exclusively described in internal coordinates: convert global Z matrix to Cartesian
Parameters
----------
ics : array (batchsize x dim)
IC matrix flattened by atom to place (bond1, angle1, torsion1, bond2, angle2, torsion2, ...)
"""
from tensorflow.python.ops.parallel_for.gradients import batch_jacobian
from deep_boltzmann.networks.invertible_coordinate_transforms import ic2xyz
batchsize = tf.shape(ics)[0]
natoms_to_place = Z_indices.shape[0]
# reshape atoms into the batch
p1s = tf.reshape(tf.gather(xyz, index2zorder[Z_indices[:, 1]], axis=1), (batchsize*natoms_to_place, 3))
p2s = tf.reshape(tf.gather(xyz, index2zorder[Z_indices[:, 2]], axis=1), (batchsize*natoms_to_place, 3))
p3s = tf.reshape(tf.gather(xyz, index2zorder[Z_indices[:, 3]], axis=1), (batchsize*natoms_to_place, 3))
ics_ = tf.reshape(ics, (batchsize*natoms_to_place, 3))
# operation to differentiate: compute new xyz's given distances, angles, torsions
newpos_batchexpand = ic2xyz(p1s, p2s, p3s, ics_[:, 0:1], ics_[:, 1:2], ics_[:, 2:3])
newpos = tf.reshape(newpos_batchexpand, (batchsize, natoms_to_place, 3))
# compute derivatives
log_det_jac_batchexpand = tf.linalg.slogdet(batch_jacobian(newpos_batchexpand, ics_))[-1]
# reshape atoms again out of batch and sum over the log det jacobians
log_det_jac = tf.reshape(log_det_jac_batchexpand, (batchsize, natoms_to_place))
log_det_jac = tf.reduce_sum(log_det_jac, axis=1)
return newpos, log_det_jac
def ics2xyz_local_log_det_jac_decomposed(all_ics, all_Z_indices, cartesian_xyz, index2order, eps=1e-10):
"""
Parameters
----------
all_ics : Tensor (batchsize, 3*nICatoms)
Tensor with all internal coordinates to be placed, in the order as they are placed in all_Z_indices
all_Z_indices : list of Z index arrays.
All atoms in one array are placed independently given the atoms that have been placed before
cartesian_xyz : Tensor (batchsize, nCartAtoms, 3)
Start here with the positions of all Cartesian atoms
index2order : array
map from atom index to the order of placement. The order of placement is first all Cartesian atoms
and then in the order of np.vstack(all_Z_indices)[:, 0]
"""
batchsize = tf.shape(all_ics)[0]
log_det_jac_tot = tf.zeros((batchsize), )
xyz = cartesian_xyz
istart = 0
for Z_indices in all_Z_indices:
ics = all_ics[:, 3*istart:3*(istart+Z_indices.shape[0])]
newpos, log_det_jac = ics2xyz_local_log_det_jac_batchexpand(ics, Z_indices, index2order, xyz, eps=eps)
xyz = tf.concat([xyz, newpos], axis=1)
log_det_jac_tot += log_det_jac
istart += Z_indices.shape[0]
return xyz, log_det_jac_tot
def ics2xyz_global_log_det_jac(ics, Z_indices, global_transform=True):
batchsize = tf.shape(ics)[0]
index2zorder = np.argsort(Z_indices[:, 0])
xyz = []
log_det_jac = tf.zeros((batchsize,))
if global_transform:
# first atom at 0,0,0
xyz.append(tf.zeros((batchsize, 3)))
# second atom at 0,0,d
args = tf.reshape(ics[:, 0:1], (batchsize, 1))
xyz.append(tf.concat([tf.zeros((batchsize, 2)), args], axis=-1))
z = xyz[-1][:, -1:]
log_det_jac += tf.linalg.slogdet(batch_jacobian(z, args))[-1]
# third atom at x,0,z
args = tf.concat([ics[:, 1:2], ics[:, 2:3]], axis=-1)
xyz.append(ic2xy0(xyz[index2zorder[Z_indices[2, 1]]],
xyz[index2zorder[Z_indices[2, 2]]],
args[..., 0:1], args[..., 1:2]))
xz = tf.stack([xyz[-1][:, 0], xyz[-1][:, 2]], axis=-1)
# + 1e-6*tf.eye(2, num_columns=2, batch_shape=(1,)
log_det_jac += tf.linalg.slogdet(batch_jacobian(xz, args))[-1]
# other atoms
log_det_jac += ics2xyz_local_log_det_jac(
ics[:, 3:], Z_indices[3:], index2zorder, xyz)
return log_det_jac
def xyz2ic_log_det_jac(x, Z_indices, eps=1e-10):
from deep_boltzmann.models.MM import dist_tf, angle_tf, torsion_tf
batchsize = tf.shape(x)[0]
atom_indices = np.arange(int(3*(np.max(Z_indices)+1))).reshape((-1, 3))
log_det_jac = tf.zeros((batchsize,))
global_transform = (Z_indices.min() < 0)
if global_transform:
start_rest = 3 # remaining atoms start in row 3
# 1. bond (input: z axis)
reference_atom = tf.gather(x, atom_indices[Z_indices[1, 0]], axis=1)
other_atom = tf.gather(x, atom_indices[Z_indices[1, 1]], axis=1)
x_ = reference_atom[:, 0]
y_ = reference_atom[:, 1]
z_ = reference_atom[:, 2]
arg = tf.expand_dims(z_, axis=1)
reference_atom = tf.stack([x_, y_, arg[:, 0]], axis=-1)
reference_atom = tf.expand_dims(reference_atom, axis=1)
other_atom = tf.expand_dims(other_atom, axis=1)
bond = dist_tf(
reference_atom,
other_atom
)
out = bond
jac = batch_jacobian(out, arg) + eps * tf.eye(3, batch_shape=(1,))
log_det_jac += tf.linalg.slogdet(jac)[-1]
# 2. bond/angle (input: x/z axes)
reference_atom = tf.gather(x, atom_indices[Z_indices[2, 0]], axis=1)
other_atom_1 = tf.gather(x, atom_indices[Z_indices[2, 1]], axis=1)
other_atom_2 = tf.gather(x, atom_indices[Z_indices[2, 2]], axis=1)
x_ = reference_atom[:, 0]
y_ = reference_atom[:, 1]
z_ = reference_atom[:, 2]
arg = tf.stack([x_, z_], axis=-1)
reference_atom = tf.stack([arg[:, 0], y_, arg[:, 1]], axis=-1)
reference_atom = tf.expand_dims(reference_atom, axis=1)
other_atom_1 = tf.expand_dims(other_atom_1, axis=1)
other_atom_2 = tf.expand_dims(other_atom_2, axis=1)
bond = dist_tf(
reference_atom,
other_atom_1
)
angle = angle_tf(
reference_atom,
other_atom_1,
other_atom_2
)
out = tf.stack([bond, angle], axis=-1)
jac = batch_jacobian(out, arg) + eps * tf.eye(3, batch_shape=(1,))
log_det_jac_ = tf.linalg.slogdet(jac)[-1]
log_det_jac_ = tf.reshape(log_det_jac_, [batchsize, -1])
log_det_jac_ = tf.reduce_sum(log_det_jac_, axis=-1)
log_det_jac += log_det_jac_
else:
start_rest = 0 # remaining atoms start now
# 3. everything together
reference_atoms = tf.gather(x, atom_indices[Z_indices[start_rest:, 0]], axis=1)
other_atoms_1 = tf.gather(x, atom_indices[Z_indices[start_rest:, 1]], axis=1)
other_atoms_2 = tf.gather(x, atom_indices[Z_indices[start_rest:, 2]], axis=1)
other_atoms_3 = tf.gather(x, atom_indices[Z_indices[start_rest:, 3]], axis=1)
arg = tf.reshape(reference_atoms, [-1, 3])
reference_atoms = tf.reshape(arg, [batchsize, -1, 3])
bond = dist_tf(
reference_atoms,
other_atoms_1
)
angle = angle_tf(
reference_atoms,
other_atoms_1,
other_atoms_2
)
torsion = torsion_tf(
reference_atoms,
other_atoms_1,
other_atoms_2,
other_atoms_3
)
out = tf.stack([bond, angle, torsion], axis=-1)
out = tf.reshape(out, [-1, 3])
jac = batch_jacobian(out, arg, use_pfor=False) # + eps * tf.eye(3, batch_shape=(1,)
log_det_jac_ = tf.linalg.slogdet(jac)[-1]
log_det_jac_ = tf.reshape(log_det_jac_, [batchsize, -1])
log_det_jac_ = tf.reduce_sum(log_det_jac_, axis=-1)
log_det_jac += log_det_jac_
return log_det_jac
class InternalCoordinatesTransformation(object):
""" Conversion between internal and Cartesian coordinates """
def __init__(self, Z_indices, Xnorm=None, torsion_cut=None):
self.dim = Z_indices.shape[0] * 3
self.Z_indices = Z_indices
# Compute IC moments for normalization
self.ic_means, self.ic_stds, self.torsion_cut = icmoments(Z_indices, X0=Xnorm, torsion_cut=torsion_cut)
@classmethod
def from_dict(cls, D):
ic_means = D['ic_means']
ic_stds = D['ic_stds']
torsion_cut = D['torsion_cut']
Z_indices = D['Z_indices']
c = cls(Z_indices)
c.ic_means = ic_means
c.ic_stds = ic_stds
c.torsion_cut = torsion_cut
return c
def to_dict(self):
D = {}
D['ic_means'] = self.ic_means
D['ic_stds'] = self.ic_stds
D['torsion_cut'] = self.torsion_cut
D['Z_indices'] = self.Z_indices
return D
def x2z(self, x):
# compute and normalize internal coordinates
z_ics = xyz2ic_tf(x, self.Z_indices, torsion_cut=self.torsion_cut)
z_ics_norm = (z_ics - self.ic_means) / self.ic_stds
return z_ics_norm
def z2x(self, z):
# split off Z block
z_ics_unnorm = z * self.ic_stds + self.ic_means
# reconstruct remaining atoms using ICs
x = ics2xyz_global(z_ics_unnorm, self.Z_indices)
return x
def x2z_jacobian(self, x):
log_det_jac = xyz2ic_log_det_jac(x, self.Z_indices)
log_det_jac = tf.reshape(log_det_jac, (-1, 1))
# log_det_jac -= tf.reduce_sum(tf.log(self.ic_stds))
return log_det_jac
def z2x_jacobian(self, z):
log_det_jac = ics2xyz_global_log_det_jac(z, self.Z_indices)
log_det_jac = tf.reshape(log_det_jac, (-1, 1))
# log_det_jac += tf.reduce_sum(tf.log(self.ic_stds))
return log_det_jac
def connect_xz(self, x):
self.input_x = x
self.output_z_only = keras.layers.Lambda(lambda x: self.x2z(x))(x)
junk_dims = 6
self.output_z = keras.layers.Lambda(
lambda z: tf.concat([z, 0. * tf.random_normal([tf.shape(z)[0], junk_dims], stddev=1.)], 1))(self.output_z_only)
# self.log_det_xz = keras.layers.Lambda(lambda x: log_det_jacobian(self.x2z(x), x))(self.input_x)
self.log_det_xz = keras.layers.Lambda(lambda x: self.x2z_jacobian(x))(self.input_x)
return self.output_z
def connect_zx(self, z):
self.input_z = z
z = IndexLayer(np.arange(0, self.dim-6))(z)
self.output_x = keras.layers.Lambda(lambda z: self.z2x(z)[0])(z)
self.angle_loss = keras.layers.Lambda(lambda z: self.z2x(z)[1])(z)
# self.log_det_zx = keras.layers.Lambda(lambda z: log_det_jacobian(self.z2x(z), z))(z)
self.log_det_zx = keras.layers.Lambda(lambda z: self.z2x_jacobian(z))(z)
return self.output_x
@property
def log_det_Jxz(self):
""" Log of |det(dz/dx)| for the current batch. Format is batchsize x 1 or a number """
return self.log_det_xz
@property
def log_det_Jzx(self):
""" Log of |det(dx/dz)| for the current batch. Format is batchsize x 1 or a number """
return self.log_det_zx
class MixedCoordinatesTransformation(InternalCoordinatesTransformation):
""" Conversion between Cartesian coordinates and whitened Cartesian / whitened internal coordinates """
def __init__(self, cart_atom_indices, Z_indices_no_order, X0=None, X0ic=None, remove_dof=6, torsion_cut=None,
jacobian_regularizer=1e-10):
"""
Parameters
----------
mm : energy model
Molecular Model
cart_atom_indices : array
Indices of atoms treated as Cartesian, will be whitened with PCA
ic_atom_indices : list of arrays
Indices of atoms for which internal coordinates will be computed. Each array defines the Z matrix
for that IC group.
X0 : array or None
Initial coordinates to compute whitening transformations on.
remove_dof : int
Number of degrees of freedom to remove from PCA whitening (default is 6 for translation+rotation in 3D)
"""
self.cart_atom_indices = np.array(cart_atom_indices)
self.cart_indices = np.concatenate([[i*3, i*3+1, i*3+2] for i in cart_atom_indices])
self.batchwise_Z_indices, _ = decompose_Z_indices(self.cart_atom_indices, Z_indices_no_order)
self.Z_indices = np.vstack(self.batchwise_Z_indices)
self.dim = 3*(self.cart_atom_indices.size + self.Z_indices.shape[0])
self.atom_order = np.concatenate([cart_atom_indices, self.Z_indices[:, 0]])
self.index2order = np.argsort(self.atom_order)
self.remove_dof = remove_dof
self.jacobian_regularizer = jacobian_regularizer
if X0 is None:
raise ValueError('Need to specify X0')
if X0ic is None:
X0ic = X0
# Compute PCA transformation on initial data
self.cart_X0mean, self.cart_Twhiten, self.cart_Tblacken, self.std = pca(X0[:, self.cart_indices],
keepdims=self.cart_indices.size-remove_dof)
if np.any(self.std <= 0):
raise ValueError('Cannot construct whiten layer because trying to keep nonpositive eigenvalues.')
self.pca_log_det_xz = -np.sum(np.log(self.std))
# Compute IC moments for normalization
self.ic_means, self.ic_stds, self.torsion_cut = icmoments(self.Z_indices, X0=X0ic, torsion_cut=torsion_cut)
@classmethod
def from_dict(cls, D):
ic_means = D['ic_means']
ic_stds = D['ic_stds']
torsion_cut = D['torsion_cut']
cart_atom_indices = D['cart_atom_indices']
cart_X0mean = D['cart_X0mean']
cart_Twhiten = D['cart_Twhiten']
cart_Tblacken = D['cart_Tblacken']
Z_indices = D['Z_indices']
dim = 3 * (cart_atom_indices.size + Z_indices.shape[0])
c = cls(cart_atom_indices, Z_indices, X0=np.random.randn(2*dim, dim))
c.cart_X0mean = tf.constant(cart_X0mean)
c.cart_Twhiten = tf.constant(cart_Twhiten)
c.cart_Tblacken = tf.constant(cart_Tblacken)
c.ic_means = ic_means
c.ic_stds = ic_stds
c.torsion_cut = torsion_cut
# optional
if 'pca_log_det_xz' in D:
pca_log_det_xz = D['pca_log_det_xz']
c.pca_log_det_xz = pca_log_det_xz
else:
print('WARNING: Deprecated BG does not have a PCA log det Jacobian saved. Will set it to 0 and carry on ...')
c.pca_log_det_xz = 0.0
return c
def to_dict(self):
D = {}
D['ic_means'] = self.ic_means
D['ic_stds'] = self.ic_stds
D['torsion_cut'] = self.torsion_cut
D['cart_atom_indices'] = self.cart_atom_indices
D['cart_X0mean'] = keras.backend.eval(self.cart_X0mean)
D['cart_Twhiten'] = keras.backend.eval(self.cart_Twhiten)
D['cart_Tblacken'] = keras.backend.eval(self.cart_Tblacken)
D['pca_log_det_xz'] = self.pca_log_det_xz
D['Z_indices'] = self.Z_indices
return D
def x2z(self, x):
# split off Cartesian coordinates and perform whitening on them
x_cart = tf.gather(x, self.cart_indices, axis=1)
z_cart_signal = tf.matmul(x_cart - self.cart_X0mean, self.cart_Twhiten)
# compute and normalize internal coordinates
z_ics = xyz2ic_tf(x, self.Z_indices, torsion_cut=self.torsion_cut)
z_ics_norm = (z_ics - self.ic_means) / self.ic_stds
# concatenate the output
z = tf.concat([z_cart_signal, z_ics_norm], axis=1)
return z
def z2x(self, z):
# split off Cartesian block and unwhiten it
dim_cart_signal = self.cart_indices.size-self.remove_dof
z_cart_signal = z[:, :dim_cart_signal]
x_cart = tf.matmul(z_cart_signal, self.cart_Tblacken) + self.cart_X0mean
# split by atom
#xyz = [x_cart[:, 3*i:3*(i+1)] for i in range(self.cart_atom_indices.size)]
batchsize = tf.shape(z)[0]
xyz = tf.reshape(x_cart, (batchsize, self.cart_atom_indices.size, 3))
def _angle_loss(angle):
positive_loss = tf.reduce_sum(tf.where(
angle > 180, angle - 180, tf.zeros_like(angle)) ** 2, axis=-1)
negative_loss = tf.reduce_sum(tf.where(
angle < -180, angle + 180, tf.zeros_like(angle)) ** 2, axis=-1)
return positive_loss + negative_loss
# split off Z block
z_ics_norm = z[:, dim_cart_signal:self.dim-self.remove_dof]
z_ics = z_ics_norm * self.ic_stds + self.ic_means
n_internal = self.dim - dim_cart_signal - self.remove_dof
angle_idxs = np.arange(n_internal // 3) * 3 + 1
torsion_idxs = np.arange(n_internal // 3) * 3 + 2
angles = tf.gather(z_ics, angle_idxs, axis=-1)
angle_loss = _angle_loss(angles)
torsions = tf.gather(z_ics, torsion_idxs, axis=-1)
torsions -= 180 + self.torsion_cut
angle_loss += _angle_loss(torsions)
# reconstruct remaining atoms using ICs
#ics2xyz_local(z_ics, self.Z_indices, self.index2order, xyz)
xyz, _ = ics2xyz_local_log_det_jac_decomposed(z_ics, self.batchwise_Z_indices, xyz, self.index2order)
# reorder and concatenate all atom coordinates
x = tf.reshape(tf.gather(xyz, self.index2order, axis=1), (batchsize, -1))
#xyz = [xyz[i] for i in self.index2order]
#x = tf.concat(xyz, axis=1)
return x, angle_loss
def x2z_jacobian(self, x):
# IC part
log_det_jac = xyz2ic_log_det_jac(x, self.Z_indices, eps=self.jacobian_regularizer)
# Add PCA part
log_det_jac += self.pca_log_det_xz
# reshape to (batchsize, 1)
log_det_jac = tf.reshape(log_det_jac, (-1, 1))
return log_det_jac
def z2x_jacobian(self, z):
# split off Cartesian block and unwhiten it
dim_cart_signal = self.cart_indices.size-self.remove_dof
z_cart_signal = z[:, :dim_cart_signal]
x_cart = tf.matmul(z_cart_signal, self.cart_Tblacken) + self.cart_X0mean
# split by atom
#xyz = [x_cart[:, 3*i:3*(i+1)] for i in range(self.cart_atom_indices.size)]
batchsize = tf.shape(z)[0]
xyz = tf.reshape(x_cart, (batchsize, self.cart_atom_indices.size, 3))
# split off Z block
z_ics_norm = z[:, dim_cart_signal:self.dim-self.remove_dof]
z_ics = z_ics_norm * self.ic_stds + self.ic_means
#log_det_jac = ics2xyz_local_log_det_jac(z_ics, self.Z_indices, self.index2order, xyz)
_, log_det_jac = ics2xyz_local_log_det_jac_decomposed(z_ics, self.batchwise_Z_indices, xyz, self.index2order, eps=self.jacobian_regularizer)
# Add PCA part
log_det_jac -= self.pca_log_det_xz
# reshape to (batchsize, 1)
log_det_jac = tf.reshape(log_det_jac, (-1, 1))
return log_det_jac
|
<reponame>obernardovieira/api
import { Sequelize, DataTypes, Model } from 'sequelize';
import {
UbiPromoterSocialMedia,
UbiPromoterSocialMediaCreation,
} from '../../../interfaces/ubi/ubiPromoterSocialMedia';
export class UbiPromoterSocialMediaModel extends Model<
UbiPromoterSocialMedia,
UbiPromoterSocialMediaCreation
> {
public id!: number;
public promoterId!: string;
public mediaType!: string;
public url!: string;
}
export function initializeUbiPromoterSocialMedia(sequelize: Sequelize): void {
UbiPromoterSocialMediaModel.init(
{
id: {
type: DataTypes.INTEGER,
autoIncrement: true,
primaryKey: true,
},
promoterId: {
type: DataTypes.INTEGER,
references: {
model: 'ubi_promoter',
key: 'id',
},
onDelete: 'CASCADE',
allowNull: false,
},
mediaType: {
type: DataTypes.STRING(32),
allowNull: true,
},
url: {
type: DataTypes.STRING(128),
allowNull: true,
},
},
{
tableName: 'ubi_promoter_social_media',
timestamps: false,
sequelize,
}
);
}
|
<reponame>fcakyon/insta-assist
import re
import time
import json
import os.path
from random import randint
from datetime import datetime
# from InstagramAPI import InstagramAPI
INSTAGRAM_DEFAULT_PROFIL_PIC_URL = "https://scontent-maa2-1.cdninstagram.com/v/t51.2885-19/44884218_345707102882519_2446069589734326272_n.jpg?_nc_ht=scontent-maa2-1.cdninstagram.com&_nc_ohc=OU-5fMy1ffUAX-o-6ty&oh=9438c1dcdb6c5d4150e5f396a64eeadb&oe=5F7A818F&ig_cache_key=<KEY>"
def get_future_time_string(seconds_from_now):
now_in_seconds = time.mktime(time.localtime())
future_in_seconds = now_in_seconds + seconds_from_now
future_in_struct = time.localtime(future_in_seconds)
future_in_string = (
("%02d" % future_in_struct[2])
+ "."
+ ("%02d" % future_in_struct[1])
+ "."
+ ("%04d" % future_in_struct[0])
+ " | "
+ ("%02d" % future_in_struct[3])
+ ":"
+ ("%02d" % future_in_struct[4])
)
future_in_string_detailed = (
("%02d" % future_in_struct[2])
+ "."
+ ("%02d" % future_in_struct[1])
+ "."
+ ("%04d" % future_in_struct[0])
+ " | "
+ ("%02d" % future_in_struct[3])
+ ":"
+ ("%02d" % future_in_struct[4])
+ ":"
+ ("%02d" % future_in_struct[5])
)
return future_in_string, future_in_string_detailed
def delay(min_delay, max_delay, display_text):
seconds_from_now = randint(min_delay, max_delay)
futureTimeString, futureTimeStringDetailed = get_future_time_string(
seconds_from_now
)
print(futureTimeStringDetailed + " : " + display_text)
if seconds_from_now < 5:
time.sleep(seconds_from_now)
else:
for ind in range(seconds_from_now):
time.sleep(1)
def get_userid_from_username(igapi, username):
success = igapi.searchUsername(username)
if success:
userid = igapi.LastJson["user"]["pk"]
return userid
else:
print("User doesnt exist: " + username)
return False
def get_all_followings(igapi, target_userid):
"""
Returns a list of {"username": "", "userid": 0, "is_private": false, "full_name": ""}
"""
following_list = []
current_followings_list = igapi.getTotalFollowings(target_userid)
for following in current_followings_list:
user = {
"username": following["username"],
"userid": following["pk"],
"is_private": following["is_private"],
"full_name": following["full_name"],
}
following_list.append(user)
return following_list
def get_all_followers(igapi, target_userid):
"""
Returns a list of {"username": "", "userid": 0, "is_private": false, "full_name": ""}
"""
follower_list = []
current_followers_list = igapi.getTotalFollowers(target_userid)
for follower in current_followers_list:
user = {
"username": follower["username"],
"userid": follower["pk"],
"is_private": follower["is_private"],
"full_name": follower["full_name"],
}
follower_list.append(user)
return follower_list
def get_following_liked_medias(igapi, following_list, target_username):
"""
each element in following_list should have the form: {"username": "", "userid": 0, "is_private": false, "full_name": ""}
Returns {"full": full_liked_media_list, "clean": clean_liked_media_list}
"""
full_liked_media_list = []
clean_liked_media_list = []
for following in following_list:
if following["is_private"] is False:
try:
delay(5, 10, "will scrap user: " + following["username"])
igapi.getUserFeed(following["userid"])
items = igapi.LastJson["items"]
for item in items:
if target_username in item["top_likers"]:
full_liked_media_list.append(item)
media_url_list = []
try:
carousel_media_list = item["carousel_media"]
for carousel_media in carousel_media_list:
media_url = carousel_media["image_versions2"][
"candidates"
][0]["url"]
media_url_list.append(media_url)
except:
media_url = item["image_versions2"]["candidates"][1]["url"]
media_url_list.append(media_url)
clean_media = {
"media_id": item["pk"],
"username": item["user"]["username"],
"user_id": item["user"]["pk"],
"full_name": item["user"]["full_name"],
"profile_pic_url": item["user"]["profile_pic_url"],
"timestamp": item["taken_at"],
"date": datetime.fromtimestamp(item["taken_at"]).strftime(
"%d/%m/%Y, %H:%M"
),
"like_count": item["like_count"],
"media_url_list": media_url_list,
"media_url": "https://instagram.com/p/" + item["code"],
}
clean_liked_media_list.append(clean_media)
except:
None
return {"full": full_liked_media_list, "clean": clean_liked_media_list}
def get_liked_media_list_from_username(igapi, username):
"""
Returns {"full": full_liked_media_list, "clean": clean_liked_media_list}
"""
userid = get_userid_from_username(igapi, username)
following_list = get_all_followings(igapi, userid)
liked_media_dict = get_following_liked_medias(igapi, following_list, username)
return liked_media_dict
def load_json(json_path):
with open(json_path) as json_file:
json_dict = json.load(json_file)
return json_dict
def save_json(json_data, json_path):
json_dir = os.path.dirname(json_path)
create_dir(json_dir)
with open(json_path, "w") as outfile:
json.dump(json_data, outfile)
def create_dir(_dir):
"""
Creates given directory if it is not present.
"""
if not os.path.exists(_dir):
os.makedirs(_dir)
def get_liked_media_dict_path(username):
liked_media_dict_path = os.path.join("data", username, "liked_media_dict.json")
return liked_media_dict_path
def get_follower_likes_list_path(username):
follower_likes_list_path = os.path.join(
"data", username, "follower_likes_list.json"
)
return follower_likes_list_path
def get_user_likes_per_account(liked_media_dict):
"""
Returns a list of {"username": "", "number_of_liked_posts": 0, "liked_media_list": [], "profile_pic_url": "", "profile_url": ""}
"""
liked_media_list = liked_media_dict["clean"]
# create dict with structure: {username: {"liked_media_list": []}}
user_likes_per_account = dict()
for liked_media in liked_media_list:
liked_username = liked_media["username"]
if liked_username not in user_likes_per_account.keys():
user_likes_per_account[liked_username] = {"liked_media_list": [liked_media]}
else:
user_likes_per_account[liked_username]["liked_media_list"].append(
liked_media
)
# create dict with structure: {username: {"liked_media_list": [], "number_of_liked_posts": 0}}
for liked_username in user_likes_per_account.keys():
user_likes_per_account[liked_username]["number_of_liked_posts"] = len(
user_likes_per_account[liked_username]["liked_media_list"]
)
# create list with structure: [{"username": "", "number_of_liked_posts": 0}]
user_likes_per_account_list = [
{
"username": liked_username,
"number_of_liked_posts": user_likes_per_account[liked_username][
"number_of_liked_posts"
],
"profile_pic_url": user_likes_per_account[liked_username][
"liked_media_list"
][0]["profile_pic_url"],
"profile_url": "https://www.instagram.com/" + liked_username,
"liked_media_list": user_likes_per_account[liked_username][
"liked_media_list"
],
}
for liked_username in user_likes_per_account
]
return user_likes_per_account_list
def get_follower_likes_list(igapi, username):
user_id = get_userid_from_username(igapi, username)
follower_list = get_all_followers(igapi, user_id)
follower_dict = dict()
for ind in range(len(follower_list)):
follower_username = follower_list[ind]["username"]
follower_list[ind]["number_of_liked_posts"] = 0
follower_list[ind]["profile_pic_url"] = INSTAGRAM_DEFAULT_PROFIL_PIC_URL
follower_dict[follower_username] = follower_list[ind]
response = igapi.getUserFeed(user_id)
user_medias = igapi.LastJson["items"]
media_id_list = [media["pk"] for media in user_medias]
for media_id in media_id_list:
response = igapi.getMediaLikers(media_id)
media_likers_list = igapi.LastJson["users"]
for media_liker in media_likers_list:
media_liker_username = media_liker["username"]
media_liker_profile_pic_url = media_liker["profile_pic_url"]
if media_liker_username in follower_dict.keys():
follower_dict[media_liker_username]["number_of_liked_posts"] += 1
follower_dict[media_liker_username][
"profile_pic_url"
] = media_liker_profile_pic_url
# create list with structure: [{"username": "", "full_name": "", "number_of_liked_posts": 0, "profile_url": "", "profile_pic_url": ""}]
follower_likes_list = [
{
"username": follower_username,
"full_name": follower_dict[follower_username]["full_name"],
"number_of_liked_posts": follower_dict[follower_username][
"number_of_liked_posts"
],
"profile_url": "https://www.instagram.com/" + follower_username,
"profile_pic_url": follower_dict[follower_username]["profile_pic_url"],
}
for follower_username in follower_dict.keys()
]
return follower_likes_list
def get_hd_profile_pic(igapi, username):
"""
Returns {"user_id": 0, "username": "", full_name": "", "hd_profile_pic_url": "",
"is_private":": false, "is_business": false, "profile_url": ""}
"""
user_id = get_userid_from_username(igapi, username)
response = igapi.getUsernameInfo(user_id)
hd_profile_pic_url = igapi.LastJson["user"]["hd_profile_pic_url_info"]["url"]
full_name = igapi.LastJson["user"]["full_name"]
is_private = igapi.LastJson["user"]["is_private"]
is_business = igapi.LastJson["user"]["is_business"]
profile_url = ("https://www.instagram.com/" + username,)
hd_profile_pic_dict = {
"user_id": user_id,
"hd_profile_pic_url": hd_profile_pic_url,
"full_name": full_name,
"is_private": is_private,
"is_business": is_business,
"profile_url": profile_url,
}
return hd_profile_pic_dict
if __name__ == "__main__":
# from igapi import igapi
username = "fcakyon"
# get_hd_profile_pic(igapi, username)
follower_likes_list_path = get_follower_likes_list_path(username)
follower_likes_list = load_json(follower_likes_list_path)
def sort_function(e):
return e["number_of_liked_posts"]
follower_likes_list.sort(reverse=True, key=sort_function)
follower_likes_list
|
def plot_pairwise_image_grid(
A: list, B: list, columns: int = 5, savepath: str = None, show: bool = False
) -> None:
num_pictures = len(A)
rows = 2 * int(np.ceil(num_pictures / columns))
fig, axes = plt.subplots(nrows=rows, ncols=columns, figsize=(columns, rows))
for idx in range(num_pictures):
axes[2 * (idx // columns), idx % columns].imshow(A[idx])
axes[2 * (idx // columns) + 1, idx % columns].imshow(B[idx])
for ax in axes.flatten():
ax.set_axis_off()
fig.tight_layout()
fig.subplots_adjust(wspace=1e-5, hspace=1e-5)
if savepath:
fig.savefig(savepath.format(rows, columns))
if show:
plt.show(fig)
plt.close(fig)
|
package br.com.satheler.bot.providers;
import java.util.List;
/**
* ICommand
*/
public abstract class CommandProvider {
protected String response;
private boolean isAsynchronous;
/**
* Construtor da classe
*/
public CommandProvider() {
this.isAsynchronous = false;
}
/**
* Construtor para inicializar a classe assíncrona ou não assíncrona.
*/
protected CommandProvider(boolean isAsynchronous) {
this.isAsynchronous = isAsynchronous;
}
/**
* Método para iniciar a chamada dessa classe como comando.
* @param params Recebe uma lista com comandos solicitados.
* @return Resposta final de execução do comando da classe.
*/
public abstract String run(List<String> params);
/**
* Método para retornar informações da classe atual que está sendo chamada.
* @return Conteúdo com informações do comando da classe.
*/
public String usage() {
return "\\" + ServiceProvider.getOnlyNameClass(this.getClass());
}
/**
* Método para retornar se estado é assíncrono.
* @return É assíncrono.
*/
public boolean isAsynchronous() {
return this.isAsynchronous;
}
}
|
import java.util.LinkedList;
import java.util.Scanner;
public class Main {
public static class Player {
int score;
String name;
boolean SamePlayer(String s) {
return name.equals(s);
}
}
public static void main(String[] args) { // New strat: Brute force!
Scanner sc = new Scanner(System.in);
int n = Integer.parseInt(sc.nextLine());
String[] nameRecord = new String[n]; // Store ALL the lines!
int[] scoreRecord = new int[n];
LinkedList<Player> Players = new LinkedList<Player>();
// Tabulate scores
for (int i = 0 ; i < n; i++) {
// Parse the line
String line = sc.nextLine();
int sp = line.indexOf(' ');
String name = line.substring(0, sp);
int score = Integer.parseInt(line.substring(sp + 1));
nameRecord[i] = name;
scoreRecord[i] = score;
int ni = NameIndex(Players, name);
if (ni == -1) {
Player p = new Player();
p.name = name;
p.score = score;
Players.add(p);
}
else Players.get(ni).score += score;
}
// Find out how many people are tied for first
int MaxScore = 0;
LinkedList<Player> Winners = new LinkedList<Player>();
for (int i = 0; i < Players.size(); i++) {
Player p = Players.get(i);
if (p.score > MaxScore) {
MaxScore = p.score;
while (Winners.size() > 0) Winners.removeFirst();
Winners.add(p);
}
else if (p.score == MaxScore) Winners.add(p);
}
// No ties? Print out winner.
if (Winners.size() == 1) {
System.out.println(Winners.get(0).name);
return;
}
// Tiebreaking process
int leastRound = 1001;
int ip = -1; // Index of best player
for (int i = 0; i < Winners.size(); i++) {
Player p = Winners.get(i);
int curScore = 0;
for (int j = 0; j < n; j++) {
if (p.name.equals(nameRecord[j])) {
curScore += scoreRecord[j];
if (curScore >= MaxScore) { // Reached maxScore on jth round
if (j < leastRound) {
leastRound = j;
ip = i; // ith player is best so far
}
}
}
}
}
System.out.println(Winners.get(ip).name);
}
public static int NameIndex(LinkedList<Player> Players, String name) {
for (int i = 0; i < Players.size(); i++) {
if (Players.get(i).SamePlayer(name)) return i;
}
return -1;
}
}
|
The effect of Functional Electric Stimulation in stroke patients' motor control – a case report
Functional Electric Stimulation (FES) has been studied as a therapeutic resource to reduce spasticity in hemiplegic patients, however there are no studies about the effects of FES in motor control of these patients during functional tasks like balance maintenance. Muscular activation of gastrocnemius medialis and semitendinosus was investigated in both limbs of a hemiparetic patient during self-disturbed quiet stance before and after FES on tibialis anterior, by surface electromyography. The instant of maximum activation peak of GM and ST were calculated immediately after a motor self-disturbance, in order to observe muscular synergy between these two muscles, and possible balance strategies used (ankle or hip strategy). At the preserved limb there occurred distal-proximal synergy (GM followed by ST), expected for small perturbations; however, at spastic limb there was inversion of this synergy (proximal-distal) after FES. It is possible that intervention of electricity had inhibited synergical pathways due to antidromic effect, making it difficult to use ankle strategy in the spastic limb.
Introduction
Stroke is a disease with vascular origin, in which there occurs a focal perturbation of encephalic function, due to ischemia or bleeding on encephalic nervous tissues. It is the second principal cause of death in the world, and among the survivors, there is a large ratio of disabled people that need constant support from their families and from social and health institutions .
Hemiplegia is the most prevalent sequel in patients that suffered stroke , which is commonly characterized by spasticity, motor control deterioration and consequently changes in gait pattern . Usual motor alterations are decrease of triple flexion of lower limb , reduction on dorsiflexion range of motion, equinus foot, and knee hyperextension . Thus, these subjects have higher energy expenditure in walking and also balance deficit .
The main factor that contributes to these alterations is spasticity , due to loss of central inhibition of the muscle stretch reflex by injury to upper motor neurons, which predominates in extensor muscles at the lower limbs .
Spasticity reduces the ability to regulate voluntary movement. The reason that this occurs is that patients with stroke sequels have their reciprocal inhibition (RI) decreased , because the predominance of hypertonic muscles blocks the interaction between synergistic and antagonistic muscle groups, resulting in a static fixation of the joint -in this case, the equine foot -instead of dynamic stability during motor tasks .
At post-stroke rehabilitation, neuromuscular facilitation is commonly used, which emphasizes the inhibition of abnormal reflex patterns and tone normalization before training standing and gait . Therefore, a possible treatment would be interfering in the mechanism that modulates RI and presynaptic inhibition . It is known that FES has the immediate effect of increasing RI of stimulated muscles' antagonists , so it is possible that, by stimulating the dorsiflexor muscles, a decrease in spasticity of plantar flexors can be caused, favoring a more efficient pattern of muscle activation in spastic hemiplegic patients.
Since 1961, when Liberson et al. described a method of FES on hemiplegics' fibular nerve, induce RI in spastic muscles by FES has been studied by several researches , and all of them found significant results such as increase of RI and, consequently, spasticity decrease in these patients. However, it is important to highlight that, with the exception of Alfieri's research , none of them measured the duration of RI on the spastic muscles. On the other hand, Alfieri showed that there was a reduction of spasticity after FES with duration of effect ranging from 10 minutes to 3 hours, with an average of 1 hour; but this was measured by the Ashworth Scale, which is subjective and unreliable to measure spasticity objectively.
Moreover, most of the cited studies has evaluated spasticity of plantar flexors in a static manner, but did not assess how these muscles behaved in functional motor tasks such as balance and gait.
With this background, we found that FES can be a useful therapeutic approach in reducing the spasticity of hemiplegic patients, but it is necessary to study the effects of FES in motor control during functional tasks such as maintaining balance.
Materials and methods
The present study investigated muscle activation of the lower limb in hemiparetic patients during self-disturbed stance before and after FES of tibialis anterior, by surface electromyography. However, we used kinematic variables to enable the division of the electromyography signal in two phases: disturbance and post-disturbance. The postdisturbance, when the subject is recovering from the perturbation, is the phase of interest in this study. The
Subject selection
We assessed a male volunteer, aged 51, who was in a rehabilitation program in the model of Light Hemiplegia in the Estação Especial da Lapa unit of IMREA.
Inclusion criteria were: diagnosis of chronic stroke (more than one year of injury), level of spasticity in lower limbs = 2 (Modified Ashworth), level of dorsiflexion muscle strength ≥ 3, and the presence of functional ambulation without aids or orthoses.
Exclusion criteria were: age above 60 years, previous use of FES as a treatment, botulinum toxin injection in the plantar flexor muscles in the last six months, orthopedic disorder or equinus foot deformity, cognitive disorders, other neurological disorders, and hemodynamic instability.
The volunteer was included in the study after he agreed to participate in the research and signed the INFORMED CONSENT FOR RESEARCH PARTICIPATION, making him aware of the procedures related to the collection of data. Additionally, all of his doubts were clarified as to the procedures to be performed. Participation was voluntary and unpaid. The experimental methods used were non-invasive and pose no hazard to the health of the volunteer. We did not use any type of substance, and no medical/ therapeutic treatment was prescribed.
Subject Preparation
After the explanations of testing procedures, the individual wore appropriate clothing (swimsuit or shorts), and then he was properly prepared for the attachment of reflective markers at specific anatomical points, defined by the anthropometric model of Dempster (1995) . This was necessary because through these markers, body segments were determined in order to calculate the total center of mass (COM) using the Ortho Trak 6.2 software , which was subsequently utilized for the separation of task intervals by the method of Costa et al. .
To capture the electromyography signal, after the abrasion of the skin with alcohol, surface electrodes were fixed on the following locations: the gastrocnemius medialis muscles (at 32% of the distance between the medial popliteal fossa and the medial side of the Achilles tendon), the semitendinosus (at 50% of the distance between the medial epicondyle and the ischial tuberosity), and the tibialis tuberosity (ground electrode). These points were chosen following the recommendations of SENIAM and Sacco .
Experimental protocol
The experimental sequence of this study was composed of the following stages: (1) system calibration, (2) execution of the task, (3) intervention using the FES, and (4) postintervention biomechanical analysis, every 45 minutes during a period of 3 hours.
The task consisted of the recovery of balance after a self-perturbation caused by the subject. The volunteer was instructed to position himself standing on the force platform, so the axis of mediolateral movement was represented by the x coordinate, and the anteroposterior by y, in relation to the axis of reference adopted on the laboratory platform. The initial position was 60° of trunk flexion (relative to the ground) with arms resting on the back of a chair -to avoid possible falls, since the subject had a deficit of balance -and feet comfortably parallel to the body (Figure 1). Data collection began after an initial sound stimulus, when the volunteer was asked to make a sudden extension of the trunk to the upright position (Figure 1), causing him a motor disturbance. Afterwards, he was told to keep his eyes open and fixed on the horizon and stay in the upright quiet position for 60 seconds, when the second sound stimulus indicated the end of the first data collection. After 1 minute of rest, the second collection was performed, and then the third, respecting the same resting interval. Of these three collections, the first was considered the task's adjustment period and was discarded. The second was chosen as valid for analysis, but in cases when the subject had made some unwanted movement like shaking his head or arms, we used the third collection. This task has been used in other experiments with the intention of causing a motor disturbance, and was described in more details by Castro and Costa .
The intervention with FES was initiated immediately after the initial biomechanical analysis. For this, the volunteer was positioned in a chair with his feet flat on the floor, with hip ischial support, keeping the hip, knee, and ankle in 90°.
For the motor point mapping, two adhesive electrodes were used: the first, with 5 cm of diameter, was used as the reference electrode; the second, with 3.2 cm of diameter, was used to map the motor point. It was considered an indication of proper motor point placement when there was muscle stimulation with FES, gradually increasing the amplitude and causing a pure dorsiflexion. After locating the motor point, two electrodes of the same size (5 cm) were applied for electrical stimulation.
After the intervention with FES, the volunteers were re-evaluated by the biomechanical system according to the procedures described above, and these were repeated four more times with time intervals of 45 minutes, totalizing a period of 3 hours after FES application.
Acquisition and processing of data
After pre-processing of the raw data by Evart software , the system provides the files with kinematic and EMG data. OrthoTrak 6.2 software integrates the signals from the cameras and creates a three dimensional model of the studied subject by photogrammetry, after manual processing by a program operator.
Before processing the EMG signals, it was necessary to determine the following intervals performed by the volunteer: disturbance interval (when the subject performed trunk extension for upright posture) and post-disturbance interval (when he reached the upright position and kept oscillating to maintain postural control). For the division of these intervals, a preprocessing of kinematic data was carried out.
2.4.1.
Pre-processing of data. For pre-processing of data, kinematic and EMG variables captured by the software Evart were used. The imaging system generates a raw data file with information of markers fixed on the subject during the movement of self-perturbation, with the coordinates of the joints, the COM and the centers of mass of each segment at coordinates x, y and z. This file can be read in an Excel spreadsheet and has the extension *.TRBCoord. Another file with extension *.xls contains the values of the electromyography signals of ST and GM muscles of both legs during the collection interval .
In order to identify the end of self-disturbance, a criterion was established to select the task intervals, and these were calculated by a mathematical routine created in Matlab, which also provides the frame in which each interval began.
The criteria used were, in this order: a) speed of center of mass position vector, b) speed in z, and c) acceleration of center of mass position vector. Both the criteria and the equations used for calculation can be observed in detail in the study of Costa .
2.4.2.
Electromyography signal processing. After the *.xls file was created, the linear envelope of the EMG signal was generated by the Excel software, and the first peak time of gastrocnemius medialis (GM) and semitendinosus (ST) muscles was calculated in the postdisturbance interval by the same software.
Results and discussion
The instant of maximum peaks of GM and ST muscles activation allows us to observe the synergism between these two muscles in an attempt to maintain balance after a motor disturbance, and possible strategies used to keep balance (ankle or hip strategy) on each trial (1)(2)(3)(4)(5)(6), and are shown in Figure 2. One can observe that in the preserved limb (Figure 2A), there is a proximal-distal synergy (ankle strategy), in which activation of the GM occurs followed by ST activation for all trials, as expected for small perturbations . In the spastic limb ( Figure 2B), this pattern occurs only in the first trial (prior to FES); but in all trials after the completion of the FES (with the exception of trial 4) this pattern is reversed, and proximal-distal activation occurs. This synergistic pattern is related to the hip strategy, which relates to major disturbance of balance . One can imagine that after the use of FES the spastic member showed greater difficulty in maintaining balance, since the same task was considered a major disturbance.
Instant of maximum activation peak Preserved Limb
In 1986, DiFabio performed a study with EMG in hemiparetic subjects and found latencies in distal muscles of their paretic limb that were longer than the contralateral limb, and he commented that patients with hemiparesis become fixed on stereotyped patterns of movement, showing a loss of flexibility and adaptability, as well as delayed responses and disruption of synergistic muscle coordination of the hemiparetic side, therefore they need to activate the proximal muscles before the distal muscles.
It is possible that FES did not cause an effective RI in the study subject's GM muscle, or it may even have caused an opposite effect to that which was expected: the action potential propagated by the motoneurons can also have passed along collateral axons making synapse with spinal inhibitory interneurons called Renshaw cells. Antidromic activation of these interneurons, in turn, inhibits the activity of agonists and synergistic hyperactive motoneurons . Thus, instead of facilitating the synergy between GM and ST, there may be an inhibition of these synergistic pathways leading to a proximal-distal synergy in the spastic limb.
Another plausible hypothesis is that, due to spasticity, the subject presented greater weightbearing at the preserved limb (despite having been instructed to distribute his weight equally between both legs), and therefore, this member had responded to self-disturbance of balance with little assistance of the spastic limb.
However, this research is a case report, so we cannot discard the possibility of measurement error or a non-sensitive method to detect changes caused in the synergy of GM and ST by RI. More studies are needed, with a larger sample of subjects to verify the effectiveness of the RI by FES in the synergy of hemiplegic patients, and even different methodologies to detect its effect.
Conclusion
Spastic hemiplegic stroke patients present altered proximal-distal synergy in the spastic limb during maintenance of the balance after a motor self-disturbance. FES is a useful tool in reducing the spasticity of GM by RI, but there is the possibility that the antidromic effect, caused by an electrical intervention, can inhibit certain muscle synergies such as the one that is used in the ankle strategy of balance, so it can be more difficult to maintain balance after a disturbance.
Therefore, further studies are needed with larger samples of subjects and different methodologies in order to determine the benefit or harm caused by FES in these patients' motor control.
|
<gh_stars>10-100
/* libs/pixelflinger/trap.cpp
**
** Copyright 2006, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include "trap.h"
#include "picker.h"
#include <cutils/log.h>
#include <cutils/memory.h>
namespace android {
// ----------------------------------------------------------------------------
// enable to see triangles edges
#define DEBUG_TRANGLES 0
// ----------------------------------------------------------------------------
static void pointx_validate(void *con, const GGLcoord* c, GGLcoord r);
static void pointx(void *con, const GGLcoord* c, GGLcoord r);
static void aa_pointx(void *con, const GGLcoord* c, GGLcoord r);
static void aa_nice_pointx(void *con, const GGLcoord* c, GGLcoord r);
static void linex_validate(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord w);
static void linex(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord w);
static void aa_linex(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord w);
static void recti_validate(void* c, GGLint l, GGLint t, GGLint r, GGLint b);
static void recti(void* c, GGLint l, GGLint t, GGLint r, GGLint b);
static void trianglex_validate(void*,
const GGLcoord*, const GGLcoord*, const GGLcoord*);
static void trianglex_small(void*,
const GGLcoord*, const GGLcoord*, const GGLcoord*);
static void trianglex_big(void*,
const GGLcoord*, const GGLcoord*, const GGLcoord*);
static void aa_trianglex(void*,
const GGLcoord*, const GGLcoord*, const GGLcoord*);
static void trianglex_debug(void* con,
const GGLcoord*, const GGLcoord*, const GGLcoord*);
static void aapolyx(void* con,
const GGLcoord* pts, int count);
static inline int min(int a, int b) CONST;
static inline int max(int a, int b) CONST;
static inline int min(int a, int b, int c) CONST;
static inline int max(int a, int b, int c) CONST;
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Tools
#endif
inline int min(int a, int b) {
return a<b ? a : b;
}
inline int max(int a, int b) {
return a<b ? b : a;
}
inline int min(int a, int b, int c) {
return min(a,min(b,c));
}
inline int max(int a, int b, int c) {
return max(a,max(b,c));
}
template <typename T>
static inline void swap(T& a, T& b) {
T t(a);
a = b;
b = t;
}
static void
triangle_dump_points( const GGLcoord* v0,
const GGLcoord* v1,
const GGLcoord* v2 )
{
float tri = 1.0f / TRI_ONE;
ALOGD(" P0=(%.3f, %.3f) [%08x, %08x]\n"
" P1=(%.3f, %.3f) [%08x, %08x]\n"
" P2=(%.3f, %.3f) [%08x, %08x]\n",
v0[0]*tri, v0[1]*tri, v0[0], v0[1],
v1[0]*tri, v1[1]*tri, v1[0], v1[1],
v2[0]*tri, v2[1]*tri, v2[0], v2[1] );
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Misc
#endif
void ggl_init_trap(context_t* c)
{
ggl_state_changed(c, GGL_PIXEL_PIPELINE_STATE|GGL_TMU_STATE|GGL_CB_STATE);
}
void ggl_state_changed(context_t* c, int flags)
{
if (ggl_likely(!c->dirty)) {
c->procs.pointx = pointx_validate;
c->procs.linex = linex_validate;
c->procs.recti = recti_validate;
c->procs.trianglex = trianglex_validate;
}
c->dirty |= uint32_t(flags);
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Point
#endif
void pointx_validate(void *con, const GGLcoord* v, GGLcoord rad)
{
GGL_CONTEXT(c, con);
ggl_pick(c);
if (c->state.needs.p & GGL_NEED_MASK(P_AA)) {
if (c->state.enables & GGL_ENABLE_POINT_AA_NICE) {
c->procs.pointx = aa_nice_pointx;
} else {
c->procs.pointx = aa_pointx;
}
} else {
c->procs.pointx = pointx;
}
c->procs.pointx(con, v, rad);
}
void pointx(void *con, const GGLcoord* v, GGLcoord rad)
{
GGL_CONTEXT(c, con);
GGLcoord halfSize = TRI_ROUND(rad) >> 1;
if (halfSize == 0)
halfSize = TRI_HALF;
GGLcoord xc = v[0];
GGLcoord yc = v[1];
if (halfSize & TRI_HALF) { // size odd
xc = TRI_FLOOR(xc) + TRI_HALF;
yc = TRI_FLOOR(yc) + TRI_HALF;
} else { // size even
xc = TRI_ROUND(xc);
yc = TRI_ROUND(yc);
}
GGLint l = (xc - halfSize) >> TRI_FRACTION_BITS;
GGLint t = (yc - halfSize) >> TRI_FRACTION_BITS;
GGLint r = (xc + halfSize) >> TRI_FRACTION_BITS;
GGLint b = (yc + halfSize) >> TRI_FRACTION_BITS;
recti(c, l, t, r, b);
}
// This way of computing the coverage factor, is more accurate and gives
// better results for small circles, but it is also a lot slower.
// Here we use super-sampling.
static int32_t coverageNice(GGLcoord x, GGLcoord y,
GGLcoord rmin, GGLcoord rmax, GGLcoord rr)
{
const GGLcoord d2 = x*x + y*y;
if (d2 >= rmax) return 0;
if (d2 < rmin) return 0x7FFF;
const int kSamples = 4;
const int kInc = 4; // 1/4 = 0.25
const int kCoverageUnit = 1; // 1/(4^2) = 0.0625
const GGLcoord kCoordOffset = -6; // -0.375
int hits = 0;
int x_sample = x + kCoordOffset;
for (int i=0 ; i<kSamples ; i++, x_sample += kInc) {
const int xval = rr - (x_sample * x_sample);
int y_sample = y + kCoordOffset;
for (int j=0 ; j<kSamples ; j++, y_sample += kInc) {
if (xval - (y_sample * y_sample) > 0)
hits += kCoverageUnit;
}
}
return min(0x7FFF, hits << (15 - kSamples));
}
void aa_nice_pointx(void *con, const GGLcoord* v, GGLcoord size)
{
GGL_CONTEXT(c, con);
GGLcoord rad = ((size + 1)>>1);
GGLint l = (v[0] - rad) >> TRI_FRACTION_BITS;
GGLint t = (v[1] - rad) >> TRI_FRACTION_BITS;
GGLint r = (v[0] + rad + (TRI_ONE-1)) >> TRI_FRACTION_BITS;
GGLint b = (v[1] + rad + (TRI_ONE-1)) >> TRI_FRACTION_BITS;
GGLcoord xstart = TRI_FROM_INT(l) - v[0] + TRI_HALF;
GGLcoord ystart = TRI_FROM_INT(t) - v[1] + TRI_HALF;
// scissor...
if (l < GGLint(c->state.scissor.left)) {
xstart += TRI_FROM_INT(c->state.scissor.left-l);
l = GGLint(c->state.scissor.left);
}
if (t < GGLint(c->state.scissor.top)) {
ystart += TRI_FROM_INT(c->state.scissor.top-t);
t = GGLint(c->state.scissor.top);
}
if (r > GGLint(c->state.scissor.right)) {
r = GGLint(c->state.scissor.right);
}
if (b > GGLint(c->state.scissor.bottom)) {
b = GGLint(c->state.scissor.bottom);
}
int xc = r - l;
int yc = b - t;
if (xc>0 && yc>0) {
int16_t* covPtr = c->state.buffers.coverage;
const int32_t sqr2Over2 = 0xC; // rounded up
GGLcoord rr = rad*rad;
GGLcoord rmin = (rad - sqr2Over2)*(rad - sqr2Over2);
GGLcoord rmax = (rad + sqr2Over2)*(rad + sqr2Over2);
GGLcoord y = ystart;
c->iterators.xl = l;
c->iterators.xr = r;
c->init_y(c, t);
do {
// compute coverage factors for each pixel
GGLcoord x = xstart;
for (int i=l ; i<r ; i++) {
covPtr[i] = coverageNice(x, y, rmin, rmax, rr);
x += TRI_ONE;
}
y += TRI_ONE;
c->scanline(c);
c->step_y(c);
} while (--yc);
}
}
// This is a cheap way of computing the coverage factor for a circle.
// We just lerp between the circles of radii r-sqrt(2)/2 and r+sqrt(2)/2
static inline int32_t coverageFast(GGLcoord x, GGLcoord y,
GGLcoord rmin, GGLcoord rmax, GGLcoord scale)
{
const GGLcoord d2 = x*x + y*y;
if (d2 >= rmax) return 0;
if (d2 < rmin) return 0x7FFF;
return 0x7FFF - (d2-rmin)*scale;
}
void aa_pointx(void *con, const GGLcoord* v, GGLcoord size)
{
GGL_CONTEXT(c, con);
GGLcoord rad = ((size + 1)>>1);
GGLint l = (v[0] - rad) >> TRI_FRACTION_BITS;
GGLint t = (v[1] - rad) >> TRI_FRACTION_BITS;
GGLint r = (v[0] + rad + (TRI_ONE-1)) >> TRI_FRACTION_BITS;
GGLint b = (v[1] + rad + (TRI_ONE-1)) >> TRI_FRACTION_BITS;
GGLcoord xstart = TRI_FROM_INT(l) - v[0] + TRI_HALF;
GGLcoord ystart = TRI_FROM_INT(t) - v[1] + TRI_HALF;
// scissor...
if (l < GGLint(c->state.scissor.left)) {
xstart += TRI_FROM_INT(c->state.scissor.left-l);
l = GGLint(c->state.scissor.left);
}
if (t < GGLint(c->state.scissor.top)) {
ystart += TRI_FROM_INT(c->state.scissor.top-t);
t = GGLint(c->state.scissor.top);
}
if (r > GGLint(c->state.scissor.right)) {
r = GGLint(c->state.scissor.right);
}
if (b > GGLint(c->state.scissor.bottom)) {
b = GGLint(c->state.scissor.bottom);
}
int xc = r - l;
int yc = b - t;
if (xc>0 && yc>0) {
int16_t* covPtr = c->state.buffers.coverage;
rad <<= 4;
const int32_t sqr2Over2 = 0xB5; // fixed-point 24.8
GGLcoord rmin = rad - sqr2Over2;
GGLcoord rmax = rad + sqr2Over2;
GGLcoord scale;
rmin *= rmin;
rmax *= rmax;
scale = 0x800000 / (rmax - rmin);
rmin >>= 8;
rmax >>= 8;
GGLcoord y = ystart;
c->iterators.xl = l;
c->iterators.xr = r;
c->init_y(c, t);
do {
// compute coverage factors for each pixel
GGLcoord x = xstart;
for (int i=l ; i<r ; i++) {
covPtr[i] = coverageFast(x, y, rmin, rmax, scale);
x += TRI_ONE;
}
y += TRI_ONE;
c->scanline(c);
c->step_y(c);
} while (--yc);
}
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Line
#endif
void linex_validate(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord w)
{
GGL_CONTEXT(c, con);
ggl_pick(c);
if (c->state.needs.p & GGL_NEED_MASK(P_AA)) {
c->procs.linex = aa_linex;
} else {
c->procs.linex = linex;
}
c->procs.linex(con, v0, v1, w);
}
static void linex(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord width)
{
GGL_CONTEXT(c, con);
GGLcoord v[4][2];
v[0][0] = v0[0]; v[0][1] = v0[1];
v[1][0] = v1[0]; v[1][1] = v1[1];
v0 = v[0];
v1 = v[1];
const GGLcoord dx = abs(v0[0] - v1[0]);
const GGLcoord dy = abs(v0[1] - v1[1]);
GGLcoord nx, ny;
nx = ny = 0;
GGLcoord halfWidth = TRI_ROUND(width) >> 1;
if (halfWidth == 0)
halfWidth = TRI_HALF;
((dx > dy) ? ny : nx) = halfWidth;
v[2][0] = v1[0]; v[2][1] = v1[1];
v[3][0] = v0[0]; v[3][1] = v0[1];
v[0][0] += nx; v[0][1] += ny;
v[1][0] += nx; v[1][1] += ny;
v[2][0] -= nx; v[2][1] -= ny;
v[3][0] -= nx; v[3][1] -= ny;
trianglex_big(con, v[0], v[1], v[2]);
trianglex_big(con, v[0], v[2], v[3]);
}
static void aa_linex(void *con, const GGLcoord* v0, const GGLcoord* v1, GGLcoord width)
{
GGL_CONTEXT(c, con);
GGLcoord v[4][2];
v[0][0] = v0[0]; v[0][1] = v0[1];
v[1][0] = v1[0]; v[1][1] = v1[1];
v0 = v[0];
v1 = v[1];
const GGLcoord dx = v0[0] - v1[0];
const GGLcoord dy = v0[1] - v1[1];
GGLcoord nx = -dy;
GGLcoord ny = dx;
// generally, this will be well below 1.0
const GGLfixed norm = gglMulx(width, gglSqrtRecipx(nx*nx+ny*ny), 4);
nx = gglMulx(nx, norm, 21);
ny = gglMulx(ny, norm, 21);
v[2][0] = v1[0]; v[2][1] = v1[1];
v[3][0] = v0[0]; v[3][1] = v0[1];
v[0][0] += nx; v[0][1] += ny;
v[1][0] += nx; v[1][1] += ny;
v[2][0] -= nx; v[2][1] -= ny;
v[3][0] -= nx; v[3][1] -= ny;
aapolyx(con, v[0], 4);
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Rect
#endif
void recti_validate(void *con, GGLint l, GGLint t, GGLint r, GGLint b)
{
GGL_CONTEXT(c, con);
ggl_pick(c);
c->procs.recti = recti;
c->procs.recti(con, l, t, r, b);
}
void recti(void* con, GGLint l, GGLint t, GGLint r, GGLint b)
{
GGL_CONTEXT(c, con);
// scissor...
if (l < GGLint(c->state.scissor.left))
l = GGLint(c->state.scissor.left);
if (t < GGLint(c->state.scissor.top))
t = GGLint(c->state.scissor.top);
if (r > GGLint(c->state.scissor.right))
r = GGLint(c->state.scissor.right);
if (b > GGLint(c->state.scissor.bottom))
b = GGLint(c->state.scissor.bottom);
int xc = r - l;
int yc = b - t;
if (xc>0 && yc>0) {
c->iterators.xl = l;
c->iterators.xr = r;
c->init_y(c, t);
c->rect(c, yc);
}
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Triangle / Debugging
#endif
static void scanline_set(context_t* c)
{
int32_t x = c->iterators.xl;
size_t ct = c->iterators.xr - x;
int32_t y = c->iterators.y;
surface_t* cb = &(c->state.buffers.color);
const GGLFormat* fp = &(c->formats[cb->format]);
uint8_t* dst = reinterpret_cast<uint8_t*>(cb->data) +
(x + (cb->stride * y)) * fp->size;
const size_t size = ct * fp->size;
memset(dst, 0xFF, size);
}
static void trianglex_debug(void* con,
const GGLcoord* v0, const GGLcoord* v1, const GGLcoord* v2)
{
GGL_CONTEXT(c, con);
if (c->state.needs.p & GGL_NEED_MASK(P_AA)) {
aa_trianglex(con,v0,v1,v2);
} else {
trianglex_big(con,v0,v1,v2);
}
void (*save_scanline)(context_t*) = c->scanline;
c->scanline = scanline_set;
linex(con, v0, v1, TRI_ONE);
linex(con, v1, v2, TRI_ONE);
linex(con, v2, v0, TRI_ONE);
c->scanline = save_scanline;
}
static void trianglex_xor(void* con,
const GGLcoord* v0, const GGLcoord* v1, const GGLcoord* v2)
{
trianglex_big(con,v0,v1,v2);
trianglex_small(con,v0,v1,v2);
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#pragma mark Triangle
#endif
void trianglex_validate(void *con,
const GGLcoord* v0, const GGLcoord* v1, const GGLcoord* v2)
{
GGL_CONTEXT(c, con);
ggl_pick(c);
if (c->state.needs.p & GGL_NEED_MASK(P_AA)) {
c->procs.trianglex = DEBUG_TRANGLES ? trianglex_debug : aa_trianglex;
} else {
c->procs.trianglex = DEBUG_TRANGLES ? trianglex_debug : trianglex_big;
}
c->procs.trianglex(con, v0, v1, v2);
}
// ----------------------------------------------------------------------------
void trianglex_small(void* con,
const GGLcoord* v0, const GGLcoord* v1, const GGLcoord* v2)
{
GGL_CONTEXT(c, con);
// vertices are in 28.4 fixed point, which allows
// us to use 32 bits multiplies below.
int32_t x0 = v0[0];
int32_t y0 = v0[1];
int32_t x1 = v1[0];
int32_t y1 = v1[1];
int32_t x2 = v2[0];
int32_t y2 = v2[1];
int32_t dx01 = x0 - x1;
int32_t dy20 = y2 - y0;
int32_t dy01 = y0 - y1;
int32_t dx20 = x2 - x0;
// The code below works only with CCW triangles
// so if we get a CW triangle, we need to swap two of its vertices
if (dx01*dy20 < dy01*dx20) {
swap(x0, x1);
swap(y0, y1);
dx01 = x0 - x1;
dy01 = y0 - y1;
dx20 = x2 - x0;
dy20 = y2 - y0;
}
int32_t dx12 = x1 - x2;
int32_t dy12 = y1 - y2;
// bounding box & scissor
const int32_t bminx = TRI_FLOOR(min(x0, x1, x2)) >> TRI_FRACTION_BITS;
const int32_t bminy = TRI_FLOOR(min(y0, y1, y2)) >> TRI_FRACTION_BITS;
const int32_t bmaxx = TRI_CEIL( max(x0, x1, x2)) >> TRI_FRACTION_BITS;
const int32_t bmaxy = TRI_CEIL( max(y0, y1, y2)) >> TRI_FRACTION_BITS;
const int32_t minx = max(bminx, c->state.scissor.left);
const int32_t miny = max(bminy, c->state.scissor.top);
const int32_t maxx = min(bmaxx, c->state.scissor.right);
const int32_t maxy = min(bmaxy, c->state.scissor.bottom);
if ((minx >= maxx) || (miny >= maxy))
return; // too small or clipped out...
// step equations to the bounding box and snap to pixel center
const int32_t my = (miny << TRI_FRACTION_BITS) + TRI_HALF;
const int32_t mx = (minx << TRI_FRACTION_BITS) + TRI_HALF;
int32_t ey0 = dy01 * (x0 - mx) - dx01 * (y0 - my);
int32_t ey1 = dy12 * (x1 - mx) - dx12 * (y1 - my);
int32_t ey2 = dy20 * (x2 - mx) - dx20 * (y2 - my);
// right-exclusive fill rule, to avoid rare cases
// of over drawing
if (dy01<0 || (dy01 == 0 && dx01>0)) ey0++;
if (dy12<0 || (dy12 == 0 && dx12>0)) ey1++;
if (dy20<0 || (dy20 == 0 && dx20>0)) ey2++;
c->init_y(c, miny);
for (int32_t y = miny; y < maxy; y++) {
register int32_t ex0 = ey0;
register int32_t ex1 = ey1;
register int32_t ex2 = ey2;
register int32_t xl, xr;
for (xl=minx ; xl<maxx ; xl++) {
if (ex0>0 && ex1>0 && ex2>0)
break; // all strictly positive
ex0 -= dy01 << TRI_FRACTION_BITS;
ex1 -= dy12 << TRI_FRACTION_BITS;
ex2 -= dy20 << TRI_FRACTION_BITS;
}
xr = xl;
for ( ; xr<maxx ; xr++) {
if (!(ex0>0 && ex1>0 && ex2>0))
break; // not all strictly positive
ex0 -= dy01 << TRI_FRACTION_BITS;
ex1 -= dy12 << TRI_FRACTION_BITS;
ex2 -= dy20 << TRI_FRACTION_BITS;
}
if (xl < xr) {
c->iterators.xl = xl;
c->iterators.xr = xr;
c->scanline(c);
}
c->step_y(c);
ey0 += dx01 << TRI_FRACTION_BITS;
ey1 += dx12 << TRI_FRACTION_BITS;
ey2 += dx20 << TRI_FRACTION_BITS;
}
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#endif
// the following routine fills a triangle via edge stepping, which
// unfortunately requires divisions in the setup phase to get right,
// it should probably only be used for relatively large trianges
// x = y*DX/DY (ou DX and DY are constants, DY > 0, et y >= 0)
//
// for an equation of the type:
// x' = y*K/2^p (with K and p constants "carefully chosen")
//
// We can now do a DDA without precision loss. We define 'e' by:
// x' - x = y*(DX/DY - K/2^p) = y*e
//
// If we choose K = round(DX*2^p/DY) then,
// abs(e) <= 1/2^(p+1) by construction
//
// therefore abs(x'-x) = y*abs(e) <= y/2^(p+1) <= DY/2^(p+1) <= DMAX/2^(p+1)
//
// which means that if DMAX <= 2^p, therefore abs(x-x') <= 1/2, including
// at the last line. In fact, it's even a strict inequality except in one
// extrem case (DY == DMAX et e = +/- 1/2)
//
// Applying that to our coordinates, we need 2^p >= 4096*16 = 65536
// so p = 16 is enough, we're so lucky!
const int TRI_ITERATORS_BITS = 16;
struct Edge
{
int32_t x; // edge position in 16.16 coordinates
int32_t x_incr; // on each step, increment x by that amount
int32_t y_top; // starting scanline, 16.4 format
int32_t y_bot;
};
static void
edge_dump( Edge* edge )
{
ALOGI( " top=%d (%.3f) bot=%d (%.3f) x=%d (%.3f) ix=%d (%.3f)",
edge->y_top, edge->y_top/float(TRI_ONE),
edge->y_bot, edge->y_bot/float(TRI_ONE),
edge->x, edge->x/float(FIXED_ONE),
edge->x_incr, edge->x_incr/float(FIXED_ONE) );
}
static void
triangle_dump_edges( Edge* edges,
int count )
{
ALOGI( "%d edge%s:\n", count, count == 1 ? "" : "s" );
for ( ; count > 0; count--, edges++ )
edge_dump( edges );
}
// the following function sets up an edge, it assumes
// that ymin and ymax are in already in the 'reduced'
// format
static __attribute__((noinline))
void edge_setup(
Edge* edges,
int* pcount,
const GGLcoord* p1,
const GGLcoord* p2,
int32_t ymin,
int32_t ymax )
{
const GGLfixed* top = p1;
const GGLfixed* bot = p2;
Edge* edge = edges + *pcount;
if (top[1] > bot[1]) {
swap(top, bot);
}
int y1 = top[1] | 1;
int y2 = bot[1] | 1;
int dy = y2 - y1;
if ( dy == 0 || y1 > ymax || y2 < ymin )
return;
if ( y1 > ymin )
ymin = TRI_SNAP_NEXT_HALF(y1);
if ( y2 < ymax )
ymax = TRI_SNAP_PREV_HALF(y2);
if ( ymin > ymax ) // when the edge doesn't cross any scanline
return;
const int x1 = top[0];
const int dx = bot[0] - x1;
const int shift = TRI_ITERATORS_BITS - TRI_FRACTION_BITS;
// setup edge fields
// We add 0.5 to edge->x here because it simplifies the rounding
// in triangle_sweep_edges() -- this doesn't change the ordering of 'x'
edge->x = (x1 << shift) + (1LU << (TRI_ITERATORS_BITS-1));
edge->x_incr = 0;
edge->y_top = ymin;
edge->y_bot = ymax;
if (ggl_likely(ymin <= ymax && dx)) {
edge->x_incr = gglDivQ16(dx, dy);
}
if (ggl_likely(y1 < ymin)) {
int32_t xadjust = (edge->x_incr * (ymin-y1)) >> TRI_FRACTION_BITS;
edge->x += xadjust;
}
++*pcount;
}
static void
triangle_sweep_edges( Edge* left,
Edge* right,
int ytop,
int ybot,
context_t* c )
{
int count = ((ybot - ytop)>>TRI_FRACTION_BITS) + 1;
if (count<=0) return;
// sort the edges horizontally
if ((left->x > right->x) ||
((left->x == right->x) && (left->x_incr > right->x_incr))) {
swap(left, right);
}
int left_x = left->x;
int right_x = right->x;
const int left_xi = left->x_incr;
const int right_xi = right->x_incr;
left->x += left_xi * count;
right->x += right_xi * count;
const int xmin = c->state.scissor.left;
const int xmax = c->state.scissor.right;
do {
// horizontal scissoring
const int32_t xl = max(left_x >> TRI_ITERATORS_BITS, xmin);
const int32_t xr = min(right_x >> TRI_ITERATORS_BITS, xmax);
left_x += left_xi;
right_x += right_xi;
// invoke the scanline rasterizer
if (ggl_likely(xl < xr)) {
c->iterators.xl = xl;
c->iterators.xr = xr;
c->scanline(c);
}
c->step_y(c);
} while (--count);
}
void trianglex_big(void* con,
const GGLcoord* v0, const GGLcoord* v1, const GGLcoord* v2)
{
GGL_CONTEXT(c, con);
Edge edges[3];
int num_edges = 0;
int32_t ymin = TRI_FROM_INT(c->state.scissor.top) + TRI_HALF;
int32_t ymax = TRI_FROM_INT(c->state.scissor.bottom) - TRI_HALF;
edge_setup( edges, &num_edges, v0, v1, ymin, ymax );
edge_setup( edges, &num_edges, v0, v2, ymin, ymax );
edge_setup( edges, &num_edges, v1, v2, ymin, ymax );
if (ggl_unlikely(num_edges<2)) // for really tiny triangles that don't
return; // cross any scanline centers
Edge* left = &edges[0];
Edge* right = &edges[1];
Edge* other = &edges[2];
int32_t y_top = min(left->y_top, right->y_top);
int32_t y_bot = max(left->y_bot, right->y_bot);
if (ggl_likely(num_edges==3)) {
y_top = min(y_top, edges[2].y_top);
y_bot = max(y_bot, edges[2].y_bot);
if (edges[0].y_top > y_top) {
other = &edges[0];
left = &edges[2];
} else if (edges[1].y_top > y_top) {
other = &edges[1];
right = &edges[2];
}
}
c->init_y(c, y_top >> TRI_FRACTION_BITS);
int32_t y_mid = min(left->y_bot, right->y_bot);
triangle_sweep_edges( left, right, y_top, y_mid, c );
// second scanline sweep loop, if necessary
y_mid += TRI_ONE;
if (y_mid <= y_bot) {
((left->y_bot == y_bot) ? right : left) = other;
if (other->y_top < y_mid) {
other->x += other->x_incr;
}
triangle_sweep_edges( left, right, y_mid, y_bot, c );
}
}
void aa_trianglex(void* con,
const GGLcoord* a, const GGLcoord* b, const GGLcoord* c)
{
GGLcoord pts[6] = { a[0], a[1], b[0], b[1], c[0], c[1] };
aapolyx(con, pts, 3);
}
// ----------------------------------------------------------------------------
#if 0
#pragma mark -
#endif
struct AAEdge
{
GGLfixed x; // edge position in 12.16 coordinates
GGLfixed x_incr; // on each y step, increment x by that amount
GGLfixed y_incr; // on each x step, increment y by that amount
int16_t y_top; // starting scanline, 12.4 format
int16_t y_bot; // starting scanline, 12.4 format
void dump();
};
void AAEdge::dump()
{
float tri = 1.0f / TRI_ONE;
float iter = 1.0f / (1<<TRI_ITERATORS_BITS);
float fix = 1.0f / FIXED_ONE;
ALOGD( "x=%08x (%.3f), "
"x_incr=%08x (%.3f), y_incr=%08x (%.3f), "
"y_top=%08x (%.3f), y_bot=%08x (%.3f) ",
x, x*fix,
x_incr, x_incr*iter,
y_incr, y_incr*iter,
y_top, y_top*tri,
y_bot, y_bot*tri );
}
// the following function sets up an edge, it assumes
// that ymin and ymax are in already in the 'reduced'
// format
static __attribute__((noinline))
void aa_edge_setup(
AAEdge* edges,
int* pcount,
const GGLcoord* p1,
const GGLcoord* p2,
int32_t ymin,
int32_t ymax )
{
const GGLfixed* top = p1;
const GGLfixed* bot = p2;
AAEdge* edge = edges + *pcount;
if (top[1] > bot[1])
swap(top, bot);
int y1 = top[1];
int y2 = bot[1];
int dy = y2 - y1;
if (dy==0 || y1>ymax || y2<ymin)
return;
if (y1 > ymin)
ymin = y1;
if (y2 < ymax)
ymax = y2;
const int x1 = top[0];
const int dx = bot[0] - x1;
const int shift = FIXED_BITS - TRI_FRACTION_BITS;
// setup edge fields
edge->x = x1 << shift;
edge->x_incr = 0;
edge->y_top = ymin;
edge->y_bot = ymax;
edge->y_incr = 0x7FFFFFFF;
if (ggl_likely(ymin <= ymax && dx)) {
edge->x_incr = gglDivQ16(dx, dy);
if (dx != 0) {
edge->y_incr = abs(gglDivQ16(dy, dx));
}
}
if (ggl_likely(y1 < ymin)) {
int32_t xadjust = (edge->x_incr * (ymin-y1))
>> (TRI_FRACTION_BITS + TRI_ITERATORS_BITS - FIXED_BITS);
edge->x += xadjust;
}
++*pcount;
}
typedef int (*compar_t)(const void*, const void*);
static int compare_edges(const AAEdge *e0, const AAEdge *e1) {
if (e0->y_top > e1->y_top) return 1;
if (e0->y_top < e1->y_top) return -1;
if (e0->x > e1->x) return 1;
if (e0->x < e1->x) return -1;
if (e0->x_incr > e1->x_incr) return 1;
if (e0->x_incr < e1->x_incr) return -1;
return 0; // same edges, should never happen
}
static inline
void SET_COVERAGE(int16_t*& p, int32_t value, ssize_t n)
{
android_memset16((uint16_t*)p, value, n*2);
p += n;
}
static inline
void ADD_COVERAGE(int16_t*& p, int32_t value)
{
value = *p + value;
if (value >= 0x8000)
value = 0x7FFF;
*p++ = value;
}
static inline
void SUB_COVERAGE(int16_t*& p, int32_t value)
{
value = *p - value;
value &= ~(value>>31);
*p++ = value;
}
void aapolyx(void* con,
const GGLcoord* pts, int count)
{
/*
* NOTE: This routine assumes that the polygon has been clipped to the
* viewport already, that is, no vertex lies outside of the framebuffer.
* If this happens, the code below won't corrupt memory but the
* coverage values may not be correct.
*/
GGL_CONTEXT(c, con);
// we do only quads for now (it's used for thick lines)
if ((count>4) || (count<2)) return;
// take scissor into account
const int xmin = c->state.scissor.left;
const int xmax = c->state.scissor.right;
if (xmin >= xmax) return;
// generate edges from the vertices
int32_t ymin = TRI_FROM_INT(c->state.scissor.top);
int32_t ymax = TRI_FROM_INT(c->state.scissor.bottom);
if (ymin >= ymax) return;
AAEdge edges[4];
int num_edges = 0;
GGLcoord const * p = pts;
for (int i=0 ; i<count-1 ; i++, p+=2) {
aa_edge_setup(edges, &num_edges, p, p+2, ymin, ymax);
}
aa_edge_setup(edges, &num_edges, p, pts, ymin, ymax );
if (ggl_unlikely(num_edges<2))
return;
// sort the edge list top to bottom, left to right.
qsort(edges, num_edges, sizeof(AAEdge), (compar_t)compare_edges);
int16_t* const covPtr = c->state.buffers.coverage;
memset(covPtr+xmin, 0, (xmax-xmin)*sizeof(*covPtr));
// now, sweep all edges in order
// start with the 2 first edges. We know that they share their top
// vertex, by construction.
int i = 2;
AAEdge* left = &edges[0];
AAEdge* right = &edges[1];
int32_t yt = left->y_top;
GGLfixed l = left->x;
GGLfixed r = right->x;
int retire = 0;
int16_t* coverage;
// at this point we can initialize the rasterizer
c->init_y(c, yt>>TRI_FRACTION_BITS);
c->iterators.xl = xmax;
c->iterators.xr = xmin;
do {
int32_t y = min(min(left->y_bot, right->y_bot), TRI_FLOOR(yt + TRI_ONE));
const int32_t shift = TRI_FRACTION_BITS + TRI_ITERATORS_BITS - FIXED_BITS;
const int cf_shift = (1 + TRI_FRACTION_BITS*2 + TRI_ITERATORS_BITS - 15);
// compute xmin and xmax for the left edge
GGLfixed l_min = gglMulAddx(left->x_incr, y - left->y_top, left->x, shift);
GGLfixed l_max = l;
l = l_min;
if (l_min > l_max)
swap(l_min, l_max);
// compute xmin and xmax for the right edge
GGLfixed r_min = gglMulAddx(right->x_incr, y - right->y_top, right->x, shift);
GGLfixed r_max = r;
r = r_min;
if (r_min > r_max)
swap(r_min, r_max);
// make sure we're not touching coverage values outside of the
// framebuffer
l_min &= ~(l_min>>31);
r_min &= ~(r_min>>31);
l_max &= ~(l_max>>31);
r_max &= ~(r_max>>31);
if (gglFixedToIntFloor(l_min) >= xmax) l_min = gglIntToFixed(xmax)-1;
if (gglFixedToIntFloor(r_min) >= xmax) r_min = gglIntToFixed(xmax)-1;
if (gglFixedToIntCeil(l_max) >= xmax) l_max = gglIntToFixed(xmax)-1;
if (gglFixedToIntCeil(r_max) >= xmax) r_max = gglIntToFixed(xmax)-1;
// compute the integer versions of the above
const GGLfixed l_min_i = gglFloorx(l_min);
const GGLfixed l_max_i = gglCeilx (l_max);
const GGLfixed r_min_i = gglFloorx(r_min);
const GGLfixed r_max_i = gglCeilx (r_max);
// clip horizontally using the scissor
const int xml = max(xmin, gglFixedToIntFloor(l_min_i));
const int xmr = min(xmax, gglFixedToIntFloor(r_max_i));
// if we just stepped to a new scanline, render the previous one.
// and clear the coverage buffer
if (retire) {
if (c->iterators.xl < c->iterators.xr)
c->scanline(c);
c->step_y(c);
memset(covPtr+xmin, 0, (xmax-xmin)*sizeof(*covPtr));
c->iterators.xl = xml;
c->iterators.xr = xmr;
} else {
// update the horizontal range of this scanline
c->iterators.xl = min(c->iterators.xl, xml);
c->iterators.xr = max(c->iterators.xr, xmr);
}
coverage = covPtr + gglFixedToIntFloor(l_min_i);
if (l_min_i == gglFloorx(l_max)) {
/*
* fully traverse this pixel vertically
* l_max
* +-----/--+ yt
* | / |
* | / |
* | / |
* +-/------+ y
* l_min (l_min_i + TRI_ONE)
*/
GGLfixed dx = l_max - l_min;
int32_t dy = y - yt;
int cf = gglMulx((dx >> 1) + (l_min_i + FIXED_ONE - l_max), dy,
FIXED_BITS + TRI_FRACTION_BITS - 15);
ADD_COVERAGE(coverage, cf);
// all pixels on the right have cf = 1.0
} else {
/*
* spans several pixels in one scanline
* l_max
* +--------+--/-----+ yt
* | |/ |
* | /| |
* | / | |
* +---/----+--------+ y
* l_min (l_min_i + TRI_ONE)
*/
// handle the first pixel separately...
const int32_t y_incr = left->y_incr;
int32_t dx = TRI_FROM_FIXED(l_min_i - l_min) + TRI_ONE;
int32_t cf = (dx * dx * y_incr) >> cf_shift;
ADD_COVERAGE(coverage, cf);
// following pixels get covered by y_incr, but we need
// to fix-up the cf to account for previous partial pixel
dx = TRI_FROM_FIXED(l_min - l_min_i);
cf -= (dx * dx * y_incr) >> cf_shift;
for (int x = l_min_i+FIXED_ONE ; x < l_max_i-FIXED_ONE ; x += FIXED_ONE) {
cf += y_incr >> (TRI_ITERATORS_BITS-15);
ADD_COVERAGE(coverage, cf);
}
// and the last pixel
dx = TRI_FROM_FIXED(l_max - l_max_i) - TRI_ONE;
cf += (dx * dx * y_incr) >> cf_shift;
ADD_COVERAGE(coverage, cf);
}
// now, fill up all fully covered pixels
coverage = covPtr + gglFixedToIntFloor(l_max_i);
int cf = ((y - yt) << (15 - TRI_FRACTION_BITS));
if (ggl_likely(cf >= 0x8000)) {
SET_COVERAGE(coverage, 0x7FFF, ((r_max - l_max_i)>>FIXED_BITS)+1);
} else {
for (int x=l_max_i ; x<r_max ; x+=FIXED_ONE) {
ADD_COVERAGE(coverage, cf);
}
}
// subtract the coverage of the right edge
coverage = covPtr + gglFixedToIntFloor(r_min_i);
if (r_min_i == gglFloorx(r_max)) {
GGLfixed dx = r_max - r_min;
int32_t dy = y - yt;
int cf = gglMulx((dx >> 1) + (r_min_i + FIXED_ONE - r_max), dy,
FIXED_BITS + TRI_FRACTION_BITS - 15);
SUB_COVERAGE(coverage, cf);
// all pixels on the right have cf = 1.0
} else {
// handle the first pixel separately...
const int32_t y_incr = right->y_incr;
int32_t dx = TRI_FROM_FIXED(r_min_i - r_min) + TRI_ONE;
int32_t cf = (dx * dx * y_incr) >> cf_shift;
SUB_COVERAGE(coverage, cf);
// following pixels get covered by y_incr, but we need
// to fix-up the cf to account for previous partial pixel
dx = TRI_FROM_FIXED(r_min - r_min_i);
cf -= (dx * dx * y_incr) >> cf_shift;
for (int x = r_min_i+FIXED_ONE ; x < r_max_i-FIXED_ONE ; x += FIXED_ONE) {
cf += y_incr >> (TRI_ITERATORS_BITS-15);
SUB_COVERAGE(coverage, cf);
}
// and the last pixel
dx = TRI_FROM_FIXED(r_max - r_max_i) - TRI_ONE;
cf += (dx * dx * y_incr) >> cf_shift;
SUB_COVERAGE(coverage, cf);
}
// did we reach the end of an edge? if so, get a new one.
if (y == left->y_bot || y == right->y_bot) {
// bail out if we're done
if (i>=num_edges)
break;
if (y == left->y_bot)
left = &edges[i++];
if (y == right->y_bot)
right = &edges[i++];
}
// next scanline
yt = y;
// did we just finish a scanline?
retire = (y << (32-TRI_FRACTION_BITS)) == 0;
} while (true);
// render the last scanline
if (c->iterators.xl < c->iterators.xr)
c->scanline(c);
}
}; // namespace android
|
// uses the go test runner "go test" to run a test with an identical name
// in the _supervised_in_test directory and takes expectations regarding output
func executeGoTestRunner(t *testing.T, expectedLogs []string, unexpectedLogs []string) {
out, _ := exec.Command(
path.Join(runtime.GOROOT(), "bin", "go"),
"test",
"../_supervised_in_test/",
"-v",
"-run",
"^("+t.Name()+")$").CombinedOutput()
goTestOutput := string(out)
ribbon := "------------------ EXTERNAL TEST OUTPUT (" + t.Name() + ") ------------------"
debugMsgOutput := fmt.Sprintln(ribbon, "\n", goTestOutput, "\n", ribbon)
for _, logLine := range expectedLogs {
require.Truef(t, strings.Contains(goTestOutput, logLine), "log should contain: '%s'\n\n%s", logLine, debugMsgOutput)
}
for _, logLine := range unexpectedLogs {
require.Falsef(t, strings.Contains(goTestOutput, logLine), "log should not contain: '%s'\n\n%s", logLine, debugMsgOutput)
}
}
|
def leave(self, slot):
try:
position = 0
for car in self.occupancy:
if(int(slot) in car):
del self.occupancy[position]
del self.blocked[position]
break
position += 1
if('\n' in str(slot)):
print("Slot number " + str(slot[0:-1]) + " is free")
else:
print("Slot number " + str(slot) + " is free")
return slot
except ValueError:
print("Please enter an integer number of slots!")
return -1
|
def Alyona(start, end, arr, n):
temp = 0
for i in range(start-1, end):
temp+=arr[i]
return temp
n , s = list(map(int, input().split()))
arr = list(map(int, input().split()))
ans = 0
for i in range(s):
start, end = list(map(int, input().split()))
res = Alyona(start, end, arr, n)
if(res>0):
ans+=res
print(ans)
|
import axios from 'axios';
import type { NextApiRequest, NextApiResponse } from 'next';
export default async (req: NextApiRequest, res: NextApiResponse) => {
try {
await axios.post(
'https://squire-25q7c.ondigitalocean.app/action',
req.body
);
} catch (e) {
console.error('Notification request error: ', e);
}
res.send('Ok');
};
|
def put(self, *args, **kwargs):
self.before_put(*args, **kwargs)
super(DatastoreModel, self).put(*args, **kwargs)
self.after_put(*args, **kwargs)
|
package com.github.lihongjie.hibernate;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
public class StoreData {
public static void main(String[] args) {
//creating configuration object
Configuration cfg=new Configuration();
cfg.configure("hibernate.cfg.xml");//populates the data of the configuration file
//creating seession factory object
SessionFactory factory=cfg.buildSessionFactory();
//creating session object
Session session=factory.openSession();
//creating transaction object
Transaction t=session.beginTransaction();
Employee e1=new Employee();
e1.setId(115);
e1.setFirstName("sonoo");
e1.setLastName("jaiswal");
session.persist(e1);//persisting the object
t.commit();//transaction is commited
session.close();
System.out.println("successfully saved");
}
}
|
import java.util.*;
/**
* Created by IntelliJ IDEA.
* User: LAPD
* Date: 24.5.2018 г.
* Time: 18:46 ч.
*/
public class _04CountSymbols {
public static void main(String[] args) {
Scanner console = new Scanner(System.in);
char[] input = console.nextLine().toCharArray();
Map<Character, Integer> symbols = new TreeMap<>();
for (char c : input) {
symbols.putIfAbsent(c, 0);
symbols.put(c, symbols.get(c) + 1);
}
for (Map.Entry<Character, Integer> entry : symbols.entrySet()) {
System.out.printf("%s: %d time/s%n",
entry.getKey(), entry.getValue());
}
}
}
|
/**
* A test object for jasmine.
*
* @author Roozbeh Farahbod
* @version $Revision: 9 $, Last modified: $Date: 2009-01-28 10:03:22 +0100 (Mi, 28 Jan 2009) $
*/
public class TestObject {
public Date date;
public String str;
public Double dbl;
public List<? extends Object> list;
public TestObject() {
date = new Date();
}
public TestObject(String s) {
str = s;
}
public TestObject(String s, Double d) {
str = s;
dbl = d;
}
public String toString() {
return "TestObject created (" + getStr() + ")";
}
public void doubleDouble() {
dbl = dbl * 2;
}
public String upperCase() {
return str.toUpperCase();
}
public String concat(String c) {
str = str + c;
return str;
}
public void assignDoubleToString() {
str = dbl.toString();
}
private String getStr() {
String result = "";
if (date != null)
result += "created at " + date;
if (str != null)
result += ", '" + str + "'";
if (dbl != null)
result += ", " + dbl;
if (result.charAt(0) == ',')
return result.substring(1);
else
return result;
}
}
|
std::string kernel_str = R"END(
//
)END";
template <typename T>
NonLinearConvolution<T>::NonLinearConvolution( T& convolution_kernel ) {
static_assert( is_stdlib_container< T >::value, "NonLinearConvolution can only accept pointers or container-like objects." );
static_assert( std::is_arithmetic<typename T::value_type>::value, "NonLinearConvolution must be made with arithmetic type" );
std::string source_directory = SOURCE_DIR;
std::string kernel_name = "/nonlinearconvolve.cl";
kernel_path = source_directory + kernel_name;
LoadCLKernel<typename T::value_type>( "NonLinearConvolve" );
uint kernel_size = convolution_kernel.size();
size_t kernel_bytes = kernel_size*sizeof( typename T::value_type );
auto kernel_ptr = convolution_kernel.data();
kernel_buff = cl::Buffer ( context, CL_MEM_READ_ONLY, kernel_bytes );
command_queue.enqueueWriteBuffer( kernel_buff, CL_TRUE, 0, kernel_bytes, kernel_ptr );
cl_int err;
err = kernel.setArg(1, kernel_buff);
OCL_DEBUG( err );
err = kernel.setArg(4, kernel_size);
OCL_DEBUG( err );
}
template <typename T>
NonLinearConvolution<T>::NonLinearConvolution( T* convolution_kernel ) {
static_assert( std::is_arithmetic<T>::value, "NonLinearConvolution must be made with arithmetic pointer type" );
std::string source_directory = SOURCE_DIR;
std::string kernel_name = "/linearconvolve.cl";
kernel_path = source_directory + kernel_name;
LoadCLKernel<T>( "LinearConvolve" );
uint kernel_size = convolution_kernel.size();
size_t kernel_bytes = kernel_size*sizeof( typename T::value_type );
auto kernel_ptr = convolution_kernel.data();
kernel_buff = cl::Buffer ( context, CL_MEM_READ_ONLY, kernel_bytes );
command_queue.enqueueWriteBuffer( kernel_buff, CL_TRUE, 0, kernel_bytes, kernel_ptr );
cl_int err;
err = kernel.setArg(1, kernel_buff);
OCL_DEBUG( err );
err = kernel.setArg(3, kernel_size);
OCL_DEBUG( err );
}
template <typename T>
NonLinearConvolution<T>::~NonLinearConvolution() {}
template <typename T>
void NonLinearConvolution<T>::Trigger() {
cl_int err;
err = command_queue.enqueueNDRangeKernel( kernel,cl::NullRange, cl::NDRange( signal_size ) );
OCL_DEBUG( err );
}
template <typename T>
void NonLinearConvolution<T>::SetSignal( cl::Buffer& signal_buff, uint sig_size ){
signal_size = sig_size;
cl_int* err_ptr = NULL;
scratch_buff = cl::Buffer ( context, CL_MEM_READ_WRITE, sig_size*sizeof(typename T::value_type), err_ptr );
OCL_DEBUG( err_ptr );
cl_int err = kernel.setArg(2, scratch_buff);
OCL_DEBUG( err );
err = kernel.setArg(0, signal_buff);
err = kernel.setArg(3, sig_size);
OCL_DEBUG( err );
}
template <typename T>
cl::Buffer& NonLinearConvolution<T>::ProcessedSignal() {
return scratch_buff;
}
template <typename T>
size_t NonLinearConvolution<T>::ProcessedSignalBytes() {
return signal_size*sizeof( typename T::value_type );
}
template <typename T>
size_t NonLinearConvolution<T>::ProcessedSignalSize(){
return signal_size;
}
|
/**
* Utility to query the database and taking care of exceptions and transactions.
* <p>
* <strong>Requires hibernate, please add it to your project, otherwise using this class will fail
* </strong>
* @author hypfvieh
* @since v11.0.0 - 2020-09-11
*/
public class QueryUtil implements Closeable {
private final Logger logger;
private Session defaultSession;
private final SessionFactory dbFactory;
public QueryUtil(SessionFactory _dbFactory) {
logger = System.getLogger(getClass().getName());
dbFactory = _dbFactory;
defaultSession = _dbFactory.openSession();
}
/**
* Execute the given Consumer in the database session.
* Optionally catch all exceptions which might be thrown.
* <br><br>
* Uses the default internal session to execute query.<br>
* If internal session was closed, a new session will be created and used as new default session
* <br>
* @param _toExecute consumer to execute
* @param _catchAllExceptions true to catch all exception, false to re-throw
*/
public void executeInSession(Consumer<Session> _toExecute, boolean _catchAllExceptions) {
executeSession(defaultSession, _toExecute, _catchAllExceptions);
}
/**
* Execute the given Consumer in the database session.
* Re-throws all exceptions.
*
* <br><br>
* Uses the default internal session to execute query.<br>
* If internal session was closed, a new session will be created and used as new default session
* <br>
* @param _toExecute consumer to execute
*/
public void executeInSession(Consumer<Session> _toExecute) {
executeInSession(_toExecute, false);
}
/**
* Execute the given Consumer in the database session.
* Catches all exceptions.
* <br><br>
* Uses the default internal session to execute query.<br>
* If internal session was closed, a new session will be created and used as new default session
* <br>
*
* @param _toExecute consumer to execute
*/
public void executeInSessionCatchAll(Consumer<Session> _toExecute) {
executeInSession(_toExecute, true);
}
/**
* Execute the given Consumer in the database session.
* Optionally catch all exceptions which might be thrown.
* <br><br>
* Creates a new session, executes the query and closes the session after query.
* <br>
* @param _toExecute consumer to execute
* @param _catchAllExceptions true to catch all exception, false to re-throw
*/
public void executeInNewSession(Consumer<Session> _toExecute, boolean _catchAllExceptions) {
executeSession(null, _toExecute, _catchAllExceptions);
}
/**
* Execute the given Consumer in the database session.
* Re-throws all exceptions.
*
* <br><br>
* Creates a new session, executes the query and closes the session after query.
* <br>
* @param _toExecute consumer to execute
*/
public void executeInNewSession(Consumer<Session> _toExecute) {
executeSession(null, _toExecute, false);
}
/**
* Execute the given Consumer in the database session.
* Catches all exceptions.
* <br><br>
* Uses the default internal session to execute query.<br>
* If internal session was closed, a new session will be created and used as new default session
* <br>
*
* @param _toExecute consumer to execute
*/
public void executeInNewSessionCatchAll(Consumer<Session> _toExecute) {
executeInNewSession(_toExecute, true);
}
/**
* Execute the given Consumer in the database session.
* Optionally catch all exceptions which might be thrown.
* <br><br>
* To execute the query in a new session automatically, use null as _session parameter.
* The new session will automatically closed after query.<br>
* If _session is non-null and
*
* @param _session session on which the query is executed, if null new session is used
* @param _toExecute consumer to execute
* @param _catchAllExceptions true to catch all exception, false to re-throw
*/
public void executeSession(Session _session, Consumer<Session> _toExecute, boolean _catchAllExceptions) {
Session session = getOrCreateSession(_session);
try {
// reset any transaction which may be pending on this session
fixOpenTransactions(session);
_toExecute.accept(session);
if (_session == null && session.isConnected()) {
session.close();
}
} catch (RuntimeException _ex) {
logger.log(Level.ERROR, "Error while performing database action.", _ex);
fixOpenTransactions(session);
if (!_catchAllExceptions) {
throw _ex;
}
}
}
/**
* Execute the given Function in the database session.
* Optionally catch all exceptions which might be thrown.
*
* @param _toExecute consumer to execute
* @param _catchAllExceptions true to catch all exception, false to re-throw
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInSession(Function<Session, T> _toExecute, boolean _catchAllExceptions) {
return querySession(defaultSession, _toExecute, _catchAllExceptions);
}
/**
* Execute the given Function in the database session.
* Catches all exceptions.
*
* @param _toExecute consumer to execute
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInSessionCatchAll(Function<Session, T> _toExecute) {
return queryInSession(_toExecute, true);
}
/**
* Execute the given Function in the database session.
* Re-throws all exceptions.
*
* @param _toExecute consumer to execute
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInSession(Function<Session, T> _toExecute) {
return queryInSession(_toExecute, false);
}
/**
* Execute the given Function in the database session.
* Optionally catches or re-throws all exceptions.
* <br><br>
* Creates a new session, executes the query and closes the session after query.
* <br>
* @param _toExecute consumer to execute
* @param _catchAllExceptions true to catch all exceptions, false to re-throw
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInNewSession(Function<Session, T> _toExecute, boolean _catchAllExceptions) {
return querySession(null, _toExecute, _catchAllExceptions);
}
/**
* Execute the given Function in the database session.
* Re-throws all exceptions.
* <br><br>
* Creates a new session, executes the query and closes the session after query.
* <br>
* @param _toExecute consumer to execute
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInNewSession(Function<Session, T> _toExecute) {
return queryInNewSession(_toExecute, false);
}
/**
* Execute the given Function in the database session.
* Catches all exceptions.
* <br><br>
* Creates a new session, executes the query and closes the session after query.
* <br>
* @param _toExecute consumer to execute
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T queryInNewSessionCatchAll(Function<Session, T> _toExecute) {
return queryInNewSession(_toExecute, true);
}
/**
* Execute the given Function in the given database session.
*
* @param _session session to execute function with
* @param _toExecute function to execute
* @param _catchAllExceptions true to catch all exceptions, false to re-throw
*
* @return Result of whatever the function should return
* @param <T> type of action result
*/
public <T> T querySession(Session _session, Function<Session, T> _toExecute, boolean _catchAllExceptions) {
Session session = getOrCreateSession(_session);
try {
fixOpenTransactions(session);
T result = _toExecute.apply(session);
if (_session == null && session.isConnected()) {
session.close();
}
return result;
} catch (RuntimeException _ex) {
logger.log(Level.ERROR, "Error while performing database action.", _ex);
fixOpenTransactions(session);
if (!_catchAllExceptions) {
throw _ex;
}
return null;
}
}
/**
* Close the underlying session.
* All subsequent calls will then fail.
*/
public void closeSession() {
if (defaultSession.isOpen()) {
logger.log(Level.INFO, "Closing DB session {}", defaultSession);
defaultSession.close();
}
}
/**
* Close open session and factory.
*/
@Override
public void close() {
closeSession();
dbFactory.close();
}
/**
* Check if session is valid (open) and reset all transactions which might be pending.
*/
private void fixOpenTransactions(Session _session) {
if (!_session.isOpen()) {
throw new IllegalStateException("Database session already closed");
}
if (_session.getTransaction().isActive()) {
_session.getTransaction().rollback();
}
}
/**
* Checks the given session.
* If session was closed a new session will be created.<br>
* If null is given, a new session will be created.<br>
* If the given session is still valid, this session will be returned.<br>
* If the given session is not null and is closed and is equal to the internal default session,
* a new session will created and configured as new default session.
*
* @param _session session to check
* @return given session or new session, never null
*/
private Session getOrCreateSession(Session _session) {
Session session;
if (_session == null) {
session = dbFactory.openSession();
} else {
if (!_session.isOpen()) { // session was closed before, create new session
session = dbFactory.openSession();
// the given session was the default session and it was closed
// update the default session to the new session (old closed session cannot be used anymore)
if (defaultSession == _session) {
defaultSession = session;
}
} else {
session = _session;
}
}
return session;
}
/**
* Execute the given query and will return the result or null on failure or no result.
*
* @param <T> type
* @param _query query to execute
*
* @return instance of type or null
*/
public static <T> T getResultOrNull(TypedQuery<T> _query) {
return getResultOrDefault(_query, null);
}
/**
* Execute the given query and return the result or the default on failure or no result.
*
* @param <T> type
* @param _query query to execute
* @param _default default to provide if no result
* @return instance of type or default value of same type
*/
public static <T> T getResultOrDefault(TypedQuery<T> _query, T _default) {
try {
T singleResult = _query.getSingleResult();
return singleResult == null && _default != null ? _default : singleResult;
} catch (NoResultException _ex) {
return _default;
}
}
}
|
/**
* DataLoadWorker class is used to load data from all kinds of source.
* Its input is data scanner. The output are usually List.
*/
public class DataLoadWorker extends AbstractWorkerActor {
private static Logger log = LoggerFactory.getLogger(DataLoadWorker.class);
/**
* Default splitter used to split input record. Use one instance to prevent more news in Splitter.on.
*/
private static final Splitter DEFAULT_SPLITTER = Splitter.on(CommonConstants.DEFAULT_COLUMN_SEPARATOR);
/**
* Basic input node count for NN model
*/
private int inputNodeCount;
/**
* {@link #candidateCount} is used to check if no variable is selected. If {@link #inputNodeCount} equals
* {@link #candidateCount}, which means no column is selected or all columns are selected.
*/
private int candidateCount;
public DataLoadWorker(ModelConfig modelConfig, List<ColumnConfig> columnConfigList, ActorRef parentActorRef,
ActorRef nextActorRef) {
super(modelConfig, columnConfigList, parentActorRef, nextActorRef);
int[] inputOutputIndex = DTrainUtils.getInputOutputCandidateCounts(this.modelConfig.getNormalizeType(), this.columnConfigList);
this.inputNodeCount = inputOutputIndex[0] == 0 ? inputOutputIndex[2] : inputOutputIndex[0];
this.candidateCount = inputOutputIndex[2];
}
/*
* (non-Javadoc)
*
* @see akka.actor.UntypedActor#onReceive(java.lang.Object)
*/
@Override
public void handleMsg(Object message) {
if(message instanceof ScanStatsRawDataMessage) {
log.info("DataLoaderActor Starting ...");
ScanStatsRawDataMessage msg = (ScanStatsRawDataMessage) message;
Scanner scanner = msg.getScanner();
int totalMsgCnt = msg.getTotalMsgCnt();
List<String> rawDataList = readDataIntoList(scanner);
log.info("DataLoaderActor Finished: Loaded " + rawDataList.size() + " Records.");
nextActorRef.tell(new StatsPartRawDataMessage(totalMsgCnt, rawDataList), getSelf());
} else if(message instanceof ScanNormInputDataMessage) {
log.info("DataLoaderActor Starting ...");
ScanNormInputDataMessage msg = (ScanNormInputDataMessage) message;
Scanner scanner = msg.getScanner();
int totalMsgCnt = msg.getTotalMsgCnt();
List<String> rawDataList = readDataIntoList(scanner);
log.info("DataLoaderActor Finished: Loaded " + rawDataList.size() + " Records.");
nextActorRef.tell(new NormPartRawDataMessage(totalMsgCnt, rawDataList), getSelf());
} else if(message instanceof ScanTrainDataMessage) {
ScanTrainDataMessage msg = (ScanTrainDataMessage) message;
Scanner scanner = msg.getScanner();
int totalMsgCnt = msg.getTotalMsgCnt();
List<MLDataPair> mlDataPairList = readTrainingData(scanner, msg.isDryRun());
log.info("DataLoaderActor Finished: Loaded " + mlDataPairList.size() + " Records for Training.");
nextActorRef.tell(new TrainPartDataMessage(totalMsgCnt, msg.isDryRun(), mlDataPairList), getSelf());
} else if(message instanceof ScanEvalDataMessage) {
log.info("DataLoaderActor Starting ...");
ScanEvalDataMessage msg = (ScanEvalDataMessage) message;
Scanner scanner = msg.getScanner();
int streamId = msg.getStreamId();
int totalStreamCnt = msg.getTotalStreamCnt();
splitDataIntoMultiMessages(streamId, totalStreamCnt, scanner,
Environment.getInt(Environment.RECORD_CNT_PER_MESSAGE, 100000));
/*
* List<String> evalDataList = readDataIntoList(scanner);
*
* log.info("DataLoaderActor Finished: Loaded " + evalDataList.size() + " Records.");
* nextActorRef.tell( new RunModelDataMessage(totalMsgCnt, evalDataList), getSelf());
*/
} else {
unhandled(message);
}
}
private long splitDataIntoMultiMessages(int streamId, int totalStreamCnt, Scanner scanner, int recordCntPerMsg) {
long recordCnt = 0;
int msgId = 0;
List<String> rawDataList = new LinkedList<String>();
while(scanner.hasNextLine()) {
String raw = scanner.nextLine();
recordCnt++;
rawDataList.add(raw);
if(recordCnt % recordCntPerMsg == 0) {
log.info("Read " + recordCnt + " Records.");
nextActorRef.tell(new RunModelDataMessage(streamId, totalStreamCnt, (++msgId), false, rawDataList),
getSelf());
rawDataList = new LinkedList<String>();
}
}
log.info("Totally read " + recordCnt + " Records.");
// anyhow, sent the last message to let next actor know - it's done
nextActorRef.tell(new RunModelDataMessage(streamId, totalStreamCnt, (++msgId), true, rawDataList), getSelf());
return recordCnt;
}
/**
* Read data into String list
*
* @param scanner
* - input partition
* @return list of data
*/
public List<String> readDataIntoList(Scanner scanner) {
List<String> rawDataList = new LinkedList<String>();
int cntTotal = 0;
while(scanner.hasNextLine()) {
String raw = scanner.nextLine();
rawDataList.add(raw);
cntTotal++;
if(cntTotal % 100000 == 0) {
log.info("Read " + cntTotal + " records.");
}
}
log.info("Totally read " + cntTotal + " records.");
return rawDataList;
}
/**
* Read the normalized training data for model training
*
* @param scanner
* - input partition
* @param isDryRun
* - is for test running?
* @return List of data
*/
public List<MLDataPair> readTrainingData(Scanner scanner, boolean isDryRun) {
List<MLDataPair> mlDataPairList = new ArrayList<MLDataPair>();
int numSelected = 0;
for(ColumnConfig config: columnConfigList) {
if(config.isFinalSelect()) {
numSelected++;
}
}
int cnt = 0;
while(scanner.hasNextLine()) {
if((cnt++) % 100000 == 0) {
log.info("Read " + (cnt) + " Records.");
}
String line = scanner.nextLine();
if(isDryRun) {
MLDataPair dummyPair = new BasicMLDataPair(new BasicMLData(new double[1]), new BasicMLData(
new double[1]));
mlDataPairList.add(dummyPair);
continue;
}
// the normalized training data is separated by | by default
double[] inputs = new double[numSelected];
double[] ideal = new double[1];
double significance = 0.0d;
int index = 0, inputsIndex = 0, outputIndex = 0;
for(String input: DEFAULT_SPLITTER.split(line.trim())) {
double doubleValue = NumberFormatUtils.getDouble(input.trim(), 0.0d);
if(index == this.columnConfigList.size()) {
significance = NumberFormatUtils
.getDouble(input.trim(), CommonConstants.DEFAULT_SIGNIFICANCE_VALUE);
break;
} else {
ColumnConfig columnConfig = this.columnConfigList.get(index);
if(columnConfig != null && columnConfig.isTarget()) {
ideal[outputIndex++] = doubleValue;
} else {
if(this.inputNodeCount == this.candidateCount) {
// all variables are not set final-select
if(CommonUtils.isGoodCandidate(columnConfig, super.hasCandidates)) {
inputs[inputsIndex++] = doubleValue;
}
} else {
// final select some variables
if(columnConfig != null && !columnConfig.isMeta() && !columnConfig.isTarget()
&& columnConfig.isFinalSelect()) {
inputs[inputsIndex++] = doubleValue;
}
}
}
}
index++;
}
MLDataPair pair = new BasicMLDataPair(new BasicMLData(inputs), new BasicMLData(ideal));
pair.setSignificance(significance);
mlDataPairList.add(pair);
}
return mlDataPairList;
}
}
|
def rank_stations_by_nta(
self,
df_rides: pd.DataFrame,
df_station_geo: pd.DataFrame,
df_stations_per_nta: pd.DataFrame,
) -> pd.DataFrame:
df_rides = df_rides.astype({"start_station_id": str})
df_joined = df_rides.merge(
df_station_geo,
how="left",
left_on="start_station_id",
right_on="station_id",
)
stations_ranked_by_nta = (
df_joined[["uuid", "ntacode", "start_station_id"]]
.groupby(["ntacode", "start_station_id"])
.count()
.sort_values(by=["ntacode", "uuid"], ascending=False)
.groupby(["ntacode"])
.rank(method="dense", ascending=False, pct=False)
.reset_index()
.rename({"uuid": "station_rank"}, axis=1)
.merge(df_stations_per_nta, on="ntacode", how="left")
.set_index("start_station_id")
)
return stations_ranked_by_nta
|
// Geometric Tools, Inc.
// http://www.geometrictools.com
// Copyright (c) 1998-2006. All Rights Reserved
//
// The Wild Magic Version 4 Restricted Libraries source code is supplied
// under the terms of the license agreement
// http://www.geometrictools.com/License/Wm4RestrictedLicense.pdf
// and may not be copied or disclosed except in accordance with the terms
// of that agreement.
#include "ConvexHull3D.h"
#include "Wm4ConvexHull3.h"
WM4_WINDOW_APPLICATION(ConvexHull3D);
//----------------------------------------------------------------------------
ConvexHull3D::ConvexHull3D ()
:
WindowApplication3("ConvexHull3D",0,0,640,480,ColorRGBA::WHITE)
{
System::InsertDirectory("Data");
m_iFileQuantity = 46;
m_iCurrentFile = 1;
m_iVQuantity = 0;
m_akVertex = 0;
m_akColor = 0;
m_iLimitedQuantity = 0;
m_eQueryType = Query::QT_INT64;
m_pkHull = 0;
System::Strcpy(m_acHeader,STRING_SIZE,"query type = INT64");
}
//----------------------------------------------------------------------------
bool ConvexHull3D::OnInitialize ()
{
if (!WindowApplication3::OnInitialize())
{
return false;
}
m_spkCamera->SetFrustum(-0.55f,0.55f,-0.4125f,0.4125f,1.0f,10000.0f);
Vector3f kCDir(0.0f,0.0f,1.0f);
Vector3f kCUp(0.0f,1.0f,0.0f);
Vector3f kCRight = kCDir.Cross(kCUp);
m_spkCamera->SetAxes(kCDir,kCUp,kCRight);
m_kCuller.SetCamera(m_spkCamera);
// center-and-fit the scene
CreateScene();
m_spkScene->UpdateGS();
m_spkScene->Local.SetTranslate(-m_spkScene->WorldBound->GetCenter());
Vector3f kCLoc = -3.0f*m_spkScene->WorldBound->GetRadius()*kCDir;
m_spkCamera->SetLocation(kCLoc);
// initial update of objects
m_spkScene->UpdateGS();
m_spkScene->UpdateRS();
// initial culling of scene
m_kCuller.ComputeVisibleSet(m_spkScene);
InitializeCameraMotion(1.0f,0.01f);
InitializeObjectMotion(m_spkScene);
return true;
}
//----------------------------------------------------------------------------
void ConvexHull3D::OnTerminate ()
{
m_spkScene = 0;
m_spkHull = 0;
m_spkSphere = 0;
m_spkWireframe = 0;
WM4_DELETE[] m_akVertex;
WM4_DELETE[] m_akColor;
WM4_DELETE m_pkHull;
WindowApplication3::OnTerminate();
}
//----------------------------------------------------------------------------
void ConvexHull3D::OnIdle ()
{
MeasureTime();
if (MoveCamera())
{
m_kCuller.ComputeVisibleSet(m_spkScene);
}
if (MoveObject())
{
m_spkScene->UpdateGS();
m_kCuller.ComputeVisibleSet(m_spkScene);
}
m_pkRenderer->ClearBuffers();
if (m_pkRenderer->BeginScene())
{
m_pkRenderer->DrawScene(m_kCuller.GetVisibleSet());
m_pkRenderer->Draw(8,16,ColorRGBA::BLACK,m_acHeader);
m_pkRenderer->Draw(8,GetHeight()-8,ColorRGBA::BLACK,m_acFooter);
m_pkRenderer->EndScene();
}
m_pkRenderer->DisplayBackBuffer();
UpdateFrameCount();
}
//----------------------------------------------------------------------------
bool ConvexHull3D::OnKeyDown (unsigned char ucKey, int iX, int iY)
{
// Intentional bypass of WindowApplications::OnKeyDown.
if (WindowApplication::OnKeyDown(ucKey,iX,iY))
{
return true;
}
switch (ucKey)
{
// load a new data set
case 'd':
case 'D':
if (++m_iCurrentFile == m_iFileQuantity)
{
m_iCurrentFile = 1;
}
LoadData();
return true;
// query type INT64
case 'n':
case 'N':
m_eQueryType = Query::QT_INT64;
System::Strcpy(m_acHeader,STRING_SIZE,"query type = INT64");
RegenerateHull();
return true;
// query type INTEGER
case 'i':
case 'I':
m_eQueryType = Query::QT_INTEGER;
System::Strcpy(m_acHeader,STRING_SIZE,"query type = INTEGER");
RegenerateHull();
return true;
// query type RATIONAL
case 'r':
case 'R':
m_eQueryType = Query::QT_RATIONAL;
System::Strcpy(m_acHeader,STRING_SIZE,"query type = RATIONAL");
RegenerateHull();
return true;
// query type REAL (float)
case 'f':
case 'F':
m_eQueryType = Query::QT_REAL;
System::Strcpy(m_acHeader,STRING_SIZE,"query type = REAL");
RegenerateHull();
return true;
case 'w':
case 'W':
m_spkWireframe->Enabled = !m_spkWireframe->Enabled;
return true;
// Read the notes in ConvexHul3D.h about how to use m_iLimitedQuantity.
case '+':
case '=':
if (m_iLimitedQuantity < m_iVQuantity)
{
for (int i = 2; i < m_iLimitedQuantity+2; i++)
{
m_spkScene->DetachChildAt(i);
}
m_iLimitedQuantity++;
CreateHull();
}
return true;
case '-':
case '_':
if (m_iLimitedQuantity > 3)
{
for (int i = 2; i < m_iLimitedQuantity+2; i++)
{
m_spkScene->DetachChildAt(i);
}
m_iLimitedQuantity--;
CreateHull();
}
return true;
}
return false;
}
//----------------------------------------------------------------------------
bool ConvexHull3D::OnMouseClick (int iButton, int iState, int iX, int iY,
unsigned int uiModifiers)
{
WindowApplication3::OnMouseClick(iButton,iState,iX,iY,uiModifiers);
if (iButton == MOUSE_RIGHT_BUTTON)
{
Ray3f kRay;
m_spkCamera->GetPickRay(iX,iY,GetWidth(),GetHeight(),kRay);
Spatial::PickArray kResults;
m_spkScene->DoPick(kRay,kResults);
if (kResults.size() > 0)
{
TriMesh::PickRecord* pkRec = (TriMesh::PickRecord*)kResults[0];
float fTMin = pkRec->T;
int i;
for (i = 1; i < (int)kResults.size(); i++)
{
if (kResults[i]->T < fTMin)
{
fTMin = kResults[i]->T;
pkRec = (TriMesh::PickRecord*)kResults[i];
}
}
TriMeshPtr spkMesh = StaticCast<TriMesh>(pkRec->IObject);
float fMaxBary = pkRec->Bary0;
int iIndex = 0;
if (pkRec->Bary1 > fMaxBary)
{
fMaxBary = pkRec->Bary1;
iIndex = 1;
}
if (pkRec->Bary2 > fMaxBary)
{
fMaxBary = pkRec->Bary2;
iIndex = 2;
}
int* aiIndex = spkMesh->IBuffer->GetData();
System::Sprintf(m_acFooter,STRING_SIZE,
"intr = %d, tri = %d, ver = %d",(int)kResults.size(),
pkRec->Triangle,aiIndex[3*pkRec->Triangle+iIndex]);
for (i = 0; i < (int)kResults.size(); i++)
{
WM4_DELETE kResults[i];
}
}
}
return true;
}
//----------------------------------------------------------------------------
void ConvexHull3D::CreateScene ()
{
m_spkScene = WM4_NEW Node;
m_spkWireframe = WM4_NEW WireframeState;
m_spkScene->AttachGlobalState(m_spkWireframe);
CullState* pkCS = WM4_NEW CullState;
pkCS->Enabled = false;
m_spkScene->AttachGlobalState(pkCS);
Attributes kAttr;
kAttr.SetPChannels(3);
kAttr.SetCChannels(0,3);
TriMesh* pkSphere = StandardMesh(kAttr).Sphere(8,8,0.01f);
pkSphere->AttachEffect(WM4_NEW VertexColor3Effect);
m_spkScene->SetChild(1,pkSphere);
// The current file is "Data/data01.txt".
LoadData();
}
//----------------------------------------------------------------------------
void ConvexHull3D::LoadData ()
{
const size_t uiSize = 32;
char acFilename[uiSize];
if (m_iCurrentFile < 10)
{
System::Sprintf(acFilename,uiSize,"data0%d.txt",m_iCurrentFile);
}
else
{
System::Sprintf(acFilename,uiSize,"data%d.txt",m_iCurrentFile);
}
const char* acPath = System::GetPath(acFilename,System::SM_READ);
assert(acPath);
std::ifstream kIStr(acPath);
assert(kIStr);
kIStr >> m_iVQuantity;
WM4_DELETE[] m_akVertex;
m_akVertex = WM4_NEW Vector3f[m_iVQuantity];
int i;
for (i = 0; i < m_iVQuantity; i++)
{
kIStr >> m_akVertex[i][0];
kIStr >> m_akVertex[i][1];
kIStr >> m_akVertex[i][2];
}
WM4_DELETE[] m_akColor;
m_akColor = WM4_NEW ColorRGB[m_iVQuantity];
for (i = 0; i < m_iVQuantity; i++)
{
m_akColor[i] = ColorRGB(Mathf::UnitRandom(),Mathf::UnitRandom(),
Mathf::UnitRandom());
}
// Discard previous scene spheres.
for (i = 2; i < m_iLimitedQuantity+2; i++)
{
m_spkScene->DetachChildAt(i);
}
m_iLimitedQuantity = m_iVQuantity;
CreateHull();
}
//----------------------------------------------------------------------------
void ConvexHull3D::CreateHull ()
{
int iVQuantity = m_iLimitedQuantity;
Attributes kAttr;
kAttr.SetPChannels(3);
kAttr.SetCChannels(0,3);
VertexBuffer* pkVBuffer = WM4_NEW VertexBuffer(kAttr,iVQuantity);
int i;
for (i = 0; i < iVQuantity; i++)
{
pkVBuffer->Position3(i) = m_akVertex[i];
pkVBuffer->Color3(0,i) = m_akColor[i];
}
RegenerateHull();
int iTQuantity = 0;
TriMesh* pkMesh = 0;
switch (m_pkHull->GetDimension())
{
case 0:
System::Sprintf(m_acFooter,STRING_SIZE,"point: v = %d, t = %d",
iVQuantity,iTQuantity);
return;
case 1:
System::Sprintf(m_acFooter,STRING_SIZE,"linear: v = %d, t = %d",
iVQuantity,iTQuantity);
return;
case 2:
{
iTQuantity = (int)(m_pkHull->GetSimplexQuantity() - 2);
const int* aiHull = m_pkHull->GetIndices();
IndexBuffer* pkIBuffer = WM4_NEW IndexBuffer(3*iTQuantity);
int* piIndex = pkIBuffer->GetData();
int i0 = 1, i1 = 2;
for (int iT = 0; iT < iTQuantity; iT++)
{
*piIndex++ = aiHull[0];
*piIndex++ = aiHull[i0];
*piIndex++ = aiHull[i1];
i0++;
i1++;
}
pkMesh = WM4_NEW TriMesh(pkVBuffer,pkIBuffer);
pkMesh->AttachEffect(WM4_NEW VertexColor3Effect);
System::Sprintf(m_acFooter,STRING_SIZE,"planar: v = %d, t = %d",
iVQuantity,iTQuantity);
break;
}
case 3:
iTQuantity = m_pkHull->GetSimplexQuantity();
const int* aiHullIndex = m_pkHull->GetIndices();
IndexBuffer* pkIBuffer = WM4_NEW IndexBuffer(3*iTQuantity);
int* aiIndex = pkIBuffer->GetData();
memcpy(aiIndex,aiHullIndex,3*iTQuantity*sizeof(int));
pkMesh = WM4_NEW TriMesh(pkVBuffer,pkIBuffer);
pkMesh->AttachEffect(WM4_NEW VertexColor3Effect);
System::Sprintf(m_acFooter,STRING_SIZE,"spatial: v = %d, t = %d",
iVQuantity,iTQuantity);
break;
}
// translate to center of mass
Vector3f kCenter = m_akVertex[0];
for (i = 1; i < m_iLimitedQuantity; i++)
{
kCenter += m_akVertex[i];
}
kCenter /= (float)m_iLimitedQuantity;
pkMesh->Local.SetTranslate(-kCenter);
m_spkScene->SetChild(0,pkMesh);
for (i = 2; i < m_iLimitedQuantity+2; i++)
{
m_spkScene->SetChild(i,CreateSphere(m_akVertex[i-2]-kCenter));
}
TriMesh* pkSphere = StaticCast<TriMesh>(m_spkScene->GetChild(1));
pkSphere->Local.SetTranslate(m_akVertex[m_iLimitedQuantity-1]-kCenter);
// update the scene, center-and-fit to frustum
m_spkScene->UpdateGS(0.0f);
m_spkScene->UpdateRS();
m_spkScene->Local.SetTranslate(-m_spkScene->WorldBound->GetCenter());
Vector3f kCLoc = -3.0f*m_spkScene->WorldBound->GetRadius()*
m_spkCamera->GetDVector();
m_spkCamera->SetLocation(kCLoc);
m_kCuller.ComputeVisibleSet(m_spkScene);
}
//----------------------------------------------------------------------------
void ConvexHull3D::RegenerateHull ()
{
WM4_DELETE m_pkHull;
m_pkHull = WM4_NEW ConvexHull3f(m_iLimitedQuantity,m_akVertex,0.001f,
false,m_eQueryType);
if (m_pkHull->GetDimension() == 2)
{
ConvexHull3f* pkSave = (ConvexHull3f*)m_pkHull;
m_pkHull = pkSave->GetConvexHull2();
WM4_DELETE pkSave;
}
}
//----------------------------------------------------------------------------
TriMesh* ConvexHull3D::CreateSphere (const Vector3f& rkCenter,
ColorRGB kColor, float fRadius)
{
Attributes kAttr;
kAttr.SetPChannels(3);
kAttr.SetCChannels(0,3);
TriMesh* pkSphere = StandardMesh(kAttr).Sphere(8,8,fRadius);
int iVQuantity = pkSphere->VBuffer->GetVertexQuantity();
for (int i = 0; i < iVQuantity; i++)
{
pkSphere->VBuffer->Color3(0,i) = kColor;
}
pkSphere->AttachEffect(WM4_NEW VertexColor3Effect);
return pkSphere;
}
//----------------------------------------------------------------------------
|
// Remove a NumBox n from an array of NumBox ns of size len
// It removes ALL NumBox that are equal to n, not just one
void remove_numbox(NumBox *ns, int *len, NumBox n)
{
int i;
for (i = 0; i != *len; i++)
{
if (eql(ns[i], n))
{
ns[i--] = ns[--(*len)];
}
}
}
|
#games
teams = int(input())
g = 0
t_colors = []
for i in range(teams):
t_colors.append([int(x) for x in input().split()])
for i in t_colors:
g += [x[1] for x in t_colors].count(i[0])
print(g)
|
<filename>src/constant/index.ts
export enum PorkerKind {
squade = 'squade',
heart = 'heart',
diamond = 'diamond',
club = 'club'
}
|
The latest Associated Press-GfK poll holds bad news for President Barack Obama, but as the November elections draw closer, there are ominous signs for congressional Democrats as well.
A look at the key findings from the March poll on this year’s election and the burgeoning 2016 presidential field.
GOP GAINING GROUND
Preferences for control of Congress are tight, but Republicans have gained on Democrats since January. Thirty-six percent in last month’s poll said they would rather see the Democrats in charge of Congress and 37 percent chose Republicans.
Democrats held a narrow advantage on that question in January, when 39 percent favored the Democrats and 32 percent the Republicans.
The shift stems largely from a change among those most interested in politics.
In the new poll, registered voters who are most strongly interested in politics favored the Republicans by 14 percentage points, 51 percent to 37 percent. In January, this group was about evenly split, with 42 percent preferring Democrats and 45 percent the Republicans.
That’s not the only positive sign in the poll for the Republicans.
Favorable views of the GOP have improved, with 38 percent overall now saying they hold a favorable impression of the Party. Republicans’ positive view of their own party has increased from 57 percent in January to 72 percent now.
Even impressions of the tea party movement have shifted more positive in recent months. GOP favorability still lags behind that of the Democrats, however, with 43 percent holding a favorable view of the Democratic Party.
CONGRESSIONAL APPROVAL STAGNANT
Congressional approval is stagnant and negative, with just 16 percent saying they approve while 82 percent disapprove. Among those who have “a great deal” or “quite a bit” of interest in politics, 90 percent disapprove, including 61 percent who strongly disapprove.
Nearly 4 in 10 (39 percent) would like to see their own member of Congress re-elected, an improvement since January. Among registered voters who say they pay a great deal of attention to politics, 44 percent say they would like to see their current member re-elected, compared with 33 percent in January.
Here, there’s a glimmer of hope for Democrats. Those who consider themselves Democrats are now more likely than Republicans to say their own member of Congress ought to be re-elected. Not all Democrats live in districts represented by Democrats, of course, but it represents a shift in opinion since January.
___
WHO’S IN CHARGE
With control of Congress divided between the parties, most Americans say Obama has a lot or quite a bit of control over what the federal government does, outpacing the share who say the Democrats or Republicans in Congress are in control.
Partisans tend to see the opposition as the controlling force, with Republicans more apt than Democrats to see Obama in charge, and Democrats more likely to say the Republicans have the upper hand.
Six in 10 (62 percent) of those with a great deal or quite a bit of interest in politics say Obama has a lot or quite a bit of control of what the federal government does. Just half (51 percent) of those closely attuned to politics say Democrats in Congress exert a similar influence over what the federal government does and 40 percent say the same about Republicans in Congress.
There’s little change since December in which party Americans trust more to handle major issues.
Democrats’ strong points are on handling social issues, including same-sex marriage (31 percent prefer Democrats, 17 percent the Republicans) and abortion (30 percent prefer Democrats, 22 percent Republicans). Republicans have the edge on protecting the country, 34 percent to 16 percent, a slightly wider margin than they held on the question in December.
___
LOOKING TO 2016? NOT SO MUCH
The poll measured impressions of 19 potential 2016 presidential candidates, and found that a majority of those surveyed offered an opinion about just seven of them. The other 12 have quite a lot of introducing themselves to do if they are to make a run for the White House.
Most people said either they hadn’t heard of them or skipped the question.
Hillary Rodham Clinton generated the most positive response of the bunch, with 46 percent viewing the former secretary of state and first lady favorably and 39 percent unfavorably.
Among potential GOP contenders, none generated a net positive reaction from the public, with 2012 vice presidential nominee Paul Ryan faring best — 27 percent viewed him favorably, 29 percent unfavorably.
Among Republicans, majorities have favorable impressions of Ryan and former Arkansas Gov. Mike Huckabee. But in a sign that the past isn’t always prologue, nearly half of Republicans say they don’t know enough to have an opinion about former Sen. Rick Santorum of Pennsylvania, a large factor in the 2012 nomination fight.
Related
Comments
comments
|
How Does the Supplemental Nutrition Assistance Program Affect the U.S. Economy?
The impact of the Supplemental Nutrition Assistance Program (SNAP) on the national economy is examined using a general equilibrium model and comparing measures of the economy from 2010 to a simulation of that economy without SNAP. Without the SNAP program, the overall size of the economy hardly differs—demand for labor increases slightly. However, households that would be eligible for SNAP experience a net loss. They have 5.5 percent less disposable income while ineligible households have approximately 1 percent more income without SNAP, and output of products eligible for purchase with SNAP funds declines approximately one billion dollars.
|
package usecase
import "github.com/lovung/GoCleanArchitecture/app/usecase/dto"
type SampleUseCase interface {
Create(candidate dto.CreateSampleRequest) (created dto.OneSampleResponse, err error)
GetByID(id interface{}) (exist dto.OneSampleResponse, err error)
}
|
// Copyright information can be found in the file named COPYING
// located in the root directory of this distribution.
#ifndef _SFXFMODPROJECT_H_
#define _SFXFMODPROJECT_H_
#ifndef _SIMDATABLOCK_H_
#include "console/simDatablock.h"
#endif
#ifndef _CONSOLETYPES_H_
#include "console/consoleTypes.h"
#endif
#ifndef _TVECTOR_H_
#include "core/util/tVector.h"
#endif
#ifndef _SFXSYSTEM_H_
#include "sfx/sfxSystem.h"
#endif
#include "fmod_event.h"
class SFXFMODEvent;
class SFXFMODEventGroup;
class SimGroup;
/// Datablock that loads an FMOD Designer project.
///
/// All events in the project are automatically made available as SFXFMODEvent track
/// datablock instances. Each event object is automatically named by substituting
/// the slashes in its fully qualified name with underscores and preprending the project
/// name to this; event 'group1/group2/event' in the SFXFMODProject instance called
/// 'project', for example, will be available as a TorqueScript object called
/// 'project_group1_group2_event'.
///
/// This class also works in a client-server environment where the server is
/// not running FMOD. The event objects are cached in an auto-generated TorqueScript
/// file alongside the .fev project file (x/y.fev -> x/y.fev.cs) which, when available
/// and up-to-date, does not require FMOD for the server-side objects to correctly
/// initialize.
///
/// To establish good loading behavior and for good memory management, it is necessary to
/// wisely distribute events to groups and to manually pre-load groups. The best solution
/// probably is to have one group of common events that is loaded during game startup and
/// then have one event group for each level in the game that is only loaded for the
/// duration of its particular level.
///
/// SFXFMODProject will propagate it's networking model to all its contents. This means
/// that if the project is a non-networked datablock, then all event groups, events, and
/// descriptions contained in the project will also be non-networked datablocks.
///
/// It usually makes the most sense to use non-networked ("client-only") datablocks as
/// otherwise the FMOD datablocks will be purged on each mission load.
///
/// @note Only one project's music data can ever be loaded at any one time.
/// Usually you wouldn't want more than a single SFXFMODProject instance in your game
/// data. Also, only a single media path can be set through the designer API so when
/// loading multiple projects, note that each project will set the media path to its
/// own directory. For data loading to work, all project thus need to be placed in
/// the same directory.
///
class SFXFMODProject : public SimDataBlock
{
public:
typedef SimDataBlock Parent;
friend class SFXFMODEventGroup; // _addGroup
friend class SFXFMODEvent; // _addEvent
protected:
///
String mFileName;
///
String mMediaPath;
///
SFXFMODEventGroup* mRootGroups;
/// A flat list of all the groups in this projet.
Vector< SFXFMODEventGroup* > mGroups;
/// A flat list of all the events in the project.
Vector< SFXFMODEvent* > mEvents;
///
FMOD_EVENTPROJECT* mHandle;
///
void _onSystemEvent( SFXSystemEventType event );
///
void _clear();
///
bool _load();
///
void _addEvent( SFXFMODEvent* event );
///
void _addGroup( SFXFMODEventGroup* group );
///
void _removeEvent( SFXFMODEvent* event );
///
void _removeGroup( SFXFMODEventGroup* group );
public:
///
SFXFMODProject();
virtual ~SFXFMODProject();
///
void acquire( bool recursive = false );
///
void release();
///
const String& getFileName() const { return mFileName; }
// SimDataBlock.
virtual bool onAdd();
virtual void onRemove();
virtual bool preload( bool server, String& errorStr );
virtual void packData( BitStream* stream );
virtual void unpackData( BitStream* stream );
static void initPersistFields();
DECLARE_CONOBJECT( SFXFMODProject );
DECLARE_CATEGORY( "SFX FMOD" );
DECLARE_DESCRIPTION( "An FMOD Designer project." );
};
#endif // !_SFXFMODPROJECT_H_
|
/**
* @author Tal Shani
*/
final class PromiseNative<T> extends JavaScriptObject implements Promise<T> {
protected PromiseNative() {
}
public final native Promise<T> nativeThen(PromiseHandler<? super T> onFulfilled, PromiseHandler<Throwable> onRejected) /*-{
var onFulfilledFn, onRejectedFn;
if (onFulfilled) {
onFulfilledFn = $entry(function (value) {
[email protected]::handle(Ljava/lang/Object;)(value);
});
}
if (onRejected) {
onRejectedFn = $entry(function (value) {
[email protected]::handle(Ljava/lang/Object;)(value);
});
}
return this.then(onFulfilledFn, onRejectedFn);
}-*/;
public final native <R> Promise<R> nativeThen(PromiseTransformingHandler<R, T> onFulfilled, PromiseTransformingHandler<R, Throwable> onRejected) /*-{
var onFulfilledFn, onRejectedFn;
if (onFulfilled) {
onFulfilledFn = $entry(function (value) {
return [email protected]::handle(Ljava/lang/Object;)(value);
});
}
if (onRejected) {
onRejectedFn = $entry(function (value) {
return [email protected]::handle(Ljava/lang/Object;)(value);
});
}
return this.then(onFulfilledFn, onRejectedFn);
}-*/;
public static native <T> Promise<T> createPromise(final PromiseResolver<T> resolver) /*-{
return new Promise($entry(function (resolve, reject) {
@noo.promise.PromiseNative::resolve(Lcom/google/gwt/core/client/JavaScriptObject;Lcom/google/gwt/core/client/JavaScriptObject;Lnoo/promise/PromiseResolver;)(resolve, reject, resolver);
}));
}-*/;
public static <T> void resolve(JavaScriptObject resolve, JavaScriptObject reject, PromiseResolver<T> resolver) {
resolver.resolve(new NativeCallback<T>(resolve, reject));
}
@Override
public final Promise<T> then(PromiseHandler<? super T> onFulfilled) {
return nativeThen(onFulfilled, null);
}
@Override
public Promise<T> then(PromiseHandler<? super T> onFulfilled, PromiseHandler<Throwable> onRejected) {
return nativeThen(onFulfilled, onRejected);
}
@Override
public final Promise<T> catchIt(PromiseHandler<Throwable> onRejected) {
return nativeThen(null, onRejected);
}
@Override
public final <R> Promise<R> then(PromiseTransformingHandler<R, T> onFulfilled) {
return nativeThen(onFulfilled, null);
}
@Override
public final Promise<T> catchIt(PromiseTransformingHandler<T, Throwable> onRejected) {
return nativeThen(null, onRejected);
}
private static class NativeCallback<T> implements PromiseCallback<T> {
private final JavaScriptObject resolveFn;
private final JavaScriptObject rejectFn;
private NativeCallback(JavaScriptObject resolveFn, JavaScriptObject rejectFn) {
this.resolveFn = resolveFn;
this.rejectFn = rejectFn;
}
private native void nativeResolve(Object o) /*-{
var fn = [email protected]::resolveFn;
if (fn) fn(o);
}-*/;
private native void nativeReject(Object o) /*-{
var fn = [email protected]::rejectFn;
if (fn) fn(o);
}-*/;
@Override
public void resolveValue(T value) {
nativeResolve(value);
}
@Override
public void resolvePromise(Promise<T> value) {
nativeResolve(value);
}
@Override
public void reject(Throwable reason) {
nativeReject(reason);
}
}
}
|
Jury acquits Highline teacher accused of child rape Woman cleared after 1 day of jury deliberation
Darcy Smith, pictured in a Office of Superintendent of Public Instruction photo. Darcy Smith, pictured in a Office of Superintendent of Public Instruction photo. Image 1 of / 1 Caption Close Jury acquits Highline teacher accused of child rape 1 / 1 Back to Gallery
A King County jury has acquitted a Highline teacher accused of sexual misconduct with a former student.
Empaneled since February to judge teacher Darcy Smith, the jury returned its verdict after one day of deliberation. Smith was acquitted on all counts.
Smith, 42, had been accused of having sex with a former student who was living in her home. She was charged with third-degree child rape, a crime once known as statutory rape.
Responding to the verdict, defense attorney Brad Meryhew said his client will now begin the hard work of rebuilding her reputation. Smith’s supporters, gathered at the Maleng Regional Justice Center in Kent for the verdict, hugged and celebrated after it was returned just before noon Thursday.
“Darcy and her family want to thank all of those former students, colleagues, and friends who have expressed their unwavering support and belief in her innocence throughout a difficult time,” Meryhew said.
King County prosecutors charged Smith in February 2015 with child rape claiming she had sex with a boy living in her home more than five years before. The allegations derailed what had been a remarkable career for Smith, a 15-year veteran teacher who had been teaching sixth grade at McMicken Heights Elementary when the allegations surfaced.
Smith has been on administrative leave since August 2014. Highline School District spokeswoman Catherine Carbone Rogers said the district will now complete a review, which was preempted by the police investigation and court proceedings.
"We have not had the opportunity to speak with Ms. Smith since this process began, and that will be our next step" Carbone Rogers said by email.
Meryhew said his client and her supporters were vindicated by the jury’s verdict.
“This unanimous verdict by a jury of her peers allows Mrs. Smith and her friends and family to begin the process of restoring the reputation she build over the last 15 years as an award-winning teacher dedicated to helping students and families,” the defense attorney said.
Prosecutor's Office spokesman Dan Donohoe praised the jurors.
"The jury gave this case careful consideration and we respect the decision of the jury," Donohoe said by email.
Thursday’s acquittal ends a prosecution that would’ve seen Smith forced to register as a sex offender. Smith had not been jailed.
CORRECTION: Defense attorney Brad Meryhew's last name was misspelled in an earlier version of this story.
Seattlepi.com reporter Levi Pulkkinen can be reached at 206-448-8348 or [email protected]. Follow Levi on Twitter at twitter.com/levipulk.
|
<gh_stars>10-100
from Crypto.Cipher import AES
from scapy.layers.bluetooth import *
from scapy.all import hexdump
class SM():
ia = None
ia_type = None
ra = None
ra_type = None
tk = None
prnd = None
rrnd = None
pcnf = None
preq = None
prsp = None
ltk = None
def __init__(self):
self.tk = '\x00' * 16
self.rrnd = '\x00' * 16
# calculates a confirm
def calc_cfm(self, master=0):
if master:
rand = self.prnd
else:
rand = self.rrnd
return ''.join(bt_crypto_c1(self.tk, rand, self.prsp, self.preq, self.ia_type, self.ia, self.ra_type, self.ra))
def verify_random(self):
confirm = self.calc_cfm(1)
if self.pcnf != confirm:
return False
self.ltk = bt_crypto_s1(self.tk, self.prnd, self.rrnd)
return True
def __repr__(self):
self._dump('ia')
self._dump('ra')
self._dump('prnd')
self._dump('rrnd')
self._dump('pcnf')
self._dump('prsp')
self._dump('preq')
def _dump(self, label):
print "s.%s = '%s'" % (label, ''.join("\\x{:02x}".format(ord(c)) for c in self.__dict__[label]))
def u128_xor(a1, a2):
return ''.join(chr(ord(a) ^ ord(b)) for a, b in zip(a1, a2))
def bt_crypto_e(key, plaintext):
aes = AES.new(key)
return aes.encrypt(plaintext)
def bt_crypto_c1(k, r, pres, preq, iat, ia, rat, ra):
p1 = ''.join((pres, preq, chr(rat), chr(iat)))
p2 = ''.join(("\x00\x00\x00\x00", ia, ra))
res = u128_xor(r, p1)
res = bt_crypto_e(k, res)
res = u128_xor(res, p2)
return bt_crypto_e(k, res)
def bt_crypto_s1(k, r1, r2):
res = ''.join((r2[8:16], r1[8:16]))
return bt_crypto_e(k, res)
class SM_Protocol:
stack = None
sm = None
def __init__(self, stack, sm):
self.stack = stack
self.sm = sm
def marshall_command(self, command):
code = command.sm_command
# pairing request
if code == 1:
# save the pairing request, reversed
self.sm.preq = str(command[SM_Hdr])[::-1]
auth = command.authentication
p = SM_Hdr()/SM_Pairing_Response(authentication=auth, initiator_key_distribution=0, responder_key_distribution=0)
# save the response, reversed
self.sm.prsp = str(p[SM_Hdr])[::-1]
self.stack.raw_l2cap(p)
# pairing confirm
elif code == 3:
# save the confirm
self.sm.pcnf = str(command[SM_Confirm])[::-1]
# calculate and send our own confirm
confirm = self.sm.calc_cfm()
p = SM_Hdr()/SM_Confirm(confirm=confirm[::-1])
self.stack.raw_l2cap(p)
# pairing random
elif code == 4:
self.sm.prnd = command.random[::-1]
res = self.sm.verify_random()
if not res:
raise Exception("pairing error")
# send random
self.stack.raw_l2cap(SM_Hdr()/SM_Random(random=self.sm.rrnd))
|
/**
* Show the differences between two tables.
*
* @param actualResult the actual result from an operation
* @param expectedResult the expected result from the operation
* @param maxDiffLines the maximum number of lines in the output
* @param itemsToSkip classes of changes to ignore
* @return a pair containing an error description String and the first different line
*/
@NotNull
static Pair<String, Long> diffInternal(Table actualResult, Table expectedResult, long maxDiffLines,
EnumSet<DiffItems> itemsToSkip) {
final List<String> issues = new ArrayList<>();
long firstDifferentPosition = Long.MAX_VALUE;
if (expectedResult == null) {
throw new IllegalArgumentException("Can not pass null expected result to TableTools.diff!");
}
if (actualResult == null) {
issues.add("Actual result is null!");
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
if (actualResult.size() != expectedResult.size()) {
issues.add("Result table has size " + actualResult.size() + " vs. expected " + expectedResult.size());
if (issues.size() >= maxDiffLines) {
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
}
final Map<String, ? extends ColumnSource> actualNameToColumnSource = actualResult.getColumnSourceMap();
final Map<String, ? extends ColumnSource> expectedNameToColumnSource = expectedResult.getColumnSourceMap();
final String[] actualColumnNames =
actualResult.getDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
final String[] expectedColumnNames =
expectedResult.getDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
for (final String actualColumnName : actualColumnNames) {
if (!expectedNameToColumnSource.containsKey(actualColumnName)) {
issues.add("Column " + actualColumnName + " not expected");
if (issues.size() >= maxDiffLines) {
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
}
}
final Set<String> columnNamesForDiff = new LinkedHashSet<>();
for (int ci = 0; ci < expectedColumnNames.length; ci++) {
final String expectedColumnName = expectedColumnNames[ci];
final ColumnSource expectedColumnSource = expectedNameToColumnSource.get(expectedColumnName);
final ColumnSource actualColumnSource = actualNameToColumnSource.get(expectedColumnName);
if (actualColumnSource == null) {
issues.add("Expected column " + expectedColumnName + " not found");
} else {
if (actualColumnNames.length - 1 < ci) {
if (!itemsToSkip.contains(DiffItems.ColumnsOrder)) {
issues.add("Expected column " + expectedColumnName + " is found but not on expected position ("
+ ci + ")");
}
} else {
if (!expectedColumnName.equals(actualColumnNames[ci])) {
if (!itemsToSkip.contains(DiffItems.ColumnsOrder)) {
issues.add("Expected column " + expectedColumnName
+ " is found but not on expected position (" + ci + ")");
}
}
}
final Class<?> expectedType = expectedColumnSource.getType();
final Class<?> actualType = actualColumnSource.getType();
if (actualType != expectedType) {
issues.add("Expected type of " + expectedColumnName + " is " + expectedType + " actual type is "
+ actualType);
} else {
columnNamesForDiff.add(expectedColumnName);
}
}
if (issues.size() >= maxDiffLines) {
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
}
try (final SafeCloseableList safeCloseables = new SafeCloseableList();
final SharedContext expectedSharedContext = SharedContext.makeSharedContext();
final SharedContext actualSharedContext = SharedContext.makeSharedContext();
final WritableBooleanChunk equalValues = WritableBooleanChunk.makeWritableChunk(chunkSize)) {
final ColumnDiffContext[] columnContexts = columnNamesForDiff.stream()
.map(name -> safeCloseables.add(new ColumnDiffContext(name, expectedNameToColumnSource.get(name),
expectedSharedContext, actualNameToColumnSource.get(name), actualSharedContext)))
.toArray(ColumnDiffContext[]::new);
try (final RowSequence.Iterator expectedIterator = expectedResult.getRowSet().getRowSequenceIterator();
final RowSequence.Iterator actualIterator = actualResult.getRowSet().getRowSequenceIterator()) {
int columnsRemaining = columnContexts.length;
long position = 0;
while (expectedIterator.hasMore() && actualIterator.hasMore() && columnsRemaining > 0) {
final RowSequence expectedChunkOk = expectedIterator.getNextRowSequenceWithLength(chunkSize);
final RowSequence actualChunkOk = actualIterator.getNextRowSequenceWithLength(chunkSize);
for (int ci = 0; ci < columnContexts.length; ++ci) {
final ColumnDiffContext columnContext = columnContexts[ci];
if (columnContext == null) {
continue;
}
final long columnFirstDifferentPosition = columnContext.diffChunk(expectedChunkOk,
actualChunkOk, equalValues, itemsToSkip, issues, position);
if (columnFirstDifferentPosition == -1L) {
continue;
}
--columnsRemaining;
columnContexts[ci] = null;
firstDifferentPosition = Math.min(columnFirstDifferentPosition, firstDifferentPosition);
if (issues.size() >= maxDiffLines) {
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
}
expectedSharedContext.reset();
actualSharedContext.reset();
position += chunkSize;
}
}
}
return makeResult(issues, maxDiffLines, firstDifferentPosition);
}
|
<reponame>darkjedi9922/graph-web
import { createStore } from 'redux'
import { AbstractCanvasObject, NodeMap, EdgeMap } from './types';
import * as appAPI from './desktop';
import { textWidth } from './libs/gmath';
interface ProjectData {
nodes: NodeMap,
nodeAutoSize: boolean,
edges: EdgeMap,
oriented: boolean,
nextNodeId: number,
nextEdgeId: number,
transparentNodes: boolean
}
export interface AppState {
project: {
file?: string,
data: ProjectData // this is what will be saved in the project file.
},
selectedObject?: AbstractCanvasObject
}
export const ADD_NODE = 'ADD_NODE';
export const ADD_EDGE = 'ADD_EDGE';
export const CURVE_EDGE = 'CURVE_EDGE';
export const END_EDGE = 'END_EDGE';
export const MOVE_NODE = 'MOVE_NODE';
export const OPEN_PROJECT = 'OPEN_PROJECT';
export const SAVE_PROJECT = 'SAVE_PROJECT';
export const SAVE_PROJECT_AS = 'SAVE_PROJECT_AS';
export const SELECT_OBJECT = 'SELECT_OBJECT';
export const SET_NODE_TEXT = 'SET_NODE_TEXT';
export const SET_NODE_AUTOSIZE = 'SET_NODE_AUTOSIZE';
export const SET_EDGE_TEXT = 'SET_EDGE_TEXT';
export const SET_ORIENTED = 'SET_ORIENTED';
export const SET_TRANSPARENT_NODES = 'SET_TRANSPARENT_NODES';
export const REMOVE_NODE = 'REMOVE_NODE';
export const REMOVE_EDGE = 'REMOVE_EDGE';
const initialState: AppState = {
project: {
file: null,
data: {
nodes: {},
nodeAutoSize: false,
edges: {},
oriented: false,
nextNodeId: 1,
nextEdgeId: 1,
transparentNodes: false
}
},
selectedObject: null
};
const appReducer = function(state = initialState, action): AppState {
let newState = {...state};
switch (action.type) {
case ADD_NODE:
var nodes = { ...state.project.data.nodes };
var text = state.project.data.nextNodeId.toString();
nodes[state.project.data.nextNodeId] = {
id: state.project.data.nextNodeId,
text: text,
radius: state.project.data.nodeAutoSize ?
textWidth(text, '16px "Times New Roman"') / 2 + 5 : 25,
x: action.pos.x,
y: action.pos.y,
startEdges: [],
endEdges: []
};
newState.project.data.nodes = nodes;
newState.project.data.nextNodeId += 1;
break;
case ADD_EDGE:
var { startNodeId, endNodeId } = action;
if (startNodeId === endNodeId) break;
const newEdgeId = state.project.data.nextEdgeId;
const newEdge = {
startNodeId: startNodeId,
endNodeId: endNodeId,
text: `Edge ${newEdgeId}`,
curve: 0
};
var { nodes, edges } = { ...state.project.data };
edges[newEdgeId] = newEdge;
nodes[startNodeId].startEdges.push(newEdgeId);
// Если endNodeId = null, то ребро в процессе добавления и у ему пока
// не назначен конечный узел.
if (endNodeId !== null) nodes[endNodeId].endEdges.push(newEdgeId);
newState.project.data.nodes = nodes;
newState.project.data.edges = edges;
newState.project.data.nextEdgeId += 1;
break;
case CURVE_EDGE:
var edges = { ...state.project.data.edges };
edges[action.id].curve = action.curve;
newState.project.data.edges = edges;
break;
case END_EDGE:
var { nodes, edges } = { ...state.project.data };
nodes[action.endNodeId].endEdges.push(action.edgeId);
edges[action.edgeId].endNodeId = action.endNodeId;
newState.project.data.nodes = nodes;
newState.project.data.edges = edges;
break;
case MOVE_NODE:
var nodes = { ...state.project.data.nodes };
nodes[action.id].x = action.pos.x;
nodes[action.id].y = action.pos.y;
newState.project.data.nodes = nodes;
break;
case OPEN_PROJECT:
const openResult = appAPI.open();
// If opening is cancelled, contents is an empty.
if (!openResult) return;
// Мог быть выбран файл неправильного формата.
try {
// Parse can throw an error.
const parsed = JSON.parse(openResult.contents);
// We must be sure that the parsed object is a graph state.
if (!isProjectData(parsed)) throw 'The object is not a graph state.';
// Further everything is ok.
newState.project.file = openResult.file;
newState.project.data = parsed;
} catch (e) {
console.error(e);
}
break;
case SAVE_PROJECT:
if (!state.project.file) {
console.error('There is no project file opened.');
break;
}
var serializedData = serializeSaveData(state.project.data);
appAPI.save(state.project.file, serializedData);
break;
case SAVE_PROJECT_AS:
// If saving is cancelled, savedFile is an empty.
var serializedData = serializeSaveData(state.project.data);
newState.project.file = appAPI.saveAs(serializedData);
break;
case SELECT_OBJECT:
newState.selectedObject = action.object;
break;
case SET_NODE_TEXT:
var nodes = { ...state.project.data.nodes };
nodes[action.id].text = action.text;
if (state.project.data.nodeAutoSize)
nodes[action.id].radius = textWidth(action.text,
'16px "Times New Roman"') / 2 + 5;
newState.project.data.nodes = nodes;
break;
case SET_NODE_AUTOSIZE:
newState.project.data.nodeAutoSize = action.enabled;
var nodes = { ...state.project.data.nodes };
for (const id in nodes) {
if (nodes.hasOwnProperty(id)) {
const node = nodes[id];
if (action.enabled)
node.radius = textWidth(node.text,
'16px "Times New Roman"') / 2 + 5;
else node.radius = 25;
}
}
newState.project.data.nodes = nodes;
break;
case SET_EDGE_TEXT:
var edges = { ...state.project.data.edges };
edges[action.id].text = action.text;
newState.project.data.edges = edges;
break;
case SET_ORIENTED:
newState.project.data.oriented = action.oriented;
break;
case SET_TRANSPARENT_NODES:
newState.project.data.transparentNodes = action.enabled;
break;
case REMOVE_NODE:
var nodes = { ...state.project.data.nodes };
var edges = { ...state.project.data.edges };
// Удаляем ребра, подсоединенные к этому узлу.
const remover = (edgeId) => {
var removeResult = removeEdge(nodes, edges, edgeId);
nodes = removeResult.nodes;
edges = removeResult.edges;
};
nodes[action.id].startEdges.map(remover);
nodes[action.id].endEdges.map(remover);
delete nodes[action.id];
newState.project.data.nodes = nodes;
newState.project.data.edges = edges;
break;
case REMOVE_EDGE:
var {nodes, edges} = removeEdge(
state.project.data.nodes,
state.project.data.edges,
action.id);
newState.project.data.nodes = nodes;
newState.project.data.edges = edges;
break;
}
return newState;
}
function removeEdge(nodes: NodeMap, edges: EdgeMap, id: number): {
nodes: NodeMap,
edges: EdgeMap
} {
var resultEdges = { ...edges };
var resultNodes = { ...nodes };
var startNode = resultNodes[resultEdges[id].startNodeId];
var endNode = resultNodes[resultEdges[id].endNodeId];
// Удаляем это ребро из его начального и конечного узла.
const filter = edgeId => edgeId !== id;
startNode.startEdges = startNode.startEdges.filter(filter);
endNode.endEdges = endNode.endEdges.filter(filter);
delete resultEdges[id];
return { nodes: resultNodes, edges: resultEdges };
}
function serializeSaveData(data: ProjectData): string {
return JSON.stringify({
...data,
// Добавим идентификатор нашего формата, чтобы проверять его при
// открытии файла (вдруг нам подсунули не то).
stateId: 'graphstate'
})
}
function isProjectData(value: object): boolean {
return value['stateId'] === 'graphstate';
}
const store = createStore(appReducer);
export default store;
|
def to_tokens(self, op_token_table):
raise NotImplementedError
|
Sergei Panteleevich Mavrodi (Russian: Серге́й Пантеле́евич Мавро́ди; 11 August 1955 – 26 March 2018) was a Russian financial fraudster, financial criminal and previously a deputy of the State Duma. He was the founder of the МММ, a scheme that defrauded millions of people around the globe.
In 2007 Sergei Mavrodi was convicted in a Russian court of defrauding 10,000 investors out of 110 million rubles ($4.3 million).[1][2][3] Mavrodi claimed he was not the beneficiary of the donations and that he was not used to a flamboyant lifestyle. The charge of which he was later convicted was tax fraud[4] though he claimed that the MMM scheme was not a business, but instead a mutual donation program which there is no law against.[5] There were interviews after his release where he claimed MMM Global was behind the bitcoin price rally.[6]
Early life [ edit ]
Mavrodi was born in Moscow. His mother is Russian. His father is half Greek (therefore his name is Mavrodi), half Ukrainian.[7] During his childhood he was diagnosed with a bilateral heart defect.[8] Mavrodi recalled that in school he was proficient in mathematics and physics.[8] He studied at the Moscow State Institute of Electronics and Mathematics.[8] In 1983 he was arrested for ten days due to illegal economic activity.[8]
MMM activities [ edit ]
In 1989 Mavrodi founded MMM.[9][10][11] He was then imprisoned on charges of financial fraud involving MMM.[12]
In 1994 Mavrodi was elected deputy of State Duma obtaining parliamentary immunity, just three weeks after he was released from prison. He ran in a by-election in the Mytishchinsky District to replace deputy Andrei Aidzerdzis, who was gunned down earlier that year.[12] Mavrodi took nearly 27% of the votes with a turnout of only 30%.[12]
Mavrodi declared MMM bankrupt on 22 December 1997, then disappeared, and was on the run until his arrest in 2003.[13]
On 28 April 2007, a Moscow court sentenced him to four and a half years in a penal colony. The court also fined him 10,000 rubles ($390).[13]
In January 2011, Mavrodi launched another pyramid scheme called MMM-2011, asking investors to buy so-called Mavro currency units. He frankly described it as a pyramid, adding "It is a naked scheme, nothing more ... People interact with each other and give each other money. For no reason!"[14] Mavrodi said that his goal with MMM-2011 was to destroy the current financial system, which he considered unfair, which would allow something new to take its place. MMM-2011 was able to function openly as Ponzi schemes and financial pyramids are not illegal under Russian law.[15] In May 2012 he froze the operation and announced that there would be no more payouts.[16]
In 2011 he launched a similar scheme in India, called MMM India, again stating clearly that the vehicle was a pyramid.[17] He has also launched MMM in China.[18] He was reported to be trying to expand his operations into Western Europe, Canada, and Latin America.[15] As of September 2015 it had spread rapidly in South Africa with a claimed 1% per day or 30% per month interest rate scheme[19] and warnings from both the South African and Russian Communist Parties for people not to participate in it.[20] In early 2016, he continued the same model in Zimbabwe (the accounts were frozen in September 2016), and later, in Nigeria (accounts frozen in December 2016).[21][22] MMM Nigeria resumed activities on 13 January 2017.[23]
Stock Generation [ edit ]
In 1998 Mavrodi created Stock Generation,[24][25] allegedly a classic pyramid scheme presented as a "virtual stock market game".[1] The website ran from 1998 to early 2000. It was based around trading non-existent companies' stocks in a form of the "stock exchange game" on the company's site, stockgeneration.com. Despite a bold-letter warning on the main page that the site was not a real stock exchange, between 20,000 and 275,000 people, according to various estimates, fell for the promised 200% returns and lost their money. According to U.S. Securities and Exchange Commission (SEC), losses of victims were at least US$5.5 million.[26] The Massachusetts district court initially found that U.S. Securities and Exchange Commission was unable to cite Stock Generation's founders and owners for securities violations. However, the United States Court of Appeals reversed this decision in 2001, concluding that the SEC alleged sufficient facts to state a triable claim.[27] In 2003 the SEC obtained permanent injunctions against SG Ltd. and relief defendants SG Perfect and SG Trading, which profited from the disbursement of funds fraudulently gained by SG Ltd.[28][29]
Death [ edit ]
Mavrodi died of heart problems at the age of 62 at a hospital in Moscow.[30][8][31] The funeral was funded by the investors of МММ-2011.[32]
Book [ edit ]
In 2008 Mavrodi published the book Temptation. In May 2008, bailiffs arrested Mavrodi's rights to this book. Seven thousand copies of the book were published.[3][33]
Film [ edit ]
A feature film The PyraMMMid, based on the story of the same name by Mavrodi was released in Russia on 7 April 2011. The role of Sergei Mavrodi (in the film he is called Sergei Mamontov) is played by Aleksei Serebryakov.
Theater [ edit ]
His play was performed in theatres. The director of the play - Maria Kuznetsova.[34][35]
|
def calc_iou( gt_bbox, pred_bbox):
x_bottomleft_gt, y_bottomleft_gt, x_topright_gt, y_topright_gt= gt_bbox
x_bottomleft_p, y_bottomleft_p, x_topright_p, y_topright_p= pred_bbox
if (x_bottomleft_gt > x_topright_gt) or (y_bottomleft_gt> y_topright_gt):
print(x_bottomleft_gt, y_bottomleft_gt, x_topright_gt, y_topright_gt)
raise AssertionError("Ground Truth Bounding Box is not correct")
if (x_bottomleft_p > x_topright_p) or (y_bottomleft_p> y_topright_p):
raise AssertionError("Predicted Bounding Box is not correct",x_bottomleft_p, x_topright_p,y_bottomleft_p,y_topright_gt)
if(x_topright_gt< x_bottomleft_p):
return 0.0
if(y_topright_gt< y_bottomleft_p):
return 0.0
if(x_bottomleft_gt> x_topright_p):
return 0.0
if(y_bottomleft_gt> y_topright_p):
return 0.0
GT_bbox_area = (x_topright_gt - x_bottomleft_gt + 1) * ( y_topright_gt -y_bottomleft_gt + 1)
Pred_bbox_area =(x_topright_p - x_bottomleft_p + 1 ) * ( y_topright_p -y_bottomleft_p + 1)
x_bottom_left =np.max([x_bottomleft_gt, x_bottomleft_p])
y_bottom_left = np.max([y_bottomleft_gt, y_bottomleft_p])
x_top_right = np.min([x_topright_gt, x_topright_p])
y_top_right = np.min([y_topright_gt, y_topright_p])
intersection_area = (x_top_right- x_bottom_left + 1) * (y_top_right-y_bottom_left + 1)
union_area = (GT_bbox_area + Pred_bbox_area - intersection_area)
return intersection_area/union_area
|
def do_share_server_migration_get_progress(cs, args):
share_server = _find_share_server(cs, args.share_server_id)
result = share_server.migration_get_progress()
cliutils.print_dict(result)
|
<reponame>JinshanMa/fenhuo-renren<filename>src/main/java/io/renren/modules/fenhuo/service/impl/FenhuoProjectfileServiceImpl.java
package io.renren.modules.fenhuo.service.impl;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import java.util.Map;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import io.renren.common.utils.PageUtils;
import io.renren.common.utils.Query;
import io.renren.modules.fenhuo.dao.FenhuoProjectfileDao;
import io.renren.modules.fenhuo.entity.FenhuoProjectfileEntity;
import io.renren.modules.fenhuo.service.FenhuoProjectfileService;
@Service("fenhuoProjectfileService")
public class FenhuoProjectfileServiceImpl extends ServiceImpl<FenhuoProjectfileDao, FenhuoProjectfileEntity> implements FenhuoProjectfileService {
@Override
public PageUtils queryPage(Map<String, Object> params) {
// 技术文件类型
String type = (String)params.get("type");
// 文件创建开始时间
String startDate = (String)params.get("startDate");
// 文件创建结束时间
String endDate = (String)params.get("endDate");
String techcatalogId = (String)params.get("techcatalogid");
String filtypename = (String)params.get("filtypename");
String creatorname = (String)params.get("creatorname");
String filename = (String)params.get("filename");
QueryWrapper<FenhuoProjectfileEntity> queryWrapper = new QueryWrapper<FenhuoProjectfileEntity>();
if(StringUtils.isNotBlank(type)){
queryWrapper.and(wrapper->wrapper.eq("type",Long.parseLong(type)));
}
if(StringUtils.isNotBlank(startDate) && StringUtils.isNotBlank(endDate)){
queryWrapper.and(wrapper->wrapper.ge("date_format(createdatetime,'%Y-%m-%d')",startDate)
.le("date_format(createdatetime,'%Y-%m-%d')", endDate));
}
if(StringUtils.isNotBlank(techcatalogId)){
queryWrapper.and(wrapper->wrapper.eq("techcatalogid", techcatalogId));
}
if(StringUtils.isNotBlank(filtypename)){
queryWrapper.and(wrapper->wrapper.like("filetype", filtypename));
}
if(StringUtils.isNotBlank(creatorname)){
queryWrapper.and(wrapper->wrapper.like("creator", creatorname));
}
if(StringUtils.isNotBlank(filename)){
queryWrapper.and(wrapper->wrapper.like("filename", filename));
}
queryWrapper.orderByDesc("fileid");
IPage<FenhuoProjectfileEntity> page = this.page(
new Query<FenhuoProjectfileEntity>().getPage(params),
queryWrapper
);
return new PageUtils(page);
}
@Override
public PageUtils queryFaultlistPage(Map<String, Object> params) {
String type = (String)params.get("type");
String faultid = (String)params.get("faultid");
QueryWrapper<FenhuoProjectfileEntity> queryWrapper = new QueryWrapper<FenhuoProjectfileEntity>();
if(StringUtils.isNotBlank(type)){
queryWrapper.and(wrapper->wrapper.eq("type",Long.parseLong(type)));
}
if(StringUtils.isNotBlank(faultid)){
queryWrapper.and(wrapper->wrapper.eq("projectid", Integer.valueOf(faultid)));
}
IPage<FenhuoProjectfileEntity> page = this.page(
new Query<FenhuoProjectfileEntity>().getPage(params),
queryWrapper
);
return new PageUtils(page);
}
}
|
def rxxr(factor: float = 1) -> float:
return 15.875 * factor
|
<reponame>BenLubar/weblegends
#include "../helpers_event.h"
#include "df/history_event_hist_figure_travelst.h"
void do_event(std::ostream & s, const event_context & context, df::history_event_hist_figure_travelst *event)
{
list_event_link<df::historical_figure>(s, context, event->group);
std::string prefix = " to ";
BEFORE_SWITCH(reason, event->reason);
switch (reason)
{
case df::history_event_hist_figure_travelst::T_reason::Journey:
s << " made a journey";
BREAK(reason);
case df::history_event_hist_figure_travelst::T_reason::Return:
s << " returned";
BREAK(reason);
case df::history_event_hist_figure_travelst::T_reason::Escape:
s << " escaped";
prefix = " from ";
BREAK(reason);
}
AFTER_SWITCH(reason, stl_sprintf("event-%d (HIST_FIGURE_TRAVEL)", event->id));
do_location(s, context, event, prefix, true);
}
|
<reponame>nauee/Algo3-TP2<gh_stars>1-10
package edu.fiuba.algo3.interfaz.botones;
import edu.fiuba.algo3.interfaz.musica.Cancion;
import edu.fiuba.algo3.interfaz.controladores.ControladorMusica;
import javafx.scene.control.RadioMenuItem;
import edu.fiuba.algo3.interfaz.musica.Reproductor;
public class BotonMusica extends RadioMenuItem {
public BotonMusica(Reproductor reproductor, Cancion cancion){
this.setText(cancion.nombre());
this.setOnAction(new ControladorMusica(reproductor, cancion));
}
}
|
WikiLeaks' Assange Wins Bail But Isn't Free Yet
toggle caption Dan Kitwood/Getty Images
A British court granted bail Tuesday to WikiLeaks founder Julian Assange, who will nevertheless remain in custody for at least two more days pending the outcome of an appeal.
Assange has spent a week in prison awaiting possible extradition to Sweden for questioning in a sex-crimes investigation. Judge Howard Riddle said he must remain behind bars until Britain's High Court hears the appeal within 48 hours.
Riddle, who had set bail around $380,000, is the same judge who denied bail last week after Assange surrendered to Scotland Yard, calling him a flight risk who had no fixed address. This time around, a former army captain offered his 600-acre country estate as a bail address.
Gemma Lindfield, who represented Swedish authorities, had argued that the allegations against Assange were serious and that he has weak ties to Britain and "the means and ability to abscond." She later announced the decision to appeal.
Even if the 39-year-old Australian is released from London's Wandsworth Prison, he must abide by strict conditions. He will be required to wear an electronic tag, live at an address registered with the authorities, report to the police daily and observe two four-hour curfews each day.
My convictions are unfaltering. ... If anything, this process has increased my determination that they are true and correct.
A number of international figures, including filmmaker Ken Loach and socialite Jemima Khan, collectively offered to post bond, Assange's lawyer, Geoffrey Robertson, told the hearing in London.
Robertson, a former appeals judge at the U.N. Special Court for Sierra Leone who specializes in freedom of speech cases, has also represented such high-profile clients as author Salman Rushdie.
Assange is wanted for questioning in Sweden over allegations of rape, molestation and unlawful coercion stemming from separate encounters with two women over the summer.
Assange and his lawyers have denied the allegations and plan to fight attempts to extradite him to Sweden for questioning. They say the accusations are political, stemming from WikiLeaks' release of hundreds of thousands of secret U.S. diplomatic cables.
A decision on whether to extradite Assange is expected to take several weeks. Both he and the Swedish government are entitled to appeal the ruling if the judge rules against them.
Assange's next scheduled court appearance was set for Jan. 11, ahead of a full hearing on Feb. 7 and 8.
His Swedish lawyer, Bjorn Hurtig, says the courts are stacked against defendants in sex cases in Sweden. However, a 2009 European Commission-funded study found only 10 percent of sex offenses reported in Sweden result in a conviction.
Supporters outside City of Westminster Magistrates' Court erupted in cheers when they heard news of Tuesday's ruling.
Assange's mother, who was flown to Britain by Australian media outlets, watched the hearing nervously from the public gallery, but gave a huge smile as the judge announced his decision.
"I just want to thank everyone who's turned up to show their support and who's taken an interest," Christine Assange said.
For his part, Assange remains defiant. Australia's Seven network said Christine Assange spoke to her son for 10 minutes and asked him, at the network's request, whether it had been worth it.
"My convictions are unfaltering," the network quoted Julian Assange as saying. "I remain true to the ideals I have expressed. This circumstance shall not shake them. If anything, this process has increased my determination that they are true and correct."
In Sweden's capital, Stockholm, many people said they admire what WikiLeaks has done in the name of transparency and accountability. Yet they also balked at the suggestion that Sweden is part of an elaborate plot against Assange.
Anna Lunden, who was part of a crowd milling through Stockholm's old city Monday night, said she thinks Assange should come to Sweden to face his accusers. "Why should he not be guilty because he's famous?" she said. "I mean, bigger men that him have been using women."
Mark Stephens, part of Assange's legal team, said earlier that his client was getting by but not without difficulty. Assange has been placed in solitary confinement in Wandsworth Prison, one of the biggest prisons in Europe.
"Julian is a sort of a bit lost without his computer, although the prison authorities said that they will give him an Internet-disabled computer in due course," Stephens said. "And he's finding it very boring because he hates British daytime television."
Britain's national security adviser said Monday that U.K. government websites could be attacked in retribution if Assange is not released. Online "hacktivists" have already launched cyber attacks on companies that cut ties to WikiLeaks, including MasterCard, Visa and PayPal.
Assange called those companies "instruments of U.S. foreign policy" in his statement Tuesday. "I am calling on the world to protect my work and my people from these illegal and immoral attacks," he was quoted as saying.
The WikiLeaks disclosures, which have continued during Assange's incarceration, have deeply angered U.S. officials, who claim that other countries have already curtailed their dealings with the U.S. government as a result.
Larry Miller and NPR's Philip Reeves contributed to this story, which contains material from The Associated Press.
|
package gfsdb
import (
"github.com/Centny/gwf/util"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
)
const (
ES_RUNNING = "running"
ES_DONE = "done"
ES_ERROR = "error"
ES_IGNORE = "ignore"
ES_NONE = "none"
//
VS_VERIFIED = "verified"
VS_ZERO = "zero"
VS_REDO = "redo"
VS_ERROR = "error"
)
const (
FS_N = "N" //normal
FS_D = "D" //deleted
)
const (
FT_FILE = "file"
FT_FOLDER = "folder"
)
type F struct {
Id string `bson:"_id" json:"id"`
Name string `bson:"name" json:"name"`
Filename string `bson:"filename" json:"filename"` //upload file name
Pub string `bson:"pub" json:"pub"` //public path.
SHA string `bson:"sha" json:"sha"` //file sha
MD5 string `bson:"md5" json:"md5"` //file md5
EXT string `bson:"ext" json:"ext"` //file externd
Size int64 `bson:"size" json:"size"` //file size.
Type string `bson:"type" json:"type"` //mimetype
Path string `bson:"path" json:"-"` //file save path.
Exec string `bson:"exec" json:"exec"` //the exec status
Info util.Map `bson:"info" json:"info"` //the extern info.
Status string `bson:"status" json:"status"` //file status
Time int64 `bson:"time" json:"time"` //upload time.
}
func (f *F) ToBsonM() bson.M {
return bson.M{
"_id": f.Id,
"name": f.Name,
"filename": f.Filename,
"pub": f.Pub,
"sha": f.SHA,
"md5": f.MD5,
"ext": f.EXT,
"size": f.Size,
"type": f.Type,
"path": f.Path,
"exec": f.Exec,
"info": f.Info,
"status": f.Status,
"time": f.Time,
}
}
type Mark struct {
Id string `bson:"_id" json:"id"`
Fid string `bson:"fid" json:"fid"`
}
// func (f *F) AddMark(mark []string) []string {
// var ms = map[string]int{}
// for _, v := range f.Mark {
// ms[v] = 1
// }
// var added = map[string]int{}
// var news = []string{}
// for _, v := range mark {
// if _, ok := ms[v]; ok {
// continue
// }
// if _, ok := added[v]; ok {
// continue
// }
// news = append(news, v)
// added[v] = 1
// }
// return news
// }
type File struct {
Id string `bson:"_id" json:"id"`
Fid string `bson:"fid" json:"fid"`
Pid string `bson:"pid" json:"pid"`
Oid string `bson:"oid" json:"oid"`
Owner string `bson:"owner" json:"owner"`
Name string `bson:"name" json:"name"`
EXT string `bson:"ext" json:"ext"` //file externd
Type string `bson:"type" json:"type"` //type
Tags []string `bson:"tags" json:"tags"`
Desc string `bson:"desc" json:"desc"`
Status string `bson:"status" json:"status"` //file status
Time int64 `bson:"time" json:"time"` //upload time.
}
var Indexes = map[string]map[string]mgo.Index{
CN_F: map[string]mgo.Index{
"f_name": mgo.Index{
Key: []string{"name"},
},
"f_filename": mgo.Index{
Key: []string{"filename"},
},
"f_pub": mgo.Index{
Key: []string{"pub"},
},
"f_sha": mgo.Index{
Key: []string{"sha"},
},
"f_md5": mgo.Index{
Key: []string{"md5"},
},
"f_ext": mgo.Index{
Key: []string{"ext"},
},
"f_size": mgo.Index{
Key: []string{"size"},
},
"f_type": mgo.Index{
Key: []string{"type"},
},
"f_exec": mgo.Index{
Key: []string{"exec"},
},
"f_status": mgo.Index{
Key: []string{"status"},
},
"f_time": mgo.Index{
Key: []string{"time"},
},
},
}
|
def compare_movies_to_json(self, movies, json_path):
if os.path.exists(json_path):
with open(json_path, 'r') as outfile:
saved_movies = json.load(outfile)
new_movies = []
for i in movies:
if i.title not in saved_movies:
new_movies.append(i)
return new_movies
else:
return movies
|
.
In spite of the constant development of pharmacotherapy, electroconvulsive therapy (ECT) remains highly efficient form of therapy in psychiatry. Among the interesting aspects connected with ECT we can mention patients' attitude and their psychic comfort. Since many patients undergoing ECT treat the situation as stressful, the research being done has been situated in a relational stress paradigm. The obtained results confirm the assumed direction of relations between cognitive appraisal of a situation and the experienced emotions. The way in which patients undergoing ECT perceive their situation substantially influences the level of their psychic comfort. Persons, who make the cognitive appraisal of a situations as a challenge, are much more efficient on the level of the experienced emotions than those who appraise a situation as a threat. It makes us pay particular attention to patients' beliefs connected with ECT. The adequate psychoeducational influence based upon creating a motivation for treatment, forming an ability of concentrating attention at positive aspects of a situation and possible benefits may contribute to the more positive reception of ECT and thus to increasing patients' psychic comfort during the treatment process.
|
package juuxel.loomquiltflower.test;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;
import org.gradle.testkit.runner.TaskOutcome;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class RuntimeClasspathTest extends ProjectTest {
@Test
void test() {
// Set up
setupProject("runtime-classpath");
// Run
BuildResult result = GradleRunner.create()
.withPluginClasspath()
.withProjectDir(projectDirectory)
.withArguments("verifyRuntimeClasspath", "genSourcesWithQuiltflower", "--stacktrace")
.forwardOutput()
.withDebug(true)
.build();
assertThat(result.task(":genSourcesWithQuiltflower").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(result.task(":verifyRuntimeClasspath").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
}
|
<reponame>usenixatc2021/SoftRefresh_Scheduling
/*
* 440SPe's XOR engines support header file
*
* 2006-2009 (C) DENX Software Engineering.
*
* Author: <NAME> <<EMAIL>>
*
* This file is licensed under the term of the GNU General Public License
* version 2. The program licensed "as is" without any warranty of any
* kind, whether express or implied.
*/
#ifndef _PPC440SPE_XOR_H
#define _PPC440SPE_XOR_H
#include <linux/types.h>
/* Number of XOR engines available on the contoller */
#define XOR_ENGINES_NUM 1
/* Number of operands supported in the h/w */
#define XOR_MAX_OPS 16
/*
* XOR Command Block Control Register bits
*/
#define XOR_CBCR_LNK_BIT (1<<31) /* link present */
#define XOR_CBCR_TGT_BIT (1<<30) /* target present */
#define XOR_CBCR_CBCE_BIT (1<<29) /* command block compete enable */
#define XOR_CBCR_RNZE_BIT (1<<28) /* result not zero enable */
#define XOR_CBCR_XNOR_BIT (1<<15) /* XOR/XNOR */
#define XOR_CDCR_OAC_MSK (0x7F) /* operand address count */
/*
* XORCore Status Register bits
*/
#define XOR_SR_XCP_BIT (1<<31) /* core processing */
#define XOR_SR_ICB_BIT (1<<17) /* invalid CB */
#define XOR_SR_IC_BIT (1<<16) /* invalid command */
#define XOR_SR_IPE_BIT (1<<15) /* internal parity error */
#define XOR_SR_RNZ_BIT (1<<2) /* result not Zero */
#define XOR_SR_CBC_BIT (1<<1) /* CB complete */
#define XOR_SR_CBLC_BIT (1<<0) /* CB list complete */
/*
* XORCore Control Set and Reset Register bits
*/
#define XOR_CRSR_XASR_BIT (1<<31) /* soft reset */
#define XOR_CRSR_XAE_BIT (1<<30) /* enable */
#define XOR_CRSR_RCBE_BIT (1<<29) /* refetch CB enable */
#define XOR_CRSR_PAUS_BIT (1<<28) /* pause */
#define XOR_CRSR_64BA_BIT (1<<27) /* 64/32 CB format */
#define XOR_CRSR_CLP_BIT (1<<25) /* continue list processing */
/*
* XORCore Interrupt Enable Register
*/
#define XOR_IE_ICBIE_BIT (1<<17) /* Invalid Command Block IRQ Enable */
#define XOR_IE_ICIE_BIT (1<<16) /* Invalid Command IRQ Enable */
#define XOR_IE_RPTIE_BIT (1<<14) /* Read PLB Timeout Error IRQ Enable */
#define XOR_IE_CBCIE_BIT (1<<1) /* CB complete interrupt enable */
#define XOR_IE_CBLCI_BIT (1<<0) /* CB list complete interrupt enable */
/*
* XOR Accelerator engine Command Block Type
*/
struct xor_cb {
/*
* Basic 64-bit format XOR CB (Table 19-1, p.463, 440spe_um_1_22.pdf)
*/
u32 cbc; /* control */
u32 cbbc; /* byte count */
u32 cbs; /* status */
u8 pad0[4]; /* reserved */
u32 cbtah; /* target address high */
u32 cbtal; /* target address low */
u32 cblah; /* link address high */
u32 cblal; /* link address low */
struct {
u32 h;
u32 l;
} __attribute__ ((packed)) ops[16];
} __attribute__ ((packed));
/*
* XOR hardware registers Table 19-3, UM 1.22
*/
struct xor_regs {
u32 op_ar[16][2]; /* operand address[0]-high,[1]-low registers */
u8 pad0[352]; /* reserved */
u32 cbcr; /* CB control register */
u32 cbbcr; /* CB byte count register */
u32 cbsr; /* CB status register */
u8 pad1[4]; /* reserved */
u32 cbtahr; /* operand target address high register */
u32 cbtalr; /* operand target address low register */
u32 cblahr; /* CB link address high register */
u32 cblalr; /* CB link address low register */
u32 crsr; /* control set register */
u32 crrr; /* control reset register */
u32 ccbahr; /* current CB address high register */
u32 ccbalr; /* current CB address low register */
u32 plbr; /* PLB configuration register */
u32 ier; /* interrupt enable register */
u32 pecr; /* parity error count register */
u32 sr; /* status register */
u32 revidr; /* revision ID register */
};
#endif /* _PPC440SPE_XOR_H */
|
Work continued in the aftermath of the bridge collapse on Interstate 75 in Cincinnati on Jan. 20, 2015. Highway advocates are urging Congress to approve more funding to repair roads and fix aging bridges. (Photo: Liz Dufour, AP)
WASHINGTON – Damaged big-city roads cost motorists as much as $1,000 a year in added maintenance, according to a report obtained by USA TODAY and are being used to urge Congress to approve a new highway bill to replace the one expiring July 31.
More than one-fourth (28%) of urban interstates, freeways and arterial routes with at least two lanes were paved in "poor" condition in 2013, according to the report from TRIP, a non-profit transportation research group funded by industry groups such as construction businesses and unions.
TRIP collated data from the Federal Highway Administration's road ratings. Poor roads have cracked or broken pavement and often show significant distress in the underlying foundation.
The poorly maintained roads cost the average motorist $516 per year in added maintenance, the report calculated.
Among places with at least 500,000 people, the cities with the greatest share of damaged roads are San Francisco (74%), Los Angeles and Long Beach (73%) and Detroit (56%), according to the report "Bumpy Roads Ahead: America's Roughest Rides and Strategies to Make Our Roads Smoother."
Among cities with 250,000 to 500,000 people, the worst roads are in Flint, Mich. (54%); Antioch, Calif. (52%); and Santa Rosa, Calif. (49%), according to the report.
The highest costs for maintenance, fuel and tire wear from bad roads totaled $1,044 per year for motorists in San Francisco and $1,031 for those in Los Angeles. The cost for Flint drivers was $839 per year.
"The nation's rough roads stress nerves and cost billions in unnecessary vehicle replacement, repair and fuel costs," said Jill Ingrassia, AAA's managing director of government relations and traffic safety advocacy. "Full investment in our nation's transportation system will reduce the financial burden on drivers and provide them with a smoother, safer and more efficient ride."
Groups including AAA and the U.S. Chamber of Commerce will hold a conference call at noon Thursday to discuss the report and urge congressional action.
"The deteriorating condition of our nation's urban roads threatens the health of the nation's economy, reducing the efficiency of a region's businesses and employers," said Janet Kavinoky, executive director for transportation at the U.S. Chamber of Commerce.
The TRIP report comes as the Senate debates a six-year highway bill that is only funded for three years, leaving the next Congress to find additional funding. The would provide $50 billion beyond what is already collected from the 18.4 cent per gallon gas tax, the additional funding coming from spending cuts or changes in federal policy elsewhere.
In another strategy, the House approved an $8 billion bill to temporarily extend highway policy to Dec. 18 to give lawmakers more time to find permanent funding.
"With state and local governments struggling to fund needed road repairs, and with federal surface transportation funding set to expire this month, road conditions are projected to get even worse," said Will Wilkins, TRIP's executive director.
Bud Wright, executive director of the American Association of State Highway and Transportation Officials, said road maintenance depends on federal investment.
"We can do better than the uncertainty of short-term extensions," Wright said.
Read or Share this story: http://usat.ly/1MJGRsO
|
package decompress_test
import (
"fmt"
"io"
"net/http"
"net/http/httptest"
"github.com/kei2100/decompress-roundtripper"
)
func ExampleRoundTripper() {
cli := http.Client{
// decompress.RoundTripper is automatically decompresses the response body according to the Content-Encoding header
Transport: &decompress.RoundTripper{},
}
svr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Encoding", "gzip")
b := gzipBytes([]byte("foobarbaz"))
w.Write(b)
}))
req, _ := http.NewRequest("GET", svr.URL, nil)
req.Header.Set("Accept-Encoding", "gzip")
resp, _ := cli.Do(req)
b, _ := io.ReadAll(resp.Body)
resp.Body.Close()
fmt.Println(string(b))
// Output: foobarbaz
}
|
def textile(text, **kwargs):
from django.contrib.markup.templatetags.markup import textile
return textile(text)
|
def promise(cls, fn, *args, **kwargs):
task = cls.task(target=fn, args=args, kwargs=kwargs)
task.start()
return task
|
/**
* Keep waiting to be override by children classes for different behaviors.
* Timeout is validated here.
*/
void keepWaiting() {
logDebug(String.format("%s runnable waiting, current state is %s.",
actionName, stateHelper.getCurrentOamState()));
isTimeout("keepWaiting");
}
|
Three individuals were arrested in Greensboro, North Carolina Thursday on suspicion of robbery. The head of a day center and advocacy group for those experiencing homelessness is calling into question the treatment of one of those individuals, a 25-year-old transgender woman.
Michelle Kennedy, executive director of the Interactive Resource Center, told LGBTQ Nation that her group has worked with Taye Richardson and that she identifies as a trans woman. Kennedy went to the jail in an attempt to have a professional services consultation, but said she was denied, despite the IRC being listed as her address.
Kennedy said a sergeant at the jail only said, “I’m just not going to do that,” when asked to set up a consultation.
“I kept calling her ‘she,’ and he kept calling her ‘he,'” Kennedy added. She said that at the time Richardson was being held in the male section of the jail.
Kennedy wrote about the experience on Facebook.
“The only information I was given was that her ID lists her as male and therefore she is being treated as a male. It is particularly difficult to update ID documents to reflect your gender identity when experiencing poverty/homelessness,” she wrote.
Shortly after visiting, Kennedy said she was informed that Richardson had been moved to a single-occupancy cell with regular visits from guards, which she said was an improvement but was still problematic, as she is still not being held in the female section.
Both the police and the sheriff’s office continued to refer to by a male name when contacted by LGBTQ Nation. The Greensboro Police Department confirmed the arrest.
The sheriff’s office, in charge of overseeing the jail, gave a longer statement, before referring further questions to the inmate’s lawyer.
Jim Secor, the Sheriff’s’ Attorney in Guilford County, likewise confirmed the arrest, using the male name.
Secor continued:
Upon completion of the booking process, this individual was immediately assigned a single-person cell in an Administrative Segregation Housing Unit where the individual has 24 hour supervision with at least one Detention Officer stationed on the floor of the Unit. In addition, a Detention Officer makes watch tour rounds past this individual’s cell every 15 minutes. The inmate can ask that the door to their assigned single-person cell be secured to prevent access to the cell by anyone other than Detention Staff. This individual has been treated respectfully by our Detention and Medical Staff and has not voiced any concerns to the Sheriff’s Office concerning the manner in which the individual is being housed or the treatment received from our Staff. This individual has already been provided his initial court appearance before a State District Court Judge.
In North Carolina, inmate placement is decided by a review board, evaluating each individual on a case-by-case basis.
Secor also said that the individual had turned down a request for an interview by Greensboro’s News & Record and that they would respect that decision.
Kennedy reported that Richardson would have a bond hearing Friday. The bond is set at $10,000, plus another $100 for a failure to appear.
She posted again on Facebook Friday, thanking those who are standing up in the community to rally around Richardson. She also said she has been “called all sorts of names and had some really hateful things said” about and to her over the past 24 hours, but that she knows what discomfort she goes through pales in comparison to what the people she fights for go through.
“I know I’m stretching and growing when my privilege can’t fully protect me (and, let’s be honest, it usually protects me pretty well). I know I’m doing right when I’m targeted,” she wrote.
We will update this story if more information becomes available.
This Story Filed Under
|
<filename>applications/TopologyOptimizationApplication/topology_optimization_application.cpp<gh_stars>1-10
// ==============================================================================
// KratosTopologyOptimizationApplication
//
// License: BSD License
// license: TopologyOptimizationApplication/license.txt
//
// Main authors: <NAME>, https://github.com/dbaumgaertner
// <NAME>
// <NAME>
//
// ==============================================================================
// System includes
// External includes
// Project includes
#include "includes/define.h"
#include "geometries/triangle_2d_3.h"
#include "geometries/triangle_3d_3.h"
#include "geometries/tetrahedra_3d_4.h"
#include "geometries/hexahedra_3d_8.h"
#include "geometries/line_2d.h"
#include "includes/variables.h"
#include "topology_optimization_application.h"
// Geometries that must be added when more elements are added into the application (SOLID MECHANICS APPLICATION)
//#include "geometries/tetrahedra_3d_10.h"
//#include "geometries/hexahedra_3d_20.h"
//#include "geometries/hexahedra_3d_27.h"
//#include "geometries/prism_3d_6.h"
//#include "geometries/prism_3d_15.h"
namespace Kratos
{
//Create Variables with Python connection
KRATOS_CREATE_VARIABLE( double, E_MIN )
KRATOS_CREATE_VARIABLE( double, E_0 )
KRATOS_CREATE_VARIABLE( double, PENAL )
KRATOS_CREATE_VARIABLE( double, X_PHYS )
KRATOS_CREATE_VARIABLE( double, X_PHYS_OLD )
KRATOS_CREATE_VARIABLE( double, DCDX )
KRATOS_CREATE_VARIABLE( double, DVDX )
KRATOS_CREATE_VARIABLE( double, SOLID_VOID )
KRATOS_CREATE_VARIABLE( double, LOCAL_STRAIN_ENERGY )
KratosTopologyOptimizationApplication::KratosTopologyOptimizationApplication() :
KratosApplication("TopologyOptimizationApplication"),
mSmallDisplacementSIMPElement3D3N( 0, Element::GeometryType::Pointer( new Triangle3D3 <Node<3> >( Element::GeometryType::PointsArrayType( 3 ) ) ) ), // dummy element for surface representation
mSmallDisplacementSIMPElement3D4N( 0, Element::GeometryType::Pointer( new Tetrahedra3D4 <Node<3> >( Element::GeometryType::PointsArrayType( 4 ) ) ) ),
mSmallDisplacementSIMPElement3D8N( 0, Element::GeometryType::Pointer( new Hexahedra3D8 <Node<3> >( Element::GeometryType::PointsArrayType( 8 ) ) ) )
// Extra elements that can be added in the future
// mSmallDisplacementSIMPElement3D6N( 0, Element::GeometryType::Pointer( new Prism3D6 <Node<3> >( Element::GeometryType::PointsArrayType( 6 ) ) ) ),
// mSmallDisplacementSIMPElement3D10N( 0, Element::GeometryType::Pointer( new Tetrahedra3D10 <Node<3> >( Element::GeometryType::PointsArrayType( 10 ) ) ) ),
// mSmallDisplacementSIMPElement3D15N( 0, Element::GeometryType::Pointer( new Prism3D15 <Node<3> >( Element::GeometryType::PointsArrayType( 15 ) ) ) ),
// mSmallDisplacementSIMPElement3D20N( 0, Element::GeometryType::Pointer( new Hexahedra3D20 <Node<3> >( Element::GeometryType::PointsArrayType( 20 ) ) ) ),
// mSmallDisplacementSIMPElement3D27N( 0, Element::GeometryType::Pointer( new Hexahedra3D27 <Node<3> >( Element::GeometryType::PointsArrayType( 27 ) ) ) )
{}
void KratosTopologyOptimizationApplication::Register()
{
// calling base class register to register Kratos components
KratosApplication::Register();
std::cout << " KRATOS|_ _/_ \\| _ \\ _ \\| | / _ \\/ __\\ \\ / / " << std::endl;
std::cout << " | | (_) | _/(_) | |_| (_) |(_ |\\ V / " << std::endl;
std::cout << " |_|\\___/|_| \\___/|____\\___/ ___| |_|OPTIMIZATION " << std::endl;
std::cout << "Initializing KratosTopologyOptimizationApplication... " << std::endl;
//Register small displacement elements
KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D3N", mSmallDisplacementSIMPElement3D3N ) // dummy element for surface representation
KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D4N", mSmallDisplacementSIMPElement3D4N )
KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D8N", mSmallDisplacementSIMPElement3D8N )
// Extra elements that can be added in the future
// KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D6N", mSmallDisplacementSIMPElement3D6N )
// KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D10N", mSmallDisplacementSIMPElement3D10N )
// KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D15N", mSmallDisplacementSIMPElement3D15N )
// KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D20N", mSmallDisplacementSIMPElement3D20N )
// KRATOS_REGISTER_ELEMENT( "SmallDisplacementSIMPElement3D27N", mSmallDisplacementSIMPElement3D27N )
//Register Variables with Python connection
KRATOS_REGISTER_VARIABLE( E_MIN )
KRATOS_REGISTER_VARIABLE( E_0 )
KRATOS_REGISTER_VARIABLE( PENAL )
KRATOS_REGISTER_VARIABLE( X_PHYS )
KRATOS_REGISTER_VARIABLE( X_PHYS_OLD )
KRATOS_REGISTER_VARIABLE( DCDX )
KRATOS_REGISTER_VARIABLE( DVDX )
KRATOS_REGISTER_VARIABLE( SOLID_VOID )
KRATOS_REGISTER_VARIABLE( LOCAL_STRAIN_ENERGY )
}
} // namespace Kratos.
|
Presidential hopeful Ron Paul takes aim at the three candidates getting the most attention in the GOP race, blasting Mitt Romney, Rick Perry and Michele Bachmann as "smooth-talking politicians" in the same company as President Obama.
The Texas congressman, who finished second to Bachmann in last weekend's Iowa straw poll, highlights his votes against "every tax increase, every unbalanced budget" and proclaims he is the candidate who "will stop the spending, save the dollar, create jobs" and "bring peace."
Images of Perry, Romney and Bachmann -- along with Obama and House Minority Leader Nancy Pelosi -- roll by throughout the ad. The script begins by describing a story of "smooth-talking politicians" and "games of he said, she said."
The ad will air in Iowa and New Hampshire, where the first presidential nominating contests will be held in February.
The intensity and direction of the race for the GOP nomination shifted after the Iowa straw poll Aug. 13, and much of the focus is now on Romney, Perry and Bachmann.
Paul's supporters have been ardent behind his message of no government intrusion and helped him do well in Ames over the weekend. He has also been able to deploy the Internet as a fundraising tool.
Romney, a former Massachusetts governor, has led in national polls by Gallup and other organizations and is the clear front-runner in New Hampshire. Perry, the Texas governor, shot up to second behind Rommey in national surveys even before he formally entered the race Saturday.
Bachmann has been doing well in Iowa, according to polls in the Hawkeye State. Paul fared better than she did in the latest CNN national survey (12% vs. 7%), but Rommey and then Perry were at the top of that poll.
|
WhiteHouse.gov
Future petitions to the White House to build a Death Star, have Piers Morgan deported, or fire Aaron Swartz's prosecutor will have to attract more support to merit the White House's attention.
Beginning today, petitions filed on WhiteHouse.org's We the People platform will need to log 100,000 signatures in 30 days to receive an official response from the Obama Administration, quadrupling the previous minimum of 25,000.
The higher threshold will "ensure we're able to continue to give the most popular ideas the time they deserve," Macon Phillips, the White House's director of digital strategy, wrote in a blog post today. "This new threshold applies only to petitions created from this point forward and is not retroactively applied to ones that already exist."
Activity on the petitions platform skyrocketed in late 2012, with the average time that petitions took to cross the 25,000-signature platform being slashed from 18 days during the first 10 months of the year to 9 days for the last two months. More than 60 percent of the petitions receiving 25,000 signatures last year did so in November and December.
"It's wonderful to see so many people using We the People to add their voices to important policy debates here in Washington and bring attention to issues that might not get the attention they deserve," Phillips wrote.
Launched in September 2011, the Obama Administration's online petition platform has become a venue for citizens to make serious policy suggestions, as well as air disapproval of recent events and enjoy a little whimsy. Recent petitions have sought to have the Westboro Baptist Church legally recognized as a hate group and persuade the government to build a Death Star -- a proposal that was met with an equally light-hearted response from the White House on Friday.
More about participation on the platform:
WhiteHouse.gov
|
It has been, and still is, one of D-Day’s most recurring hypothetical scenario, for historians and wargamers’ alike: ‘what if’ the Germans had unleashed their Panzers against the Allied beachhead on the very first hours of June 6th?
And in each and every such scenario, the main German protagonist for such a swift counter-offensive, the one division on which shoulders all of Germany’s hopes rested on, was the strangest collection of equipment ever seen in a Panzerdivision: the 21. Panzer.
‘PANZER ROLLEN IN AFRIKA VOR …’
The original 21. Panzerdivision can trace its origins to the 5. Leichte Division, an ad hoc battlegroup made up of elements from 3. Panzerdivision and other detachments hastily dispatched to Libya, then an Italian colony, to help Mussolini’s troops repel the British after the disastrous battle of Beda Fomm. Barely two weeks following its arrival in Africa, the 5. Leichte went on the offensive under Rommel’s leadership, initiating a series of victories over the British Army which drove the Axis from Tripoli to Tobruk in just two months.
On August 1st, 1941, the 5. Leichte Division is converted into the 21. Panzerdivision. From then on, its fame will be inseparable from that of Rommel’s legendary Deutsches Afrika Korps (DAK), being present for all its battles: Tobruk, Marsa Maruth, Gazala, Bir Hakeim, Mersa Matruh, First El Alamein, … It will share its fate too: after the Second Battle of El Alamein, in late 1942, reduced to four tanks and its commander killed, the 21. Panzer retreated toward Tunisia with the DAK.
There, what was left of the division was reorganized with some reinforcements into a Kampfgruppe (battlegroup), which was soon engaged against the Americans at Kasserine Pass. Although a shadow of its former self, the battle-hardened 21. Panzer inflicted the still inexperienced Americans a major defeat, yet one Rommel couldn’t exploit due to lack of troops. Soon, the fast recovering Americans & French troops from Algeria, and the British & Commonwealth troops from Libya, were on the offensive again. What remained of the first 21. Panzerdivision, trapped around Tunis, surrendered on May 13th, 1943.
RISING FROM ITS ASHES
On July 15th, 1943, 21. Panzerdivision was recreated in Rennes (France), using Schnelle-Brigade 931 (or West) as its basis and with all surviving personnel from the original division: wounded men whom had been evacuated before the showdown in Afrika, soldiers on leave at the time, etc… Still, with everyone accounted for, the latter were left with less than a 1.000 in fighting condition.
For almost a year, under command of Generalmajor Edgar Feuchtinger, 21. Panzer would reorganize and train new personnel to be brought back to a truly Panzerdivision status. On April 26th, 1944, it was moved to Caen and received its new mission: readily await to counter-strike at a moment’s notice any Allied landing in the sector.
It was just a mere month before D-Day, and 21. Panzerdivision was hardly ready for that mission. Under-equipped, with old Panzer 35S(f) (French Somua tanks from 1940 used as training machines) still accounting for one third of its tank fleet on June 1st, and a few obsolete Panzer III & short-barreled Panzer IV could still to be found among the regular long-barreled Panzer IV.
Another issue was its commander, Edgar Feuchtinger. As organizer of the military portion of the Nuremberg rallies before the war, he had displayed great talents ingratiating himself with the higher circles of the Nazi party, and he owed to those connections to have received command of a prestigious Panzerdivision despite having built his entiere career within the artillery, and knew nothing about mechanized warfare. A socialite, Feuchtinger would spend as much time as possible in Paris or other major cities instead of his headquarters, letting his subordinates running the division in his place. As a matter of fact, during the fateful night of June 5-6th, he had slipped to Paris without telling anyone, for a night in Parisian clubs with his mistress. When news of the airborne landings reached his HQ, Feuchtinger could not be found until the morning …
BECKER’S FUNNIES
But 21. Panzerdivision had assets of its own. One of Feuchtinger’s quality was to realize he needed good subordinates to compensate his own lack of experience in armored warfare, hence he used his connections again to get very good regimental commanders under his command, experts in armored warfare such as von Luck and von Oppeln-B., … Those were the officers really in charge of training and organizing the division.
Another asset was the unsung genius Major Alfred Becker. A civilian engineer and WW1 reserve officer, he was called back in service in the artillery to serve during the campaigns in France & the Netherlands. His unit (then 227. Infanterie-Division) staying in France as part of the occupation force, he started collecting British equipment abandoned at Dunkirk and converted them into self-propelled guns with the addition of howitzers or anti-tank guns. After a short tour in Russia, he set a workshop in France in 1942 and started converting French vehicles into SPGs. The German army owes his craftsmanship many famous vehicles: Hummel, Wespe, Marder I, Lorraine-based carriers, etc…
In 1943-44, now a Major, he will refurbish the 21. Panzerdivision with hundreds of his vehicles: but in the recon battalion, all armored personnel carriers were modified French Unic P107, as well as all the specialized versions: anti-aircraft, mortar, command, anti-tank, etc… Self-propelled guns are all based on French Lorraine & Hotchkiss chassis, and artillery observation vehicles on the H-38 tank. Unique vehicles built from the Somua MGC half-track were added to the division’s armory: MLRS, multi-mortars, and even a self-propelled PaK 40 months before the first SdKfz 251/22.
Dubbed Rommel’s Zirkus because of its motley assortment of vehicles, 21. Panzer was at the same time under-equipped yet more mechanized than most of its sister Panzerdivisionen, thanks for Becker’s SPG and half-tracks which it had in greatest numbers. Major Becker took command of the division’s StuG battalion himself, and into action in Normandy.
D-DAY AND THE NORMANDY CAMPAIGN
On the June 5-6th’s night, British paratroopers landed in 21. Panzer’s perimeter. But due to Feuchtinger’s disappearance, the division didn’t move immediately. Only late in the morning did the latter hurried back to his HQ and, refusing to coordinate with his neighbor and superior general Erich Marks (reputedly the only senior German officer whom had anticipated and warned against an Allied landing in Normandy), launched an attack against the paratroopers instead of the British beachhead, as Marcks suggested. Yet, unable to concentrate all his division, he attacked erratically … and failed. Meanwhile, one of the 21. Panzer‘s regimental commander, Oberst Joseph Rauch, had counter-attacked on his own against the beachhead and even reached the sea, effectively separating the British from the Canadians. But without any support, and with more paratroopers landing behind him, Rauch had to order a retreat.
From then on, Feuchtinger is said to have “allowed his subordinate commanders a great deal of latitude“, others commented that he left them completely to fend for themselves. 21. Panzerdivision remained operational as long as possible by absorbing the remnants of destroyed units, and fought all Summer to prevent the Allies from capturing Caen. It was ultimately destroyed in the Falaise pocket, losing all its vehicles and all but 300 personnel … although Feuchtinger himself had managed to escape earlier.
WHAT IS DEAD MAY NEVER DIE
Once again rebuilt, 21. Panzerdivision was engaged during the battle of the Bulge, to be, once again, decimated. Pulled back, it was hurried on the Eastern Front where it surrendered to the Red Army on April 29th, 1945.
As for Feuchtinger, still the division’s commander at the outbreak of the battle of the Bulge, he was arrested on Christmas Eve for being away from his post without leave, as well as several embezzlement charges. Stripped of all ranks & awards and condemned to death, he was pardoned by Hitler himself and sent back to the front as a Kanonier (artillery private) … but immediately deserted, hid and surrendered to the Americans as a general, passing himself as a victim of the Nazis. Later, after the war, he would be coerced by the KGB into spying for the Soviet Union until his death in 1960.
THE 21. PANZERDIVISION INGAME
Note: On June 1st, 1944, 21. Panzer still had one third of its Panzer-Regiment equipped with French Somua tanks. Yet, there is no historical evidences that those were ever actually used in combat: their crews were hastily sent to the rear to receive new Panzer IV before going back to combat. For gameplay and diversity reasons, we have chosen to represent the division as it was on June 1st, had it engaged the Allies with everything it had, including its obsolete Somua tanks.
21. Panzer is the panzer equivalent of a light cavalry division, immediately unleashed on the Allies. With plenty of old French cavalry tanks at its disposal, it is very strong in phase A: the Panzer 35S(f) (a.k.a Somua) might be obsolete by 1944’s standards, but when faced only with Stuarts or other light tanks, it can give the best of itself.
21. Panzer can also rely on a wide array of specialized and unique vehicle, such as the S307(f) R-Vielfachwerfer MLRS, the odd S307(f) Reihenwerfer with its sixteen combined mortar tubes, and the deadly S307(f) PaK, a self-propelled PaK 40. All much-needed assets to survive through phase B which, with its Panzer IV as best tanks, isn’t when 21. Panzer is at its best …
Things tend to get a bit better with phase C, and the welcome reinforcement of some tank-borne heavy rocket launcher Wurfrahmen 35H(f), or a handful of Königstiger from s.Panzer-Abteilung 503.
Bonus feature: German newsreel about Rommel reviewing the 21. Panzer and its strange vehicles on May 8th, 1944, one month before D-Day.
|
/**
* Removes a log writer, either global or from a particular <code>logService</code>.
* <p>
* Note that for a writer to be removed global, it had to be added global before.
*
* @param logService name of the log service of which the writer will be removed; to
* remove the writer global, use an empty string or <code>null</code>
* @param writer log writer to remove
* @see LogService#removeWriter(LogWriter)
*/
public void removeWriter(final String logService, final LogWriter writer)
{
if (logService != null && logService.length() > 0) {
final LogService l = (LogService) loggers.get(logService);
if (l != null)
l.removeWriter(writer);
}
else
synchronized (loggers) {
if (writers.remove(writer))
for (final Iterator i = loggers.values().iterator(); i.hasNext();)
((LogService) i.next()).removeWriter(writer);
}
}
|
import factory
class WalletFactory(factory.django.DjangoModelFactory):
owner = factory.SubFactory('users.User', wallet=None)
class Meta:
model = 'users.Wallet'
class UserFactory(factory.django.DjangoModelFactory):
username = factory.Sequence(lambda n: 'user-{0}'.format(n))
email = factory.Sequence(lambda n: '<EMAIL>'.<EMAIL>(n))
password = factory.PostGenerationMethodCall('set_password', 'password')
wallet = factory.RelatedFactory(WalletFactory, 'owner')
class Meta:
model = 'users.User'
django_get_or_create = ('username', )
@classmethod
def get_user(self, user):
from ..models import User
return User.g_objects.filter(pk=user.pk).with_transaction_and_booking().first()
def val(n):
v = 'ACDESFFESG2{0}'.format(n)
if len(v) > 12:
return v[1:13]
return v
class BookingFactory(factory.django.DjangoModelFactory):
user = factory.SubFactory(UserFactory)
order = factory.Sequence(val)
class Meta:
model = 'users.Booking'
class WalletTransactionFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'users.WalletTransaction'
|
/*!
* Copyright (c) Microsoft. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project.
*/
import { Pivot, PivotItem } from '@fluentui/react'
import { FC } from 'react'
import { ColorPalette } from '../ColorPalette'
import { CoolerPicker } from '../CoolerPicker'
import { FluentViewer } from '../FluentViewer'
import { GimpEditor } from '../GIMP'
import { JSONPane } from '../JSON'
import { MarkGrid } from '../MarkGrid'
import { Office } from '../Office'
import { PowerBiEditor } from '../PowerBI'
import './index.css'
export interface ThemeEditorProps {
scaleItemCount: number
}
export const ThemeEditor: FC<ThemeEditorProps> = ({ scaleItemCount }) => {
return (
<div className="editor-wrapper">
<Pivot>
<PivotItem className="tab" headerText="Color Picker">
<CoolerPicker />
</PivotItem>
<PivotItem className="tab" headerText="Marks">
<MarkGrid />
</PivotItem>
<PivotItem className="tab" headerText="Fluent UI">
<FluentViewer />
</PivotItem>
<PivotItem className="tab" headerText="Office">
<Office />
</PivotItem>
<PivotItem className="tab" headerText="Power BI">
<PowerBiEditor />
</PivotItem>
<PivotItem className="tab" headerText="GIMP">
<GimpEditor />
</PivotItem>
<PivotItem className="tab" headerText="JSON">
<JSONPane />
</PivotItem>
</Pivot>
<ColorPalette scaleItemCount={scaleItemCount} />
<div className="footer">
<div className="privacy">
This site does not collect any personal information or use
cookies.
<a
target="_blank"
rel="noreferrer"
href="https://privacy.microsoft.com/en-us/privacystatement/"
>
Read Microsoft's statement on Privacy and Cookies
</a>
.
</div>
<div className="github">
Contribute at
<a
target="_blank"
rel="noreferrer"
href="https://github.com/microsoft/thematic"
>
GitHub
</a>
.
</div>
</div>
</div>
)
}
|
z = input().split()
l = int(z[0])
k = int(z[1])
a = [int(x) for x in input().split()]
least = 0
sum = 0
lowest = 0
for i in range(k):
sum += a[i]
least = sum
for i in range(k, l):
sum = sum - a[i-k] + a[i]
if sum < least:
least = sum
lowest = i-k+1
print(lowest+1)
|
/* @(#)steim.c 1.8 02/03/97 */
/*======================================================================
*
* steim.c
*
* Steim 1 compression/decompression. Here we force the output to be
* in big endian byte order, as it looks like many of the big endian
* decompressors out there assume that the data are that way.
*
* The decompressor is meant to handle data from both systems.
*
* Requires 4 byte longs.
*
* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
* Copyright (c) 1997 Regents of the University of California.
* All rights reserved.
*====================================================================*/
#include <assert.h>
#include <memory.h>
#include <string.h>
#include <errno.h>
#include "util.h"
#define FLEN 64 /* number of bytes per frame */
#define NSEQ 16 /* number of sequences per frame */
#define WLEN FLEN / NSEQ /* number of bytes per sequence */
/* local globals */
long *out; /* output buffer */
long *frame; /* current frame */
long fi; /* index of current frame */
long maxfrm; /* number of frames that will fit into output buffer */
int si; /* sequence index in current frame */
long nc; /* number of compressed samples in output */
long ocount; /* number of bytes used in output buffer */
int swap; /* swap flag, set if host is not big endian */
static int ns = 0; /* number of diffs currently saved */
/* internal function: load one compressed word into output */
int _util_csteim1_addword(d, ndiff)
long *d;
int ndiff;
{
union {
char *c;
short *s;
long *l;
} ptr;
static char key[5] = {0, 3, 2, 0, 1};
if (ndiff == 0) return 0;
/* Load in the differences and associated descriptor */
ptr.l = frame + si;
if (ndiff == 4) {
ptr.c[0] = (char) (d[0] & 0xff);
ptr.c[1] = (char) (d[1] & 0xff);
ptr.c[2] = (char) (d[2] & 0xff);
ptr.c[3] = (char) (d[3] & 0xff);
} else if (ndiff == 2) {
ptr.s[0] = (short) (d[0] & 0xffff);
ptr.s[1] = (short) (d[1] & 0xffff);
if (swap) util_sswap(ptr.s, 2);
} else if (ndiff == 1) {
ptr.l[0] = d[0];
if (swap) util_lswap(ptr.l, 1);
}
*frame |= (key[ndiff] << (2 * (15 - si)));
ocount += 4;
nc += ndiff;
/* Increment the sequence counter */
if (++si < NSEQ) return 0;
/* At this point the frame has been completed. */
if (swap) util_lswap(frame, 1);
if (++fi == maxfrm) return 1; /* No more frames available, done */
/* Initialize new frame */
frame = out + (fi * NSEQ);
*frame = 0;
ocount += 4;
si = 1;
return 0;
}
/* internal function: build a frame */
int _util_csteim1_frm(diff, flush)
long diff;
int flush;
{
/* Possible states */
#define STATE_NUL 0 /* no differences saved so far */
#define STATE_1D1 1 /* 1 1-byte difference saved so far */
#define STATE_2D1 2 /* 2 1-byte difference saved so far */
#define STATE_3D1 3 /* 3 1-byte difference saved so far */
#define STATE_1D2 4 /* 1 2-byte difference saved so far */
#define STATE_BAD -1 /* will never occur */
struct activity {
int flush1; /* no. diffs to flush right away */
int flush2; /* no. diffs to flush after that and before saving crnt */
int flush3; /* no. diffs to flush after saving crnt diff */
};
static struct activity action_table[5][5] = {
/* size of difference */
/* 0 1 2 3 4 */
/* STATE_NUL */ { {0,0,0}, {0,0,0}, {0,0,0}, {0,0,0}, {0,0,1} },
/* STATE_1D1 */ { {1,0,0}, {0,0,0}, {0,0,2}, {0,0,0}, {1,0,1} },
/* STATE_2D1 */ { {2,0,0}, {0,0,0}, {2,0,0}, {0,0,0}, {2,0,1} },
/* STATE_3D1 */ { {2,1,0}, {0,0,4}, {2,0,2}, {0,0,0}, {2,1,1} },
/* STATE_1D2 */ { {1,0,0}, {0,0,2}, {0,0,2}, {0,0,0}, {1,0,1} }
};
static int state_table[5][5] = {
/* size of difference */
/* 0 1 2 3 4 */
/* STATE_NUL */ {STATE_NUL, STATE_1D1, STATE_1D2, STATE_BAD, STATE_NUL},
/* STATE_1D1 */ {STATE_NUL, STATE_2D1, STATE_NUL, STATE_BAD, STATE_NUL},
/* STATE_2D1 */ {STATE_NUL, STATE_3D1, STATE_1D2, STATE_BAD, STATE_NUL},
/* STATE_3D1 */ {STATE_NUL, STATE_NUL, STATE_NUL, STATE_BAD, STATE_NUL},
/* STATE_1D2 */ {STATE_NUL, STATE_NUL, STATE_NUL, STATE_BAD, STATE_NUL}
};
int i, j, shift, size, nf, done;
struct activity action;
static long d[4];
static int state = STATE_NUL;
/* Figure out how many bytes are needed for this difference */
if (flush) {
size = 0;
} else if (diff >= -128 && diff <= 127) {
size = 1;
} else if (diff > -32768 && diff <= 32767) {
size = 2;
} else {
size = 4;
}
/* Figure out what to do with this difference */
action = action_table[state][size];
state = state_table[state][size]; /* new state, after action */
if (nf = action.flush1) {
done = _util_csteim1_addword(d, nf);
for (i = 0, j = nf; j < ns; i++, j++) d[i] = d[j];
ns -= nf;
if (done) return 1;
}
if (nf = action.flush2) {
done = _util_csteim1_addword(d, nf);
for (i = 0, j = nf; j < ns; i++, j++) d[i] = d[j];
ns -= nf;
if (done) return 1;
}
if (size) d[ns++] = diff;
if (nf = action.flush3) {
done = _util_csteim1_addword(d, nf);
for (i = 0, j = nf; j < ns; i++, j++) d[i] = d[j];
ns -= nf;
if (done) return 1;
}
return 0;
}
/* Steim 1 compression */
/* Given a fixed length array to store the output, this routine
* will compress as many samples as possible into this array,
* starting at offset 0. The number of samples which were
* compressed (which may be less that the input count) is
* returned.
*/
long util_csteim1(dest, destlen, src, count, used)
char *dest; /* output array */
long destlen; /* size of dest, in bytes */
long *src; /* input array */
long count; /* number of entries in src */
long *used; /* number of output bytes actually used */
{
long s, diff;
if (sizeof(long) != WLEN) {
errno = EINVAL;
return -1;
}
/* Initialize everything */
memset(dest, 0, destlen);
swap = (util_order() == LTL_ENDIAN_ORDER);
out = (long *) dest;
out[1] = src[0];
out[2] = src[0];
ocount = 12; /* W0, X0, Xn to start */
maxfrm = destlen / FLEN; /* maximum number of output frames */
frame = out;
*frame = 0;
fi = 0;
nc = 0;
si = 3;
/* Loop for each sample (will bail when we run out of output memory) */
for (s = 0; s < count; s++) {
diff = src[s] - out[2];
if (_util_csteim1_frm(diff, 0) != 0) {
if (used != (long *) NULL) *used = ocount;
if (swap) {
util_lswap(out+1, 2);
}
return nc;
}
out[2] = src[s];
}
/* All the input fit into the output. Flush accumulated differences. */
_util_csteim1_frm(0, 1);
if (used != (long *) NULL) *used = ocount;
if (swap) {
util_lswap(frame, 1);
util_lswap(out+1, 2);
}
return nc;
}
/* Steim 1 decompression */
int util_dsteim1(dest, destlen, src, srclen, order, count)
long *dest; /* output array */
long destlen; /* dimension of output array */
char *src; /* input Steim 1 compressed data */
long srclen; /* size of src, in bytes */
u_long order; /* byte order (as per util_order()) of src */
long count; /* number of uncompressed samples in src */
{
int i, j, k, nsamp, nfrm;
long ltmp, val, beg, end, key, code[NSEQ];
short stmp;
char *frm;
union {
char *c;
short *s;
long *l;
} ptr;
if (sizeof(long) != WLEN) {
errno = EINVAL;
return -1;
}
swap = (order != util_order());
nfrm = srclen / FLEN;
/* Get the block start/stop values */
ptr.c = src;
memcpy(&beg, ptr.c + 4, 4);
if (swap) util_lswap(&beg, 1);
memcpy(&end, ptr.c + 8, 4);
if (swap) util_lswap(&end, 1);
/* Loop over each frame */
/* We do not verify that the 0x00 codes are where they should be */
val = dest[0] = beg;
nsamp = 1;
for (i = 0; i < nfrm; i++) {
frm = src + (i * FLEN); /* point to start of current frame */
key = *((long *) frm); /* codes are in first 4 bytes */
if (swap) util_lswap(&key, 1);
for (j = NSEQ - 1; j >= 0; j--) {
code[j] = key & 0x03;
key >>= 2;
}
for (j = 1; j < NSEQ; j++) {
if (nsamp >= destlen) {
return -2;
}
ptr.c = frm + (j * 4); /* point to current 4 byte sequence */
switch (code[j]) {
case 0:
break;
case 1:
for (k = (nsamp == 1) ? 1 : 0; k < 4; k++) {
dest[nsamp++] = (val += (long) ptr.c[k]);
}
break;
case 2:
for (k = (nsamp == 1) ? 1 : 0; k < 2; k++) {
stmp = ptr.s[k];
if (swap) util_sswap(&stmp, 1);
dest[nsamp++] = (val += (long) stmp);
}
break;
case 3:
if (nsamp > 1) {
ltmp = ptr.l[0];
if (swap) util_lswap(<mp, 1);
dest[nsamp++] = (val += ltmp);
}
break;
default:
return -3;
}
}
}
/* Sanity checks */
if (count != nsamp) return -4;
if (dest[nsamp-1] != end) return -5;
return 0;
}
#ifdef DEBUG_TEST
#define NSAMP 17
#define OUTLEN 64
#define NEWLEN 512 /* way bigger than required! */
main()
{
int i;
static long raw[NSAMP] =
{306, 306, 301, 298, 297, 1024, 3000, 3100, 2000, 0, -100, -200, 0, 15, 80111, 80111, -80123};
u_long order;
int retval;
long nc;
char output[OUTLEN];
long new[NEWLEN], used;
order = BIG_ENDIAN_ORDER;
printf("beg data = ");
for (i = 0; i < NSAMP; i++) printf("%3ld ", raw[i]); printf("\n");
nc = util_csteim1(output, OUTLEN, raw, NSAMP, &used);
printf("util_csteim1 returns %ld (%ld bytes used)\n", nc, used);
printf("\n");
util_hexdmp((unsigned char *) output, OUTLEN, 0, 'd');
printf("\n");
retval = util_dsteim1(new, NEWLEN, output, OUTLEN, order, NSAMP);
printf("util_dsteim1 returns %d\n", retval);
printf("beg data = ");
for (i = 0; i < NSAMP; i++) printf("%3ld ", raw[i]); printf("\n");
printf("end data = ");
for (i = 0; i < NSAMP; i++) printf("%3ld ", new[i]); printf("\n");
}
#endif /* DEBUG_TEST */
|
package Controllers
import (
"github.com/kataras/iris"
"github.com/kataras/iris/context"
)
func GetIndexHandler(ctx iris.Context) {
ctx.JSON(context.Map{
"code": 200,
"message": "hello Marisa~",
})
}
|
/// Computes unordered set of pending hashes.
///
/// Since strict nonce-checking is not required, you may get some false positive future transactions as well.
pub fn pending_hashes<N>(
&self,
nonce: N,
) -> BTreeSet<H256> where
N: Fn(&Address) -> Option<U256>,
{
let ready = ready::OptionalState::new(nonce);
self.pool.read().unordered_pending(ready).map(|tx| tx.hash).collect()
}
|
<filename>third_party/blink/renderer/core/animation/animation_utils_test.cc<gh_stars>10-100
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/animation/animation_utils.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/core/animation/animation_input_helpers.h"
#include "third_party/blink/renderer/core/animation/invalidatable_interpolation.h"
#include "third_party/blink/renderer/core/animation/keyframe_effect_model.h"
#include "third_party/blink/renderer/core/animation/string_keyframe.h"
#include "third_party/blink/renderer/core/css/properties/css_property_ref.h"
#include "third_party/blink/renderer/core/css/resolver/style_resolver.h"
#include "third_party/blink/renderer/core/dom/document.h"
#include "third_party/blink/renderer/core/style/computed_style.h"
#include "third_party/blink/renderer/core/testing/core_unit_test_helper.h"
#include "third_party/googletest/src/googletest/include/gtest/gtest.h"
namespace blink {
class AnimationUtilsTest : public RenderingTest {
public:
AnimationUtilsTest() = default;
StringKeyframe* AddKeyframe(StringKeyframeVector& keyframes, double offset) {
StringKeyframe* keyframe = MakeGarbageCollected<StringKeyframe>();
keyframe->SetOffset(offset);
keyframes.push_back(keyframe);
return keyframe;
}
void AddProperty(StringKeyframe* keyframe,
CSSPropertyID property_id,
String value) {
keyframe->SetCSSPropertyValue(property_id, value,
SecureContextMode::kInsecureContext,
/*style_sheet_contents=*/nullptr);
}
void AddInterpolation(ActiveInterpolationsMap& interpolations_map,
const StringKeyframeVector& keyframes,
PropertyHandle property_handle) {
ActiveInterpolationsMap::AddResult entry = interpolations_map.insert(
property_handle, MakeGarbageCollected<ActiveInterpolations>());
ActiveInterpolations* active_interpolations = entry.stored_value->value;
PropertySpecificKeyframe* from_keyframe =
CreatePropertySpecificKeyframe(keyframes[0], property_handle, 0);
PropertySpecificKeyframe* to_keyframe =
CreatePropertySpecificKeyframe(keyframes[1], property_handle, 1);
Interpolation* interpolation =
MakeGarbageCollected<InvalidatableInterpolation>(
property_handle, from_keyframe, to_keyframe);
interpolation->Interpolate(/*iteration=*/0, /*progress=*/1);
active_interpolations->push_back(interpolation);
}
PropertySpecificKeyframe* CreatePropertySpecificKeyframe(
Keyframe* keyframe,
PropertyHandle property_handle,
double offset) {
return keyframe->CreatePropertySpecificKeyframe(
property_handle, EffectModel::kCompositeReplace, offset);
}
};
TEST_F(AnimationUtilsTest, ForEachInterpolatedPropertyValue) {
SetBodyInnerHTML("<div id='target' style='left:10px'></div>");
Element* target = GetElementById("target");
PropertyHandleSet properties;
properties.insert(PropertyHandle(GetCSSPropertyLeft()));
properties.insert(PropertyHandle(GetCSSPropertyTop()));
HashMap<String, String> map;
ActiveInterpolationsMap interpolations_map;
base::RepeatingCallback<void(PropertyHandle, const CSSValue*)> callback =
WTF::BindRepeating(
[](HashMap<String, String>* map, PropertyHandle property,
const CSSValue* value) {
String property_name =
AnimationInputHelpers::PropertyHandleToKeyframeAttribute(
property);
map->Set(property_name, value->CssText());
},
WTF::Unretained(&map));
AnimationUtils::ForEachInterpolatedPropertyValue(
target, properties, interpolations_map, callback);
EXPECT_EQ(2U, map.size());
EXPECT_EQ("10px", map.at("left"));
EXPECT_EQ("auto", map.at("top"));
map.clear();
StringKeyframeVector keyframes;
StringKeyframe* fromKeyframe = AddKeyframe(keyframes, 0);
AddProperty(fromKeyframe, CSSPropertyID::kLeft, "10px");
AddProperty(fromKeyframe, CSSPropertyID::kTop, "auto");
StringKeyframe* toKeyframe = AddKeyframe(keyframes, 1);
AddProperty(toKeyframe, CSSPropertyID::kLeft, "20px");
AddProperty(toKeyframe, CSSPropertyID::kTop, "40px");
AddInterpolation(interpolations_map, keyframes,
PropertyHandle(GetCSSPropertyLeft()));
AddInterpolation(interpolations_map, keyframes,
PropertyHandle(GetCSSPropertyTop()));
AnimationUtils::ForEachInterpolatedPropertyValue(
target, properties, interpolations_map, callback);
EXPECT_EQ(2U, map.size());
EXPECT_EQ("20px", map.at("left"));
EXPECT_EQ("40px", map.at("top"));
}
} // namespace blink
|
def raise_403(response):
http_error_msg = f"403 Client Error: Forbidden for url: {response.url}"
raise requests.exceptions.HTTPError(http_error_msg, response=response)
|
def parse(self, parser: jinja2_parser.Parser) -> nodes.CallBlock:
next(parser.stream)
lineno = parser.stream.current.lineno
body = parser.parse_statements([f"name:end{tagname}" for tagname in self.tags], True)
method = self.call_method("strip_spaces")
call_block = nodes.CallBlock(method, [], [], body)
call_block.set_lineno(lineno)
return call_block
|
<filename>default_image_test.go
package brightbox
import (
"github.com/brightbox/gobrightbox"
"testing"
)
func TestEmptyImages(t *testing.T) {
emptyImages := []brightbox.Image{}
if _, err := GetDefaultImage(emptyImages); err == nil {
t.Error("Missing default image not detected in empty list")
}
}
func TestSingleImageFound(t *testing.T) {
singleImage := []brightbox.Image{
{
Resource: brightbox.Resource{
Id: "img-upwxc",
},
Name: "CoreOS 766.4.0",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.brightbox:test/net.core-os.release:amd64-usr/766.4.0/disk1.img, Release: stable",
Username: "core",
Official: true,
Public: true,
CompatibilityMode: false,
},
}
image, err := GetDefaultImage(singleImage)
if err != nil {
t.Fatal(err)
}
if image.Id != "img-upwxc" {
t.Error("Failed to select correct image")
}
}
func TestSingleImageNotFound(t *testing.T) {
singleImage := []brightbox.Image{
{
Resource: brightbox.Resource{
Id: "img-abcde",
},
Name: "ubuntu-wily-daily-amd64-server",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.ubuntu.cloud:daily:download/com.ubuntu.cloud.daily:server:15.10:amd64/20151026/disk1.img, Release: daily",
Username: "ubuntu",
Official: true,
Public: true,
CompatibilityMode: false,
},
}
image, err := GetDefaultImage(singleImage)
if err == nil {
t.Error("Expected no image")
}
if image != nil {
t.Errorf("Received image reference %s when not expected", image.Id)
}
}
func TestFilterAndSort(t *testing.T) {
multipleImages := []brightbox.Image{
{
Resource: brightbox.Resource{
Id: "img-upwxc",
},
Name: "CoreOS 766.4.0",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.brightbox:test/net.core-os.release:amd64-usr/766.4.0/disk1.img, Release: stable",
Username: "core",
Official: true,
Public: true,
CompatibilityMode: false,
},
{
Resource: brightbox.Resource{
Id: "img-gnhsz",
},
Name: "CoreOS 845.0.0",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.brightbox:test/net.core-os.release:amd64-usr/845.0.0/disk1.img, Release: alpha",
Username: "core",
Official: true,
Public: true,
CompatibilityMode: false,
},
{
Resource: brightbox.Resource{
Id: "img-77dmp",
},
Name: "ubuntu-wily-15.10-amd64-server-uefi1",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.ubuntu.cloud:released:download/com.ubuntu.cloud:server:15.10:amd64/20151021/uefi1.img, Release: release",
Username: "ubuntu",
Official: true,
Public: true,
CompatibilityMode: false,
},
{
Resource: brightbox.Resource{
Id: "img-b0ieg",
},
Name: "ubuntu-wily-15.10-i386-server",
Owner: "brightbox",
Arch: "i686",
Description: "ID: com.ubuntu.cloud:released:download/com.ubuntu.cloud:server:15.10:i386/20151026/disk1.img, Release: release",
Username: "ubuntu",
Official: true,
Public: true,
CompatibilityMode: false,
},
{
Resource: brightbox.Resource{
Id: "img-5atge",
},
Name: "ubuntu-wily-15.10-amd64-server",
Owner: "brightbox",
Arch: "x86_64",
Description: "ID: com.ubuntu.cloud:released:download/com.ubuntu.cloud:server:15.10:amd64/20151021/disk1.img, Release: release",
Username: "ubuntu",
Official: true,
Public: true,
CompatibilityMode: false,
},
{
Resource: brightbox.Resource{
Id: "img-abcde",
},
Name: "ubuntu-wily-daily-amd64-server",
Owner: "acc-7wy80",
Arch: "x86_64",
Description: "ID: com.ubuntu.cloud:daily:download/com.ubuntu.cloud.daily:server:15.10:amd64/20151026/disk1.img, Release: daily",
Username: "ubuntu",
Official: false,
Public: true,
CompatibilityMode: false,
},
}
image, err := GetDefaultImage(multipleImages)
if err != nil {
t.Fatal(err)
}
if image.Id != "img-gnhsz" {
t.Errorf("Received image reference %s - expecting img-77dmp", image.Id)
}
}
|
<reponame>wonderful666/marx-10.1.0<gh_stars>0
/*
* Hisilicon ISP Sensor Driver
* Name : sensor.c
* Copyright (c) 2018- Hisilicon Technologies CO., Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <linux/kernel.h>
#include <linux/platform_device.h>
#include <linux/compiler.h>
#include <linux/list.h>
#include <linux/gpio.h>
#include <uapi/linux/histarisp.h>
#include <linux/platform_data/remoteproc-hisi.h>
#include "sensor_commom.h"
enum hisp_gpio_type_e {
RESET = 0,
POWERDOWN,
DPHY_TXRXZ,
DPHY_RSTZCAL,
CAM_1V05_EN,
CAM_1V2_EN,
CAM_1V8_EN,
CAM_2V85_EN,
CAM_VCM_2V85_EN,
CAM_VCM_POWER,
MAX_HISP_GPIO
};
enum hisp_level_type_e {
LOW = 0,
HIGH,
MAX_HISP_LEVEL
};
struct hisp_gpio_s {
unsigned int type;
unsigned int level;
};
struct hisp_sensor_info {
struct device *dev;
sensor_t *sensor;
struct list_head link;
};
static LIST_HEAD(g_sinfo);
int __weak hisi_is_clt_flag(void)
{
pr_err("[%s] Not Supported Now!\n", __func__);
return 0;
}
int __weak hw_is_fpga_board(void)
{
pr_err("[%s] Not Supported Now!\n", __func__);
return 1;
}
int __weak hw_sensor_power_up_config(struct device *dev,
hwsensor_board_info_t *sensor_info)
{
pr_err("[%s] Not Supported Now!\n", __func__);
return 0;
}
void __weak hw_sensor_power_down_config(hwsensor_board_info_t *sensor_info)
{
pr_err("[%s] Not Supported Now!\n", __func__);
}
int __weak hw_sensor_power_up(sensor_t *s_ctrl)
{
pr_err("[%s] Not Supported Now!\n", __func__);
return 0;
}
static int hw_sensor_gpio_config(struct hisp_sensor_info *hsi,
struct hisp_gpio_s *gpio)
{
hwsensor_board_info_t *bi = NULL;
sensor_t *sensor = NULL;
int ret = 0;
sensor = hsi->sensor;
if (sensor == NULL) {
pr_err("[%s] Failed : sensor.%pK\n", __func__, sensor);
return -EINVAL;
}
bi = sensor->board_info;
if (bi == NULL) {
pr_err("[%s] Failed : board_info.%pK\n",
__func__, sensor->board_info);
return -EINVAL;
}
if (gpio == NULL) {
pr_err("[%s] Failed : gpio.%pK\n", __func__, gpio);
return -EINVAL;
}
if (gpio->type >= MAX_HISP_GPIO || gpio->level >= MAX_HISP_LEVEL) {
pr_err("[%s] Failed : Invalid gpio. %d, %d\n",
__func__, gpio->type, gpio->level);
return -EINVAL;
}
ret = hisi_is_clt_flag();
if (ret == 0) {
pr_err("[%s] Failed : hisi_is_clt_flag.%d\n", __func__, ret);
return 0;
}
if (!bi->gpios[gpio->type].gpio) {
pr_err("[%s] Failed : GPIO.%d Not Actived\n",
__func__, gpio->type);
return -EINVAL;
}
ret = gpio_request(bi->gpios[gpio->type].gpio, NULL);
if (ret == 0) {
pr_err("[%s] Failed : gpio_request.%d, type.%d\n",
__func__, ret, gpio->type);
return ret;
}
ret = gpio_direction_output(bi->gpios[gpio->type].gpio, gpio->level);
if (ret == 0) {
pr_err("[%s] Failed : gpio_direction_output.%d, type.%d\n",
__func__, ret, gpio->type);
return ret;
}
gpio_free(bi->gpios[gpio->type].gpio);
return 0;
}
static int check_sensor_name(int sensorid, const char *name,
struct hisp_sensor_info **ret_hsi)
{
struct hisp_sensor_info *hsi = NULL;
hwsensor_board_info_t *bi = NULL;
sensor_t *sensor = NULL;
int i = 0;
pr_info("[%s] sensorid.%d, name.%s +\n", __func__, sensorid, name);
if (list_empty_careful(&g_sinfo)) {
pr_err("[%s] Failed : Sensor Info List Empty\n", __func__);
return -ENOMEM;
}
list_for_each_entry(hsi, &g_sinfo, link) {
if (hsi == NULL) {
pr_err("[%s] Failed : hsi.%pK\n", __func__, hsi);
return -EINVAL;
}
sensor = hsi->sensor;
if (sensor == NULL) {
pr_err("[%s] Failed : sensor.%pK\n", __func__, sensor);
return -EINVAL;
}
bi = sensor->board_info;
if (bi == NULL) {
pr_err("[%s] Failed : board_info.%pK\n",
__func__, sensor->board_info);
return -EINVAL;
}
if (bi->name == NULL) {
pr_err("[%s] Failed : name.%pK\n", __func__, bi->name);
return -EINVAL;
}
pr_info("[%s][%d][(%s, %d) = (%s, %d)]\n", __func__,
i++, name, sensorid, bi->name, bi->sensor_index);
if (!strncmp(bi->name, name, strlen(name)) &&
(bi->sensor_index == sensorid)) {
pr_info("[%s] %s @ %d Found\n", __func__,
bi->name, bi->sensor_index);
*ret_hsi = hsi;
return 0;
}
}
pr_err("[%s] Failed : %s Not Found %d in All\n", __func__, name, i);
return -EINVAL;
}
/*lint -save -e429*/
int rpmsg_sensor_register(struct platform_device *pdev, void *psensor)
{
struct hisp_sensor_info *hsi = NULL;
if (!pdev || !psensor) {
pr_err("[%s] Failed : pdev.%pK, psensor.%pK\n",
__func__, pdev, psensor);
return -ENOMEM;
}
hsi = kzalloc(sizeof(struct hisp_sensor_info), GFP_KERNEL);
if (hsi == NULL) {
pr_err("[%s] Failed : kzalloc.%pK\n", __func__, hsi);
return -ENOMEM;
}
hsi->dev = &pdev->dev;
hsi->sensor = (sensor_t *)psensor;
list_add_tail(&hsi->link, &g_sinfo);
return 0;
}
/*lint -restore */
void rpmsg_sensor_unregister(void *psensor)
{
struct hisp_sensor_info *hsi = NULL;
struct list_head *pos = NULL;
if (psensor == NULL) {
pr_err("[%s] Failed : si.%pK\n", __func__, psensor);
return;
}
hsi = container_of(psensor, struct hisp_sensor_info, sensor);
if (hsi == NULL) {
pr_err("[%s] Failed : container_of.%pK\n", __func__, hsi);
return;
}
list_for_each(pos, &g_sinfo) {
if (pos == &hsi->link) {
list_del(&hsi->link);
kfree(hsi);
return;
}
}
pr_err("[%s] Failed : Can't find sensor!\n", __func__);
}
static int do_gpio_config_on(struct hisp_sensor_info *hsi)
{
unsigned int i = 0;
int ret = 0;
struct hisp_gpio_s *gpio = NULL;
struct hisp_gpio_s on_sequence[] = {
{RESET, HIGH},
{POWERDOWN, LOW},
{CAM_VCM_POWER, HIGH},
{DPHY_TXRXZ, LOW},
{DPHY_RSTZCAL, HIGH},
{CAM_1V05_EN, HIGH},
{CAM_1V2_EN, HIGH},
{CAM_1V8_EN, HIGH},
{CAM_2V85_EN, HIGH},
{CAM_VCM_2V85_EN, HIGH},
};
for (gpio = &on_sequence[0], i = 0;
i < ARRAY_SIZE(on_sequence); gpio++, i++) {
ret = hw_sensor_gpio_config(hsi, gpio);
if (ret != 0) {
pr_err("[%s] Failed : hw_sensor_gpio_config.%d.(%pK, %pK)\n",
__func__, ret, hsi, gpio);
return ret;
}
}
return 0;
}
static int do_gpio_config_off(struct hisp_sensor_info *hsi)
{
unsigned int i = 0;
int ret = 0;
struct hisp_gpio_s *gpio = NULL;
struct hisp_gpio_s off_sequence[] = {
{CAM_VCM_POWER, LOW},
{POWERDOWN, HIGH},
{RESET, LOW},
{DPHY_TXRXZ, HIGH},
{DPHY_RSTZCAL, LOW},
{CAM_1V05_EN, LOW},
{CAM_1V2_EN, LOW},
{CAM_1V8_EN, LOW},
{CAM_2V85_EN, LOW},
{CAM_VCM_2V85_EN, LOW},
};
for (gpio = &off_sequence[0], i = 0;
i < ARRAY_SIZE(off_sequence); gpio++, i++) {
ret = hw_sensor_gpio_config(hsi, gpio);
if (ret != 0) {
pr_err("[%s] Failed : hw_sensor_gpio_config.%d, (%pK, %pK)\n",
__func__, ret, hsi, gpio);
return ret;
}
}
return 0;
}
static int all_sensor_power_on(int index)
{
struct hisp_sensor_info *hsi = NULL;
hwsensor_board_info_t *bi = NULL;
sensor_t *sensor = NULL;
int ret = 0;
if (list_empty_careful(&g_sinfo)) {
pr_err("[%s] Failed : Sensor Info List Empty\n", __func__);
return -ENOMEM;
}
list_for_each_entry(hsi, &g_sinfo, link) {
sensor = hsi->sensor;
if (sensor == NULL) {
pr_err("[%s] Failed : sensor.%pK\n", __func__, sensor);
return -EINVAL;
}
bi = sensor->board_info;
if (bi == NULL) {
pr_err("[%s] Failed : board_info.%pK\n",
__func__, sensor->board_info);
return -EINVAL;
}
if (index != bi->sensor_index)
continue;
pr_info("[%s] %s@%d\n", __func__, bi->name, index);
ret = do_gpio_config_on(hsi);
if (ret != 0) {
pr_err("[%s] Failed : do_gpio_config_on.%d, %s@%d\n",
__func__, ret, bi->name, index);
return ret;
}
}
return 0;
}
static int all_sensor_power_off(int index)
{
struct hisp_sensor_info *hsi = NULL;
hwsensor_board_info_t *bi = NULL;
sensor_t *sensor = NULL;
int ret = 0;
if (list_empty_careful(&g_sinfo)) {
pr_err("[%s] Failed : Sensor Info List Empty\n", __func__);
return -ENOMEM;
}
list_for_each_entry(hsi, &g_sinfo, link) {
sensor = hsi->sensor;
if (sensor == NULL) {
pr_err("[%s] Failed : sensor.%pK\n", __func__, sensor);
return -EINVAL;
}
bi = sensor->board_info;
if (bi == NULL) {
pr_err("[%s] Failed : board_info.%pK\n",
__func__, sensor->board_info);
return -EINVAL;
}
if (index != bi->sensor_index)
continue;
pr_info("[%s] %s@%d\n", __func__, bi->name, index);
ret = do_gpio_config_off(hsi);
if (ret != 0) {
pr_err("[%s] Failed : do_gpio_config_off.%d, %s@%d\n",
__func__, ret, bi->name, index);
return ret;
}
}
return 0;
}
int do_sensor_power_on(int index, const char *name)
{
struct hisp_sensor_info *hsi = NULL;
int ret = 0;
pr_info("[%s] %s@%d\n", __func__, name, index);
ret = check_sensor_name(index, name, &hsi);
if (ret != 0) {
pr_err("[%s] Failed : check_sensor_name.%d. %d, %s, %pK\n",
__func__, ret, index, name, hsi);
return ret;
}
ret = hw_sensor_power_up_config(hsi->dev, hsi->sensor->board_info);
if (ret != 0) {
pr_err("[%s] Failed : hw_sensor_power_up_config.%d\n",
__func__, ret);
return ret;
}
if (hw_is_fpga_board()) {
ret = do_gpio_config_on(hsi);
if (ret != 0)
pr_err("[%s] Failed : do_gpio_config_on.%d, %s@%d\n",
__func__, ret, name, index);
return ret;
}
ret = hw_sensor_power_up(hsi->sensor);
if (ret != 0)
pr_err("[%s] Failed : hw_sensor_power_up.%d, %s@%d\n",
__func__, ret, name, index);
return 0;
}
int do_sensor_power_off(int index, const char *name)
{
struct hisp_sensor_info *hsi = NULL;
int ret = 0;
pr_info("[%s] %s@%d\n", __func__, name, index);
ret = check_sensor_name(index, name, &hsi);
if (ret != 0) {
pr_err("[%s] Failed : check_sensor_name.%d. %d, %s, %pK\n",
__func__, ret, index, name, hsi);
return ret;
}
if (hw_is_fpga_board()) {
ret = do_gpio_config_off(hsi);
if (ret != 0)
pr_err("[%s] Failed : do_gpio_config_off.%d, %s@%d\n",
__func__, ret, name, index);
return ret;
}
hw_sensor_power_down_config(hsi->sensor->board_info);
return 0;
}
int rpmsg_sensor_ioctl(unsigned int cmd, int index, char *name)
{
int ret = -EINVAL;
switch (cmd) {
case HWSENSOR_IOCTL_POWER_UP:
if (strlen(name) == 0) {
ret = all_sensor_power_on(index);
if (ret != 0)
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
}
ret = hisi_rproc_select_def();
if (ret != 0) {
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
}
ret = do_sensor_power_on(index, name);
if (ret != 0)
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
case HWSENSOR_IOCTL_POWER_DOWN:
if (strlen(name) == 0) {
ret = all_sensor_power_off(index);
if (ret != 0)
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
}
ret = do_sensor_power_off(index, name);
if (ret != 0) {
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
}
ret = hisi_rproc_select_idle();
if (ret != 0)
pr_err("[%s] Failed : ret.%d, index.%d, name.%s\n",
__func__, ret, index, name);
break;
default:
pr_err("[%s] Failed : cmd.%d, index.%d, name.%s\n",
__func__, cmd, index, name);
break;
}
pr_info("[%s] cmd.%d, index.%d, name.%s\n",
__func__, cmd, index, name);
return ret;
}
MODULE_AUTHOR("<NAME> <<EMAIL>>");
MODULE_DESCRIPTION("Hisilicon ISP Sensor Driver");
MODULE_LICENSE("GPL v2");
|
/**
* Defines the slide size (trigger frequency) for the windowed data stream.
* This controls how often the user defined function will be triggered on
* the window. </br></br> For example to get a window of 5 elements with a
* slide of 2 seconds use: </br></br>
* {@code ds.window(Count.of(5)).every(Time.of(2,TimeUnit.SECONDS))}
* </br></br> The user function in this case will be called on the 5 most
* recent elements every 2 seconds
*
* @param policyHelpers
* The policies that define the triggering frequency
*
* @return The windowed data stream with triggering set
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public WindowedDataStream<OUT> every(WindowingHelper... policyHelpers) {
WindowedDataStream<OUT> ret = this.copy();
if (ret.evictionHelpers == null) {
ret.evictionHelpers = ret.triggerHelpers;
ret.triggerHelpers = new ArrayList<WindowingHelper<OUT>>();
}
for (WindowingHelper<OUT> helper : policyHelpers) {
ret.triggerHelpers.add(helper);
}
return ret;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.