content
stringlengths 10
4.9M
|
---|
// equivalent steps through each node in the given linked list and checks that it
// is forward connected, backward connected, and each node is equivalent to the same index
// in the slice.
func equivalent(t *testing.T, ll LinkedList[int], s []int) bool {
cursor := ll.CursorGhost()
for i := 0; i < len(s); i++ {
assert.True(t, cursor.Next())
assert.Equal(t, s[i], *cursor.Current())
assert.Equal(t, i, *cursor.Index())
if i == 0 {
assert.False(t, cursor.Prev())
} else {
assert.True(t, cursor.Prev())
assert.Equal(t, s[i-1], *cursor.Current())
}
assert.True(t, cursor.Next())
}
assert.False(t, cursor.Next())
assert.Nil(t, cursor.Current())
assert.Nil(t, cursor.Index())
if len(s) > 0 {
assert.True(t, cursor.Prev())
assert.Equal(t, s[len(s)-1], *cursor.Current())
} else {
assert.False(t, cursor.Prev())
assert.Nil(t, cursor.Current())
assert.Nil(t, cursor.Index())
}
assert.Equal(t, len(s), ll.Len())
assert.Equal(t, s, ll.ToSlice())
return !t.Failed()
} |
<reponame>ffshen/go-istio-api
package main
import (
"fmt"
log "github.com/sirupsen/logrus"
"net/http"
)
func main() {
http.Handle("/repo/v1/info", loggingMiddleware(http.HandlerFunc(handler)))
http.ListenAndServe(":9528", nil)
}
func handler(w http.ResponseWriter, req *http.Request) {
//fmt.Println(string(body))
fmt.Fprintf(w, "this is go istio repo : 9528 version : v1 ")
}
func loggingMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
log.Infof("uri: %s", req.RequestURI)
next.ServeHTTP(w, req)
})
}
|
import React, { ReactElement } from "react";
import { Box, styled } from "@mui/material";
interface HierarchyItemProps {
level: number;
connector?: ReactElement;
}
const ConnectorBox = styled(Box)({
width: "16px",
height: "100%",
border: "solid #E7E7E7",
});
export const NormalEnd: React.FC = () => {
return <ConnectorBox ml={4} style={{ borderWidth: "0px 0px 4px 4px" }} />;
};
export const ChildrenEnd: React.FC = () => {
return <ConnectorBox style={{ borderWidth: "0px 0px 4px 4px" }} />;
};
export const ParentEnd: React.FC = () => {
return <ConnectorBox style={{ borderWidth: "4px 0px 0px 4px" }} />;
};
export const ParentsEnd: React.FC = () => {
return (
<>
<ConnectorBox style={{ borderWidth: "4px 0px 4px 4px" }} />
<ConnectorBox style={{ borderWidth: "0px 0px 4px 0px" }} />
<ConnectorBox style={{ borderWidth: "0px 0px 4px 0px" }} />
</>
);
};
const HierarchyItem: React.FC<HierarchyItemProps> = (props) => {
//TODO: Fix responsiveness, when Term has foreign vocabulary or text is wrapped => connectors are not aligned in the center
return (
<Box display="flex" ml={props.level * 4} mt={2}>
<Box style={{ position: "relative", minWidth: "16px" }}>
<Box
display="flex"
flexDirection="row"
style={{
position: "absolute",
top: "28px",
zIndex: -1,
height: `calc(100% + 16px)`,
}}
>
{props.connector}
</Box>
</Box>
<Box style={{ flex: 1 }}>{props.children}</Box>
</Box>
);
};
export default HierarchyItem;
|
Jane Shore, Edward IV, and the Politics of Publicity
n 1614, the ghost of Richard III gleefully recalled the “peece of Iustice” he had inflicted on “Mistresse Shore,” the mistress of his brother Edward IV. “Shore’s wife,” as she was also known, first appeared in Thomas More’sHistory of King Richard III. She featured as the only female exemplar in the second edition of the Mirror for Magistrates (1563), and through a spate of verse complaints, she continued to tell her story in the 1590s. All versions follow roughly the same outline: Shore’s wife rises to power as Edward’s favorite mistress and then falls precipitously once Richard seizes the throne. Richard’s ghost, in the 1614 narrative poem by Christopher Brooke, revels in his hypocrisy “when (with a fained hate / To vnchast Life) I forced her to goe / Bare-foote, on penance, with deiected State.” But this “peece of Iustice” seems to have backfired. Shifting from medieval England to early modern London, Richard’s ghost bitterly complains, |
"""
It requires only 3 wires to connect it to the Pi.
One is +5V which can be grabbed directly from pin 2 of the Raspberry Pi,
one is Ground, which is provided on pin 4,
and the third is pin 11
ToDo: catch error discontinued mesurment
"""
from PyEdgeIoTFramework.pyedgeiotframework.core.EdgeService import EdgeService
import time
from pubsub import pub
import RPi.GPIO as GPIO
RADAR_DISTANCE_TOPIC = "radar_distance_topic"
# Use board based pin numbering
# GPIO.setmode(GPIO.BOARD)
sigGpio = 17
sigInterval = .2
maxTime = 0.04
distance = -1
def read_distance(pin):
# print("read")
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, 0)
time.sleep(0.000002)
# send trigger signal
GPIO.output(pin, 1)
time.sleep(0.000005)
GPIO.output(pin, 0)
GPIO.setup(pin, GPIO.IN)
starttime = time.time()
timeout = starttime + maxTime
# while GPIO.input(pin) == 0:
while GPIO.input(pin) == 0 and starttime < timeout:
starttime = time.time()
endtime = time.time()
timeout = endtime + maxTime
# while GPIO.input(pin) == 1:
while GPIO.input(pin) == 1 and endtime < timeout:
endtime = time.time()
duration = endtime - starttime
# Distance is defined as time/2 (there and back) * speed of sound 34000 cm/s
distance = duration * 34000 / 2
return distance
class PyPing(EdgeService):
def __init__(self):
EdgeService.__init__(self)
# ----
self.distance = -1
# ----
def run(self):
# ----
EdgeService.run(self)
# ----
while True:
try:
self.distance = read_distance(sigGpio)
except Exception as e:
print("{} error: {}".format(self.__class__.__name__, e))
# print("Distance to object is ", distance, " cm or ", distance * .3937, " inches")
# ----
self.dispatch_event(
topic=RADAR_DISTANCE_TOPIC,
payload=str(self.distance)
)
# ----
time.sleep(sigInterval)
|
#pragma once
#include <boost/assert.hpp>
#define _WINSOCK_DEPRECATED_NO_WARNINGS
#include <WinSock2.h>
#pragma comment(lib, "WS2_32.lib")
#include <chrono>
#include <iostream>
#include "reader_statistics.hpp"
using namespace reader;
template<typename T>
void print(const std::map<unsigned int, T>& map,
const std::map<unsigned int, std::string>& dictionary = std::map<unsigned int, std::string>())
{
for (const auto& entry : map)
{
if (dictionary.find(entry.first) != dictionary.cend())
std::cout << dictionary.find(entry.first)->second << "\t=\t" << entry.second << std::endl;
else
std::cout << entry.first << "\t=\t" << entry.second << std::endl;
}
}
void unit_tests(image_recognition& recog, statistics& image_recog)
{
auto start = std::chrono::high_resolution_clock::now();
auto test_image = [&](const std::string& language, const std::string& path, std::map<unsigned int, statistics_screen::properties> expected)
{
auto get_name = [&](unsigned int guid)
{
const auto& dict = recog.dictionaries.at(language);
auto iter = dict.population_levels.find(guid);
if (iter != dict.population_levels.end())
{
return iter->second;
}
iter = dict.factories.find(guid);
if (iter != dict.factories.end())
{
return iter->second;
}
iter = dict.products.find(guid);
if (iter != dict.products.end())
{
return iter->second;
}
return std::to_string(guid);
};
image_recog.update(language, image_recognition::load_image(path));
const auto result = image_recog.get_all();
for (const auto& asset : result)
{
auto expected_iter = expected.find(asset.first);
if (expected_iter == expected.end())
if (asset.second.find(statistics_screen::KEY_AMOUNT) != asset.second.end() && asset.second.at(statistics_screen::KEY_AMOUNT) == 0)
continue;
else {
std::cout << path << " [FP] " << get_name(asset.first);
for (const auto& entry : asset.second)
std::cout << "\t" << entry.first << ": " << entry.second;
std::cout << std::endl;
}
else
{
for (const auto& entry_expected : expected_iter->second) {
const auto& iter_actual = asset.second.find(entry_expected.first);
if (iter_actual == asset.second.end())
std::cout << path << " [MISS] " << get_name(asset.first) << "." << entry_expected.first << " expected " << entry_expected.second << std::endl;
else if (iter_actual->second != entry_expected.second)
std::cout << path << " [DIFF] " << get_name(asset.first) << "." << entry_expected.first << " expected " << entry_expected.second << " got " << iter_actual->second << std::endl;
}
expected.erase(expected_iter);
}
}
for (const auto& asset : expected)
{
std::cout << path << " [FN] " << get_name(asset.first);
for (const auto& entry : asset.second)
std::cout << "\t" << entry.first << ": " << entry.second;
std::cout << std::endl;
}
};
using properties = statistics_screen::properties;
{
test_image("german", "test_screenshots/Anno 1800 Res 2560x1080.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 4510}}) },
{ 15000001, properties({{statistics_screen::KEY_AMOUNT, 6140}}) },
{ 15000002, properties({{statistics_screen::KEY_AMOUNT, 960}}) },
{ 15000003, properties({{statistics_screen::KEY_AMOUNT, 0}}) },
{ 15000004, properties({{statistics_screen::KEY_AMOUNT, 0}}) },
{ 15000005, properties({{statistics_screen::KEY_AMOUNT, 0}}) },
{ 15000006, properties({{statistics_screen::KEY_AMOUNT, 0}}) }
}));
}
{
test_image("english", "test_screenshots/pop_global_bright_1920.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1345}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 4236}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 4073}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 11214}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 174699}})},
{15000005, properties({{statistics_screen::KEY_AMOUNT, 2922}})},
{15000006, properties({{statistics_screen::KEY_AMOUNT, 8615}})}
}));
}
{
test_image("english", "test_screenshots/pop_global_dark_1680.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1345}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 4236}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 4073}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 11275}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 174815}})},
{15000005, properties({{statistics_screen::KEY_AMOUNT, 2922}})},
{15000006, properties({{statistics_screen::KEY_AMOUNT, 8615}})}
}));
}
{
test_image("english", "test_screenshots/pop_global_dark_1920.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1307}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 4166}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 4040}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 10775}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 167805}})},
{15000005, properties({{statistics_screen::KEY_AMOUNT, 2856}})},
{15000006, properties({{statistics_screen::KEY_AMOUNT, 8477}})} }));
}
{
test_image("english", "test_screenshots/pop_island_artisans_1920.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1460}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 2476}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 24}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 0}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 0}})},
{15000005, properties({{statistics_screen::KEY_AMOUNT, 0}})},
{15000006, properties({{statistics_screen::KEY_AMOUNT, 0}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_island_1.png",
std::map<unsigned int, properties>({
{15000005, properties({{statistics_screen::KEY_AMOUNT, 790},{statistics_screen::KEY_EXISTING_BUILDINGS, 79}})},
{15000006, properties({{statistics_screen::KEY_AMOUNT, 518},{statistics_screen::KEY_EXISTING_BUILDINGS, 37}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_island_2.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 2097},{statistics_screen::KEY_EXISTING_BUILDINGS, 210}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 2480},{statistics_screen::KEY_EXISTING_BUILDINGS, 124}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 2100},{statistics_screen::KEY_EXISTING_BUILDINGS, 70}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 3040},{statistics_screen::KEY_EXISTING_BUILDINGS, 76}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 42},{statistics_screen::KEY_EXISTING_BUILDINGS, 1}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_island_3.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1460},{statistics_screen::KEY_EXISTING_BUILDINGS, 146}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 2480},{statistics_screen::KEY_EXISTING_BUILDINGS, 124}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 24},{statistics_screen::KEY_EXISTING_BUILDINGS, 1}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 3040},{statistics_screen::KEY_EXISTING_BUILDINGS, 76}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 42},{statistics_screen::KEY_EXISTING_BUILDINGS, 1}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_island_4.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1450},{statistics_screen::KEY_EXISTING_BUILDINGS, 145}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 2474},{statistics_screen::KEY_EXISTING_BUILDINGS, 124}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 47},{statistics_screen::KEY_EXISTING_BUILDINGS, 2}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_island_5.png",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_AMOUNT, 1430},{statistics_screen::KEY_EXISTING_BUILDINGS, 143}})},
{15000001, properties({{statistics_screen::KEY_AMOUNT, 2443},{statistics_screen::KEY_EXISTING_BUILDINGS, 123}})},
{15000002, properties({{statistics_screen::KEY_AMOUNT, 114},{statistics_screen::KEY_EXISTING_BUILDINGS, 5}})},
{15000003, properties({{statistics_screen::KEY_AMOUNT, 3040},{statistics_screen::KEY_EXISTING_BUILDINGS, 76}})},
{15000004, properties({{statistics_screen::KEY_AMOUNT, 42},{statistics_screen::KEY_EXISTING_BUILDINGS, 1}})} }));
}
{
test_image("english", "test_screenshots/stat_pop_global_widescreen.png",
std::map<unsigned int, properties>({
{15000001, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 335},{statistics_screen::KEY_AMOUNT, 6514}})},
{15000002, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 200},{statistics_screen::KEY_AMOUNT, 5337}})},
{15000003, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 32},{statistics_screen::KEY_AMOUNT, 1069}})},
{15000004, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{15000005, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 86},{statistics_screen::KEY_AMOUNT, 568}})},
{15000006, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 59},{statistics_screen::KEY_AMOUNT, 1003}})},
{112642, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{112643, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})} }));
}
{
test_image("german", "test_screenshots/stat_pop_global_3_16_10.jpg",
std::map<unsigned int, properties>({
{15000000, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 145},{statistics_screen::KEY_AMOUNT, 1440}})},
{15000001, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 89},{statistics_screen::KEY_AMOUNT, 1754}})},
{15000002, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{15000003, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{15000004, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{15000005, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{112642, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})},
{112643, properties({{statistics_screen::KEY_EXISTING_BUILDINGS, 0},{statistics_screen::KEY_AMOUNT, 0}})} }));
}
std::cout << "all tests passed!" << std::endl;
auto end = std::chrono::high_resolution_clock::now();
std::chrono::duration<double, std::milli> duration = end - start;
std::cout << duration.count() << " ms" << std::endl;
}
int main(int argc, char** argv) {
image_recognition recog(true);
statistics image_recog(recog);
//unit_tests(recog, image_recog);
// cv::Mat src = image_recognition::load_image("C:/Users/Nico/Documents/Anno 1800/screenshot/screenshot_2019-12-31-13-03-20.jpg");
// cv::Mat src = image_recognition::load_image("C:/Users/Nico/Pictures/Uplay/Anno 1800/Anno 18002020-1-6-0-32-3.png");
// cv::Mat src = image_recognition::load_image("C:/Users/Nico/Documents/Dokumente/Computer/Softwareentwicklung/AnnoCalculatorServer/calculator-recognition-issues/population_number_slash_issue/screenshot6.png");
// cv::Mat src = image_recognition::load_image("J:/Pictures/Uplay/Anno 1800/Anno 18002020-1-6-0-32-3.png");
cv::Mat src = argc >= 2 ? image_recognition::load_image(argv[1]) : image_recognition::load_image("test_screenshots/stat_prod_global_3_16_10.jpg");
image_recog.update("english", src);
//image_recog.update("german", image_recognition::load_image("C:/Users/Nico/Documents/Dokumente/Computer/Softwareentwicklung/AnnoCalculatorServer/calculator-recognition-issues/island_name_mua/screenshot.png"));
std::string island = image_recog.get_selected_island();
std::cout << "Island: " << island << std::endl;
std::cout << std::endl;
const auto& islands = image_recog.get_islands();
std::cout << "Island list: " << std::endl;
for (const auto& entry : islands)
try {
std::cout << entry.first << "\t" << image_recog.get_dictionary().ui_texts.at(entry.second) << std::endl;
}
catch (const std::exception& e) {}
std::cout << std::endl;
const auto& dict = recog.dictionaries.at("english");
for (const auto& asset : image_recog.get_all())
{
try { std::cout << dict.population_levels.at(asset.first); }
catch (...) {}
try { std::cout << dict.factories.at(asset.first); }
catch (...) {}
std::cout << ": { ";
for (const auto& entry : asset.second)
std::cout << entry.first << ": " << entry.second << ", ";
std::cout << "}" << std::endl;
}
return 0;
} |
def share_history_with_user(self, user_id=None, user_email=None, assert_valid=False, screenshot=False):
self.navigate_to_history_share_page()
self.components.histories.sharing.share_with_collapse.wait_for_and_click()
multiselect = self.components.histories.sharing.share_with_multiselect.wait_for_and_click()
self.components.histories.sharing.share_with_input.wait_for_and_send_keys(user_id or user_email)
self.send_enter(multiselect)
if screenshot:
self.screenshot("history_sharing_user")
self.components.histories.sharing.submit_sharing_with.wait_for_and_click()
if assert_valid:
self.assert_no_error_message()
xpath = f'//span[contains(text(), "{user_email}")]'
self.wait_for_xpath_visible(xpath) |
#include <stdio.h>
void solve(){
int n;
long long a[50], b[50], minA, minB, sum = 0;
scanf("%d", &n);
scanf("%I64d", &a[0]);
minA = a[0];
for( int i = 1; i < n; i++ ){
scanf("%I64d", &a[i]);
if( a[i] < minA ) minA = a[i];
}
scanf("%I64d", &b[0]);
minB = b[0];
for( int i = 1; i < n; i++ ){
scanf("%I64d", &b[i]);
if( b[i] < minB ) minB = b[i];
}
for( long long i = 0, s1, s2; i < n; i++ ){
s1 = s2 = 0;
if( a[i] > minA ){
s1 = -(minA - a[i]);
}
if( b[i] > minB ){
s2 = -(minB - b[i]);
}
if( s1 >= s2 ) sum += s1;
else sum += s2;
}
printf("%I64d\n", sum);
}
int main(){
int t;
scanf("%d", &t);
while( t-- ){
solve();
}
return 0;
} |
def match_title(event_title):
bill_titles = [
"bill amended and passed by ncop",
"bill passed and amended by ncop",
"bill passed and amended by the ncop",
"bill passed and referred to the ncop",
"bill passed and sent to the president for assent",
"bill passed and submitted to the ncop",
"bill passed by both",
"bill passed by na",
"bill passed by national assembly",
"bill passed by national council of provinces",
"bill passed by ncop",
"bill passed by parliament",
"bill passed by the national assembly",
"bill passed by the national council of provinces",
"bill passed by the ncop",
"bill passed with proposed amendments",
"bill revived on this date",
"the ncop rescinded",
"bill remitted",
"bill revived on this date",
]
event_title_lower = event_title.strip().lower()
for title in bill_titles:
if event_title_lower.startswith(title):
return True
return False |
/**
* Changes to the project that the selected document belongs to
*/
public void change() {
ProjectCommands inList = selectFromList(edtDoc[iDoc].fileParent(), true);
if (inList != null) {
change(inList);
}
} |
/**
* xmlXIncludeFreeRef:
* @ref: the XInclude reference
*
* Free an XInclude reference
*/
static void
xmlXIncludeFreeRef(xmlXIncludeRefPtr ref) {
if (ref == NULL)
return;
#ifdef DEBUG_XINCLUDE
xmlGenericError(xmlGenericErrorContext, "Freeing ref\n");
#endif
if (ref->doc != NULL) {
#ifdef DEBUG_XINCLUDE
xmlGenericError(xmlGenericErrorContext, "Freeing doc %s\n", ref->URI);
#endif
xmlFreeDoc(ref->doc);
}
if (ref->URI != NULL)
xmlFree(ref->URI);
if (ref->fragment != NULL)
xmlFree(ref->fragment);
if (ref->xptr != NULL)
xmlXPathFreeObject(ref->xptr);
xmlFree(ref);
} |
<reponame>rabiavarol/METU-CENG
#ifndef VIDEOSHARE_HPP
#define VIDEOSHARE_HPP
#include <string>
#include <iostream>
#include "User.hpp"
#include "Video.hpp"
#include "LinkedList.hpp"
using namespace std;
class VideoShare {
private:
LinkedList<User> users;
LinkedList<Video> videos;
/* TO-DO: you can private members(functions/variables) below */
/* end of private member declaration */
public:
/*....DO NOT EDIT BELOW....*/
VideoShare();
~VideoShare();
void createUser(const string & userName, const string & name = "", const string & surname = "");
void loadUsers(const string & fileName);
void createVideo(const string & title, const string & genre);
void loadVideos(const string & fileName);
void addFriendship(const string & userName1, const string & userName2);
void removeFriendship(const string & userName1, const string & userName2);
void updateUserStatus(const string & userName, Status newStatus);
void subscribe(const string & userName, const string & videoTitle);
void unSubscribe(const string & userName, const string & videoTitle);
void deleteUser(const string & userName);
void sortUserSubscriptions(const string & userName);
void printAllUsers();
void printAllVideos();
void printUserSubscriptions(const string & userName);
void printFriendsOfUser(const string & userName);
void printCommonSubscriptions(const string & userName1, const string & userName2);
void printFriendSubscriptions(const string & userName);
bool isConnected(const string & userName1, const string & userName2);
};
#endif /* VIDEOSHARE_HPP */
|
// Silenced filters based on role's datacenters
func (u *Uchiwa) Silenced(data *[]interface{}, token *jwt.Token) []interface{} {
silenced := make([]interface{}, len(*data))
copy(silenced, *data)
return silenced
} |
Laura Jane Grace Covers The Mountain Goats' 'Best Ever Death Metal Band in Denton'
Enlarge this image toggle caption Ryan Russell, DL Andersoni/Courtesy of the artists Ryan Russell, DL Andersoni/Courtesy of the artists
John Darnielle tells stories that make you care so deeply about the people in them that when Darnielle begins to scrape away the layers of grit and glory, you sink deeply, helplessly into their psyche and hope things turn out fine, knowing they probably won't. You find them in his songs as The Mountain Goats, and his novels Wolf In White Van and Universal Harvester. It's not hard to be a John Darnielle fan — and once you're in, you never leave.
I Only Listen To The Mountain Goats is a new podcast from Night Vale Presents that, in this season, sees novelist and Welcome To Night Vale creator Joseph Fink and Darnielle tell the stories behind each song from the band's 2002 breakout, All Hail West Texas. Today they released the first episode, featuring "The Best Ever Death Metal Band Out Of Denton," a song that's almost become a statement of purpose for Darnielle — not just because yelling along to "hail Satan... tonight!" is fun as hell (it is), but because you feel the characters' flesh and blood and triumph and potential failure, all under three minutes.
The podcast also features a track-by-track cover album. You'll hear a new recording each week by the likes of Andrew Bird, Dessa, Carrie Elkin, Craig Finn, Holy Sons, Ibibio Sound Machine, Julian Koster, Loamlands, Erin McKeown, Mothers, Nana Grizol, Amanda Palmer, Eliza Rickman & Jherek Bischoff.
Laura Jane Grace handles the first cover and it's not hard to hear a younger version of the Against Me! singer-songwriter banging out "The Best Ever Death Metal Band Out Of Denton" in her bedroom.
Take a listen to the first episode of I Only Listen To The Mountain Goats. |
<gh_stars>0
# -*- coding: utf-8 -*-
import sys
import datetime
import getopt
from reddit_user import RedditUser, UserNotFoundError, NoDataError
print "Processing user %s" % sys.argv[1]
start = datetime.datetime.now()
try:
u = RedditUser(sys.argv[1])
print u
except UserNotFoundError:
print "User %s not found" % sys.argv[1]
except NoDataError:
print "No data available for user %s" % sys.argv[1]
print "Processing complete... %s" % (datetime.datetime.now() - start) |
Well, this would be interesting.
This Sunday will be a homecoming of sorts for one player on the Miami roster: QB J.P. Losman, former Buffalo first-round draft bust turned Miami backup. Losman’s mostly held a clipboard this season, but he’s looking likely to get the start this weekend against the Bills. Dolphins starter Matt Moore suffered a concussion last week against Philadelphia and has yet to return to practice. Interim Miami head coach Todd Bowles confirmed that Losman took the starter’s reps in practice Wednesday, but did not say that Moore has been ruled out of Sunday’s game.
J.P. back in Buffalo? Oh, boy.
I have to admit, I’m a little afraid. After all, the Bills have already been burned by one former draft bust this season. That #@$#@$ Aaron Maybin managed to get two sacks in a Jets win earlier this year, and I don’t think I’ve ever been more angry about something in a Bills game. If freaking J.P. Losman torches the Bills as well? I might just have a heart attack.
Of course, it would be sort of fun to monitor the crowd if Losman does go off and have a big game. That crowd might turn UGLY. It’s already going to be an unpleasant place due to that whole “six game losing streak” thing the Bills are doing right now. A Losman-led Dolphins victory might just lead to a full-scale riot.
Fortunately, Losman didn’t look great in relief duty last week against the Eagles. He finished 6-f0r-10 passing for a paltry 60 yards. He was also sacked for a safety late in the game. |
/// Add an image to the `Glyph`.
///
/// Errors when format version 1 is set or the function is called more than once.
pub fn image(&mut self, image: Image) -> Result<&mut Self, ErrorKind> {
if self.glyph.format == GlifVersion::V1 {
return Err(ErrorKind::UnexpectedTag);
}
if self.glyph.image.is_some() {
return Err(ErrorKind::UnexpectedDuplicate);
}
self.glyph.image.replace(image);
Ok(self)
} |
<filename>bin/MongoToolbeltCLI/CLI.hs<gh_stars>0
module CLI
( opts
, Command(..)
, SearchIdOpts(..)
) where
import Data.Bson (ObjectId)
import Options.Applicative
import Safe (readMay)
opts :: ParserInfo Command
opts = info (actionParser <**> helper)
( fullDesc
<> progDesc "Set of helpers to keep your sanity when using MongoDB"
)
actionParser :: Parser Command
actionParser =
let parser = SearchId . SearchIdOpts <$> argument (maybeReader readMay) idm
desc = progDesc "Search usage of an ID across the database"
in subparser (command "search-id" $ info parser desc)
data Command
= SearchId SearchIdOpts
data SearchIdOpts
= SearchIdOpts { idToSearch :: ObjectId
}
|
// chown.c
//
// Time-stamp: <18/02/01 11:07:50 keuchel@w2k>
#include "celib.h"
int
xcechown(const char *filename, int owner, int group)
{
return 0;
}
|
/**
* Output an object that is not an array. If <code>toString()</code> (other than the base definition in Object) is declared then
* use that otherwise make a recursive call.
* @param sb where the output is to be placed.
* @param obj to be output.
* @param prefix to be prepended to each line
*/
private static void objectToString(final StringBuilder sb, final Object obj, final String prefix) {
if (obj instanceof Iterable) {
iterableToString(sb, (Iterable<?>) obj, prefix);
return;
}
final Class<?> clazz = obj.getClass();
if (toStringDeclared(clazz)) {
sb.append(prefix);
sb.append(obj);
} else {
toString(obj, sb, prefix + " ");
}
} |
/**
* TransactionErrorEventListener: creates a new transaction context.
*/
@SuppressWarnings("rawtypes")
public static class TransactionLifecycleErrorEventListener extends TransactionContextLifecycleAwareAdapter
implements ApplicationListener<TransactionLifecycleErrorEvent> {
@Override
public void onApplicationEvent(final TransactionLifecycleErrorEvent transactionLifecycleErrorEvent) {
LOGGER.debug("[START] onApplicationEvent(TransactionLifecycleErrorEvent)");
// transaction rollbacked: destroy its context from the manager
// instance
transactionContextLifecycle.destroyTransactionContext();
LOGGER.debug("[END] onApplicationEvent(TransactionLifecycleErrorEvent)");
}
} |
import * as React from 'react'
import classNames from 'classnames'
import Icon from '../icon'
import TimelineItem from './TimelineItem'
import ConfigContext from '../config-provider/ConfigContext'
import { getCompProps } from '../_utils'
export interface TimelineProps {
reverse?: boolean
prefixCls?: string
className?: string
labelWidth?: number
lineHeight?: number
pending?: React.ReactNode
style?: React.CSSProperties
pendingDot?: React.ReactNode
mode?: 'left' | 'alternate' | 'right'
}
interface TimelineType extends React.FC<TimelineProps> {
Item: typeof TimelineItem
}
const Timeline: TimelineType = (props) => {
const { getPrefixCls, prefixCls: pkgPrefixCls, compDefaultProps: userDefaultProps } = React.useContext(ConfigContext)
const {
prefixCls: customizePrefixCls,
pending = null,
pendingDot,
children,
className,
reverse,
mode,
style,
lineHeight,
labelWidth,
...restProps
} = getCompProps('Timeline', userDefaultProps, props)
const prefixCls = getPrefixCls!(pkgPrefixCls, 'timeline', customizePrefixCls)
const pendingNode = typeof pending === 'boolean' ? null : pending
const pendingItem = pending ? (
<TimelineItem pending={!!pending} dot={pendingDot || <Icon type="loadding" spin />}>
{pendingNode}
</TimelineItem>
) : null
const timeLineItems = reverse
? [pendingItem, ...React.Children.toArray(children).reverse()]
: [...React.Children.toArray(children), pendingItem]
const truthyItems = timeLineItems.filter((item) => !!item)
const itemsCount = React.Children.count(truthyItems)
const items = React.Children.map(truthyItems, (ele: React.ReactElement<any>, index) => {
const pendingClass = index === itemsCount - 2 ? 'last' : ''
const readyClass = index === itemsCount - 1 ? 'last' : ''
return React.cloneElement(ele, {
className: classNames([ele.props.className, !reverse && !!pending ? pendingClass : readyClass]),
})
})
const hasLabelItem = truthyItems.some((item: React.ReactElement<any>) => !!item.props.label)
const classString = classNames(
prefixCls,
{
pending: !!pending,
reverse: !!reverse,
[mode]: mode,
label: hasLabelItem,
},
className,
)
const styleString = {
...style,
paddingLeft: hasLabelItem && mode === 'left' ? `${labelWidth}px` : undefined,
paddingRight: hasLabelItem && mode === 'right' ? `${labelWidth}px` : undefined,
}
return (
<ul {...restProps} className={classString} style={styleString}>
{items?.map((item: React.ReactElement) => {
return React.cloneElement(item, {
mode,
labelWidth,
lineHeight,
label: item.props.label || (hasLabelItem && ' '),
})
})}
</ul>
)
}
Timeline.displayName = 'Timeline'
Timeline.Item = TimelineItem
export default Timeline
|
package events
import (
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/tools/record"
)
const (
EventRecorderName = "varnish"
EventReasonReloadError EventReason = "ReloadError"
EventReasonVCLCompilationError EventReason = "VCLCompilationError"
annotationSourcePod string = "sourcePod"
)
// EventReason is the reason why the event was create. The value appears in the 'Reason' tab of the events list
type EventReason string
// NewEventHandler creates a new event handler that will use the specified recorder
func NewEventHandler(recorder record.EventRecorder, podName string) *EventHandler {
return &EventHandler{
Recorder: recorder,
podName: podName,
}
}
// EventHandler handles the operations for events
type EventHandler struct {
Recorder record.EventRecorder
podName string
}
// Warning creates a 'warning' type event
func (e *EventHandler) Warning(object runtime.Object, reason EventReason, message string) {
e.Recorder.AnnotatedEventf(object, map[string]string{annotationSourcePod: e.podName}, v1.EventTypeWarning, string(reason), message)
}
// Normal creates a 'normal' type event
func (e *EventHandler) Normal(object runtime.Object, reason EventReason, message string) {
e.Recorder.AnnotatedEventf(object, map[string]string{annotationSourcePod: e.podName}, v1.EventTypeNormal, string(reason), message)
}
|
import * as fs from 'fs'
import * as path from 'path'
// eslint-disable-next-line import/prefer-default-export
export function getContentDirectory(...paths: string[]) {
return path.join(process.cwd(), ...paths)
}
export function getContentFiles(contentPath: string) {
return fs.readdirSync(getContentDirectory(contentPath)).filter(Boolean)
}
|
Three people are stranded on a small island. One is a physicist, one is a circus strongman, and one is an economist. After a few days of surviving on fruit, they discover a cache of canned food, and they have to decide how to open it. The physicist says to the strongman "Why don't you climb that tree, and smash the cans down on the rocks, and burst them open?"
The strongman says, "No, that would spatter the stuff all over. I can open the cans with my teeth!"
The economist says "First, we must assume that we have a can opener."
A man takes a balloon ride at a local country fair. A fierce wind suddenly kicks up, causing the balloon to violently leave the fair and carry its occupant out into the countryside. The man has no idea where he is, so he goes down to five meters above ground and asks a passing wanderer: "Excuse me, sir, can you tell me where I am?" Eyeing the man in the balloon the passer-by says: "You are in a downed red balloon, five meters above ground." The balloon's unhappy resident replied, "You must be an economist." an engineer." an accountant." a consultant." a mathematician." a Microsoft tutorial writer." an IBM sales representative." "How could you possible know that?" asked the passer-by.
"Because your answer is technically correct but absolutely useless, and the fact is I am still lost". "How could you possible know that?" asked the passer-by."Because your answer is technically correct but absolutely useless, and the fact is I am still lost". "Then you must be in management", said the passer-by.
"Thats right! How did you know?"
"You have such a good view from where you are, and yet you don't know where you are and you don't know where you are going. The fact is you are in the exact same position you were in before we met, but now your problem is somehow my fault!"
The accountant checks his budget and figures out he's better off eating it, so he does and collects money. An economist and an accountant are walking along a large puddle. They get across a frog jumping on the mud. The economist says: "If you eat the frog I'll give you $20,000!"The accountant checks his budget and figures out he's better off eating it, so he does and collects money. Continuing along the same puddle they almost step into yet another frog. The accountant says: "Now, if you eat this frog I'll give you $20,000."
After evaluating the proposal the economist eats the frog and gets the money. They go on. The accountant starts thinking: "Listen, we both have the same amount of money we had before, but we both ate frogs. I don't see us being better off."
The economist: "Well, that's true, but you overlooked the fact that we've been just involved in $40,000 of trade."
Why has astrology been invented? So that economy could be an accurate science. An economist returns to visit his old school. He's interested in the current exam questions and asks his old professor to show some. To his surprise they are exactly the same ones to which he had answered 10 years ago! When he asks about this the professor answers: "the questions are always the same - only the answers change!" An economic forecaster was known to have an horseshoe prominently displayed above the doorframe of his office. Asked what it was for, he replied "it is a good luck charm that helps my forecasts".
"But do you believe in that superstition?" he was asked.
"Of course not!" he said, "but it works whether you believe in it or not."
An economist was leaving his office building and saw a little boy sitting on the curb with a dog. The boy yelled at the economist, "Hey, how would you like to buy a dog."The man was intrigued by this sales approach and asked the boy, "How much do you want for your dog."The boy told him, "Fifty thousand dollars.""Fifty thousand dollars!" the man repeated in astonishment. "What special tricks does this dog do that he can earn enough money to be worth fifty thousand dollars?" the man asked the boy.The boy replied, "Mister, this dog never made a nickel in his life. Matter of fact, count what he eats I guess you could say you lose money on him every year."
The economist felt this was a good time to explain economics to the young man and expounded on how an item had to produce more income than it consumed to equal a purchase price ending with he might get five dollars from someone who just wanted a companion. Feeling he had imparted a very valuable lesson to the young man, the economist went on his way.
A few weeks later, the economist came out of his office building and the small boy was again sitting on the curb minus the dog. The man said to him, "I see you took my advise and sold the dog for five dollars."
The boy said, "No, I got fifty thousand dollars for him."
The business man was completely flabbergasted. "How did you ever get fifty thousand dollars for that dog" he asked.
"It was easy," said the boy. "I traded him for two twenty five thousand dollar cats."
There are two types of economists:
- those who cannot forecast interest rates, and
- those who do not know that they cannot forecast interest rates.
George T. Milkovich and Jerry M. Newman, "Compensation"
Ronald Reagan
Feudalism:
Socialism:
Communism:
Bureaucratic Communism:
Bureaucracy:
Fascism:
Nazism:
Liberalism:
Capitalism:
How economists do it... Economists do it cyclically.
Economists do it on demand.
Economists do it with models.
Economists do it with crystal balls.
How many economists does it take to change a light bulb?
An economics professor and a student were strolling through the campus."Look," the student cried, "there's a $100 bill on the path!""No, you are mistaken," the wiser head replied. "That cannot be. If there were actually a $100 bill, someone would have picked it up." Inflation allows you to live in a more expensive neigbourhood without moving. Government's view of the economy could be summed up in a few short phrases: If it moves, tax it. If it keeps moving, regulate it. And if it stops moving, subsidise it.You have two cows. Your lord takes some of the milk.You have two cows. State takes one and gives it to someone else.You have two cows. State takes both of them and gives you as much milk as you need.You have two cows. State takes both of them and gives you as much milk as the regulations say you should need.You have two cows. State regulates what you can feed them and when you can milk them. Then it pays you not to milk them. After that it takes both cows, shoots one, milks the other and pours the milk down the drain. Then it requires you to fill out forms accounting for the missing cows.You have two cows. State takes both of them and sells you milk.You have two cows. State takes both of them and shoots you.You have two cows. State doesn't care whether you exist, let alone your cows.You have two cows. You sell one and buy a bull.None. If it really needed changing, market forces would have caused it to happen.None. If the government would just leave it alone, it would screw itself in.None. The invisible hand does it.Two. One to assume the existence of ladder and one to change the bulb.Eight. One to change it and seven to hold everything else constant.One to prepare the proposal, an econometrician to run the model, one each MS and PhD students to write the theses and dissertations, two more to prepare the journal article (senior authorship not assigned), four to review it, and at least as many to refine the model and replicate the results.
Senior management had collected a lot of operations data but did not know what to do with it. They knew they needed a numbers person and decided to interview an accountant, an engineer and an economist. During the interview they assessed their math skills.
First was the accountant.
Interview: What is 1+1?
Accountant: 1+1 = 2.
Interview: Are you sure?
Accountant: Absolutely. 1+1 equals 2 and only 2.
Next the engineer.
Interview: What is 1+1?
Engineer: 1+1 = 2.
Interview: Are you sure?
Engineer: Well, within acceptable tolerance levels yes, 1+1 is 2.
Last the economist.
Interview: What is 1+1?
The economist got up, closed the door, drew the blinds, leaned across the table and replied "What do you want it to equal?"
Peter |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: <NAME>
"""
# ROS imports
import rospy
from geometry_msgs.msg import Twist
from geometry_msgs.msg import PoseStamped
from std_msgs.msg import UInt16
from tf.transformations import euler_from_quaternion
from nav_msgs.msg import Path
# Python imports
import numpy as np
# HAWS imports
from ha_model import *
from hasimpy import *
import ha_model
#global variables
# robot pose and inputs
x = 1.0
y = 1.0
h = 0.0 #Theta
def ps_callback(ps):
global x, y, h
x = ps.pose.position.x
y = ps.pose.position.y
#PoseStamped orientation is a quaternion
#needs to be converted to theta (h)
(roll,pitch,yaw) = euler_from_quaternion([ps.pose.orientation.x,
ps.pose.orientation.y,
ps.pose.orientation.z,
ps.pose.orientation.w])
h = yaw
def start():
global x, y, h
rospy.Subscriber('turtle1/pose', PoseStamped, ps_callback)
pub = rospy.Publisher('q', UInt16, queue_size = 1000)
# starts the node
rospy.init_node('mode_tracker')
r = rospy.Rate(200)
# before the loops, grabs the initial values for qID
qID = ha_model.h.Init_qID
#ROS main loop
while not rospy.is_shutdown():
# update state from ps_callback
X = np.array([x, y, h])
print 'qID:', qID , 'X:', X
# update the current mode
qID = ha_model.h.mode_tracker_guard_check(qID, X)
#Publish message to topic
pub.publish(qID)
r.sleep()
# Start Loop
if __name__ == '__main__':
start()
|
<reponame>leftiness/hex_math
mod ray;
mod through;
pub use self::ray::Ray;
pub use self::through::Through;
|
<filename>packages/api-console-gui/src/app/app.component.ts
import { Component, ElementRef } from '@angular/core';
import { DuiApp } from '@deepkit/desktop-ui';
import { ControllerClient } from './client';
@Component({
selector: 'app-root',
template: `
<dui-window>
<dui-window-header size="small">
<dui-window-toolbar>
<deepkit-header-logo title="API Console"></deepkit-header-logo>
<dui-window-toolbar-container name="main"></dui-window-toolbar-container>
<div class="top-right">
<div>
<a routerLink="/api">OVERVIEW</a>
</div>
<deepkit-header-status-bar></deepkit-header-status-bar>
</div>
</dui-window-toolbar>
</dui-window-header>
<dui-window-content [sidebarVisible]="sidebarVisible" class="no-padding">
<router-outlet></router-outlet>
</dui-window-content>
</dui-window>
`,
styleUrls: ['./app.component.scss']
})
export class AppComponent {
sidebarVisible: boolean = true;
constructor(
public duiApp: DuiApp,
public client: ControllerClient,
public host: ElementRef<HTMLElement>,
) {
const controller = host.nativeElement.getAttribute('controller');
if (controller && controller !== 'APP_CONTROLLER_NAME') {
this.client.setController(controller);
}
}
}
|
//============================================================================
// update the plane rate depending on the occupancy
void
OctreePlanarState::updateRate(int occupancy, int numSiblings)
{
bool isPlanarX = !((occupancy & 0xf0) && (occupancy & 0x0f));
bool isPlanarY = !((occupancy & 0xcc) && (occupancy & 0x33));
bool isPlanarZ = !((occupancy & 0x55) && (occupancy & 0xaa));
_rate[0] = (255 * _rate[0] + (isPlanarX ? 256 * 8 : 0) + 128) >> 8;
_rate[1] = (255 * _rate[1] + (isPlanarY ? 256 * 8 : 0) + 128) >> 8;
_rate[2] = (255 * _rate[2] + (isPlanarZ ? 256 * 8 : 0) + 128) >> 8;
_localDensity = (255 * _localDensity + 1024 * numSiblings) >> 8;
} |
def load_splits(
self, splits_dir: str
) -> Dict[str, Tuple[List[str], List[int]]]:
splits = {}
splits_path = os.path.join(self._data_dir, 'processed', splits_dir)
for fname in os.listdir(splits_path):
fpath = os.path.join(splits_path, fname)
if os.path.isdir(fpath):
continue
with open(fpath) as fp:
reader = csv.reader(fp)
data, labels = zip(*[(row[2], row[1]) for row in reader])
splits[fname.split('.')[0]] = (data, labels)
return splits |
/**
* Reactive operations that really belong to {@link EntityType}
*
* @author Gavin King
*/
public class EntityTypes {
/**
* Replacement for {@link EntityType#resolve(Object, SharedSessionContractImplementor, Object, Boolean)}
*/
public static CompletionStage<Object> resolve(EntityType entityType, Object idOrUniqueKey, Object owner,
SharedSessionContractImplementor session) {
if ( idOrUniqueKey != null && !isNull( entityType, owner, session ) ) {
if ( entityType.isReferenceToPrimaryKey() ) {
return ((ReactiveQueryExecutor) session).reactiveInternalLoad(
entityType.getAssociatedEntityName(),
(Serializable) idOrUniqueKey,
true,
entityType.isNullable()
);
}
else {
return loadByUniqueKey( entityType, idOrUniqueKey, session );
}
}
else {
return null;
}
}
/**
* @see OneToOneType#isNull(Object, SharedSessionContractImplementor)
*/
static boolean isNull(EntityType entityType, Object owner,
SharedSessionContractImplementor session) {
if ( entityType instanceof OneToOneType) {
OneToOneType type = (OneToOneType) entityType;
String propertyName = type.getPropertyName();
if ( propertyName != null ) {
EntityPersister ownerPersister =
session.getFactory().getMetamodel()
.entityPersister( entityType.getAssociatedEntityName() );
Serializable id = session.getContextEntityIdentifier(owner);
EntityKey entityKey = session.generateEntityKey(id, ownerPersister);
return session.getPersistenceContextInternal().isPropertyNull( entityKey, propertyName);
}
else {
return false;
}
}
else {
return false;
}
}
/**
* Load an instance by a unique key that is not the primary key.
*
* @param entityType The {@link EntityType} of the association
* @param key The unique key property value.
* @param session The originating session.
*
* @return The loaded entity
*
* @throws HibernateException generally indicates problems performing the load.
*/
static CompletionStage<Object> loadByUniqueKey(
EntityType entityType,
Object key,
SharedSessionContractImplementor session) throws HibernateException {
SessionFactoryImplementor factory = session.getFactory();
String entityName = entityType.getAssociatedEntityName();
String uniqueKeyPropertyName = entityType.getRHSUniqueKeyPropertyName();
ReactiveEntityPersister persister =
(ReactiveEntityPersister) factory.getMetamodel().entityPersister( entityName );
//TODO: implement 2nd level caching?! natural id caching ?! proxies?!
EntityUniqueKey euk = new EntityUniqueKey(
entityName,
uniqueKeyPropertyName,
key,
entityType.getIdentifierOrUniqueKeyType( factory ),
persister.getEntityMode(),
factory
);
PersistenceContext persistenceContext = session.getPersistenceContextInternal();
Object result = persistenceContext.getEntity( euk );
if ( result != null ) {
return completedFuture( persistenceContext.proxyFor( result ) );
}
else {
return persister.reactiveLoadByUniqueKey( uniqueKeyPropertyName, key, session )
.thenApply( loaded -> {
// If the entity was not in the Persistence Context, but was found now,
// add it to the Persistence Context
if ( loaded != null ) {
persistenceContext.addEntity(euk, loaded);
}
return loaded;
} );
}
}
/**
* @see org.hibernate.type.TypeHelper#replace(Object[], Object[], Type[], SharedSessionContractImplementor, Object, Map)
*/
public static CompletionStage<Object[]> replace(
final Object[] original,
final Object[] target,
final Type[] types,
final SessionImplementor session,
final Object owner,
final Map copyCache) {
Object[] copied = new Object[original.length];
for ( int i=0; i<types.length; i++ ) {
if ( original[i] == UNFETCHED_PROPERTY || original[i] == UNKNOWN ) {
copied[i] = target[i];
}
else {
if ( !(types[i] instanceof EntityType) ) {
copied[i] = types[i].replace(
original[i],
target[i] == UNFETCHED_PROPERTY ? null : target[i],
session,
owner,
copyCache
);
}
}
}
return loop(0, types.length,
i -> original[i] != UNFETCHED_PROPERTY && original[i] != UNKNOWN
&& types[i] instanceof EntityType,
i -> replace(
(EntityType) types[i],
original[i],
target[i] == UNFETCHED_PROPERTY ? null : target[i],
session,
owner,
copyCache
).thenAccept( copy -> copied[i] = copy )
).thenApply( v -> copied );
}
/**
* @see org.hibernate.type.TypeHelper#replace(Object[], Object[], Type[], SharedSessionContractImplementor, Object, Map, ForeignKeyDirection)
*/
public static CompletionStage<Object[]> replace(
final Object[] original,
final Object[] target,
final Type[] types,
final SessionImplementor session,
final Object owner,
final Map copyCache,
final ForeignKeyDirection foreignKeyDirection) {
Object[] copied = new Object[original.length];
for ( int i=0; i<types.length; i++ ) {
if ( original[i] == UNFETCHED_PROPERTY || original[i] == UNKNOWN ) {
copied[i] = target[i];
}
else {
if ( !(types[i] instanceof EntityType) ) {
copied[i] = types[i].replace(
original[i],
target[i] == UNFETCHED_PROPERTY ? null : target[i],
session,
owner,
copyCache,
foreignKeyDirection
);
}
}
}
return loop(0, types.length,
i -> original[i] != UNFETCHED_PROPERTY && original[i] != UNKNOWN
&& types[i] instanceof EntityType,
i -> replace(
(EntityType) types[i],
original[i],
target[i] == UNFETCHED_PROPERTY ? null : target[i],
session,
owner,
copyCache,
foreignKeyDirection
).thenAccept( copy -> copied[i] = copy )
).thenApply( v -> copied );
}
/**
* @see org.hibernate.type.AbstractType#replace(Object, Object, SharedSessionContractImplementor, Object, Map, ForeignKeyDirection)
*/
private static CompletionStage<Object> replace(
EntityType entityType,
Object original,
Object target,
SessionImplementor session,
Object owner,
Map copyCache,
ForeignKeyDirection foreignKeyDirection)
throws HibernateException {
boolean include = entityType.isAssociationType()
? entityType.getForeignKeyDirection() == foreignKeyDirection
: ForeignKeyDirection.FROM_PARENT == foreignKeyDirection;
return include
? replace( entityType, original, target, session, owner, copyCache )
: completedFuture(target);
}
/**
* @see EntityType#replace(Object, Object, SharedSessionContractImplementor, Object, Map)
*/
private static CompletionStage<Object> replace(
EntityType entityType,
Object original,
Object target,
SessionImplementor session,
Object owner,
Map copyCache) {
if ( original == null ) {
return nullFuture();
}
Object cached = copyCache.get( original );
if ( cached != null ) {
return completedFuture(cached);
}
else {
if ( original == target ) {
return completedFuture(target);
}
if ( session.getContextEntityIdentifier( original ) == null ) {
return ForeignKeys.isTransient( entityType.getAssociatedEntityName(), original, false, session )
.thenCompose( isTransient -> {
if ( isTransient ) {
// original is transient; it is possible that original is a "managed" entity that has
// not been made persistent yet, so check if copyCache contains original as a "managed" value
// that corresponds with some "merge" value.
if ( copyCache.containsValue( original ) ) {
return completedFuture(original);
}
else {
// the transient entity is not "managed"; add the merge/managed pair to copyCache
final Object copy = session.getEntityPersister( entityType.getAssociatedEntityName(), original )
.instantiate( null, session );
copyCache.put( original, copy );
return completedFuture(copy);
}
}
else {
return resolveIdOrUniqueKey( entityType, original, session, owner, copyCache );
}
} );
}
else {
return resolveIdOrUniqueKey( entityType, original, session, owner, copyCache );
}
}
}
private static CompletionStage<Object> resolveIdOrUniqueKey(
EntityType entityType,
Object original,
SessionImplementor session,
Object owner,
Map copyCache) {
return getIdentifier( entityType, original, session )
.thenCompose( id -> {
if ( id == null ) {
throw new AssertionFailure(
"non-transient entity has a null id: "
+ original.getClass().getName()
);
}
// For the special case of a @ManyToOne joined on a (non-primary) unique key,
// the "id" class is actually the associated entity object itself, but treated
// as a ComponentType. In the case that the entity is unfetched, we need to
// explicitly fetch it here before calling replace(). (Note that in Hibernate
// ORM this is unnecessary due to transparent lazy fetching.)
return ((ReactiveSessionImpl) session).reactiveFetch( id, true )
.thenCompose( fetched -> {
Object idOrUniqueKey =
entityType.getIdentifierOrUniqueKeyType( session.getFactory() )
.replace( fetched, null, session, owner, copyCache);
return resolve( entityType, idOrUniqueKey, owner, session );
} );
} );
}
/**
* see EntityType#getIdentifier(Object, SharedSessionContractImplementor)
*/
private static CompletionStage<Object> getIdentifier(EntityType entityType, Object value, SessionImplementor session) {
if ( entityType.isReferenceToIdentifierProperty() ) {
return ForeignKeys.getEntityIdentifierIfNotUnsaved(
entityType.getAssociatedEntityName(),
value,
session
); //tolerates nulls
}
else if ( value == null ) {
return nullFuture();
}
else {
EntityPersister entityPersister = entityType.getAssociatedEntityPersister( session.getFactory() );
String uniqueKeyPropertyName = entityType.getRHSUniqueKeyPropertyName();
Object propertyValue = entityPersister.getPropertyValue( value, uniqueKeyPropertyName );
// We now have the value of the property-ref we reference. However,
// we need to dig a little deeper, as that property might also be
// an entity type, in which case we need to resolve its identifier
Type type = entityPersister.getPropertyType( uniqueKeyPropertyName );
if ( type.isEntityType() ) {
propertyValue = getIdentifier( (EntityType) type, propertyValue, session );
}
return completedFuture( propertyValue );
}
}
} |
<filename>grape/parallel/parallel_engine.h
/** Copyright 2020 Alibaba Group Holding Limited.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef GRAPE_PARALLEL_PARALLEL_ENGINE_H_
#define GRAPE_PARALLEL_PARALLEL_ENGINE_H_
#include <algorithm>
#include <atomic>
#include <memory>
#include <thread>
#include <vector>
#include "grape/communication/sync_comm.h"
#include "grape/parallel/parallel_engine_spec.h"
#include "grape/utils/thread_pool.h"
#include "grape/utils/vertex_set.h"
#include "grape/worker/comm_spec.h"
namespace grape {
class ParallelEngine {
public:
ParallelEngine() : thread_num_(1) {}
virtual ~ParallelEngine() {}
void InitParallelEngine(
const ParallelEngineSpec& spec = DefaultParallelEngineSpec()) {
thread_num_ = spec.thread_num;
thread_pool_.InitThreadPool(spec);
}
inline ThreadPool& GetThreadPool() { return thread_pool_; }
/**
* @brief Iterate on vertexes of a VertexRange concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam VID_T Type of vertex id.
* @param range The vertex range to be iterated.
* @param iter_func Vertex program to be applied on each vertex.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const VertexRange<VID_T>& range,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(range, dummy_func, iter_func, dummy_func, chunk_size);
}
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DualVertexRange<VID_T>& range,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(range, dummy_func, iter_func, dummy_func, chunk_size);
}
/**
* @brief Iterate on discontinuous vertices concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam VID_T Type of vertex id.
* @param vertices The vertex array to be iterated.
* @param iter_func Vertex program to be applied on each vertex.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const VertexVector<VID_T>& vertices,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(vertices, dummy_func, iter_func, dummy_func, chunk_size);
}
template <typename ITERATOR_T, typename ITER_FUNC_T>
inline void ForEach(const ITERATOR_T& begin, const ITERATOR_T& end,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(begin, end, dummy_func, iter_func, dummy_func, chunk_size);
}
/**
* @brief Iterate on vertexes of a VertexRange concurrently, initialize
* function and finalize function can be provided to each thread.
*
* @tparam INIT_FUNC_T Type of thread init program.
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam FINALIZE_FUNC_T Type of thread finalize program.
* @tparam VID_T Type of vertex id.
* @param range The vertex range to be iterated.
* @param init_func Initializing function to be invoked by each thread before
* iterating on vertexes.
* @param iter_func Vertex program to be applied on each vertex.
* @param finalize_func Finalizing function to be invoked by each thread after
* iterating on vertexes.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const VertexRange<VID_T>& range,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 1024) {
std::atomic<VID_T> cur(range.begin_value());
VID_T end = range.end_value();
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] = thread_pool_.enqueue(
[&cur, chunk_size, &init_func, &iter_func, &finalize_func, end, tid] {
init_func(tid);
while (true) {
VID_T cur_beg = std::min(cur.fetch_add(chunk_size), end);
VID_T cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
VertexRange<VID_T> cur_range(cur_beg, cur_end);
for (auto u : cur_range) {
iter_func(tid, u);
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const DualVertexRange<VID_T>& range,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 1024) {
VertexRange<VID_T> head = range.head();
VertexRange<VID_T> tail = range.tail();
std::atomic<VID_T> head_cur(head.begin_value());
VID_T head_end = head.end_value();
std::atomic<VID_T> tail_cur(tail.begin_value());
VID_T tail_end = tail.end_value();
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] = thread_pool_.enqueue([&head_cur, &tail_cur, chunk_size,
&iter_func, head_end, tail_end, tid,
&init_func, &finalize_func] {
init_func(tid);
while (true) {
VID_T cur_beg = std::min(head_cur.fetch_add(chunk_size), head_end);
VID_T cur_end = std::min(cur_beg + chunk_size, head_end);
if (cur_beg == cur_end) {
break;
}
VertexRange<VID_T> cur_range(cur_beg, cur_end);
for (auto& u : cur_range) {
iter_func(tid, u);
}
}
while (true) {
VID_T cur_beg = std::min(tail_cur.fetch_add(chunk_size), tail_end);
VID_T cur_end = std::min(cur_beg + chunk_size, tail_end);
if (cur_beg == cur_end) {
break;
}
VertexRange<VID_T> cur_range(cur_beg, cur_end);
for (auto& u : cur_range) {
iter_func(tid, u);
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
/**
* @brief Iterate on discontinuous vertices concurrently, initialize
* function and finalize function can be provided to each thread.
*
* @tparam INIT_FUNC_T Type of thread init program.
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam FINALIZE_FUNC_T Type of thread finalize program.
* @tparam VID_T Type of vertex id.
* @param vertices The vertex array to be iterated.
* @param init_func Initializing function to be invoked by each thread before
* iterating on vertexes.
* @param iter_func Vertex program to be applied on each vertex.
* @param finalize_func Finalizing function to be invoked by each thread after
* iterating on vertexes.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const VertexVector<VID_T>& vertices,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 1024) {
std::atomic<size_t> cur(0);
auto end = vertices.size();
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] =
thread_pool_.enqueue([&cur, chunk_size, &init_func, &vertices,
&iter_func, &finalize_func, end, tid] {
init_func(tid);
while (true) {
auto cur_beg = std::min(cur.fetch_add(chunk_size), end);
auto cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
for (auto idx = cur_beg; idx < cur_end; idx++) {
iter_func(tid, vertices[idx]);
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
/**
* @brief Iterate a range specified by iterator pair concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam ITERATOR_T Type of range iterator.
* @param begin The begin iterator of range.
* @param end The end iterator of range.
* @param init_func Initializing function to be invoked by each thread before
* iterating on vertexes.
* @param iter_func Vertex program to be applied on each vertex.
* @param finalize_func Finalizing function to be invoked by each thread after
* iterating on vertexes.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITERATOR_T, typename INIT_FUNC_T,
typename ITER_FUNC_T, typename FINALIZE_FUNC_T>
inline void ForEach(const ITERATOR_T& begin, const ITERATOR_T& end,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 1024) {
std::atomic<size_t> offset(0);
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] = thread_pool_.enqueue(
[&offset, chunk_size, &init_func, &iter_func, &finalize_func, begin,
end, tid] {
init_func(tid);
while (true) {
const ITERATOR_T cur_beg =
std::min(begin + offset.fetch_add(chunk_size), end);
const ITERATOR_T cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
for (auto iter = cur_beg; iter != cur_end; ++iter) {
iter_func(tid, *iter);
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
/**
* @brief Iterate on vertexes of a DenseVertexSet concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam VID_T Type of vertex id.
* @param dense_set The vertex set to be iterated.
* @param iter_func Vertex program to be applied on each vertex.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexRange<VID_T>>& dense_set,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(dense_set, dummy_func, iter_func, dummy_func, chunk_size);
}
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexVector<VID_T>>& dense_set,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(dense_set, dummy_func, iter_func, dummy_func, chunk_size);
}
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<DualVertexRange<VID_T>>& dense_set,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto dummy_func = [](int tid) {};
ForEach(dense_set, dummy_func, iter_func, dummy_func, chunk_size);
}
template <typename ITER_FUNC_T, typename VID_T>
inline void bitwise_iterate(VID_T begin, VID_T end, const Bitset& bitset,
VID_T offset, int tid,
const ITER_FUNC_T& iter_func) {
Vertex<VID_T> v(begin);
Vertex<VID_T> v_end(end);
while (v != v_end) {
if (bitset.get_bit(v.GetValue() - offset)) {
iter_func(tid, v);
}
++v;
}
}
template <typename ITER_FUNC_T, typename VID_T>
inline void wordwise_iterate(VID_T begin, VID_T end, const Bitset& bitset,
VID_T offset, int tid,
const ITER_FUNC_T& iter_func) {
for (VID_T vid = begin; vid < end; vid += 64) {
Vertex<VID_T> v(vid);
uint64_t word = bitset.get_word(vid - offset);
while (word != 0) {
if (word & 1) {
iter_func(tid, v);
}
++v;
word = word >> 1;
}
}
}
template <typename ITER_FUNC_T, typename VID_T>
inline void parallel_iterate(VID_T begin, VID_T end, const Bitset& bitset,
VID_T offset, const ITER_FUNC_T& iter_func,
int chunk_size) {
VID_T batch_begin = (begin - offset + 63) / 64 * 64 + offset;
VID_T batch_end = (end - offset) / 64 * 64 + offset;
if (batch_begin >= end || batch_end <= begin) {
bitwise_iterate(begin, end, bitset, offset, 0, iter_func);
return;
}
std::atomic<VID_T> cur(batch_begin);
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] = thread_pool_.enqueue([&iter_func, &cur, chunk_size,
&bitset, batch_begin, batch_end,
begin, end, offset, this, tid] {
if (tid == 0 && begin < batch_begin) {
bitwise_iterate(begin, batch_begin, bitset, offset, tid, iter_func);
}
if (tid == (thread_num_ - 1) && batch_end < end) {
bitwise_iterate(batch_end, end, bitset, offset, tid, iter_func);
}
if (batch_begin < batch_end) {
while (true) {
VID_T cur_beg = std::min(cur.fetch_add(chunk_size), batch_end);
VID_T cur_end = std::min(cur_beg + chunk_size, batch_end);
if (cur_beg == cur_end) {
break;
}
wordwise_iterate(cur_beg, cur_end, bitset, offset, tid, iter_func);
}
}
});
}
thread_pool_.WaitEnd(results);
}
/**
* @brief Iterate on vertexes of a DenseVertexSet concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam VID_T Type of vertex id.
* @param dense_set The vertex set to be iterated.
* @param range The vertex range to be iterated.
* @param iter_func Vertex program to be applied on each vertex.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexRange<VID_T>>& dense_set,
const VertexRange<VID_T>& range,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto& bitset = dense_set.GetBitset();
VertexRange<VID_T> complete_range = dense_set.Range();
VID_T begin = std::max(range.begin_value(), complete_range.begin_value());
VID_T end = std::min(range.end_value(), complete_range.end_value());
if (begin < end) {
parallel_iterate(begin, end, bitset, complete_range.begin_value(),
iter_func, chunk_size);
}
}
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexVector<VID_T>>& dense_set,
const VertexRange<VID_T>& range,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
auto& bitset = dense_set.GetBitset();
VertexRange<VID_T> complete_range = dense_set.Range();
VID_T begin = std::max(range.begin_value(), complete_range.begin_value());
VID_T end = std::min(range.end_value(), complete_range.end_value());
if (begin < end) {
parallel_iterate(begin, end, bitset, complete_range.begin_value(),
iter_func, chunk_size);
}
}
template <typename ITER_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<DualVertexRange<VID_T>>& dense_set,
const VertexRange<VID_T>& range,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
VertexRange<VID_T> head_range = dense_set.Range().head();
VertexRange<VID_T> tail_range = dense_set.Range().tail();
VID_T head_begin = std::max(range.begin_value(), head_range.begin_value());
VID_T head_end = std::min(range.end_value(), head_range.end_value());
VID_T tail_begin = std::max(range.begin_value(), tail_range.begin_value());
VID_T tail_end = std::min(range.end_value(), tail_range.end_value());
auto& head_bitset = dense_set.GetHeadBitset();
auto& tail_bitset = dense_set.GetTailBitset();
if (head_begin < head_end) {
parallel_iterate(head_begin, head_end, head_bitset,
head_range.begin_value(), iter_func, chunk_size);
}
if (tail_begin < tail_end) {
parallel_iterate(tail_begin, tail_end, tail_bitset,
tail_range.begin_value(), iter_func, chunk_size);
}
}
/**
* @brief Iterate on vertexes of a DenseVertexSet concurrently.
*
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam VID_T Type of vertex id.
* @param dense_set The vertex set to be iterated.
* @param vertices The vertices to be iterated.
* @param iter_func Vertex program to be applied on each vertex.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename ITER_FUNC_T, typename VID_T, typename VERTEX_SET_T>
inline void ForEach(const DenseVertexSet<VERTEX_SET_T>& dense_set,
const VertexVector<VID_T>& vertices,
const ITER_FUNC_T& iter_func, int chunk_size = 1024) {
std::atomic<size_t> cur(0);
auto end = vertices.size();
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] =
thread_pool_.enqueue([&iter_func, &cur, chunk_size, &dense_set,
&vertices, end, this, tid] {
while (true) {
auto cur_beg = std::min(cur.fetch_add(chunk_size), end);
auto cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
for (auto idx = cur_beg; idx < cur_end; idx++) {
auto v = vertices[idx];
if (dense_set.Exist(v)) {
iter_func(tid, v);
}
}
}
});
}
thread_pool_.WaitEnd(results);
}
/**
* @brief Iterate on vertexes of a DenseVertexSet concurrently, initialize
* function and finalize function can be provided to each thread.
*
* @tparam INIT_FUNC_T Type of thread init program.
* @tparam ITER_FUNC_T Type of vertex program.
* @tparam FINALIZE_FUNC_T Type of thread finalize program.
* @tparam VID_T Type of vertex id.
* @param dense_set The vertex set to be iterated.
* @param init_func Initializing function to be invoked by each thread before
* iterating on vertexes.
* @param iter_func Vertex program to be applied on each vertex.
* @param finalize_func Finalizing function to be invoked by each thread after
* iterating on vertexes.
* @param chunk_size Vertices granularity to be scheduled by threads.
*/
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexRange<VID_T>>& dense_set,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 10 * 1024) {
VertexRange<VID_T> range = dense_set.Range();
std::atomic<VID_T> cur(range.begin_value());
VID_T beg = range.begin_value();
VID_T end = range.end_value();
const Bitset& bs = dense_set.GetBitset();
chunk_size = ((chunk_size + 63) / 64) * 64;
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] =
thread_pool_.enqueue([&init_func, &finalize_func, &iter_func, &cur,
chunk_size, &bs, beg, end, tid] {
init_func(tid);
while (true) {
VID_T cur_beg = std::min(cur.fetch_add(chunk_size), end);
VID_T cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
for (VID_T vid = cur_beg; vid < cur_end; vid += 64) {
Vertex<VID_T> v(vid);
uint64_t word = bs.get_word(vid - beg);
while (word != 0) {
if (word & 1) {
iter_func(tid, v);
}
++v;
word = word >> 1;
}
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<VertexVector<VID_T>>& dense_set,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 10 * 1024) {
VertexRange<VID_T> range = dense_set.Range();
std::atomic<VID_T> cur(range.begin_value());
VID_T beg = range.begin_value();
VID_T end = range.end_value();
const Bitset& bs = dense_set.GetBitset();
chunk_size = ((chunk_size + 63) / 64) * 64;
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] =
thread_pool_.enqueue([&init_func, &finalize_func, &iter_func, &cur,
chunk_size, &bs, beg, end, tid] {
init_func(tid);
while (true) {
VID_T cur_beg = std::min(cur.fetch_add(chunk_size), end);
VID_T cur_end = std::min(cur_beg + chunk_size, end);
if (cur_beg == cur_end) {
break;
}
for (VID_T vid = cur_beg; vid < cur_end; vid += 64) {
Vertex<VID_T> v(vid);
uint64_t word = bs.get_word(vid - beg);
while (word != 0) {
if (word & 1) {
iter_func(tid, v);
}
++v;
word = word >> 1;
}
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
template <typename INIT_FUNC_T, typename ITER_FUNC_T,
typename FINALIZE_FUNC_T, typename VID_T>
inline void ForEach(const DenseVertexSet<DualVertexRange<VID_T>>& dense_set,
const INIT_FUNC_T& init_func,
const ITER_FUNC_T& iter_func,
const FINALIZE_FUNC_T& finalize_func,
int chunk_size = 10 * 1024) {
VertexRange<VID_T> head = dense_set.head();
VertexRange<VID_T> tail = dense_set.tail();
VID_T head_beg = head.begin_value();
std::atomic<VID_T> head_cur(head_beg);
VID_T head_end = head.end_value();
VID_T tail_beg = tail.begin_value();
std::atomic<VID_T> tail_cur(tail_beg);
VID_T tail_end = tail.end_value();
const Bitset& head_bs = dense_set.GetHeadBitset();
const Bitset& tail_bs = dense_set.GetTailBitset();
chunk_size = ((chunk_size + 63) / 64) * 64;
std::vector<std::future<void>> results(thread_num_);
for (uint32_t tid = 0; tid < thread_num_; ++tid) {
results[tid] = thread_pool_.enqueue([&init_func, &finalize_func,
&iter_func, &head_cur, &tail_cur,
chunk_size, &head_bs, &tail_bs,
head_beg, tail_beg, head_end,
tail_end, tid] {
init_func(tid);
while (true) {
VID_T cur_beg = std::min(head_cur.fetch_add(chunk_size), head_end);
VID_T cur_end = std::min(cur_beg + chunk_size, head_end);
if (cur_beg == cur_end) {
break;
}
for (VID_T vid = cur_beg; vid < cur_end; vid += 64) {
Vertex<VID_T> v(vid);
uint64_t word = head_bs.get_word(vid - head_beg);
while (word != 0) {
if (word & 1) {
iter_func(tid, v);
}
++v;
word = word >> 1;
}
}
}
while (true) {
VID_T cur_beg = std::min(tail_cur.fetch_add(chunk_size), tail_end);
VID_T cur_end = std::min(cur_beg + chunk_size, tail_end);
if (cur_beg == cur_end) {
break;
}
for (VID_T vid = cur_beg; vid < cur_end; vid += 64) {
Vertex<VID_T> v(vid);
uint64_t word = tail_bs.get_word(vid - tail_beg);
while (word != 0) {
if (word & 1) {
iter_func(tid, v);
}
++v;
word = word >> 1;
}
}
}
finalize_func(tid);
});
}
thread_pool_.WaitEnd(results);
}
uint32_t thread_num() { return thread_num_; }
private:
ThreadPool thread_pool_;
uint32_t thread_num_;
};
template <typename APP_T>
typename std::enable_if<std::is_base_of<ParallelEngine, APP_T>::value>::type
InitParallelEngine(std::shared_ptr<APP_T> app, const ParallelEngineSpec& spec) {
app->InitParallelEngine(spec);
}
template <typename APP_T>
typename std::enable_if<!std::is_base_of<ParallelEngine, APP_T>::value>::type
InitParallelEngine(std::shared_ptr<APP_T> app, const ParallelEngineSpec& spec) {
}
} // namespace grape
#endif // GRAPE_PARALLEL_PARALLEL_ENGINE_H_
|
/**
* Created with IntelliJ IDEA.
* User: alsutton
* Date: 10/09/2013
* Time: 13:46
* To change this template use File | Settings | File Templates.
*/
public class ModifyTodoRequest extends ServerRequestData {
private Double completionTimestamp;
private String eventId;
private String nfcId;
private String todoId;
private String userId;
public ModifyTodoRequest()
{
}
public Double getCompletionTimestamp()
{
return this.completionTimestamp;
}
public ModifyTodoRequest setCompletionTimestamp(Double completionTimestamp)
{
this.completionTimestamp = completionTimestamp;
return this;
}
public String getEventId()
{
return this.eventId;
}
public ModifyTodoRequest setEventId(String eventId)
{
this.eventId = eventId;
return this;
}
public String getNfcId()
{
return this.nfcId;
}
public ModifyTodoRequest setNfcId(String nfcId)
{
this.nfcId = nfcId;
return this;
}
public String getTodoId()
{
return this.todoId;
}
public ModifyTodoRequest setTodoId(String todoId)
{
this.todoId = todoId;
return this;
}
public String getUserId()
{
return this.userId;
}
public ModifyTodoRequest setUserId(String userId)
{
this.userId = userId;
return this;
}
@Override
public String toJSON() throws JSONException {
return new JSONObject()
.put("completionTimestamp", completionTimestamp)
.put("eventId", eventId)
.put("nfcId", nfcId)
.put("todoId", todoId)
.put("userId", userId)
.toString();
}
} |
<filename>fcrepo-http-api/src/test/java/org/fcrepo/integration/http/api/ExternalContentHandlerIT.java<gh_stars>0
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree.
*/
package org.fcrepo.integration.http.api;
import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE;
import static java.nio.charset.StandardCharsets.UTF_8;
import static javax.ws.rs.core.HttpHeaders.CONTENT_DISPOSITION;
import static javax.ws.rs.core.HttpHeaders.LINK;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.CONFLICT;
import static javax.ws.rs.core.Response.Status.CREATED;
import static javax.ws.rs.core.Response.Status.NO_CONTENT;
import static javax.ws.rs.core.Response.Status.OK;
import static javax.ws.rs.core.Response.Status.TEMPORARY_REDIRECT;
import static org.fcrepo.kernel.api.RdfLexicon.NON_RDF_SOURCE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import static org.apache.http.HttpStatus.SC_OK;
import static org.apache.http.HttpHeaders.CONTENT_LENGTH;
import static org.apache.http.HttpStatus.SC_CREATED;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.fcrepo.kernel.api.FedoraTypes;
import org.glassfish.jersey.media.multipart.ContentDisposition;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.springframework.test.context.TestExecutionListeners;
/**
* @author whikloj
* @since 2018-07-10
*/
@TestExecutionListeners(
listeners = { TestIsolationExecutionListener.class },
mergeMode = TestExecutionListeners.MergeMode.MERGE_WITH_DEFAULTS)
public class ExternalContentHandlerIT extends AbstractResourceIT {
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
private static final String NON_RDF_SOURCE_LINK_HEADER = "<" + NON_RDF_SOURCE.getURI() + ">;rel=\"type\"";
private static final String WANT_DIGEST = "Want-Digest";
private static final String DIGEST = "Digest";
private static final String TEST_BINARY_CONTENT = "01234567890123456789012345678901234567890123456789";
private static final String TEST_SHA_DIGEST_HEADER_VALUE = "sha=9578f951955d37f20b601c26591e260c1e5389bf";
private static final String TEST_MD5_DIGEST_HEADER_VALUE = "md5=baed005300234f3d1503c50a48ce8e6f";
private static final CloseableHttpClient noFollowClient = HttpClientBuilder.create()
.disableRedirectHandling().build();
@Before
public void setup() throws Exception {
tempFolder.create();
}
@Test
public void testProxyRemoteContentTypeForHttpUri() throws Exception {
final var externalLocation = createHttpResource("audio/ogg", "xyz");
final String finalLocation = getRandomUniqueId();
// Make an external content resource proxying the above URI.
final HttpPut put = putObjMethod(finalLocation);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "proxy", null));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Get the external content proxy resource.
try (final CloseableHttpResponse response = execute(getObjMethod(finalLocation))) {
assertEquals(SC_OK, getStatus(response));
assertContentType(response, "audio/ogg");
assertContentLocation(response, externalLocation);
assertContentLength(response, 3);
}
}
@Test
public void testProxyClientContentTypeOverridesRemoteForHttpUri() throws Exception {
final var externalLocation = createHttpResource("audio/ogg", "vxyz");
final String finalLocation = getRandomUniqueId();
// Make an external content resource proxying the above URI.
final HttpPut put = putObjMethod(finalLocation);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "proxy", "audio/mp3"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Get the external content proxy resource.
try (final CloseableHttpResponse response = execute(getObjMethod(finalLocation))) {
assertEquals(SC_OK, getStatus(response));
assertContentType(response, "audio/mp3");
assertContentLocation(response, externalLocation);
assertContentLength(response, 4);
}
}
@Test
public void testCopyRemoteContentTypeForHttpUri() throws Exception {
final var externalLocation = createHttpResource("audio/ogg", "xyz");
final String finalLocation = getRandomUniqueId();
// Make an external content resource proxying the above URI.
final HttpPut put = putObjMethod(finalLocation);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "copy", null));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Get the external content proxy resource.
try (final CloseableHttpResponse response = execute(getObjMethod(finalLocation))) {
assertEquals(SC_OK, getStatus(response));
assertContentType(response, "audio/ogg");
assertContentLength(response, 3);
}
}
@Test
public void testCopyClientContentTypeOverridesRemoteForHttpUri() throws Exception {
final var externalLocation = createHttpResource("audio/ogg", "xyz");
final String finalLocation = getRandomUniqueId();
// Make an external content resource proxying the above URI.
final HttpPut put = putObjMethod(finalLocation);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "copy", "audio/mp3"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Get the external content proxy resource.
try (final CloseableHttpResponse response = execute(getObjMethod(finalLocation))) {
assertEquals(SC_OK, getStatus(response));
assertContentType(response, "audio/mp3");
assertContentLength(response, 3);
}
}
@Test
public void testProxyWithWantDigestForLocalFile() throws IOException {
final File externalFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String fileUri = externalFile.toURI().toString();
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(fileUri, "proxy", "text/plain"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
final String expectedDigestHeaderValue = TEST_SHA_DIGEST_HEADER_VALUE;
// HEAD request with Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(headObjMethod, fileUri, expectedDigestHeaderValue);
// GET request with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(getObjMethod, fileUri, expectedDigestHeaderValue);
}
@Test
public void testCopyWithWantDigestForLocalFile() throws IOException {
final File externalFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String fileUri = externalFile.toURI().toString();
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(fileUri, "copy", "text/plain"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
final String expectedDigestHeaderValue = TEST_SHA_DIGEST_HEADER_VALUE;
// HEAD request with Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(headObjMethod, null, expectedDigestHeaderValue);
// GET request with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(getObjMethod, null, expectedDigestHeaderValue);
}
@Test
public void testProxyWithWantDigestForHttpUri() throws Exception {
final String dsUrl = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(dsUrl, "proxy", "text/plain"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
final String expectedDigestHeaderValue = TEST_SHA_DIGEST_HEADER_VALUE;
// HEAD request with Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(headObjMethod, dsUrl, expectedDigestHeaderValue);
// GET request with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(getObjMethod, dsUrl, expectedDigestHeaderValue);
}
@Test
public void testCopyWithWantDigestForHttpUri() throws Exception {
final String dsUrl = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(dsUrl, "copy", "text/plain"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
final String expectedDigestHeaderValue = TEST_SHA_DIGEST_HEADER_VALUE;
// HEAD request with Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(headObjMethod, null, expectedDigestHeaderValue);
// GET request with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha");
checkExternalDataStreamResponseHeader(getObjMethod, null, expectedDigestHeaderValue);
}
@Test
public void testProxyWithWantDigestMultipleForLocalFile() throws IOException {
final File externalFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String fileUri = externalFile.toURI().toString();
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(fileUri, "proxy", "text/plain"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// HEAD request with Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha, md5;q=0.3");
try (final CloseableHttpResponse response = execute(headObjMethod)) {
assertEquals(OK.getStatusCode(), response.getStatusLine().getStatusCode());
assertContentLocation(response, fileUri);
assertTrue(response.getHeaders(DIGEST).length > 0);
final String digesterHeaderValue = response.getHeaders(DIGEST)[0].getValue();
assertTrue("SHA-1 Fixity Checksum doesn't match",
digesterHeaderValue.contains(TEST_SHA_DIGEST_HEADER_VALUE));
assertTrue("MD5 fixity checksum doesn't match",
digesterHeaderValue.contains(TEST_MD5_DIGEST_HEADER_VALUE));
}
// GET request with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha, md5;q=0.3");
try (final CloseableHttpResponse response = execute(getObjMethod)) {
assertEquals(OK.getStatusCode(), response.getStatusLine().getStatusCode());
assertContentLocation(response, fileUri);
assertTrue(response.getHeaders(DIGEST).length > 0);
final String digesterHeaderValue = response.getHeaders(DIGEST)[0].getValue();
assertTrue("SHA-1 Fixity Checksum doesn't match",
digesterHeaderValue.contains(TEST_SHA_DIGEST_HEADER_VALUE));
assertTrue("MD5 fixity checksum doesn't match",
digesterHeaderValue.contains(TEST_MD5_DIGEST_HEADER_VALUE));
}
}
private File createExternalLocalFile(final String content) throws IOException {
final File externalFile = tempFolder.newFile();
try (final FileWriter fw = new FileWriter(externalFile)) {
fw.write(content);
}
return externalFile;
}
private void checkExternalDataStreamResponseHeader(final HttpUriRequest req, final String contenLocation,
final String shaValue) throws IOException {
try (final CloseableHttpResponse response = execute(req)) {
assertEquals(OK.getStatusCode(), response.getStatusLine().getStatusCode());
assertTrue(response.getHeaders(DIGEST).length > 0);
if (StringUtils.isNoneBlank(contenLocation)) {
assertEquals(contenLocation, getContentLocation(response));
}
final String digesterHeaderValue = response.getHeaders(DIGEST)[0].getValue();
assertTrue("Fixity Checksum doesn't match",
digesterHeaderValue.equals(shaValue));
}
}
@Test
public void testHeadExternalDatastreamRedirectForHttpUri() throws Exception {
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "redirect", "image/jpeg"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Configure HEAD request to NOT follow redirects
final HttpHead headObjMethod = headObjMethod(id);
final RequestConfig.Builder requestConfig = RequestConfig.custom();
requestConfig.setRedirectsEnabled(false);
headObjMethod.setConfig(requestConfig.build());
try (final CloseableHttpResponse response = execute(headObjMethod)) {
assertEquals(TEMPORARY_REDIRECT.getStatusCode(), response.getStatusLine().getStatusCode());
assertLocation(response, externalLocation);
assertEquals("bytes", response.getFirstHeader("Accept-Ranges").getValue());
assertContentLength(response, 0);
assertContentType(response, "image/jpeg");
final ContentDisposition disposition =
new ContentDisposition(response.getFirstHeader(CONTENT_DISPOSITION).getValue());
assertEquals("attachment", disposition.getType());
}
}
@Test
public void testGetExternalDatastreamForHttpUri() throws Exception {
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "redirect", "image/jpeg"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Configure HEAD request to NOT follow redirects
final HttpGet getObjMethod = getObjMethod(id);
final RequestConfig.Builder requestConfig = RequestConfig.custom();
requestConfig.setRedirectsEnabled(false);
getObjMethod.setConfig(requestConfig.build());
try (final CloseableHttpResponse response = execute(getObjMethod)) {
assertEquals(TEMPORARY_REDIRECT.getStatusCode(), response.getStatusLine().getStatusCode());
assertLocation(response, externalLocation);
assertContentType(response, "image/jpeg");
assertEquals("bytes", response.getFirstHeader("Accept-Ranges").getValue());
final ContentDisposition disposition =
new ContentDisposition(response.getFirstHeader(CONTENT_DISPOSITION).getValue());
assertEquals("attachment", disposition.getType());
}
}
private void checkRedirectWantDigestResult(final HttpRequestBase request, final String dsUrl, final String sha1,
final String md5) throws IOException {
try (final CloseableHttpResponse response = execute(request)) {
assertEquals(TEMPORARY_REDIRECT.getStatusCode(), response.getStatusLine().getStatusCode());
assertLocation(response, dsUrl);
assertTrue(response.getHeaders(DIGEST).length > 0);
final String digesterHeaderValue = response.getHeaders(DIGEST)[0].getValue();
assertTrue("SHA-1 Fixity Checksum doesn't match",
digesterHeaderValue.contains(sha1));
if (md5 != null) {
assertTrue("MD5 fixity checksum doesn't match",
digesterHeaderValue.contains(md5));
}
}
}
@Test
public void testRedirectWithWantDigest() throws Exception {
final String dsUrl = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut put = putObjMethod(id);
put.addHeader(LINK, getExternalContentLinkHeader(dsUrl, "redirect", "image/jpeg"));
assertEquals(CREATED.getStatusCode(), getStatus(put));
// Configure request to NOT follow redirects
final RequestConfig.Builder requestConfig = RequestConfig.custom();
requestConfig.setRedirectsEnabled(false);
// Verify HEAD request behavior with single Want-Digest
final HttpHead headObjMethod = headObjMethod(id);
headObjMethod.addHeader(WANT_DIGEST, "sha");
headObjMethod.setConfig(requestConfig.build());
checkRedirectWantDigestResult(headObjMethod, dsUrl,
TEST_SHA_DIGEST_HEADER_VALUE, null);
// Verify HEAD request behavior with multiple Want-Digest
final HttpHead headObjMethodMulti = headObjMethod(id);
headObjMethodMulti.addHeader(WANT_DIGEST, "sha, md5;q=0.3");
headObjMethodMulti.setConfig(requestConfig.build());
checkRedirectWantDigestResult(headObjMethodMulti, dsUrl,
TEST_SHA_DIGEST_HEADER_VALUE, TEST_MD5_DIGEST_HEADER_VALUE);
// Verify GET request behavior with Want-Digest
final HttpGet getObjMethod = getObjMethod(id);
getObjMethod.addHeader(WANT_DIGEST, "sha");
getObjMethod.setConfig(requestConfig.build());
checkRedirectWantDigestResult(getObjMethod, dsUrl,
TEST_SHA_DIGEST_HEADER_VALUE, null);
// Verify GET with multiple Want-Digest
final HttpGet getObjMethodMulti = getObjMethod(id);
getObjMethodMulti.addHeader(WANT_DIGEST, "sha, md5;q=0.3");
getObjMethodMulti.setConfig(requestConfig.build());
checkRedirectWantDigestResult(getObjMethodMulti, dsUrl,
TEST_SHA_DIGEST_HEADER_VALUE, TEST_MD5_DIGEST_HEADER_VALUE);
}
@Test
public void testRedirectForHttpUri() throws Exception {
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPut.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "redirect", null));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = noFollowClient.execute(get)) {
assertEquals(TEMPORARY_REDIRECT.getStatusCode(), getStatus(getResponse));
assertLocation(getResponse, externalLocation);
}
}
}
@Test
public void testProxyLocalFile() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
final String fileUri = localFile.toURI().toString();
httpPut.addHeader(LINK, getExternalContentLinkHeader(fileUri, "proxy", "text/plain"));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentLocation(getResponse, fileUri);
assertContentLength(getResponse, TEST_BINARY_CONTENT.length());
assertBodyMatches(getResponse, TEST_BINARY_CONTENT);
}
}
}
@Test
public void testCopyLocalFile() throws Exception {
final String entityStr = "Hello there, this is the original object speaking.";
final File localFile = createExternalLocalFile(entityStr);
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
final String localPath = localFile.toURI().toString();
httpPut.addHeader(LINK, getExternalContentLinkHeader(localPath, "copy", "text/plain"));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
// fetch the copy of the object
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentType(getResponse, "text/plain");
assertBodyMatches(getResponse, entityStr);
}
}
}
@Test
public void testCopyForHttpUri() throws Exception {
// create a random binary object
final var entityStr = "Hello there, this is the original object speaking.";
final String copyLocation = createHttpResource(entityStr);
// create a copy of it
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPut.addHeader(LINK, getExternalContentLinkHeader(copyLocation, "copy", "text/plain"));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
// fetch the copy of the object
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentType(getResponse, "text/plain");
assertBodyMatches(getResponse, entityStr);
}
}
}
@Test
public void testProxyForHttpUri() throws Exception {
// Create a resource
final String entityStr = "Hello there, this is the original object speaking.";
final String origLocation = createHttpResource(entityStr);
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPut.addHeader(LINK, getExternalContentLinkHeader(origLocation, "proxy", null));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentLocation(getResponse, origLocation);
assertBodyMatches(getResponse, entityStr);
}
}
}
@Test
public void testPostExternalContentProxyForHttpUri() throws Exception {
// Create a resource
final String entityStr = "Hello there, this is the original object speaking.";
final String origLocation = createHttpResource(entityStr);
final String id = getRandomUniqueId();
final HttpPost httpPost = postObjMethod();
httpPost.addHeader("Slug", id);
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(origLocation, "proxy", "text/plain"));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentLocation(getResponse, origLocation);
assertContentType(getResponse, "text/plain");
assertBodyMatches(getResponse, entityStr);
}
}
}
@Test
public void testUnsupportedHandlingTypeInExternalMessagePUT() throws IOException {
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, getExternalContentLinkHeader("http://example.com/test", "junk", "image/jpeg"));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a BAD REQUEST error!", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "External content link header url is malformed");
}
}
@Test
public void testUnsupportedHandlingTypeInExternalMessagePOST() throws IOException {
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, getExternalContentLinkHeader("http://example.com/junk", "junk", "image/jpeg"));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Didn't get a BAD_REQUEST response!", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "External content link header url is malformed");
}
}
@Test
public void testMissingHandlingTypeInExternalMessage() throws IOException {
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, getExternalContentLinkHeader("http://example.com/junk", null, "image/jpeg"));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a BAD_REQUEST response!", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "External content link header url is malformed");
}
}
@Test
public void testCopyNotFoundHttpContent() throws Exception {
final String nonexistentPath = serverAddress + getRandomUniqueId();
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentPath, "copy", null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Didn't get a BAD_REQUEST response!", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Unable to access external binary");
}
}
@Test
public void testCopyUnreachableHttpContent() throws Exception {
final String nonexistentPath = "http://" + getRandomUniqueId() + ".example.com/";
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentPath, "copy", null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Didn't get a BAD_REQUEST response!", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Unable to access external binary");
}
}
@Test
public void testProxyNotFoundHttpContent() throws Exception {
final String nonexistentPath = serverAddress + getRandomUniqueId();
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentPath, "proxy", null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Expected failure on creation", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Unable to access external binary");
}
}
@Test
public void testProxyUnreachableHttpContent() throws Exception {
final String nonexistentPath = "http://" + getRandomUniqueId() + ".example.com/";
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentPath, "proxy", null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Expected failure on creation", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Unable to access external binary");
}
}
@Test
public void testRedirectUnreachableHttpContent() throws Exception {
final String nonexistentPath = "http://" + getRandomUniqueId() + ".example.com/";
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentPath, "redirect", null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Expected failure on creation", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Unable to access external binary");
}
}
@Test
public void testProxyNotFoundLocalFile() throws Exception {
verifyNotFoundLocalFile("proxy");
}
@Test
public void testRedirectNotFoundLocalFile() throws Exception {
verifyNotFoundLocalFile("redirect");
}
@Test
public void testCopyNotFoundLocalFile() throws Exception {
verifyNotFoundLocalFile("copy");
}
private void verifyNotFoundLocalFile(final String handling) throws Exception {
final File nonexistentFile = tempFolder.newFile();
nonexistentFile.delete();
final String nonexistentUri = nonexistentFile.toURI().toString();
// create a copy of it
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(nonexistentUri, handling, null));
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals("Expected failure on creation", BAD_REQUEST.getStatusCode(),
getStatus(response));
assertBodyContains(response, "Path did not match any allowed external content paths");
}
}
@Test
public void testCopyWithTransmissionFixityForLocalFile() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String localPath = localFile.toURI().toString();
final HttpPut httpPut = setupExternalContentPut(localPath, "copy", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
// fetch the copy of the object
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentType(getResponse, "text/plain");
assertBodyMatches(getResponse, TEST_BINARY_CONTENT);
}
}
}
@Test
public void testCopyWithInvalidTransmissionFixityForLocalFile() throws Exception {
final String content = "Not the expected content";
final File localFile = createExternalLocalFile(content);
final String localPath = localFile.toURI().toString();
final HttpPut httpPut = setupExternalContentPut(localPath, "copy", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CONFLICT.getStatusCode(), getStatus(response));
}
}
@Test
public void testCopyWithTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final HttpPut httpPut = setupExternalContentPut(externalLocation, "copy", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
// fetch the copy of the object
final HttpGet get = new HttpGet(getLocation(response));
try (final CloseableHttpResponse getResponse = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(getResponse));
assertContentType(getResponse, "text/plain");
assertBodyMatches(getResponse, TEST_BINARY_CONTENT);
}
}
}
@Test
public void testProxyWithTransmissionFixityForLocalFile() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String externalLocation = localFile.toURI().toString();
final HttpPut httpPut = setupExternalContentPut(externalLocation, "proxy", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals("Didn't get a CREATED response!", CREATED.getStatusCode(), getStatus(response));
assertIsProxyBinary(getLocation(response), externalLocation, TEST_BINARY_CONTENT, "text/plain");
}
}
@Test
public void testProxyWithInvalidTransmissionFixityForLocalFile() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String externalLocation = localFile.toURI().toString();
final HttpPut httpPut = setupExternalContentPut(externalLocation, "proxy", "text/plain");
httpPut.addHeader("Digest", "sha=12345678910");
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CONFLICT.getStatusCode(), getStatus(response));
}
}
@Test
public void testRedirectWithTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final HttpPut httpPut = setupExternalContentPut(externalLocation, "redirect", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
assertIsRedirectBinary(getLocation(response), externalLocation, TEST_BINARY_CONTENT, "text/plain");
}
}
@Test
public void testRedirectWithInvalidTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", "bad content");
final HttpPut httpPut = setupExternalContentPut(externalLocation, "redirect", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CONFLICT.getStatusCode(), getStatus(response));
}
}
@Test
public void testProxyPostWithTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "proxy", "text/plain"));
httpPost.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
assertIsProxyBinary(getLocation(response), externalLocation, TEST_BINARY_CONTENT, "text/plain");
}
}
@Test
public void testProxyPostWithInvalidTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", "bad content");
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "proxy", "text/plain"));
httpPost.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals(CONFLICT.getStatusCode(), getStatus(response));
}
}
@Test
public void testRedirectPostWithTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "redirect", "text/plain"));
httpPost.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
assertIsRedirectBinary(getLocation(response), externalLocation, TEST_BINARY_CONTENT, "text/plain");
}
}
@Test
public void testRedirectPostWithInvalidTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", "bad content");
final HttpPost httpPost = postObjMethod();
httpPost.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPost.addHeader(LINK, getExternalContentLinkHeader(externalLocation, "redirect", "text/plain"));
httpPost.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPost)) {
assertEquals(CONFLICT.getStatusCode(), getStatus(response));
}
}
@Test
public void testProxyPutWithTransmissionFixityForHttpUri() throws Exception {
final String externalLocation = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final HttpPut httpPut = setupExternalContentPut(externalLocation, "proxy", "text/plain");
httpPut.addHeader("Digest", TEST_SHA_DIGEST_HEADER_VALUE);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
assertIsProxyBinary(getLocation(response), externalLocation, TEST_BINARY_CONTENT, "text/plain");
}
}
@Test
public void testUpdateProxyHttpUri() throws Exception {
final String content2 = "<doc>some more content</doc>";
final String externalLocation1 = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final String externalLocation2 = createHttpResource("text/xml", content2);
final String rescLoc = createExternalContentResource(externalLocation1, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation1, TEST_BINARY_CONTENT, "text/plain");
updateExternalContentResource(rescLoc, externalLocation2, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation2, content2, "text/xml");
}
@Test
public void testUpdateProxyLocalFile() throws Exception {
final String content2 = "<doc>some more content</doc>";
final File localFile1 = createExternalLocalFile(TEST_BINARY_CONTENT);
final File localFile2 = createExternalLocalFile(content2);
final String externalLocation1 = localFile1.toURI().toString();
final String externalLocation2 = localFile2.toURI().toString();
final String rescLoc = createExternalContentResource(externalLocation1, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation1, TEST_BINARY_CONTENT, "application/octet-stream");
updateExternalContentResource(rescLoc, externalLocation2, "proxy", "text/xml");
assertIsProxyBinary(rescLoc, externalLocation2, content2, "text/xml");
}
@Test
public void testUpdateRedirectHttpUri() throws Exception {
final String content2 = "<doc>some more content</doc>";
final String externalLocation1 = createHttpResource("text/plain", TEST_BINARY_CONTENT);
final String externalLocation2 = createHttpResource(content2);
final String rescLoc = createExternalContentResource(externalLocation1, "redirect", null);
assertIsRedirectBinary(rescLoc, externalLocation1, TEST_BINARY_CONTENT, "text/plain");
updateExternalContentResource(rescLoc, externalLocation2, "redirect", "text/xml");
assertIsRedirectBinary(rescLoc, externalLocation2, content2, "text/xml");
}
@Test
public void testUpdateProxyToRedirectForHttpUri() throws Exception {
// Create a resource
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String rescLoc = createExternalContentResource(externalLocation, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, null);
updateExternalContentResource(rescLoc, externalLocation, "redirect", null);
assertIsRedirectBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, null);
}
@Test
public void testUpdateRedirectToProxyForHttpUri() throws Exception {
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String rescLoc = createExternalContentResource(externalLocation, "redirect", null);
assertIsRedirectBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, null);
updateExternalContentResource(rescLoc, externalLocation, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, null);
}
@Test
public void testUpdateProxyToRedirectForLocalFile() throws Exception {
final String content2 = "<doc>some more content</doc>";
final File localFile1 = createExternalLocalFile(TEST_BINARY_CONTENT);
final File localFile2 = createExternalLocalFile(content2);
final String externalLocation1 = localFile1.toURI().toString();
final String externalLocation2 = localFile2.toURI().toString();
final String rescLoc = createExternalContentResource(externalLocation1, "proxy", "text/plain");
assertIsProxyBinary(rescLoc, externalLocation1, TEST_BINARY_CONTENT, "text/plain");
updateExternalContentResource(rescLoc, externalLocation2, "redirect", "text/xml");
// Not checking on the content, since following a redirect on a file is unlikely to work
assertIsRedirectBinary(rescLoc, externalLocation2, null, "text/xml");
}
@Test
public void testUpdateHttpUriToLocalFile() throws Exception {
final String content2 = "some more content";
final String externalLocation1 = createHttpResource(TEST_BINARY_CONTENT);
final File localFile2 = createExternalLocalFile(content2);
final String externalLocation2 = localFile2.toURI().toString();
final String rescLoc = createExternalContentResource(externalLocation1, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation1, TEST_BINARY_CONTENT, "text/plain");
updateExternalContentResource(rescLoc, externalLocation2, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation2, content2, "application/octet-stream");
}
@Test
public void testUpdateInternalToLocalFile() throws Exception {
final String id = getRandomUniqueId();
final String rescLoc;
try (final CloseableHttpResponse response = execute(putDSMethod(id, "x", TEST_BINARY_CONTENT))) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
rescLoc = getLocation(response);
}
final String content2 = "<doc>some more content</doc>";
final File localFile = createExternalLocalFile(content2);
final String externalLocation = localFile.toURI().toString();
updateExternalContentResource(rescLoc, externalLocation, "proxy", "text/xml");
assertIsProxyBinary(rescLoc, externalLocation, content2, "text/xml");
}
@Test
public void testUpdateLocalFileToInternal() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String externalLocation = localFile.toURI().toString();
final String rescLoc = createExternalContentResource(externalLocation, "proxy", "text/plain");
assertIsProxyBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, "text/plain");
final String content2 = "<doc>some more content</doc>";
final HttpPut put = new HttpPut(rescLoc);
put.setEntity(new StringEntity(content2));
put.setHeader(CONTENT_TYPE, "text/xml");
assertEquals(NO_CONTENT.getStatusCode(), getStatus(put));
final HttpGet get = new HttpGet(rescLoc);
try (final CloseableHttpResponse resp = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(resp));
assertNull(resp.getFirstHeader("Content-Location"));
assertContentLength(resp, content2.length());
assertBodyMatches(resp, content2);
assertContentType(resp, "text/xml");
}
}
@Test
public void testUpdateInternalToHttpUri() throws Exception {
final String id = getRandomUniqueId();
final String rescLoc;
try (final CloseableHttpResponse response = execute(putDSMethod(id, "x", TEST_BINARY_CONTENT))) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
rescLoc = getLocation(response);
}
final String content2 = "<doc>some more content</doc>";
final String externalLocation = createHttpResource(content2);
updateExternalContentResource(rescLoc, externalLocation, "redirect", "text/xml");
assertIsRedirectBinary(rescLoc, externalLocation, content2, "text/xml");
}
@Test
public void testUpdateHttpUriToInternal() throws Exception {
final String externalLocation = createHttpResource(TEST_BINARY_CONTENT);
final String rescLoc = createExternalContentResource(externalLocation, "proxy", null);
assertIsProxyBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, "text/plain");
final String content2 = "<doc>some more content</doc>";
final HttpPut put = new HttpPut(rescLoc);
put.setEntity(new StringEntity(content2));
put.setHeader(CONTENT_TYPE, "text/xml");
assertEquals(NO_CONTENT.getStatusCode(), getStatus(put));
final HttpGet get = new HttpGet(rescLoc);
try (final CloseableHttpResponse resp = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(resp));
assertNull(resp.getFirstHeader("Content-Location"));
assertContentLength(resp, content2.length());
assertBodyMatches(resp, content2);
assertContentType(resp, "text/xml");
}
}
@Test
public void testLocalFileNotDeleted() throws Exception {
final File localFile = createExternalLocalFile(TEST_BINARY_CONTENT);
final String externalLocation = localFile.toURI().toString();
final String rescLoc = createExternalContentResource(externalLocation, "proxy", "text/plain");
final String id = StringUtils.substringAfterLast(rescLoc, "/");
assertIsProxyBinary(rescLoc, externalLocation, TEST_BINARY_CONTENT, "text/plain");
final HttpDelete delete = new HttpDelete(rescLoc);
assertEquals(NO_CONTENT.getStatusCode(), getStatus(delete));
assertDeleted(id);
assertTrue("External binary must exist after resource deletion", localFile.exists());
final HttpDelete deleteTomb = new HttpDelete(rescLoc + "/" + FedoraTypes.FCR_TOMBSTONE);
assertEquals(NO_CONTENT.getStatusCode(), getStatus(deleteTomb));
assertTrue("External binary must exist after deleting tombstone", localFile.exists());
}
private HttpPut setupExternalContentPut(final String externalLocation, final String handling,
final String contentType) {
final String id = getRandomUniqueId();
final HttpPut httpPut = putObjMethod(id);
httpPut.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
httpPut.addHeader(LINK, getExternalContentLinkHeader(externalLocation, handling, contentType));
return httpPut;
}
private String createExternalContentResource(final String externalLocation, final String handling,
final String contentType) throws IOException {
final HttpPut httpPut = setupExternalContentPut(externalLocation, handling, contentType);
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(CREATED.getStatusCode(), getStatus(response));
return getLocation(response);
}
}
private void updateExternalContentResource(final String rescLoc, final String externalLocation,
final String handling, final String contentType) throws IOException {
final HttpPut httpPut = new HttpPut(rescLoc);
httpPut.addHeader(LINK, getExternalContentLinkHeader(externalLocation, handling, contentType));
try (final CloseableHttpResponse response = execute(httpPut)) {
assertEquals(NO_CONTENT.getStatusCode(), getStatus(response));
}
}
private void assertIsProxyBinary(final String rescLocation, final String expectedLocation,
final String expectedContent, final String expectedType) throws IOException {
final HttpGet get = new HttpGet(rescLocation);
try (final CloseableHttpResponse resp = execute(get)) {
assertEquals(OK.getStatusCode(), getStatus(resp));
assertContentLocation(resp, expectedLocation);
assertContentLength(resp, expectedContent.length());
assertBodyMatches(resp, expectedContent);
if (expectedType != null) {
assertContentType(resp, expectedType);
}
}
}
private void assertIsRedirectBinary(final String rescLocation, final String expectedLocation,
final String expectedContent, final String expectedType) throws IOException {
final HttpGet get = new HttpGet(rescLocation);
try (final CloseableHttpResponse resp = noFollowClient.execute(get)) {
assertEquals(TEMPORARY_REDIRECT.getStatusCode(), getStatus(resp));
assertLocation(resp, expectedLocation);
if (expectedType != null) {
assertContentType(resp, expectedType);
}
}
if (expectedContent != null) {
// Follow redirect to the content
try (final CloseableHttpResponse resp = execute(get)) {
assertBodyMatches(resp, expectedContent);
assertContentLength(resp, expectedContent.length());
}
}
}
private String createHttpResource(final String content) throws Exception {
return createHttpResource("text/plain", content);
}
private String createHttpResource(final String contentType, final String content) throws Exception {
final HttpPost method = postObjMethod();
method.addHeader(CONTENT_TYPE, contentType);
method.addHeader(LINK, NON_RDF_SOURCE_LINK_HEADER);
method.setEntity(new StringEntity(content));
// Make an external remote URI.
try (final CloseableHttpResponse response = execute(method)) {
assertEquals(SC_CREATED, getStatus(response));
return getLocation(response);
}
}
private void assertBodyContains(final CloseableHttpResponse response, final String expected) throws IOException {
final String body = IOUtils.toString(response.getEntity().getContent(), UTF_8);
assertTrue("Expected response to contain '" + expected + "' but was '" + body + "'",
body.contains(expected));
}
private void assertBodyMatches(final CloseableHttpResponse response, final String expected) throws IOException {
final String body = IOUtils.toString(response.getEntity().getContent(), UTF_8);
assertEquals("Response body did not match the expected value", expected, body);
}
private void assertContentLength(final CloseableHttpResponse response, final long expectedLength) {
assertEquals("Content-length header did not match", expectedLength, Long.parseLong(response
.getFirstHeader(CONTENT_LENGTH).getValue()));
}
private void assertContentType(final CloseableHttpResponse response, final String expected) {
assertEquals("Content-type header did not match", expected, response.getFirstHeader(CONTENT_TYPE).getValue());
}
private void assertContentLocation(final CloseableHttpResponse response, final String expectedLoc) {
assertEquals("Content location header did not match", expectedLoc, getContentLocation(response));
}
private void assertLocation(final CloseableHttpResponse response, final String expectedLoc) {
assertEquals("Location header did not match", expectedLoc, getLocation(response));
}
}
|
<filename>src/main/java/com/forte/config/ConfigurationHelper.java
/*
* Copyright (c) 2020. ForteScarlet All rights reserved.
* Project simple-robot-core
* File ConfigurationHelper.java
*
* You can contact the author through the following channels:
* github https://github.com/ForteScarlet
* gitee https://gitee.com/ForteScarlet
* email <EMAIL>
* QQ 1149159218
*
*/
package com.forte.config;
import com.forte.config.resolve.ConfigResolvor;
import java.util.Map;
import java.util.Properties;
/**
*
* 工具入口
*
* @author ForteScarlet <[email]<EMAIL>>
* @since JDK1.8
**/
public class ConfigurationHelper {
/**
* 解析一个类并转化为可注入的Configuration对象。
* 如果你里面一个对应的注解都没
* 那反正也没变化
* @param type 类型
*/
public static <T> InjectableConfig<T> toInjectable(Class<T> type){
return ConfigResolvor.toInjectable(type);
}
/**
* 为一个类注入配置信息
* @param config 配置类
* @param configProperties 配置信息
*/
public static <T> void inject(T config, Properties configProperties){
Class<T> configClass = getClass(config);
InjectableConfig<T> injectableConfig = toInjectable(configClass);
injectableConfig.inject(config, configProperties);
}
/**
* 为一个类注入配置信息
* @param config 配置类
* @param configMap 配置信息
*/
public static <T> void inject(T config, Map<String, Object> configMap){
Class<T> configClass = getClass(config);
InjectableConfig<T> injectableConfig = toInjectable(configClass);
injectableConfig.inject(config, configMap);
}
/**
* 为一个类注入配置信息
* @param config 配置类
* @param key 配置信息的key
* @param value 配置信息的值
*/
public static <T> void inject(T config, String key, Object value){
Class<T> configClass = getClass(config);
InjectableConfig<T> injectableConfig = toInjectable(configClass);
injectableConfig.inject(config, key, value);
}
/**
* 获取对应的Class
*/
private static <T> Class<T> getClass(T obj){
return (Class<T>) obj.getClass();
}
}
|
namespace JVM {
export interface ConnectionTestResult {
ok: boolean;
message: string;
}
}
|
package com.roadrunner.panoengine.panorama;
import org.json.JSONArray;
import org.json.JSONObject;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import com.roadrunner.panoengine.opengl.OpenGLTexture;
public class JSONPanoramaLoader extends PanoramaLoader {
public enum PanoramaType {
Unknown, Cubic, Cylindrical
}
public interface BitmapLoader {
public Bitmap loadBitmap(String assetID);
}
private BitmapLoader bitmapLoader;
private JSONObject json;
public JSONPanoramaLoader(BitmapLoader bitmapLoader, JSONObject json) {
this.bitmapLoader = bitmapLoader;
this.json = json;
}
@Override
public void load(PanoramaView view) {
Panorama panorama = null;
String type = json.optString("type");
PanoramaType panoramaType = PanoramaType.Unknown;
if (type != null) {
try {
if (type.equals("cubic")) {
panoramaType = PanoramaType.Cubic;
int subdivisionX = json.getInt("subdivisionX");
int subdivisionY = json.getInt("subdivisionY");
panorama = new CubicPanorama(subdivisionX, subdivisionY);
} else if (type.equals("cylindrical")) {
panoramaType = PanoramaType.Cylindrical;
int subdivisionX = json.getInt("subdivisionX");
int subdivisionY = json.getInt("subdivisionY");
panorama = new CylindricalPanorama(subdivisionX, subdivisionY);
} else {
throw new RuntimeException("Panorama type is not recognized");
}
} catch(Exception e) {
throw new RuntimeException("Panorama could not be created", e);
}
} else {
throw new RuntimeException("type property not exists");
}
JSONObject images = json.optJSONObject("images");
if (images != null) {
if(!images.isNull("preview")) {
try {
String assetID = images.getString("preview");
OpenGLTexture texture = createTexture(bitmapLoader, assetID);
for(int i = 0; i < panorama.getTexturesCount(); i++) {
panorama.setTexture(texture, i);
}
} catch(Exception e) {
e.printStackTrace();
}
}
if (panoramaType == PanoramaType.Cubic) {
CubicPanorama cPanorama = (CubicPanorama) panorama;
JSONArray imageArr = null;
try {
imageArr = images.getJSONArray("front");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.FRONT,
cPanorama);
} catch(Exception e) {
e.printStackTrace();
}
try {
imageArr = images.getJSONArray("back");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.BACK,
cPanorama);
} catch(Exception e) {
e.printStackTrace();
}
try {
imageArr = images.getJSONArray("left");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.LEFT,
cPanorama);
} catch(Exception e){
e.printStackTrace();
}
try {
imageArr = images.getJSONArray("right");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.RIGHT,
cPanorama);
} catch(Exception e) {
e.printStackTrace();
}
try {
imageArr = images.getJSONArray("up");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.UP,
cPanorama);
} catch(Exception e) {
e.printStackTrace();
}
try {
imageArr = images.getJSONArray("down");
jsonForCubicPanorama(
bitmapLoader,
imageArr,
CubicPanorama.CubeFaceOrientation.DOWN,
cPanorama);
} catch(Exception e) {
e.printStackTrace();
}
} else if (panoramaType == PanoramaType.Cylindrical) {
CylindricalPanorama cPanorama = (CylindricalPanorama) panorama;
JSONArray imageArr = images.optJSONArray("images");
if(imageArr != null) {
for (int i = 0; i < imageArr.length(); i++) {
try {
JSONObject image = imageArr.getJSONObject(i);
int divX = image.getInt("divX");
int divY = image.getInt("divY");
String assetID = image.getString("assetID");
CylindrialLoader loader = new CylindrialLoader(bitmapLoader,
cPanorama, assetID, divX, divY);
loader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch(Exception e) {
e.printStackTrace();
}
}
}
}
} else {
throw new RuntimeException("images property not exists");
}
JSONObject camera = json.optJSONObject("camera");
if (camera != null) {
PanoramaCamera currentCamera = view.getCamera();
int athmin = camera.optInt("athmin");
int athmax = camera.optInt("athmax");
int atvmin = camera.optInt("atvmin");
int atvmax = camera.optInt("atvmax");
int hlookat = camera.optInt("hlookat");
int vlookat = camera.optInt("vlookat");
currentCamera.setPitchRange((float) Math.toRadians(atvmin), (float)Math.toRadians(atvmax));
currentCamera.setYawRange((float) Math.toRadians(athmin), (float) Math.toRadians(athmax));
currentCamera.setLookAt((float) Math.toRadians(vlookat), (float) Math.toRadians(hlookat));
}
JSONArray hotspots = json.optJSONArray("hotspots");
if (hotspots != null) {
for (int i = 0; i < hotspots.length(); i++) {
try {
JSONObject hotspot = hotspots.getJSONObject(i);
String assetId = hotspot.getString("image");
OpenGLTexture hotspotTexture = createTexture(bitmapLoader,
assetId);
int identifier = hotspot.getInt("id");
int atv = hotspot.getInt("atv");
int ath = hotspot.getInt("ath");
float width = (float) hotspot.getDouble("width");
float height = (float) hotspot.getDouble("height");
String data = hotspot.getString("data");
PanoramaHotspot currentHotspot = new PanoramaHotspot(
identifier, hotspotTexture, (float) Math.toRadians(atv), (float) Math.toRadians(ath), width,
height, data);
panorama.addHotspot(currentHotspot);
} catch(Exception e) {
e.printStackTrace();
}
}
}
JSONObject gyro = json.optJSONObject("gyro");
if(gyro != null) {
view.setSensorEnabled(gyro.optBoolean("enabled", false));
}
JSONObject scrolling = json.optJSONObject("scrolling");
if(scrolling != null) {
view.setTouchScrollingEnabled(scrolling.optBoolean("enabled", true));
}
view.setPanorama(panorama);
}
private static OpenGLTexture createTexture(BitmapLoader bitmapLoader, String assetID) throws Exception {
Bitmap bm = bitmapLoader.loadBitmap(assetID);
OpenGLTexture texture = new OpenGLTexture(bm);
return texture;
}
private static class CubicLoader extends
AsyncTask<Void, Void, OpenGLTexture> {
private BitmapLoader bitmapLoader;
private CubicPanorama panorama;
private String assetID;
private CubicPanorama.CubeFaceOrientation face;
private int sX;
private int sY;
public CubicLoader(BitmapLoader bitmapLoader, CubicPanorama panorama,
String assetID,
CubicPanorama.CubeFaceOrientation face, int sX, int sY) {
this.bitmapLoader = bitmapLoader;
this.panorama = panorama;
this.assetID = assetID;
this.face = face;
this.sX = sX;
this.sY = sY;
}
@Override
protected OpenGLTexture doInBackground(Void... params) {
try {
return createTexture(bitmapLoader, assetID);
} catch (Exception e) {
return null;
}
}
@Override
protected void onPostExecute(OpenGLTexture result) {
panorama.setTexture(result, face, sX, sY);
}
}
private static void jsonForCubicPanorama(BitmapLoader bitmapLoader,
JSONArray jsonArray, CubicPanorama.CubeFaceOrientation face,
CubicPanorama panorama) {
int count = jsonArray.length();
for (int i = 0; i < count; i++) {
JSONObject image = jsonArray.optJSONObject(i);
if(image != null) {
try {
int divX = image.getInt("divX");
int divY = image.getInt("divY");
String assetID = image.getString("assetID");
CubicLoader loader = new CubicLoader(bitmapLoader, panorama,
assetID, face, divX, divY);
loader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} catch(Exception e) {
e.printStackTrace();
}
}
}
}
private static class CylindrialLoader extends
AsyncTask<Void, Void, OpenGLTexture> {
private BitmapLoader bitmapLoader;
private CylindricalPanorama panorama;
private String assetID;
private int sX;
private int sY;
public CylindrialLoader(BitmapLoader bitmapLoader,
CylindricalPanorama panorama, String assetID,
int sX, int sY) {
this.bitmapLoader = bitmapLoader;
this.panorama = panorama;
this.assetID = assetID;
this.sX = sX;
this.sY = sY;
}
@Override
protected OpenGLTexture doInBackground(Void... params) {
try {
return createTexture(bitmapLoader, assetID);
}catch (Throwable t) {
return null;
}
}
@Override
protected void onPostExecute(OpenGLTexture result) {
if(result != null) {
panorama.setTexture(result, sX, sY);
}
}
}
}
|
Life strategies of people with deafblindness due to Usher syndrome type 2a - a qualitative study
ABSTRACT Purpose: To explore life strategies in people with Usher syndrome type 2a. Background: There are no studies on life strategies in people with Usher syndrome. People with deafblindness are often described in terms of poor health and low quality of life, or as being vulnerable. From a clinical point of view, it is of importance to balance this picture, with an increased knowledge of life strategies. Methods: The study had a qualitative explorative design. Fourteen people aged 20–64 years (4 women, 10 men) with USH2a in Sweden participated in focus group interviews, which were transcribed and analysed by qualitative content analysis. Results: The content analysis resulted in seven categories; remaining active, using devices, using support, sharing knowledge, appreciating the present, maintaining a positive image and alleviating emotional pain. Two sub-themes: resolve or prevent challenges and comforting oneself was abstracted forming a theme “being at the helm”. Conclusion: The findings show that people with USH2a have a variety of life strategies that can be interpreted as highlighting different aspects of psychological flexibility in a life adjustment process. The study demonstrates that people with USH2a manage in many ways, and metaphorically, by “taking the helm”, they strive to actively navigate towards their own chosen values.
Introduction
Deafblindness, also known as dual sensory loss, varies in diagnosis, onset, auditory and visual abilities. The Nordic definition of deafblindness states: "Deafblindness is a combined vision and hearing impairment of such severity that it is hard for the impaired senses to compensate for each other. Thus, deafblindness is a distinct disability. To varying degrees, deafblindness limits activities and restricts full participation in society. It affects social life, communication, access to information, orientation and the ability to move around freely and safely. To help compensate for the combined vision and hearing impairment, especially the tactile sense becomes important …" (Nordic Centre for Welfare and Social Issues, 2018) The situation of persons with deafblindness has been studied, showing psychological distress, unmet needs and lack of formal support (Bodsworth, Clare, Simblett, & Deafblind, UK 2011). People who become deafblind have been described in terms of interactional powerlessness, vulnerability and struggling hard to adapt in a world that is sometimes perceived as hostile (Schneider, 2006). Stigmatization has been reported in relation to the use of mobility and communication aids (Hersh, 2013b). The situation of persons with deafblindness has also been referred to as a constant ontological insecurity (Danermark & Möller, 2008). Möller (2008) states that the functional limitations in hearing and vision present in deafblindness lead to a vulnerable situation, due to the difficulties in accessing information and in face-toface interaction with other people. Isolation and social exclusion are a common consequence, as are restrictions in terms of activity and increased risk of physical harm (Möller, 2008).
Despite the challenges described above, a systematic review revealed that persons with deafblindness do not view themselves as permanently vulnerable, but research tends to focus on negative outcomes instead of exploring positive risk taking, coping capacity and resilience (Simcock, 2017). Similarly, Schneider (2006) emphasized that despite their vulnerability, people with deafblindness have a range of strategies to adapt to the challenges inherent in their situation (Schneider, 2006). They have also been found to be interested in being involved and contributing to society (Hersh, 2013a). A scoping review revealed that participation among people with deafblindness is due to the interaction of personal and environmental factors, but called for studies focusing on lived experiences in order to increase understanding and improve services to enhance participation (Jaiswal, Aldersey, Wittich, Mirza, & Finlayson, 2018).
The importance of understanding deafblindness from a process oriented perspective has been emphasized through a life adjustment model (Gullacksen, Göransson, Rönnblom, Koppen, & Rud Jörgensen, 2011). There, the progressive course that is common when living with deafblindness is described as an ongoing process, in which personal and social changes to handle the progression of hearing and vision loss are emphasized (Gullacksen et al., 2011).
Among the syndromes leading to deafblindness, Usher syndrome (USH) is the most common (Möller, 2003). USH is an autosomal recessive inheritance disorder that affects hearing, vision and in some cases vestibular function (Möller, 2003). The estimated prevalence of USH is 6 per 100,000 individuals (Kimberling & Möller, 2013). It can be divided into three clinical subgroups, USH 1-3 (Millan et al., 2011). The clinical sub-groups differ in degree of hearing loss, vision and balance problems and to date, 13 genes have been detected in USH (Mathur & Yang, 2015).
Most studies of the psychosocial situation of those with USH include participants with great differences in hearing loss, balance problems and vision loss. Some use sign-language and others oral communication (Ellis & Hodges, 2013;Evans, 2017;Högner, 2015). Such diversity could pose a threat to conclusive study results. Nevertheless, depression and loneliness have been shown to be common and strongly related to perceived poor quality of life in persons with USH, where the importance of receiving social support has been emphasized (Dean, Orford, Staines, McGee, & Smith, 2017). High levels of stress are common (Högner, 2015). Ellis and Hodges (2013) interviewed people with USH in the UK and their narratives reveal a situation of ongoing change due to the diagnosis and the constant challenge of trying to create predicatiblity in an unpredictable situation. However, they also emphasize the great diversity within the USH group. The informants in their study described increased dependence and problems with the unequal distribution of deafblind-related services (Ellis & Hodges, 2013). In a thesis by Evans (2017), the lived experiences of people with USH focused on the diagnosis, family relationships during the life span, sense of belonging and experiences of professional support. The study highlights that although USH is not life threatening, it is certainly life altering, affecting the whole family. Deafblind culture and tecnology could endow a sense of belonging, but Evans also reveals the urgent need for specialized support and increased awareness among professionals and the public.
To the best of our knowledge, no studies on psychosocial aspects have specifically controlled for genetic variations of USH2a. However, a few studies focused on the situation of persons with USH2, revealing poor physical and psychological health with an increased risk of fatigue and suicidal behaviour (Wahlqvist, Möller, Möller, & Danermark, 2013). A study of perceived independence in USH2 showed that the need of support increases with higher age (Damen, Krabbe, Kilsby, & Mylanus, 2005).
To date, most studies on USH have focused on various problems or challenges and to the best of our knowledge, there is no study that exclusively explored the strategies employed by people with USH to manage life. The clinical subgroups of people with USH differ in genetics, hearing and vision progression and there is a need for studies that focus on the specific situation of various clinical and genetic sub-groups of people with USH, for example people with USH2a. Such specific knowledge is important for the planning of rehabilitation interventions and as a foundation for evidencebased support guidelines. From an ethical perspective, it is important that people with deafblindness, in this study represented by people with USH2a, not only are described as vulnerable, but rather as people with competences and resources.
Purpose
The aim of the study was to explore life strategies in people with Usher syndrome type 2a.
Design
The study had a qualitative explorative design (Patton, 2002) focusing on the participants' own perspective of their everyday life. Data were collected through focus group interviews (Kitzinger, 1994), and an inductive qualitative content analysis was employed (Graneheim & Lundman, 2004;Krippendorff, 2012).
Participants and setting
The participants with USH2a were recruited in 2013 from a research register of persons with USH that includes approximately 50% of the total USH 2a population in Sweden. At the time of the study, 58 persons of all ages with a genetically confirmed diagnosis of USH2a had been registered, from whom a purposeful sample was selected. Persons were regarded as eligible if they were of working age (18-65 years) and capable of participating in a focus group using verbal communication.
The sampling strived to achieve heterogeneity in terms of age and sex, as well as geographic distribution. Thirtytwo persons were invited to participate. An invitation letter in large print was sent by post. The decision to only invite people with USH2a was mainly logistic, as the data collection was conducted at a research event for people with USH2a, where the participants also took part in research focusing on cognitive assessments and were offered social activities.
Of the 32 persons invited, 14 agreed to participate, 5 declined, 1 person initially accepted, but was unable to attend the research event and 12 persons did not respond to the invitation. We have no further information about the non-participants. All 14 individuals (4 women and 10 men) who agreed to participate were included in the study. They ranged in age from 20 to 64 years and lived in different parts of Sweden. All participants travelled to take part in the interviews, which were conducted at an audiological research centre in Sweden. Hearing was assessed at the time of the study and all participants met the criterion of moderate to severe hearing loss. All visual assessments were conducted at different low vision clinics at a time close to the data collection. No study-specific assessments were performed and instead, medical reports including the above-mentioned assessments were retrieved after authorization by the participants. Data on age, hearing and vision are presented in Table I.
Data collection
Data were collected by means of focus group interviews. Focus group interviews enable the use of group interaction to elicit data, where the participants are encouraged to talk, ask questions and comment on each other's statements in order to obtain a range of perceptions (Kitzinger, 1994). The interaction between the group members can reveal and develop perceptions and attitudes that would not be found in individual interviews (Jamieson & Williams, 2003).
The participants were divided into three focus groups: One with younger men (21-44 years, n = 5), one with older men (41-61 years, n = 5) and one consisting of women (23-61 years, n = 4). The total sample size limited the variability of the grouping and the three focus groups were based on a balance between homogeneity in sex that could promote open discussion and heterogeneity to allow contrasting opinions (cf. Jamieson & Williams, 2003). Any siblings were separated to make sure that everyone could speak freely and increased confidentiality.
The focus groups were moderated by one of the authors (MW), who made sure that all participants had the vision and hearing prerequisites necessary to participate. The moderator is very familiar with persons with deafblindness and was assisted by a senior researcher experienced in research about deafblindness. The moderator introduced each focus group session by informing about the aim of the study and that she would not direct the interview, only facilitate it with minimal intervention in order to promote the participants' own initiatives in the dialogue (Kitzinger, 1994). Based on the aim of the study the following questions were posed in order to start the focus group discussion: "How do you cope with your life situation and how do you manage difficulties? If you were not successful in dealing with the situation, in what way would you have liked to do things?" Thereafter, the moderator only asked for clarification when something was unclear and facilitated the participants to take turns to contribute to the discussion by verbally indicating when she noticed that someone wanted to say something, thus helping the group to shift attention towards quieter participants. The focus group interviews were held in Swedish oral language and lasted for about two hours with a 20 minute break. They were digitally filmed and audio recorded. During the interviews, all participants used their hearing aids connected to a hearing loop. The focus group interview room was adjusted to reduce noise and had proper illumination. Furthermore, window blinds were used to
Demographics
Range N 1 4 Women, n (%) 4 (29%) Age mean (SD) 41 y (13 y) 20-64 y Hearing loss diagnosis age mean (SD) 4 y (2 y) 0,5-10 y Vision loss diagnosis age mean (SD) 22 y (9 y) 7-42 y Hearing loss PTA4* mean (SD) 65 dB (9 dB) 48-80 dB Visual field category** (median) 4 (<10 degrees) 2-5 Best corrected visual acuity*** mean (SD) 0,4 (0,3) 0,05-1,0 *Hearing was assessed by Pure-tone audiometry with calculation of the pure tone average for the frequencies 0.5, 1, 2, and 4 kHz (PTA4). Thresholds were classified from mild to profound hearing loss. **Visual field tests were performed by Goldmann perimetry and categorized into five phenotypes (1-5): category 1 was a normal visual field; category 2 the presence of a partial or complete ring scotoma (the latter either extending or not extending into the periphery); category 3 concentric central field loss with a remaining peripheral island less than one-half of the field circumference; category 4 marked concentric loss <10 degrees; and category 5, no visual field at all (blind) (Grover, Fishman, Anderson, Alexander, & Derlacki, 1997). ***Best corrected visual acuity (VA) (the ability to discriminate details) was measured by Snellen chart-based standard tests, given in decimals, 1.0 indicating normal VA whereas 0.05 is a severely reduced VA (in the US VA ≤ 0.1 is defined as legal blindness). minimize glare. The participants had the opportunity to be assisted by a significant person, guide or interpreter during the interview. One of the participants had an interpreter and another chose to have a family member at hand.
Analysis
The focus group interviews were analysed by qualitative content analysis with the aim of understanding the meaning of the text of the interviews and gaining new insights into the particular phenomenon; i.e., life strategies of persons with deafblindness. An inductive analysis was conducted due to the exploratory design of the study (Krippendorff, 2012). All recordings were transcribed verbatim by an experienced secretary and the transcripts were compared to the recordings. All interviews were listened to and read through several times by the first author in order to obtain a sense of the whole. Thereafter meaning units were identified, condensed and labelled with a code (Graneheim & Lundman, 2004). The codes were also labelled with the individuals' initials in order to follow the statements of each participant during the analysis. A comparison of the codes was made in order to find similarities and differences, after which they were abstracted to manifest categories and sub-categories. The first author was responsible for the analysis in collaboration with the other authors. All authors actively took part in the coding process during research seminars. All categories were evaluated in a reflective interpretative process, where all authors moved back and forth between meaning units, codes and categories. When discrepancies in interpretation occurred, they were discussed until consensus was obtained. In addition to the manifest categories, a more abstract, latent thematic content running through the entire dataset was found, resulting in two sub-themes and one theme (Graneheim & Lundman, 2004). The video recordings were not used in the analysis, but could be used if any ambiguity was raised when analysing the text based on the audio recordings, such as turn taking or if needing to analyse a non-verbal behaviour. No member checking or audit trail was conducted.
The authors represent different professions with extensive clinical and research experience that enabled active reflection that was valuable in the data analysis. ME is a clinical psychologist with 15 years of experience of patients with USH, CM is a professor in audiology with 30 years of clinical experience and research in USH, AAC is an RN and associate professor specialized in qualitative methods who has been active in deafblindness research for several years. Finally, MW is a PhD and social worker with experience of working with people with hearing loss, deafness and deafblindness, as well as experience of research on USH and health.
Ethical considerations
USH 2a is a rare diagnosis and therefore extra consideration of confidentiality is necessary. The recruitment of participants from different parts of Sweden reduced the risk of identification and data that could disclose identities (names, places) were removed from the focusgroup interviews. Prior to the interviews, all participants received verbal and written information about the study, that participation was voluntary and that they had the right to withdraw from the study at any time without giving reasons. All participants signed an informed consent form prior to the focus group interviews.
The study conformed to the criteria of the Helsinki declaration for medical research in human subjects. The study was approved by the Regional Ethics Committee in Uppsala (Nr. 2012/515).
Results
The data analysis resulted in seven categories and a total of 17 sub-categories that are presented below. Most sub-categories were represented by statements in all three focus groups. However, the sub-category Business as usual was only represented by statements of the younger men and the sub-categories Escapism and Hope were only represented by statements in the two groups of men. Besides the manifest content analysis, two latent sub-themes and a superordinate theme emerged. For an overview of the theme, sub-themes, categories and sub-categories see Table II The theme, sub-themes and categories illustrated by quotations will be presented below.
Theme being at the helm
The analysis formed an overarching theme "Being at the helm" that emerged in the abstraction process of working through the manifest content of the categories. The theme encompasses the participants' active, cognitive and emotional striving to be the person in control of life domains that they considered of importance. The theme was divided into two sub-themes Resolving or preventing challenges and Comforting oneself.
Sub-theme: resolving or preventing challenges
The first four categories, Remaining active, Using devices, Using support and Sharing knowledge formed the sub-theme Resolving or preventing challenges, which reflects the participants' ability to manage practical aspects of their life situation.
Remaining active
Being able to maintain an active life was discussed and emphasized by all three groups. The content of this category was divided into business as usual, adapting activities and using memory and attention.
Business as usual was only discussed among the younger men and included not restricting or changing their way of life due to limited vision and hearing. Although the participants were aware of their limitations they had not changed their way of performing activities. The participants valued acting as if there were no restrictions and emphasized that their vision and hearing loss should not change or restrict their ability to be active. One of the participants stated: I just want to be with my friends. I have said from day one that I will refuse to let it affect my life and prevent me from living the life I want. So then I just go with the flow, so to speak. And sometimes one just sits there and can hear nothing because the music is so loud. But at the same time one doesn't want to miss out on anything. (Person 6) However, one of the older participants in the group of young men considered that he had also acted in accordance with the principle of business as usual some years ago, but had gradually changed his mindset and could no longer ignore the fact that the condition sometimes affected his life.
Adapting activities was discussed by all three groups. It entailed practical adaptations, as well as adapting the pace, timing and duration of activities. It often included the use of assistive aids, but most of all a change towards thinking "outside the box". "It is about being able to have the imagination to find new ways, not being blind to the fact that there is more than one way … for example you may not need to hammer in nails when you can use a screw driver." (Person 14) The experiences of the participants were at times contradictory. Some highlighted the fact that the fairly slow progression of their condition had given them time to gradually adapt and find new ways to deal with everyday situations. For others, adaptation was sometimes difficult as small changes in situational conditions could make a great difference: For me it is like a situational disability. In many situations … … I'm just as much a part as everybody else, while in other situations it doesn't work at all. And finding a balance is very difficult. (Person 3) Adaptation of working life was important for several of the participants. One participant had decided to change career when he became aware that his previous profession would not be possible in the long term due to visual problems. For others, re-education had opened up the possibility for a positive change of career, thus increasing their ability to remain active. Others tried to achieve a sustainable working life by reducing assignments or limiting visually demanding elements of duties, for example travelling. Selfemployment also facilitated remaining active: I have chosen my job and can even choose the working hours that suit me. After all, I'm my own boss so I can decide that I want to work exactly those hours. And now I want to work from home for a few days. (Person 3) Devoting a great deal of effort to organizing things in the home enabled an independent and active life at home. By organizing things in an individualized way the perception of being isolated and locked up at home was changed to a feeling of being active at home.
Being able to move around safely and independently was important for remaining active and mentioned by all groups. Many of the participants had adapted their way of moving around, being more careful when walking, such as walking slowly or holding someone's shoulder, especially in unfamiliar surroundings or in the dark. Public transport was necessary and functioned well for some of the participants who had given up driving and cycling. Among the younger participants cycling was still possible in the daytime or in summer when visual conditions were more optimal: "But in the summer I cycle as much as I can" (Person 7). One person highlighted the extreme end-points and complexity of the disability, sometimes being able to cycle and sometimes needing to use a cane for mobility. The participants described adapting their way of exercising to activities that were less visually demanding. For example, playing football was replaced by training at a gym. However, sometimes it was still possible to perform demanding activities for fun, but not on a competitive level. Yet another adaptation of activities was remaining in contact with old friends, despite no longer being active in the sport they used to take part in together.
Adaptation of activities was described in terms of being selective in the choice of activities and company in order to save energy. "Then you have sufficient strength to have fun and enjoy yourself. Otherwise you just waste energy to show that you are independent.
you have to use your energy where it is most needed." (Person 2) Adaptation also included optimizing communication during social events. Some of the participants decided to only see a few friends at a time and others refrained from attending noisy parties, but instead choose to attend quiet events. One participant always tried to approach strenuous activities in as relaxed and calm a manner as possible as it had a positive impact on the visual and hearing perception "If you make too great an effort, then your sight and hearing become worse, or at least they do in my case. So if I'm relaxed and yes calm I can also hear and see better." (Person 3)
Using memory and attention
Several participants expressed that their vision and hearing limitations were to some extent compensated by different cognitive strategies. Memorizing where objects were located facilitated finding them, while spatial information such as what a specific room looked like or how an outdoor environment was organized enhanced the ability to move around safely. Increased attention when moving about was also emphasized. "I feel that I have to constantly scan the periphery, because all of a sudden somebody may decide to cross my path and perhaps I'm not aware of it …" (Person 1)
Using devices
This category includes different kinds of devices that the participants described using in their daily life to compensate for vision and hearing problems. The category reveals both the importance of specialized aids and the need to be inventive in using items that are at hand. The devices were divided into accessibility aids and everyday tools.
Accessibility aids were products mainly aimed for use by a person with a disability, for example a white cane, a safety alarm and computer screen reader, as well as visually or auditory accessible signs. All participants mentioned a variety of devices that compensated for their hearing and visual problems, but it was the white cane that gave rise to the most interest in the discussions. The experiences of using the white cane differed; some found it unproblematic, while others considered it more controversial and related to negative emotions. Using it could lead to increased security and at the same time be something unwanted: "I was mainly thinking about the cane, as sometimes you want it and at other times you don't even want to see it. But you still have it with you and carry it around and that's what makes it difficult." (Person 6) Likewise, one of the participants had experienced that the white cane had an impact on how he was treated by the surrounding environment and facilitated obtaining help from others when necessary. Moreover, if their behaviour deviated from the norm the white cane helped to normalize the situation. When travelling abroad it limited the need to explain the disability in a foreign language. One participant suggested new items that could complement the white cane in situations where it could not be used, e.g., a marked bathing cap for use in a swimming pool. Despite the positive experiences of using the white cane, there were also accounts of not using it in one's own neighbourhood due to risk of meeting an acquaintance who was not aware of the severity of a person's vision problems. Some only used the cane in demanding situations, such as in the dark, when there was a great deal of snow or when using public transport.
Everyday products include any other device that the participants could use to compensate for their vision and hearing loss. Two persons described using an umbrella instead of the white cane to facilitate mobility, not making their visual problems obvious: "But a tip is to carry an umbrella, I usually do if I don't want people to notice" (Person 9) Smartphones and tablets were commonly used by most participants and easily adapted by means of magnification, adjusted fonts and high contrast modes or by adjusting the auditory settings. The tablets and smartphones facilitated communication, access to information and were also used for playing interactive games. Other everyday tools used were increased illumination and larger computer displays.
The limited vision made it difficult to search for small items that were missing and some described using a box to store important items in a designated place at home. All participants were very positive about the use of everyday devices and described that it enabled them to participate in activities, increased their well-being and at the same time made them feel that they could do things in just the same way as anyone else.
Using support This category describes the experiences of using different kinds of support provided by others. The support was both formal, given by different agents in society and informal when received from family, friends and others.
The participants had used the formal support from healthcare providers, including low vision or audiological clinics, and some had also received services from specialized counselling and support teams for persons with deafblindness. Formal support also included doctor's certificates informing authorities about their diagnosis and disability when applying for social services. Although formal support from the social services (for example sick-leave, disability pension, assistive aids and professional guides) was highly valued, it was at times difficult to obtain. The participants thus needed assistance to apply for services, or to lodge an appeal against refusals from various social services. Thus, having regular contact with the healthcare system was regarded as helpful. Such contact also provided the participants with information about the disease, which was valued. To understand genetics for future family planning was regarded as supportive by some. One of the participants had benefited from psychiatric services but urged that it should not be associated with stigma.
To emotionally connect with the professionals providing personal support was described as important by some. One of the participants especially appreciated the dedicated professionals in the specialized counselling and support teams who not only provided services, but also had an interest in the entire person and sometimes just popped in for a chat.
The personal chemistry was more pronounced when receiving personal support outside healthcare situations. One of the participants had good experiences of one guide, but another guide did not work at all. Others had only positive experiences of receiving support and preferred a guide instead of their spouse when going shopping for clothes. One participant described a positive first experience: "The first time I got this guide from to accompany me, it was great fun. I could relax". (Person 4) Formal personal support such as a guide increased independence and safety, thus reducing the exposure to demanding situations or avoidance. I can be independent when I have a guide or interpreter or whatever, then I'm independent. But if I don't have one and want to go shopping then I become dependent on finding some shop assistant who'll help me. And that's when I'm not independent. (Person 3) Receiving support was described as sometimes natural, yet sometimes challenging. For one of the participants, requesting formal support in the form of a guide was perceived as difficult for reasons of integrity. Although needs would be identified, the risk that everyone in the small village would know about them, with the risk of increased stigmatization, prevented this participant from applying. The experience of using a deafblind interpreter could also be situational; it was described as positive at a healthcare visit, but more complex in informal situations, such as at a party where the interpreter was too much in focus for the participant to feel comfortable.
Informal support was support provided by friends or family members. Many of the participants knew that spouses, parents or children were persons who would always stand by their side. The participants had differing experiences of receiving support from friends. The importance of having friends who were aware of the consequences of deafblindness was emphasized. Support, such as holding someone's shoulder, could facilitate orientation in the dark and a friend driving the participant's car increased mobility. For some of the younger participants, going to a club, travelling and taking part in sporting events was natural and facilitated by the company of friends, thus regarded as no big issue.
However, being dependent on help from friends was not always easy. Repeated offers of support could be experienced as intrusive and although the participants were aware of their need for assistance, receiving support reminded them of their lost independence. One participant described the ambivalence towards support as follows: "I feel like a queen sitting there waiting for my husband to bring me the food. You have to try to turn it into something positive. But it is a bit difficult." (Person 4) One strategy for dealing with one's own functional deficits was to search for other persons with USH who could serve as role models. Such a person was good to talk to, as they shared the same experiences. For some, participation in the focus group was the first occasion on which they met others with the same diagnosis and they were very interested in creating a network to share experiences.
Sharing knowledge
The category sharing knowledge reveals an almost never ending need for informing others about USH and its consequences in order to make life easier for themselves. The content was divided into informing in everyday situations and educating professionals.
Informing in everyday situations included sharing their knowledge about their diagnosis or needs to family and friends. All participants stressed the importance of informing, although at times it was frustrating and emotionally demanding, especially when repeated information seemed to make no difference with regard to adapting behaviour or encounters. Others reported having no problem with repeatedly giving a short description of their condition. As the hearing and vision problems were often invisible to those in the participants' surroundings, taking the initiative to share information had become even more important as the condition worsened. One participant remarked: "It is clear that you have to , as it makes it easier for others to comprehend if told how to handle it . It is not easy for them to understand, because it doesn't show." (Person 3) The experience was that early information could limit the risk of misunderstandings and increase the possibility of a helping hand. However, information was not equally shared with everyone. One of the participants pointed out the importance of being able to differentiate between levels of narratives appropriate for the given situation, duration and proximity: … I have maybe three or four really close friends to whom I can tell everything. So I suppose I have told them … … but you may not bother to tell people who you might meet once a month or so … … It depends on when or where you meet them … But of course I tell those who are closest to me. (Person 5) Openness to the family, especially one's own children, was highlighted, as it made deafblindness uncontroversial and a natural part of family life. The children then learned to adjust their communication. "My youngest daughter usually tells me that here comes somebody called so and so, then there is no problem whatsoever". (Person 4). However, even children sometimes forget and then more concrete "informative" actions were a helpful strategy: My children have always , for instance when their friends visit them, do not leave your shoes like that because mum will trip over them It has always been like that, if they are thrown in a heap, then they will be thrown out the door. (Person 3) Although informing had many benefits, it could take a great deal of energy, something the participants did not always have. In such cases the strategies varied; some stressed the importance of always being polite and informative to everyone. Another participant appreciated the support of a friend who told others about his condition after receiving the relevant information, while yet another participant stressed the importance of not leaving the responsibility for informing to anyone else in order to ensure the accuracy of the information. Another tentative strategy was to ask the local newspaper to conduct an interview and report about what it is like to live with deafblindness, as a means of increasing public awareness. Practicing informing others about USH made it easier to know what to say in different situations and led to increased confidence.
Two of the older participants revealed their experiences of the symbolic effect of the white cane in indicating their situation instead of actually telling people about their visual problems. One of them stated: "I'm using my white cane now. I have started using it more and more this last year. It is my way of telling others that I cannot see … I bring it to make others aware" (Person 11).
Educating professionals
The participants had experiences of meeting professionals who had no, or only very limited knowledge about their condition. The experience of educating those who were assigned to help them was very mixed, where the participants in greatest need of expertise found that the only expert they could rely on was themselves. One of the participants exemplified the never-ending need to educate professionals: "Then after six or twelve months there is a new official. Which means that you have to educate an endless number of people. And it is the same when you meet doctors. It is so bloody hard". (Person 2). The lack of professional continuity within the healthcare system and among the authorities had made one of the participants think about making a power-point presentation to show to each new professional they encountered.
Sub-theme comforting oneself
The categories Appreciating the present, Maintaining a positive image and Alleviating emotional pain represent different sides of the sub-theme Comforting oneself that reflects the emotional aspects of the participants' struggle to manage their life situation.
Appreciating the present
The category appreciating the present includes the participants' way of handling an uncertain future. Regardless of what the future might bring, the participants had different strategies that helped them to focus on their present situation and not be governed by the future. The category encompasses the ability to Seize the moment and the importance of being able to Enjoy meaningful activities Seizing the moment Some of the participants, especially younger persons, stressed the importance of not focusing on the diagnosis and its future consequences, but instead concentrating on positive moments in the present. They described an attitude of taking one thing at the time, handling problems when they occur and not allowing the condition to rule their thoughts. Some hoped that research might come up with a cure in the future, but meanwhile they continued to live a good life. Fulfilling one's wishes instead of postponing them was described as important. One such example was learning to drive while being aware of only being able to drive for a few years, but prioritizing it for as long as possible: "So I feel that you have to do what you are capable of when you are capable of it, then you get the most out of activities you want to do. That's what I believe anyway. "(Person 1) Besides fulfilling wishes, the importance of appreciating small things in life was stressed and also to try to live in the here and now. Small things could be getting up in the morning and seizing the day, enjoying the beautiful weather, a cup of coffee, or a micro pause with a spoon of honey when energy levels were low. Another example was lighting a fire in the fireplace. One participant said: It is like when I share a bath with my daughter, then we light a lot of candles, which she really loves. And then you relax and sort of feel content and in the here and now. Because then you are blissfully unaware of your problems, even though they still exist. (Person 2) Enjoying meaningful activities All participants highlighted activities as an important factor for their well-being. For some, working life was of importance both in terms of performance and for the opportunity of job-related social networks. Others stressed the importance of leisure activities for their well-being, such as drawing, painting, reading or playing an instrument. The importance of discovering something new when forced to refrain from familiar leisure activities and the sustainability of the new choices were highlighted. Spending time gardening had generated positive attention from neighbours, which increased their self-esteem. Some of the younger men stressed the importance of exercising or spending time after work having coffee with friends to relax and boost their energy levels. Playing computer games and watching films were also ways to relax or distract themselves from undesired thoughts. One of the participants concluded: One important thing that becomes clear here is that if you manage to do something, whatever it is, it gives you a sense of satisfaction, no matter how nice or unattractive the result turns out. It sort of refreshes me" (person 10)
Maintaining a positive image
The category maintaining a positive image includes aspects such as self-confidence and identity despite the challenges related to the diagnosis. The category was divided into Negotiating who I am, Standing up for myself and Boosting self-confidence.
Negotiating who I am outlines how the participants strived to keep their identity and not be defined by a progressive disease. Strategies for how to keep the initiative, remain independent and accept the consequences of the diagnosis were discussed. The younger men expressed that the hearing impairment was something they had always lived with and accepted, but they did not identify themselves as being deafblind.
My identity … first and foremost I have always had impaired hearing and not deafblindness … I have my interests and yours is music and so on. … I guess it is our identities and our way of spending time together with our friends. Then it is made more difficult by the fact that we have disabilities that are specific to the situation (Person 8) Some of the participants reported that they had no contact with others who have a hearing impairment or deafblindness because they did not want to identify themselves with this group. Those who were more affected by their pronounced visual symptoms had accepted their diagnosis, but not the future deterioration. "There are different kinds of acceptance. I accept the fact that I have Usher 2. But I still find it difficult to accept that my vision will deteriorate" (Person 10) Another aspect of negotiating the self was to show themselves and others that they could manage their everyday life. Some of the participants stated that being able to perform activities as good as, or even better, than anyone else was of importance for how they viewed themselves: "I can manage myself, and then one places extremely high demands on oneself. I mean, I place much higher demands on myself compared to other people." (Person 2) For some, being independent was an important aspect of how they viewed themselves, thus revealing the challenges involved in balancing an increased need of help from others and at the same time trying to remain independent: "Because it is a huge struggle trying to find strategies for living as independently as possible when having a visual impairment. It´s not easy. It's terribly difficult. And then when you have become used to managing on your own it is difficult to know when you can or should ask for help." (Person 3) One of the participants who had children highlighted the importance of striving to be a good parent despite the disability: "Of course I have to try to be a good mum, despite my impairments, just like every other mum. I don't want to be less capable than anybody else. I do my best." (Person 4). Other participants planned to have children in the near future and had an ongoing discussion with their partner about how they would be able to take full responsibility for raising children and wanted to be considered a good parent despite their limitations in vision and hearing.
Standing up for myself. The participants encountered a variety of challenging situations where they did not only have to solve problems in a different way to others, but also needed to stand up for themselves.
Standing up for oneself was closely related to the participants' perception of themselves in relation to their diagnosis.
Finding one's own way, not thinking too much about what others think or do and realizing that life does not have to be lived in exactly the same way as everyone else was described as helpful. The participants had to overcome feelings of shame when they encountered persons who pitied them. One participant emphasized not regarding oneself as a victim and was proud of never taking advantage of the diagnosis. Another participant had gradually learned to no longer blame herself for setbacks, but instead to attribute them to circumstances beyond her control and to finally stand up for her need of assistance.
There is no reason to feel ashamed about being in need of some help. I say you must not feel sorry for me … I said it is not me who is the problem, it is my eyesight that is so trying. Because we are just as capable as everybody else, we just have our own prerequisites. (Person 4) Fighting for their right to services and eventually being successful in appeals despite initial rejections was one way of standing up for oneself and one's needs: But it is the lack of knowledge, lack of understanding and the unwillingness that you are up against all the time. And it is really very frustrating when you have to do it yourself, submitting appeals and things like that. (Person 2) This could strengthen the participants in the continuing strive to achieve their own goals. Furthermore, raising one´s voice at the authorities could be a way to cope with the inequalities caused by a lack of competence. However, sometimes situations came to a point where frustration was expressed as: "You just want to tell them to get lost …" (Person 7). However, to only attribute problems to the environment could sometimes delay the process of personal development: What you are saying now, NN, is that it is easier to blame somebody else or some item. That bloody chair, why is it standing in my way? Then you need not confront yourself. You get the aggression out of your system by swearing a bit. But it is part of the process, sort of. I recognise myself in it and have done exactly the same. (Person 2) Boosting self-confidence. For some of the participants the positive self-image could be boosted by external attributes. For example, two of the participants agreed that they felt more confident when they were well dressed: "When I dress up … … when I am going out and wear some nice clothes. It is a feeling that one has managed to make one's life a bit better (Person 13) … I agree with you." (Person 10).
Driving a car was also of symbolic importance and associated with feelings of confidence. Keeping the car or driving license after being obliged to quit driving was important for some: Today, being able to get a driving license is a privilege. … … I have retained mine and use it as an identity card. I suppose I should have handed it in but I keep it for my own sake because it feels good to have it. And it is important for my own self-esteem. (Person 2) Alleviating emotional pain The category Alleviating emotional pain comprises the participants' way of protecting themselves from intrusive thoughts and emotions related to situations in the present or the future. It includes the participants' self-distancing ability, but also encompasses different aspects of escapism and the importance of hope.
Self-distancing, which involves trying to look at oneself, the environment or a situation from a distance and sometimes viewing it from a humoristic angle was a strategy for coping with difficult situations and reducing emotional pain:"For me, laughing is very important, I mean seeing things in a different light … Some situations may not be a laughing matter but you still laugh." (Person 10). One of the participants could see the fun in "having a very close relationship to road signs" due to visual problems, which had resulted in being given a humorous nickname. Humour was also used to distance oneself from being emotionally affected by authorities. One participant told about a humorous episode with an ignorant insurance official who went on asking stupid questions. Instead of getting angry, the participant could laugh at the ignorant behaviour.
Escapism
Both groups of men discussed how they escaped from thinking about deafblindness. Some of the younger participants had developed good strategies for preventing negative thoughts and emotions. They simply avoided situations where deafblindness was in focus and did not spend more time than necessary talking about it. Instead, they looked for friends in order to have fun. Different activities were helpful for distracting them and preventing negative thoughts: "I am also quite good at switching off. … … Then I let go, watch a film or play a TV game or whatever. That's my way of relaxing. I just switch off. (Person 6) Another way to ease pain by means of the strategy of escapism was to avoid old friends in order not to be reminded about their previous life: "It is rather that I distanced myself, not wishing to be in contact with my old friends. They remind me of my previous life, so to speak." (Person 11). Symptoms could also partially be reduced by anxiolytic medication and sleeping pills, and sometimes alcohol was used as means to escape from the negative emotions.
Hope for the future was discussed by both groups of men and included wishes for a possible cure of the disease in the future and preparations to handle or reduce the uncertainty. A younger participant had thought about the risk that he could become blind in the future but hoped that his vision loss would not deteriorate to that extent: "I don't think it will happen to me … … I hope that my health and my body will remain." (Person 7). Trying not to worry about the future was a way of not confronting negative emotions.
Discussion
The aim of the study was to explore life strategies from the perspective of persons with USH 2a. The content analysis resulted in seven categories, where the categories Appreciating the present, Maintaining a positive selfimage and Alleviating emotional pain formed an emotionfocused sub-theme, Comforting oneself, while Remaining active, Using devices, Using support and Sharing knowledge constituted a practically-focused sub-theme Resolving or preventing challenges. During the process of working through the content of the categories a common overarching theme emerged: Being at the helm. Below, the results will be discussed in relation to previous literature and from a theoretical perspective.
One way to discuss the life strategies of people with deaf blindness from a psychological perspective is to apply the psychological flexibility model (Hayes, Strosahl, & Wilson, 2012). Psychological flexibility (PF) embraces a variety of human abilities that facilitate identifying and adapting to situational demands. Behaviour or mind-sets need to shift when social or personal functioning is compromised. Balance in important life domains is maintained by awareness, openness and commitment to behaviours in line with deeply held values (Kashdan & Rottenberg, 2010). PF has been described as a process of being fully in contact with the present moment as a conscious human being and maintaining or changing behaviour in line with one´s own chosen values (Hayes et al., 2012). PF entails six core flexibility/inflexibility processes: • Cognitive defusion: the ability to change one's relation to unwanted thoughts • Acceptance: the ability to embrace private events without experiential avoidance • Being present the ability to be in contact with the present moment • Self as context: the ability to experience events and be able to separate experiences from content • Value: qualities and desires that can be manifested in purposive action • Committed action: An ongoing increase in effective behaviour patterns towards chosen values (Hayes et al., 2012).
The six interrelated core processes form the two aspects of PF, commitment-behaviour activating processes and mindfulness-acceptance processes (Hayes et al., 2012). These aspects are similar to the two subthemes identified in the present study. The first subtheme in our study, Resolving or preventing challenges, has similarities to the commitment and behaviour activating aspect and the second sub-theme Comforting oneself, relates to the latter aspect of mindfulness and acceptance. The two categories Remaining active and Appreciating the present both comprises different, sometimes overlapping, perspectives on the participants' experiences of various activities of importance for the well-being. The participants enjoyed their activities from a goalorientated perspective, for the pleasure of performing and for social well-being. The health related importance of employment has previously been studied in USH2, where employed people exhibited significantly better health than those who had a disability pension (Ehn, Möller, Danermark, & Möller, 2016). The negative effects of long-term sick leave have similarly been shown by Ellis and Hodges (2013). Our participants, especially those who were no longer in paid employment, stressed the importance of meaningful leisure activities. No studies have previously focused on the importance of leisure activities for persons with USH. The psychological flexibility model (Hayes et al., 2012), as mentioned above, stresses the importance of activity and commitment to one's values for growth and well-being. In line with this, our results show that the participants highly valued being active, where maintaining working-life and leisure activities seems to play an important role. To obtain the value of being active, the participants described continuing in line with business as usual on some occasions, but in many cases they needed to adapt the activities. Some adaptations can be interpreted as more behaviour oriented, while others are related to the use of memory and attention. Cognitive and behaviour adaptation ability can be interpreted in terms of committed action with an ongoing expansion of effective behaviours (cf. Hayes et al., 2012). The results showed that younger participants often acted in accordance with business as usual, continuing to do things in the same way as always despite having USH, as compared to the somewhat older persons. This difference could partly be explained by the progressive vision loss of USH, where younger people have a less pronounced visual problem and can remain active without any adaptations (Mathur & Yang, 2015).
Being able to appreciate the present rather than focusing on an uncertain future was helpful for the participants. Similar strategies have been shown to be successful among people with USH in Great Britain (Ellis & Hodges, 2013), where interviewees reported handling their diagnosis by focusing on the present. The participants' ability to appreciate concrete experiences in the here and now seems to protect them when managing situations that have been described in other deafblind-related studies as characterized by a lack of predictability associated with ontological insecurity (Danermark & Möller, 2008). Acceptance and mindfulness processes as a means to achieve contact with the present, instead of being occupied by the past or a feared future, have been associated with psychological flexibility and well-being (Hayes et al., 2012).
The use of support entailed aspects such as the complexity of how to accept the support required, and the focus group discussions revealed that using support and sharing knowledge was something the participants gradually learned to handle. The process of finding one's own way included processes of inner negotiation and at times standing up for oneself. The process of using support indicates overlapping psychological flexibility processes of committed action and acceptance but also exemplifies that psychological flexibility is a process that occurs in a social context (cf. Hayes et al., 2012). However, the process of using support was delicate, especially in situations where the participants had finally overcome their own inner conflict about accepting support, only to find the authorities questioning their need for it. Research has shown that lack of support entails a significant risk of loneliness and low quality of life among people with USH (Dean et al., 2017). At the same time, increasing levels of social support predicted high levels of mental health-related quality of life (Dean et al., 2017). The lack of and unequal distribution of support among people with USH is unfortunately one of the most common themes reported in research on persons with USH (e.g., Dean et al., 2017;Ellis & Hodges, 2013;Evans, 2017;Schneider, 2006).
The use of devices was something that the participants discussed. However, it is interesting to note that no one talked about hearing aids, although all fourteen participants were hearing aid users. This might be due to the participants' familiarity with and wellbeing in a stable hearing situation. Devices related to the progressive visual impairment evoked more attention, indicating the participants' need for visual compensation adjustments. The white cane was focused on and some, especially younger participants, limited their use of the cane, or did not use it at all. Sometimes this was because it was not expedient, but on other occasions, it was to avoid potentially negative social consequences. However, older participants who had frequently used the cane revealed only positive experiences. The ambivalence concerning the cane has previously been reported by Ellis and Hodges (2013) and Hersh (2013b), who found that people with deafblindness often perceive the white cane as stigmatizing. The discussion among the participants showed the importance of finding one's own way to approach controversial accessibility aids. Some participants revealed that by using an umbrella they had found a way to avail of the benefits of an accessibility aid without disclosing their visual impairment. Similar "umbrella-compromises" were also found by Ellis and Hodges (2013). The importance of adjustable everyday devices cannot be underestimated, as they not only practically facilitate the participants' lives, but also help them to gradually approach situations with a reduced risk of unwanted emotions and a preserved self-image. This can be interpreted as an elaboration of behaviour activation towards important values, where emotional avoidance still prevents a more pronounced change in behaviour that would call for a higher degree of acceptance (cf. Hayes et al., 2012).
Balancing activity with limited energy was something the participants regarded as important. Wahlqvist (2013) has shown that among people with USH2 fatigue was by far the most reported health problem. The narratives of the participants in our study reveal a process of committed action in terms of selecting activities, which was sometimes facilitated by acceptance processes and restricted by denial or avoidance. This life adjustment process among people with progressive deafblindness has previously been described by Gullacksen et al., (2011). Gullacksen's life adjustment model illustrates the process of initial denial, where people with deafblindness tend to hang on to old strategies. At a certain point in life they have to recognize their situation and thereafter gradually explore and become more rooted in themselves in order to live a good life (Gullacksen et al., 2011). The categories maintaining a positive self-image and alleviating emotional pain indicate a process where the participants try to accept, or sometimes avoid experiencing, their condition. This is done by means of a stepwise inner negotiation and striving to receive recognition from others by standing up for oneself. The process also shows more avoidant sides, i.e., escapism that could be interpreted in terms of psychological inflexibility (cf. Hayes et al., 2012). However, avoidant emotional coping has been shown to be a strategy that at least in a short-term perspective can be regarded as protective when more problem focused strategies are not at hand (Lazarus, 2006). More adaptive strategies such as self-distancing in the form of humour were also revealed. Humour has previously been shown to be an effective way for people with USH to cope with stressful situations (Högner, 2016) and can be interpreted as an example of cognitive defusion as a means of acceptance (cf. Hayes et al., 2012).
Strengths and limitations
The qualitative explorative design of the study was regarded as appropriate in view of the fact that there was no previous research on life strategies among people with USH2a. To the best of our knowledge, this is the first focus group interview study with people with USH2a. The use of focus group interviews enabled interaction between the participants that elicited a range of perceptions. The trustworthiness of the study was enhanced through audio and video recording, as well as professional transcription. Furthermore, the involvement of all authors in the content analysis strengthened the credibility and confirmability (Graneheim & Lundman, 2004). The trustworthiness was increased by all participants having a genetically verified USH2a diagnosis and that the background hearing and vision data were not only self-reported. The group of persons with USH2a is relatively small and ensuring the attendance of persons of different ages and gender from various parts of Sweden was challenging. Thus, one limitation of the study is the small number of women who participated. The ability to assess transferability to other groups of people with deafblindness is enhanced through the rich presentation of results and background data (Graneheim & Lundman, 2004). As USH2a represents about 70-80% of the total population of persons with USH2 and as other genetic forms of USH2 involve similar vision and hearing problems, it is likely that the results are representative of people with USH2.
Conclusion
Our results show that people with USH2a have a variety of life strategies to handle deafblindnessrelated challenges with a high degree of psychological flexibility. This contradicts the common description of people with deafblindness as vulnerable. Our results show that by Being at the helm the participants are committed agents in a process of striving to live an active life in accordance with their own values.
Clinical implications and future research
The experiences shared by our participants could be used by professionals as an inspiration or starting point to draw attention to important life areas that might need to be addressed in counselling. Furthermore, the findings could be used to identify areas of psychological inflexibility that need attention, but just as important, they could also help people with USH2 to recognize their own personal resources and to emphasize psychological flexibility. The findings underline the importance of being offered early intervention as well as psychological, medical and technical support.
The findings could also form a base for designing research exploring life strategies in other clinical or genetic sub-groups of USH to attain a more comprehensive picture of the experiences of living with USH. Future studies concerning how to support people with USH2 to continue striving towards important values in life are also of significance.
Funding
This study was funded by the Swedish Institute for Disability Research, Örebro University and The Swedish Research Council.
Notes on contributors
Mattias Ehn is active as a clinical psychologist and doctoral student with a PhD project focusing on health-related biopsychosocial factors in adults with Usher syndrome. His special interest is the association between working life and strategies for managing everyday situations.
Agneta Anderzén-Carlsson is an RN and associate professor in healthcare sciences. She currently holds the position of researcher and research supervisor in the Örebro County Region, Sweden and is affiliated to The Swedish Institute for Disability Research at Örebro University.
Claes Möller MD, PhD is professor em at Örebro University. He has been active in deafblind research for more than 30 years. Claes Möller was a member of the research group that localized the first Usher genes. He has been supervisor for 9 doctoral theses on deafblindness and has some 200 scientific publications on ENT, audiology and genetics Moa Wahlqvist, PhD in Disability science with a background as a social worker, is a researcher at the Audiological Research centre, University hospital Örebro, Sweden and an affiliated researcher at the Swedish Institute for Disability Research, School of Health sciences, Örebro University. She also works as a coordinator at the Swedish National Resource Centre for Deafblindness. Her research focuses on health, wellbeing and rehabilitation in a biopsychosocial context for people with deafblindness and their families. |
def maskBySigmas(self, sigma=2):
self.avgFlux = np.mean(self.flux)
self.stdFlux = np.std(self.flux)
self.smoothFlux = self.flux
self.smoothFlux[self.smoothFlux <= self.avgFlux - sigma * self.stdFlux] = 0
self.mask = np.where(self.smoothFlux <= 0)
self.wave = np.delete(self.wave, list(self.mask))
self.flux = np.delete(self.flux, list(self.mask))
self.noise = np.delete(self.noise, list(self.mask))
self.sky = np.delete(self.sky, list(self.mask))
self.mask = self.mask[0] |
Characterization of Staphylococcus xylosus isolated from broiler chicken barn bioaerosol.
In this study we isolated and characterized Staphylococcus xylosus, a coagulase-negative staphylococcal species considered as commensal and one of the prevalent staphylococcal species found in poultry bioaerosol. Isolates were obtained using air samplers and selective phenylethyl alcohol agar for gram-positive bacteria during 35-d periods at different times of the day. A total of 200 colonies were recovered and after basic biochemical tests were performed, presumptive staphylococci were subsequently identified by API Staph strips. A total of 153 (76.5%) staphylococci were found, among which 84 were S. xylosus (46 and 38 isolated inside and outside, respectively). Biofilm formation was observed in 86.9% of S. xylosus isolates, whereas 79.8% of them showed hemolytic activity. There was a strong correlation (92.5%) between biofilm formation and hemolytic activity. All 84 S. xylosus isolates were susceptible to amikacin, ampicillin/sulbactam, chloramphenicol, ciprofloxacin, gentamycin, kanamycin, linezolid, trimethoprim/sulfamethoxazole, and vancomycin. Resistance to nalidixic acid (86.9%), novobiocin (85.7%), penicillin (70.2%), lincomycin (46.4%), oxacillin (42.9%), ampicillin (27.4%), tetracycline (21.4%), erythromycin (11.9%), bacitracin (10.7%), and streptomycin (2.4%) was observed among the isolates. Resistance to tetracycline, lincomycin, erythromycin, and β-lactam antibiotics was occasionally linked to the tetK, linA, ermB, and blaZ genes, respectively. Random amplification of polymorphic DNA results showed similarity of 15 to 99% between isolates collected outside and inside the barn, indicating genetic diversity of these isolates. Our study indicates that characterization of poultry bioaerosol coagulase-negative staphylococcal species such as S. xylosus is necessary for assessing their safety status for both poultry and humans. |
def validation_lineplot(model, X, y, target):
means = model.predict(X)
line_chart = pygal.Line(fill=False, height=500, width=1000, title="[Validation] Model Fit Time Series", x_title="Day",
y_title=f"{target}", explicit_size=True, show_legend=True, legend_at_bottom=False)
line_chart.add('TRUE', y.values)
line_chart.add("PREDICTION", means)
show(line_chart) |
/**
* Return the text of the first item in an xQ's collection.
*
* Returns a pointer to the string copy on success, or 0 on failure. The
* caller is responsible for freeing any returned string by calling
* xmlFree().
*/
xmlChar* xQ_getText(xQ* self) {
if ( (!self->context.size) || (!self->context.list[0]) || (!self->context.list[0]->children) )
return xmlCharStrdup("");
return xmlNodeGetContent(self->context.list[0]);
} |
<filename>src/test/java/wise86/socialNetworkKata/repository/onMemory/MemoryFollowingRepositoryTest.java
package wise86.socialNetworkKata.repository.onMemory;
import org.junit.Test;
import wise86.socialNetworkKata.data.User;
import wise86.socialNetworkKata.repository.FollowingRepository;
import java.util.List;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.collection.IsEmptyCollection.empty;
import static org.junit.Assert.assertThat;
import static wise86.socialNetworkKata.testUtil.UserMockUtil.mockUserWithName;
public class MemoryFollowingRepositoryTest {
private static final User USER_A = mockUserWithName("UserA");
private static final User USER_B = mockUserWithName("UserB");
private static final User USER_C = mockUserWithName("UserC");
private FollowingRepository repository = new MemoryFollowingRepository();
@Test
public void aFallingRelationCanBeAddAndRetrieved() {
repository.addFollowingRelation(USER_A, USER_B);
List<User> followedByA = repository.getUsersFollowedBy(USER_A);
assertThat(followedByA, hasSize(1));
assertThat(followedByA, contains(USER_B));
}
@Test
public void anUserCanFollowMoreUsers() {
repository.addFollowingRelation(USER_A, USER_B);
repository.addFollowingRelation(USER_A, USER_C);
List<User> followedByA = repository.getUsersFollowedBy(USER_A);
assertThat(followedByA, hasSize(2));
assertThat(followedByA, contains(USER_B, USER_C));
}
@Test
public void onlyTheFollowerOfTheUserAreReturned() {
repository.addFollowingRelation(USER_A, USER_B);
repository.addFollowingRelation(USER_A, USER_C);
repository.addFollowingRelation(USER_B, USER_C);
repository.addFollowingRelation(USER_C, USER_A);
List<User> followedByB = repository.getUsersFollowedBy(USER_B);
assertThat(followedByB, hasSize(1));
assertThat(followedByB, contains(USER_C));
}
@Test(expected = UnsupportedOperationException.class)
public void theListReturnedIsReadOnly() {
repository.addFollowingRelation(USER_A, USER_B);
repository.getUsersFollowedBy(USER_A).add(USER_C);
}
@Test
public void theListIsEmptyTheUserDoesNotFollowAnyone() {
repository.addFollowingRelation(USER_A, USER_B);
assertThat(repository.getUsersFollowedBy(USER_B), is(empty()));
}
} |
<reponame>fsspec/ipfsspec
import pytest
from ipfsspec.async_ipfs import AsyncIPFSGateway, MultiGateway, AsyncIPFSFileSystem
import aiohttp
TEST_ROOT = "QmW3CrGFuFyF3VH1wvrap4Jend5NRTgtESDjuQ7QhHD5dd"
REF_CONTENT = b'ipfsspec test data'
TEST_FILENAMES = ["default", "multi", "raw", "raw_multi", "write"]
@pytest.fixture
async def session():
async with aiohttp.ClientSession() as session:
yield session
@pytest.mark.local_gw
@pytest.mark.parametrize("gw_host", ["http://127.0.0.1:8080"])
@pytest.mark.parametrize("filename", TEST_FILENAMES)
@pytest.mark.asyncio
async def test_different_file_representations(filename, gw_host, session):
gw = AsyncIPFSGateway(gw_host)
path = TEST_ROOT + "/" + filename
info = await gw.file_info(path, session)
assert info["size"] == len(REF_CONTENT)
assert info["type"] == "file"
content = await gw.cat(path, session)
assert content == REF_CONTENT
@pytest.mark.local_gw
@pytest.mark.parametrize("gw_host", ["http://127.0.0.1:8080"])
@pytest.mark.asyncio
async def test_get_cid_of_folder(gw_host, session):
gw = AsyncIPFSGateway(gw_host)
info = await gw.file_info(TEST_ROOT, session)
assert info["CID"] == TEST_ROOT
@pytest.mark.local_gw
@pytest.mark.parametrize("gw_hosts", [
["http://127.0.0.1:8080"],
["http://127.0.0.1:9999", "http://127.0.0.1:8080"],
["http://127.0.0.1:8080", "http://127.0.0.1:9999"],
["https://ipfs.io", "http://127.0.0.1:8080"],
["http://127.0.0.1:8080", "https://ipfs.io"],
])
@pytest.mark.asyncio
async def test_multi_gw_cat(gw_hosts, session):
gws = [AsyncIPFSGateway(gw_host) for gw_host in gw_hosts]
gw = MultiGateway(gws)
res = await gw.cat(TEST_ROOT + "/default", session)
assert res == REF_CONTENT
@pytest.mark.asyncio
async def test_ls(event_loop):
AsyncIPFSFileSystem.clear_instance_cache() # avoid reusing old event loop
fs = AsyncIPFSFileSystem(asynchronous=True, loop=event_loop)
res = await fs._ls(TEST_ROOT, detail=False)
assert res == [TEST_ROOT + fs.sep + fn for fn in TEST_FILENAMES]
res = await fs._ls(TEST_ROOT, detail=True)
assert [r["name"] for r in res] == [TEST_ROOT + fs.sep + fn for fn in TEST_FILENAMES]
assert all([r["size"] == len(REF_CONTENT) for r in res])
@pytest.mark.asyncio
async def test_cat_file(event_loop):
AsyncIPFSFileSystem.clear_instance_cache() # avoid reusing old event loop
fs = AsyncIPFSFileSystem(asynchronous=True, loop=event_loop)
res = await fs._cat_file(TEST_ROOT + "/default")
assert res == REF_CONTENT
res = await fs._cat_file(TEST_ROOT + "/default", start=3, end=7)
assert res == REF_CONTENT[3:7]
@pytest.mark.asyncio
async def test_exists(event_loop):
AsyncIPFSFileSystem.clear_instance_cache() # avoid reusing old event loop
fs = AsyncIPFSFileSystem(asynchronous=True, loop=event_loop)
res = await fs._exists(TEST_ROOT + "/default")
assert res is True
res = await fs._exists(TEST_ROOT + "/missing")
assert res is False
res = await fs._exists("/missing")
assert res is False
@pytest.mark.asyncio
async def test_isfile(event_loop):
AsyncIPFSFileSystem.clear_instance_cache() # avoid reusing old event loop
fs = AsyncIPFSFileSystem(asynchronous=True, loop=event_loop)
res = await fs._isfile(TEST_ROOT + "/default")
assert res is True
res = await fs._isfile(TEST_ROOT)
assert res is False
|
import { Injectable } from '@angular/core'
import { AngularFireStorage } from '@angular/fire/storage'
import { Storage } from '../constants/storage'
@Injectable({
providedIn: 'root',
})
export class ImageService {
constructor(private readonly afStorage: AngularFireStorage) {}
addImage(image: File, quizId: string) {
if (!image || !quizId) {
throw new Error('Need non-null parameters to upload an image')
}
// TODO: Add security rules to limit upload size and file type
const timestamp = new Date().getTime()
const path = `${Storage.QUESTION_IMAGES}/${quizId}__${timestamp}`
const metadata = { quizId: quizId }
return this.afStorage.upload(path, image, { customMetadata: metadata })
}
deleteImage(url: string) {
if (!url) {
throw new Error(`'url' is undefined`)
}
return this.afStorage.refFromURL(url).delete().toPromise()
}
}
|
<reponame>mkorman9/spigot-plugin-test
package com.github.mkorman9.spigotplugintest.commands;
import com.github.mkorman9.spigotplugintest.Entrypoint;
import com.github.mkorman9.spigotplugintest.events.PoweroffWhenEmptyEvent;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
public class PoweroffWhenEmptyCommand implements CommandExecutor {
private final Entrypoint entrypoint;
public PoweroffWhenEmptyCommand(Entrypoint entrypoint) {
this.entrypoint = entrypoint;
}
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
entrypoint.getServer().getPluginManager().callEvent(new PoweroffWhenEmptyEvent());
return true;
}
}
|
#后缀自动机 (SAM)
#字符串 s 的 SAM 是一个接受s的所有后缀的最小 DFA(确定性有限自动机或确定性有限状态自动机)。
#https://oi-wiki.org/string/sam/
#待实现 |
from typing import Optional
from fastapi import Header, HTTPException
from carbonserver.database.database import SessionLocal
async def get_token_header(
x_token: Optional[str] = Header(
"fake-super-secret-token", convert_underscores=False
)
):
if x_token != "fake-super-secret-token":
raise HTTPException(status_code=400, detail="X-Token header invalid")
async def get_query_token(token: Optional[str] = "jessica"):
if token != "jessica":
raise HTTPException(status_code=400, detail="No Jessica token provided")
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
|
/*----------------------------------------------------------------
*
* Function: OS_TaskCreate_Impl
*
* Purpose: Implemented per internal OSAL API
* See prototype for argument/return detail
*
*-----------------------------------------------------------------*/
int32 OS_TaskCreate_Impl (uint32 task_id, uint32 flags)
{
rtems_status_code status;
rtems_name r_name;
rtems_mode r_mode;
rtems_attribute r_attributes;
r_name = OS_global_task_table[task_id].active_id;
r_mode = RTEMS_PREEMPT | RTEMS_NO_ASR | RTEMS_NO_TIMESLICE | RTEMS_INTERRUPT_LEVEL(0);
r_attributes = RTEMS_LOCAL;
if (flags & OS_FP_ENABLED)
{
r_attributes |= RTEMS_FLOATING_POINT;
}
status = rtems_task_create(
r_name,
OS_task_table[task_id].priority,
OS_task_table[task_id].stack_size,
r_mode,
r_attributes,
&OS_impl_task_table[task_id].id);
if (status != RTEMS_SUCCESSFUL )
{
OS_printf("rtems_task_create failed: %s\n", rtems_status_text(status));
return OS_ERROR;
}
status = rtems_task_start (OS_impl_task_table[task_id].id,
(rtems_task_entry) OS_RtemsEntry,
(rtems_task_argument) OS_global_task_table[task_id].active_id );
if (status != RTEMS_SUCCESSFUL )
{
OS_printf("rtems_task_start failed: %s\n", rtems_status_text(status));
rtems_task_delete(OS_impl_task_table[task_id].id);
return OS_ERROR;
}
return OS_SUCCESS;
} |
/**
*
* selects the appropriate parsing method
* a tree is returned where each node is a list with the first element being the string representation and the remaining entries being the parsed values (java types or subtrees)
*
* @throws IllegalArgumentException
* value does not conform cachedType, see error message for more
* detail
*/
public static List<Object> parse(final String value, final CachedType cachedType) throws IllegalArgumentException {
if (cachedType instanceof CachedAtomicType) {
return parse(value, (CachedAtomicType) cachedType);
}
if (cachedType instanceof CachedListType) {
return parse(value, (CachedListType) cachedType);
}
if (cachedType instanceof CachedCompoundType) {
return parse(value, (CachedCompoundType) cachedType);
}
return null;
} |
def step(self, action):
done = False
self.pose = np.clip(self.pose + 0.05 * action, -1, 1)
self.t += 1
reward = self.calculate_reward()
if self.t == self.max_steps:
done = True
return self.pose, reward, done, {'rew_area': self.reward_area} |
package com.xxmassdeveloper.mpchartexample;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.view.WindowManager;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.xxmassdeveloper.mpchartexample.notimportant.DemoBase;
import java.util.ArrayList;
public class LineChartActivityColored extends DemoBase {
private LineChart[] mCharts = new LineChart[4];
private Typeface mTf;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_colored_lines);
mCharts[0] = (LineChart) findViewById(R.id.chart1);
mCharts[1] = (LineChart) findViewById(R.id.chart2);
mCharts[2] = (LineChart) findViewById(R.id.chart3);
mCharts[3] = (LineChart) findViewById(R.id.chart4);
mTf = Typeface.createFromAsset(getAssets(), "OpenSans-Bold.ttf");
for (int i = 0; i < mCharts.length; i++) {
LineData data = getData(36, 100);
data.setValueTypeface(mTf);
// add some transparency to the color with "& 0x90FFFFFF"
setupChart(mCharts[i], data, mColors[i % mColors.length]);
}
}
private int[] mColors = new int[] {
Color.rgb(137, 230, 81),
Color.rgb(240, 240, 30),
Color.rgb(89, 199, 250),
Color.rgb(250, 104, 104)
};
private void setupChart(LineChart chart, LineData data, int color) {
((LineDataSet) data.getDataSetByIndex(0)).setCircleHoleColor(color);
// no description text
chart.getDescription().setEnabled(false);
// mChart.setDrawHorizontalGrid(false);
//
// enable / disable grid background
chart.setDrawGridBackground(false);
// chart.getRenderer().getGridPaint().setGridColor(Color.WHITE & 0x70FFFFFF);
// enable touch gestures
chart.setTouchEnabled(true);
// enable scaling and dragging
chart.setDragEnabled(true);
chart.setScaleEnabled(true);
// if disabled, scaling can be done on x- and y-axis separately
chart.setPinchZoom(false);
chart.setBackgroundColor(color);
// set custom chart offsets (automatic offset calculation is hereby disabled)
chart.setViewPortOffsets(10, 0, 10, 0);
// add data
chart.setData(data);
// get the legend (only possible after setting data)
Legend l = chart.getLegend();
l.setEnabled(false);
chart.getAxisLeft().setEnabled(false);
chart.getAxisLeft().setSpaceTop(40);
chart.getAxisLeft().setSpaceBottom(40);
chart.getAxisRight().setEnabled(false);
chart.getXAxis().setEnabled(false);
// animate calls invalidate()...
chart.animateX(2500);
}
private LineData getData(int count, float range) {
ArrayList<Entry> yVals = new ArrayList<Entry>();
for (int i = 0; i < count; i++) {
float val = (float) (Math.random() * range) + 3;
yVals.add(new Entry(i, val));
}
// create a dataset and give it a type
LineDataSet set1 = new LineDataSet(yVals, "DataSet 1");
// set1.setFillAlpha(110);
// set1.setFillColor(Color.RED);
set1.setLineWidth(1.75f);
set1.setCircleRadius(5f);
set1.setCircleHoleRadius(2.5f);
set1.setColor(Color.WHITE);
set1.setCircleColor(Color.WHITE);
set1.setHighLightColor(Color.WHITE);
set1.setDrawValues(false);
// create a data object with the datasets
LineData data = new LineData(set1);
return data;
}
}
|
// Execute prepared statement. If statement requires parameters you may bind
// them first or specify directly. After this command you may use GetRow to
// retrieve data.
func (stmt *Stmt) Run(params ...interface{}) (res mysql.Result, err error) {
defer catchError(&err)
if stmt.my.net_conn == nil {
return nil, mysql.ErrNotConn
}
if stmt.my.unreaded_reply {
return nil, mysql.ErrUnreadedReply
}
if len(params) != 0 {
stmt.Bind(params...)
} else if stmt.param_count != 0 && !stmt.binded {
panic(mysql.ErrBindCount)
}
stmt.sendCmdExec()
r := stmt.my.getResponse()
r.binary = true
res = r
return
} |
export interface StringKeyRegEx {
[key: string]: RegExp
}
export interface StringKeyString {
[key: string]: string
}
export interface StringKeyNumber {
[key: string]: number
}
|
To celebrate the one year anniversary of Ben Affleck being cast as Batman, I thought I’d share a small Batman v Superman: Dawn of Justice scoop with you. Minor spoilers follow…
Advertisement
Ben Affleck shot a Batman v Superman: Dawn of Justice scene inside the Masonic Temple in Detroit last week, and a source told Batman News exactly what went down.
The Masonic Temple is a reception hall for weddings, parties, banquets, etc. and they transformed a large kitchen into an underground MMA fighting arena and bar. Kind of like in the movie Fight Club. People from all types of backgrounds were down there betting on which fighter would win. And in walks our hero…
Bruce Wayne shows up to bet, which makes people wonder “what’s this billionaire doing here?”. Wayne replies to that by saying “I’m kind of a night person” and ends up having a discussion with someone at the bar.
Now that you’ve had a year to think it over, do you feel any differently about Ben Affleck as Batman? What do you think about the scene above? Sound off in the comments below! |
<filename>src/com/littlechoc/leetcode/algorithms/convert_sorted_list_to_binary_search_tree/Solution.java
package com.littlechoc.leetcode.algorithms.convert_sorted_list_to_binary_search_tree;
import com.littlechoc.leetcode.datastructure.ListNode;
import com.littlechoc.leetcode.datastructure.TreeNode;
/**
* Problem 109. See in <a href="https://leetcode.com/problems/convert-sorted-list-to-binary-search-tree/">LeetCode</a>
*
* @author 周俊皓.
* 2016-07-21 10:53
*/
public class Solution {
public TreeNode sortedListToBST(ListNode head) {
if (head == null) {
return null;
}
ListNode middle = head;
ListNode tail = head.next;
ListNode pre = null;
if (tail == null) {
return new TreeNode(head.val);
}
while (tail.next != null && tail.next.next != null) {
pre = middle;
middle = middle.next;
tail = tail.next.next;
}
if (tail.next != null) {
pre = middle;
middle = middle.next;
}
TreeNode treeNode = new TreeNode(middle.val);
if (pre != null) {
pre.next = null;
treeNode.left = sortedListToBST(head);
}
treeNode.right = sortedListToBST(middle.next);
return treeNode;
}
}
|
The complexity of empathy during medical school training: evidence for positive changes
Empathy is an essential aspect of clinical care, associated with improved patient satisfaction, increased adherence to treatment, and fewer malpractice complaints. Previous studies suggest that empathy declines during medical training. However, past research relied on a single narrowly operationalised, self‐report measure of empathy. As empathy is a complex socio‐emotional construct, it is critical to assess changes across its distinct components using multiple measures in order to better understand how it is influenced by medical training. |
/*
* Copyright (c) 2018 IOTA Stiftung
* https://github.com/iotaledger/iota.c
*
* Refer to the LICENSE file for licensing information
*/
/**
* @ingroup cclient
*
* @{
*
* @file
* @brief
*
*/
#ifndef CCLIENT_SERVICE_H_
#define CCLIENT_SERVICE_H_
#include <stdlib.h>
#include "cclient/serialization/serializer.h"
#include "common/errors.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief HTTP request information
*
*/
typedef struct {
char const* host; /**< Host name */
char const* path; /**< The path of HTTP/HTTPS request */
char const* content_type; /**< Content type of request */
char const* accept; /**< Accept content type of response */
char const* ca_pem; /**< String of root ca */
uint16_t port; /**< Port number of the host*/
int api_version; /**< Number of IOTA API version */
} http_info_t;
/**
* @brief client service
*
*/
typedef struct {
http_info_t http; /**< The http request information */
serializer_t serializer; /**< The client serializer */
serializer_type_t serializer_type; /** The type of serialization */
} iota_client_service_t;
/**
* @brief init CClient service
*
* @param serv service object
* @return error code
*/
retcode_t iota_client_service_init(iota_client_service_t* serv);
/**
* @brief destory and clean CClient service
*
* @param serv service object
*/
void iota_client_service_destroy(iota_client_service_t* serv);
#ifdef __cplusplus
}
#endif
#endif // CCLIENT_SERVICE_H_
/** @} */ |
// Clone returns a deep copy of the tree.
func (t *Tree) Clone() *Tree {
return &Tree{
nonce: t.nonce,
maxDepth: t.maxDepth,
memDepth: t.memDepth,
root: t.root.Clone(),
context: t.context,
factory: t.factory,
}
} |
def custom_data(coco, target, path=data_path+'/sub_sets/split_train_test_val_data/', nr_of_users=2, img_per_user=80,
super_cat=True, toggle_folder_creation=False, split=False, split_name=""):
path = path if path[-1] == '/' else path + '/'
flush(path)
users = UserSampling(coco=coco, labels_path=path)
since = time.time()
users_list = []
for sc in tqdm(COCO_SUPER_CATEGORIES, desc=target):
if sc == target:
for i in range(nr_of_users*(len(COCO_SUPER_CATEGORIES)-1)):
dict_user = users.create_user(name="user_" + sc + '_' + str(i), category=sc, super_cat=super_cat,
size=img_per_user,
toggle_folder_creation=toggle_folder_creation, dump=True, suppress_print=True,
split=split, split_name=split_name)
users_list.append(dict_user)
else:
for i in range(nr_of_users):
dict_user = users.create_user(name="user_" + sc + '_' + str(i), category=sc, super_cat=super_cat,
size=img_per_user,
toggle_folder_creation=toggle_folder_creation, dump=True, suppress_print=True,
split=split, split_name=split_name, anti_category=target, super_cat_anti=True)
users_list.append(dict_user)
time_elapsed = time.time() - since
print('Creation of data complete in {:.0f}m {:.0f}s'.format(
time_elapsed // 60, time_elapsed % 60))
return users_list |
If "Boston Uprising" turns out like another local team owned by Robert Kraft, it should be a solid competitor.
The Overwatch League, an international electronic sports "ecosystem" managed by Blizzard Entertainment Inc., is made up of 12 teams that will compete for the first time this year. The e-sports entertainment industry is said to have more than 30 million players around the world.
Patriots owner Robert Kraft, chairman and CEO of the Kraft Group, and his son Jonathan Kraft, President of the Kraft Group, co-own the Boston Overwatch League team franchise.
On Wednesday the company unveiled the franchise's team name and logos.
According to a statement from the Kraft Group, the team name "Boston Uprising" was chosen "to represent the resilience of the region's revolutionary forefathers, whose indomitable legacy laid the foundation for the great city of Boston."
The franchise says the blue, yellow and black team colors to represent Boston's official flag. The logo is an emboldened letter "B" to represent Boston.
Team members on Boston Uprising, who will be announced later this week, will be pitted against e-sports teams in Asia, Europe and North America. The Overwatch League's preseason kicks off on Dec. 6, and the regular season will run from Jan. 10, 2018 until June, before playoffs in July.
Robert Kraft said in a statement his company is "eager to use the Boston Uprising to introduce Boston and our franchise" to e-sports viewers around the world.
"We are currently assembling a team that we anticipate will compete for championships, as Boston sports fans have come to expect from all of their professional teams," Kraft added. |
Evaluation of laser radiation regimes at thermal tissue destruction
The existing methods of laser destruction of biotissues, widely spread in surgery and coagulation action, are based on local heat emission in the tissues after light absorption. Here we present the results of the simulation of tissues heat destruction, taking into account the influence of blood and lymph circulation on the processes of heat transfer. The problem is adapted to the case of liver tissue with tumor. A liver is considered as a capillary-porous body with internal blood circulation. Heatconductivity and tissue-blood heat transfer are considered. Heat action is assumed to be implemented with contact laser scalpel. The mathematical model consists of two inhomogeneous nonlinear equations of heatconductivity with spherical symmetry. Nonstationary temperature fields of tissue and blood are determined and the main parameters are: (1) coefficients of heatconductivity and capacitance of blood and tissue, (2) blood and tissue density, (3) total metabolic energy, (4) volume coefficient accounting for heat-exchange between tissue and blood, and (5) blood circulation velocity. The power of laser radiation was taken into account in boundary conditions set for the center of coagulated tissue volume. We also took into account the process connected with changing of substance phase (vaporization). The original computer programs allow one to solve the problem varying in a wide range of the main parameters. Reasonable agreement was found between the calculation results and the experimental data for operations on microsamples and on test animals. It was demonstrated, in particular, that liver tissue coagulation regime is achieved at 10 W laser power during 25 s. The coagulation radius of 0.7 cm with the given tumor radius of 0.5 cm corresponds to the real clinical situation in case of metastasis liver affection. |
<filename>advancedbot/components/storage_managing/storage.py
from abc import ABC, abstractmethod
from .exceptions.storageexception import StorageException
class Storage(ABC):
'''
Base class for storing data
'''
#---------------------------------------------------------------------------
# Getting methods
#---------------------------------------------------------------------------
@abstractmethod
def get_data(self, collection: str, columns: list = [], doc: dict = {}, count: int = 0) -> list:
'''
Getting list of documents from the certain collection
Keyword arguments:
---
collection : str, required
collection name from the storage
columns : list, optional (default [])
list of columns to get values from
'''
pass
@abstractmethod
def get_data_by_column(self, collection: str, by: str, value: str, columns: list = [], count: int = 0) -> list:
'''
Getting list of documents from the certain collection by requested column: value
Keyword arguments:
---
collection : str, required
collection name from the storage
columns : list, optional (default [])
list of columns to get values from
by: str, required
column name to search by
value: str, required
value from the column
'''
pass
#---------------------------------------------------------------------------
# Insertion methods
#---------------------------------------------------------------------------
@abstractmethod
def insert_one_doc(self, collection: str, doc: dict) -> None:
'''
Inserts document to the collection
Keyword arguments:
---
collection : str, required
collection name from the storage
doc : dict, required
document to insert to the storage
'''
pass
@abstractmethod
def insert_many_docs(self, collection: str, docs: list) -> None:
'''
Inserts list of documents to the collection
Keyword arguments:
---
collection : str, required
collection name from the storage
docs : list<dict>, required
listr of documents to insert to the storage
'''
pass
#---------------------------------------------------------------------------
# Removal methods
#---------------------------------------------------------------------------
@abstractmethod
def remove_one_doc_by_column(self, collection: str, column: str, value: str) -> None:
'''
Removes first found document with specified value from the specified column
Keyword arguments:
---
collection : str, required
collection name from the storage
column : str, required
column name
value : str, required
value from the column
'''
pass
@abstractmethod
def remove_one_doc_by_dict(self, collection: str, doc: dict) -> None:
'''
Removes first found document containg values from specified dictionary
Keyword arguments:
---
collection : str, required
collection name from the storage
doc : dict, required
"collection_column_name - value" dictionary
'''
pass
@abstractmethod
def remove_many_docs_by_column(self, collection: str, column: str, value: str):
'''
Removes all documents with specified value from the specified column
Keyword arguments:
---
collection : str, required
collection name from the storage
column : str, required
column name
value : str, required
value from the column
'''
pass
@abstractmethod
def remove_many_docs_by_dict(self, collection: str, doc: dict) -> None:
'''
Removes all documents containg values from specified dictionary
Keyword arguments:
---
collection : str, required
collection name from the storage
doc : dict, required
"collection_column_name - value" dictionary
'''
pass
#---------------------------------------------------------------------------
# Update methods
#---------------------------------------------------------------------------
@abstractmethod
def update_one_doc(self, collection: str, id_column: str, id: str, doc: dict) -> None:
'''
Updates first found doc in certain collection by chosen column by dictionary from args
Keyword arguments:
---
collection : str, required
collection name from the storage
id_column : str, required
name of the id column
id : str, required
id value itself
doc : dict, required
dictionay with values to update for the found doc by id
'''
pass
@abstractmethod
def update_many_docs(self, collection: str, id_column: str, id: str, doc: dict) -> None:
'''
Updates all documents in certain collection by chosen column by dictionary from args
Keyword arguments:
---
collection : str, required
collection name from the storage
id_column : str, required
name of the id column
id : str, required
id value itself
doc : dict, required
dictionay with values to update for every found document by id
'''
pass
|
#include <bits/stdc++.h>
using namespace std;
typedef pair<int, int> pii;
bool fun(pii A, pii B){
if(A.first<B.first) return true;
else if(A.first==B.first) return A.second < B.second;
else return false;
}
main(){
int n;
scanf("%d",&n);
vector<pii> arr (n);
int a,b;
for(int i=0;i<n;++i){
scanf("%d %d",&a,&b);
arr[i] = pii(a,b);
}
sort(arr.begin(), arr.end(), fun);
int last = 0;
for(int i=0;i<n;++i){
int candidate = arr[i].first;
if(arr[i].second>=last) candidate = min(candidate,arr[i].second);
last = candidate;
}
cout << last << endl;
} |
<gh_stars>0
from pypandoc import convert_text
from archivy.extensions import get_elastic_client
def add_to_index(index, model):
es = get_elastic_client()
if not es:
return
payload = {}
for field in model.__searchable__:
payload[field] = getattr(model, field)
es.index(index=index, id=model.id, body=payload)
def remove_from_index(index, dataobj_id):
es = get_elastic_client()
if not es:
return
es.delete(index=index, id=dataobj_id)
def query_index(index, query):
es = get_elastic_client()
if not es:
return []
search = es.search(
index=index,
body={
"query": {
"multi_match": {
"query": query,
"fields": ["*"],
"analyzer": "rebuilt_standard"
}
},
"highlight": {
"fields": {
"content": {
"pre_tags": "<span style='background-color: #f6efa6'>",
"post_tags": "</span>",
"boundary_max_scan": 200,
"fragment_size": 0
}
}
}
}
)
hits = []
for hit in search["hits"]["hits"]:
formatted_hit = {"id": hit["_id"], "title": hit["_source"]["title"], "highlight": []}
if "highlight" in hit:
# FIXME: find a way to make this less hacky and
# yet still conserve logical separations
# hack to make pandoc faster by converting highlights in one go
# join highlights into string with symbolic separator
SEPARATOR = "SEPARATOR.m.m.m.m.m.m.m.m.m.SEPARATOR"
concatenated_highlight = SEPARATOR.join(
[highlight for highlight in hit["highlight"]["content"]])
# re split highlights
formatted_hit["highlight"] = convert_text(concatenated_highlight,
"html",
format="md").split(SEPARATOR)
hits.append(formatted_hit)
return hits
|
<reponame>christianspecht/tasko-androidclient
package de.christianspecht.tasko.androidclient;
import android.content.Context;
import android.content.Intent;
/**
* Open menu items
*/
public class MenuOpener {
private Context context;
private MessageText message;
public MenuOpener(Context context, MessageText message){
this.context = context;
this.message = message;
}
/**
* Opens the menu item with the given ID
* @param itemId ID of the menu item
* @return True if it exists
*/
public boolean Open(int itemId) {
switch(itemId) {
case R.id.action_newtask:
this.message.Show("New Task");
return true;
case R.id.action_settings:
Intent settingsIntent = new Intent(this.context, SettingsActivity.class);
this.context.startActivity(settingsIntent);
return true;
case R.id.action_about:
Intent aboutIntent = new Intent(this.context, AboutActivity.class);
this.context.startActivity(aboutIntent);
return true;
}
return false;
}
}
|
/*
* Clean up a cache manager call.
*/
static void afs_cm_destructor(struct afs_call *call)
{
kfree(call->buffer);
call->buffer = NULL;
} |
package br.com.ead.curso.dtos;
import br.com.ead.curso.models.UsuarioModel;
import org.springframework.beans.BeanUtils;
import java.util.UUID;
public class UsuarioEventDTO {
private Long idUsuario;
private UUID idExterno;
private String nomeCompleto;
private String cpf;
private String email;
private String telefone;
private String usuario;
private String imagemUrl;
private String tipo;
private String situacao;
private String tipoOperacao;
public UsuarioEventDTO() {
}
public UsuarioModel convertToUsuarioModel(){
UsuarioModel usuarioModel = new UsuarioModel();
BeanUtils.copyProperties(this, usuarioModel);
return usuarioModel;
}
public Long getIdUsuario() {
return idUsuario;
}
public void setIdUsuario(Long idUsuario) {
this.idUsuario = idUsuario;
}
public UUID getIdExterno() {
return idExterno;
}
public void setIdExterno(UUID idExterno) {
this.idExterno = idExterno;
}
public String getNomeCompleto() {
return nomeCompleto;
}
public void setNomeCompleto(String nomeCompleto) {
this.nomeCompleto = nomeCompleto;
}
public String getCpf() {
return cpf;
}
public void setCpf(String cpf) {
this.cpf = cpf;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getTelefone() {
return telefone;
}
public void setTelefone(String telefone) {
this.telefone = telefone;
}
public String getUsuario() {
return usuario;
}
public void setUsuario(String usuario) {
this.usuario = usuario;
}
public String getImagemUrl() {
return imagemUrl;
}
public void setImagemUrl(String imagemUrl) {
this.imagemUrl = imagemUrl;
}
public String getTipo() {
return tipo;
}
public void setTipo(String tipo) {
this.tipo = tipo;
}
public String getSituacao() {
return situacao;
}
public void setSituacao(String situacao) {
this.situacao = situacao;
}
public String getTipoOperacao() {
return tipoOperacao;
}
public void setTipoOperacao(String tipoOperacao) {
this.tipoOperacao = tipoOperacao;
}
}
|
package gl_tls
import (
"crypto/tls"
"crypto/x509"
"fmt"
go_http "net/http"
go_time "time"
)
type tlsVerifier struct {
}
func NewTLSVerifier() *tlsVerifier {
return &tlsVerifier{}
}
func (tv *tlsVerifier) VerifyHostCertificate(host string, port int) error {
conn, err := tls.Dial("tcp", fmt.Sprintf("%s:%d", host, port), nil)
if err != nil {
return fmt.Errorf("server does not support tls certificate: %s", err.Error())
}
err = conn.VerifyHostname(host)
if err != nil {
return fmt.Errorf("hostname does not match with the certificate: %s", err.Error())
}
expiry := conn.ConnectionState().PeerCertificates[0].NotAfter
if go_time.Now().UTC().After(expiry) {
certExpiryInfo := fmt.Sprintf("Issuer: %s\nExpiry: %v\n", conn.ConnectionState().PeerCertificates[0].Issuer, expiry.Format(go_time.RFC850))
return fmt.Errorf("certificate is expired:\n%s", certExpiryInfo)
}
return nil
}
func (tv *tlsVerifier) VerifySelfSignedCertificate(hostAddr string, certPem []byte) error {
rootCAs, _ := x509.SystemCertPool()
if rootCAs == nil {
rootCAs = x509.NewCertPool()
}
ok := rootCAs.AppendCertsFromPEM(certPem)
if !ok {
return fmt.Errorf("unable to append certificate")
}
config := &tls.Config{
RootCAs: rootCAs,
}
transport := &go_http.Transport{TLSClientConfig: config}
httpCli := &go_http.Client{Transport: transport}
req, _ := go_http.NewRequest(go_http.MethodGet, hostAddr, nil)
_, err := httpCli.Do(req)
if err != nil {
return fmt.Errorf("call to %s failed: %s", hostAddr, err.Error())
}
return nil
}
|
/**
* A wrapper to call the Site API.
*/
public class ITSiteApi {
private Service mService;
public ITSiteApi(Context context) {
mService = ITRetrofitUtils.getRetrofitInstance(context).create(Service.class);
}
/**
* Retrieves the site with the given reference.
*
* @param siteRef the site reference
*/
public void get(String siteRef, ITApiCallback<ITSite> callback) {
mService.get(siteRef, Locale.getDefault().getLanguage()).enqueue(new ProxyCallback<>(callback));
}
/**
* Retrieves the sites of the user's domain.
*
* @param page the results are paginated, you need to precise the page you want to load (page index starts at 1)
*/
public void get(int page, ITApiCallback<ITSiteList> callback) {
mService.get(null, page, Service.MAX_COUNT, Locale.getDefault().getLanguage()).enqueue(new ProxyCallback<>(callback));
}
/**
* Retrieves the sites of the user's domain, matching the given address criteria.
*
* @param postalCode the postal code of the searched city
* @param city the name of the searched city
* @param countryCode the code of the searched country (format: "FR")
*/
public void getByAddress(String postalCode, String city, String countryCode, ITApiCallback<ITSiteList> callback) {
mService.getByAddress(postalCode, city, countryCode, 1, 0, Locale.getDefault().getLanguage()).enqueue(new ProxyCallback<>(callback));
}
/**
* Retrieves the sites of the user's domain, matching the given query.
*
* @param query a query string to filter the results
*/
public void search(String query, ITApiCallback<ITSiteList> callback) {
mService.get(query, 1, 0, Locale.getDefault().getLanguage()).enqueue(new ProxyCallback<>(callback));
}
/**
* Retrieves the IDs of the sites around the given location.
*
* @param lat latitude
* @param lng longitude
* @param maxDistance search the sites in this maximum distance around the given location, in meters
*/
public void getSiteIdsAroundLocation(double lat, double lng, int maxDistance, ITApiCallback<List<String>> callback) {
mService.getSiteIdsAroundLocation(lat, lng, maxDistance).enqueue(new ProxyCallback<>(callback));
}
/**
* Retrieves the sites around the given location.
*
* @param lat latitude
* @param lng longitude
* @param maxDistance search the sites in this maximum distance around the given location, in meters
*/
public void getSitesAroundLocation(double lat, double lng, int maxDistance, ITApiCallback<List<ITSite>> callback) {
mService.getSitesAroundLocation(lat, lng, maxDistance).enqueue(new ProxyCallback<>(callback));
}
private interface Service {
int MAX_COUNT = 50;
@GET("datahub/v1/sites/near?onlyIds=true")
Call<List<String>> getSiteIdsAroundLocation(@Query("lat") double lat, @Query("lng") double lng, @Query("maxDistance") int maxDistance);
@GET("datahub/v1/sites/near")
Call<List<ITSite>> getSitesAroundLocation(@Query("lat") double lat, @Query("lng") double lng, @Query("maxDistance") int maxDistance);
@GET("datahub/v1/sites/{externalRef}")
Call<ITSite> get(@Path("externalRef") String siteRef, @Query("lang") String lang);
@GET("datahub/v1/sites")
Call<ITSiteList> get(@Query("query") String query, @Query("page") int page, @Query("countByPage") int count, @Query("lang") String lang);
@GET("datahub/v1/sites")
Call<ITSiteList> getByAddress(@Query("filters[zip]") String postalCode, @Query("filters[city]") String city, @Query("filters[country]") String countryCode, @Query("page") int page, @Query("countByPage") int count, @Query("lang") String lang);
}
} |
// Update updates the OauthToken in the database.
func (ot *OauthToken) Update(db XODB) error {
var err error
if !ot._exists {
return errors.New("update failed: does not exist")
}
if ot._deleted {
return errors.New("update failed: marked for deletion")
}
const sqlstr = `UPDATE oauth_tokens SET ` +
`app = ?, type = ?, client_id = ?, client_secret = ?, original_refresh_token = ?, refresh_token = ?, access_token = ?, expires_at = ?, created_at = ?, updated_at = ?, code_exchange_response_body = ?` +
` WHERE id = ?`
XOLog(sqlstr, ot.App, ot.Type, ot.ClientID, ot.ClientSecret, ot.OriginalRefreshToken, ot.RefreshToken, ot.AccessToken, ot.ExpiresAt, ot.CreatedAt, ot.UpdatedAt, ot.CodeExchangeResponseBody, ot.ID)
_, err = db.Exec(sqlstr, ot.App, ot.Type, ot.ClientID, ot.ClientSecret, ot.OriginalRefreshToken, ot.RefreshToken, ot.AccessToken, ot.ExpiresAt, ot.CreatedAt, ot.UpdatedAt, ot.CodeExchangeResponseBody, ot.ID)
return err
} |
<reponame>hir12111/pvAccessCPP
#ifndef SECURITYIMPL_H
#define SECURITYIMPL_H
#include <pv/remote.h>
#include "security.h"
namespace epics {
namespace pvAccess {
class AuthNZHandler :
public ResponseHandler
{
EPICS_NOT_COPYABLE(AuthNZHandler)
public:
AuthNZHandler(Context* context) :
ResponseHandler(context, "authNZ message")
{
}
virtual ~AuthNZHandler() {}
virtual void handleResponse(osiSockAddr* responseFrom,
Transport::shared_pointer const & transport,
epics::pvData::int8 version,
epics::pvData::int8 command,
size_t payloadSize,
epics::pvData::ByteBuffer* payloadBuffer);
};
}}
#endif // SECURITYIMPL_H
|
<gh_stars>0
import * as vscode from 'vscode';
import { TreeItem } from 'vscode';
import * as path from 'path';
const reach_icon = path.join(__filename, '..', '..', '..', 'images', 'reach-icon.svg');
const reach_icon_red = path.join(__filename, '..', '..', '..', 'images', 'reach-icon-red.svg');
const discord_icon = path.join(__filename, '..', '..', '..', 'images', 'discord-icon-small.png');
const github_icon = path.join(__filename, '..', '..', '..', 'images', 'github-icon-red.png');
const gist_icon = path.join(__filename, '..', '..', '..', 'images', 'github-icon-blue.png');
const makeTreeItem = (label, command, icon = reach_icon) => {
return makeLabeledTreeItem(label, label, command, icon);
};
const makeLabeledTreeItem = (label: string, title: string, command: string, icon = reach_icon) => {
const t : TreeItem = new TreeItem(title, 0);
t.command = {
command: command,
title: label,
arguments: []
};
t.iconPath = {
light: vscode.Uri.parse(icon) ,
dark: vscode.Uri.parse(icon)
};
return t;
};
const COMMANDS = require('../../data/commands.json');
const COMMANDS_TREE_DATA = [];
const HELP_TREE_DATA = [];
const DOCUMENTATION_TREE_DATA = [];
COMMANDS.forEach(commandObject => {
// Extract all the properties we might need from
// the parameter.
const {
label, title, command,
commandsTreeDataProvider,
helpTreeDataProvider,
documentationTreeDataProvider
} = commandObject;
if (commandsTreeDataProvider) {
COMMANDS_TREE_DATA.push(
makeTreeItem(label, command)
);
}
else if (helpTreeDataProvider) {
let icon = `Error: Icon needed for ${label}`;
switch (label) {
case 'discord':
icon = discord_icon;
break;
case 'gist':
icon = gist_icon;
break;
case 'issue':
icon = github_icon
break;
};
HELP_TREE_DATA.push(
makeLabeledTreeItem(
label, title, command, icon
)
);
}
else if (documentationTreeDataProvider) {
// All documentation tree items will have the
// same icon.
DOCUMENTATION_TREE_DATA.push(
makeLabeledTreeItem(
label, title, command, reach_icon_red
)
);
}
});
export class CommandsTreeDataProvider implements vscode.TreeDataProvider<TreeItem> {
data: vscode.TreeItem[];
constructor() {
this.data = COMMANDS_TREE_DATA;
}
getTreeItem(element: TreeItem) {
return element;
}
getChildren(_?: TreeItem|undefined) {
return this.data;
}
}
export class HelpTreeDataProvider implements vscode.TreeDataProvider<TreeItem> {
data: vscode.TreeItem[];
constructor() {
this.data = HELP_TREE_DATA;
}
getTreeItem(element: TreeItem) {
return element;
}
getChildren(_?: TreeItem|undefined) {
return this.data;
}
}
export class DocumentationTreeDataProvider implements vscode.TreeDataProvider<TreeItem> {
data: vscode.TreeItem[];
constructor() {
this.data = DOCUMENTATION_TREE_DATA;
}
getTreeItem(element: TreeItem) {
return element;
}
getChildren(_?: TreeItem|undefined) {
return this.data;
}
} |
export enum ActionTypes {
SET_TOKEN = "SET_TOKEN",
SET_USERID = "SET_USERID"
}
|
<filename>pkg/fanal/handler/gomod/gomod_test.go
package gomod
import (
"context"
"testing"
"golang.org/x/exp/slices"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_gomodMergeHook_Hook(t *testing.T) {
tests := []struct {
name string
blob *types.BlobInfo
want *types.BlobInfo
}{
{
name: "Go 1.17",
blob: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.Pipenv,
FilePath: "app/Pipfile.lock",
Libraries: []types.Package{
{
Name: "django",
Version: "3.1.2",
},
},
},
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
{
Name: "gopkg.in/yaml.v2",
Version: "v2.4.0",
Indirect: true,
},
},
},
{
Type: types.GoModule,
FilePath: "/app/go.sum",
Libraries: []types.Package{
{
Name: "modernc.org/libc",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
},
},
},
},
want: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.Pipenv,
FilePath: "app/Pipfile.lock",
Libraries: []types.Package{
{
Name: "django",
Version: "3.1.2",
},
},
},
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
{
Name: "gopkg.in/yaml.v2",
Version: "v2.4.0",
Indirect: true,
},
},
},
},
},
},
{
name: "Go 1.16",
blob: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
},
},
{
Type: types.GoModule,
FilePath: "/app/go.sum",
Libraries: []types.Package{
{
Name: "modernc.org/libc",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.45.0",
},
},
},
},
},
want: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
{
Name: "modernc.org/libc",
Version: "v0.0.0-20220412145205-d0501f906d90",
Indirect: true,
},
},
},
},
},
},
{
name: "Go 1.16 and go.sum is not found",
blob: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
},
},
},
},
want: &types.BlobInfo{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "/app/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "v0.0.0-20220412145205-d0501f906d90",
},
{
Name: "github.com/aws/aws-sdk-go",
Version: "v1.43.31",
},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
h := gomodMergeHook{}
err := h.Handle(context.Background(), nil, tt.blob)
require.NoError(t, err)
for i := range tt.blob.Applications {
slices.SortFunc(tt.blob.Applications[i].Libraries, func(a, b types.Package) bool {
return a.Name < b.Name
})
}
assert.Equal(t, tt.want, tt.blob)
})
}
}
|
/** toDays correctly converts sample values in different units to days */
public void testToDays() {
for (long t = 0; t < 88888; t += 100) {
assertEquals(t, TimeUnit.DAYS.toDays(t));
assertEquals(t, TimeUnit.HOURS.toDays(t * 24));
assertEquals(t, TimeUnit.MINUTES.toDays(t * SECS_IN_MIN * 24));
assertEquals(t, TimeUnit.SECONDS.toDays(t * SECS_IN_DAY));
assertEquals(t, TimeUnit.MILLISECONDS.toDays(t * MILLIS_IN_DAY));
assertEquals(t, TimeUnit.MICROSECONDS.toDays(t * MICROS_IN_DAY));
assertEquals(t, TimeUnit.NANOSECONDS.toDays(t * NANOS_IN_DAY));
}
} |
// PrefixedNumerics returns a list of ids with a simple name prefix
// followed by an integer separated by a dash. Naming starts at '1' not '0'.
func PrefixedNumerics(prefix string, max int) []string {
ids := make([]string, 0)
for i := 1; i <= max; i++ {
ids = append(ids, fmt.Sprintf("%s%d", prefix, i))
}
return ids
} |
from functools import reduce
import numpy as np
import pandas as pd
from utils import get_country_ts, read_data
def get_mortality_data(country, confirmed, dead):
"""
Calculate mortality rate over time
for specific country.
"""
df = get_country_ts(country=country,
dataframes=[confirmed, dead],
columns=['Confirmed', 'Deaths'])
df = df[df['Confirmed'] > 0]
df['Mortality'] = df['Deaths'] / df['Confirmed']
df['Mortality'] = df['Mortality'] * 100
df['Mortality'] = np.round(df['Mortality'], 2)
df = df[['Date', 'Mortality']]
df.columns = ['Date', country]
return df
def make_mortality(in_path, out_path):
conf,_,dead = read_data(in_path)
all_countries = sorted(set(conf.drop('Date', axis=1).columns))
mort = list()
for c in all_countries:
tmp = get_mortality_data(country=c,
confirmed=conf,
dead=dead)
mort.append(tmp)
mort = reduce(lambda x, y: pd.merge(x, y, on='Date'), mort)
mort.to_csv(f'{out_path}/mortality_rate.csv', index=False)
if __name__ == '__main__':
in_path = './data/processed'
out_path = './data/processed'
make_mortality(in_path=in_path,
out_path=out_path)
|
<reponame>ploukareas/Design-Patterns
// ˅
#include "behavioral_patterns/visitor/File.h"
#include "behavioral_patterns/visitor/Visitor.h"
#include "behavioral_patterns/visitor/FileSystemElement.h"
// ˄
File::File(const string& name, const int size)
: name(name)
, size(size)
// ˅
// ˄
{
// ˅
// ˄
}
File::~File()
{
// ˅
// ˄
}
void File::accept(Visitor* visitor)
{
// ˅
visitor->visit(this);
// ˄
}
const string File::getName() const
{
// ˅
return name;
// ˄
}
const int File::getSize() const
{
// ˅
return size;
// ˄
}
// ˅
// ˄
|
<reponame>srghma/reflex-platform<filename>haskell-overlays/reflex-packages/dep/ghcjs-dom/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/RTCIceCandidate.hs
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE JavaScriptFFI #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.RTCIceCandidate
(js_newRTCIceCandidate, newRTCIceCandidate, js_getCandidate,
getCandidate, js_getSdpMid, getSdpMid, getSdpMidUnsafe,
getSdpMidUnchecked, js_getSdpMLineIndex, getSdpMLineIndex,
getSdpMLineIndexUnsafe, getSdpMLineIndexUnchecked,
RTCIceCandidate(..), gTypeRTCIceCandidate)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript safe
"new window[\"RTCIceCandidate\"]($1)" js_newRTCIceCandidate ::
RTCIceCandidateInit -> IO RTCIceCandidate
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate Mozilla RTCIceCandidate documentation>
newRTCIceCandidate ::
(MonadIO m) => RTCIceCandidateInit -> m RTCIceCandidate
newRTCIceCandidate candidateInitDict
= liftIO (js_newRTCIceCandidate candidateInitDict)
foreign import javascript unsafe "$1[\"candidate\"]"
js_getCandidate :: RTCIceCandidate -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.candidate Mozilla RTCIceCandidate.candidate documentation>
getCandidate ::
(MonadIO m, FromJSString result) => RTCIceCandidate -> m result
getCandidate self
= liftIO (fromJSString <$> (js_getCandidate self))
foreign import javascript unsafe "$1[\"sdpMid\"]" js_getSdpMid ::
RTCIceCandidate -> IO (Nullable JSString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMid Mozilla RTCIceCandidate.sdpMid documentation>
getSdpMid ::
(MonadIO m, FromJSString result) =>
RTCIceCandidate -> m (Maybe result)
getSdpMid self = liftIO (fromMaybeJSString <$> (js_getSdpMid self))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMid Mozilla RTCIceCandidate.sdpMid documentation>
getSdpMidUnsafe ::
(MonadIO m, HasCallStack, FromJSString result) =>
RTCIceCandidate -> m result
getSdpMidUnsafe self
= liftIO
((fromMaybeJSString <$> (js_getSdpMid self)) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMid Mozilla RTCIceCandidate.sdpMid documentation>
getSdpMidUnchecked ::
(MonadIO m, FromJSString result) => RTCIceCandidate -> m result
getSdpMidUnchecked self
= liftIO (fromJust . fromMaybeJSString <$> (js_getSdpMid self))
foreign import javascript unsafe "$1[\"sdpMLineIndex\"]"
js_getSdpMLineIndex :: RTCIceCandidate -> IO (Nullable Word)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMLineIndex Mozilla RTCIceCandidate.sdpMLineIndex documentation>
getSdpMLineIndex ::
(MonadIO m) => RTCIceCandidate -> m (Maybe Word)
getSdpMLineIndex self
= liftIO (nullableToMaybe <$> (js_getSdpMLineIndex self))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMLineIndex Mozilla RTCIceCandidate.sdpMLineIndex documentation>
getSdpMLineIndexUnsafe ::
(MonadIO m, HasCallStack) => RTCIceCandidate -> m Word
getSdpMLineIndexUnsafe self
= liftIO
((nullableToMaybe <$> (js_getSdpMLineIndex self)) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidate.sdpMLineIndex Mozilla RTCIceCandidate.sdpMLineIndex documentation>
getSdpMLineIndexUnchecked ::
(MonadIO m) => RTCIceCandidate -> m Word
getSdpMLineIndexUnchecked self
= liftIO
(fromJust . nullableToMaybe <$> (js_getSdpMLineIndex self)) |
/**
* UGENE - Integrated Bioinformatics Tools.
* Copyright (C) 2008-2012 UniPro <<EMAIL>>
* http://ugene.unipro.ru
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
#ifndef _U2_REMOTE_MACHINE_H_
#define _U2_REMOTE_MACHINE_H_
#include "ProtocolInfo.h"
#include "Serializable.h"
#include <U2Core/Task.h>
#include <QtCore/QSharedPointer>
namespace U2 {
class RemoteMachineSettings;
typedef QSharedPointer<RemoteMachineSettings> RemoteMachineSettingsPtr;
/*
* Base class for RemoteMachines. Should be implemented for each transport protocol.
* Stores all required remote machine info, performs remote requests.
*/
class U2REMOTE_EXPORT RemoteMachine {
public:
virtual ~RemoteMachine();
// Runs task on remote machine. Returns remote taskId
virtual qint64 runTask( TaskStateInfo& si, const QString & taskFactoryId, const QVariant & settings) = 0;
// Cancels remote task
virtual void cancelTask(TaskStateInfo& si, qint64 taskId ) = 0;
// Retrieves remote task state
virtual Task::State getTaskState(TaskStateInfo& si, qint64 taskId) = 0;
// Retrieves remote task progress [0..100%]
virtual int getTaskProgress(TaskStateInfo& si, qint64 taskId) = 0;
// Retrieves remote task result
virtual void getTaskResult(TaskStateInfo& si, qint64 taskId, const QStringList& resultUrls, const QString& urlPrefix ) = 0;
// Retrieves remote task error message
virtual QString getTaskErrorMessage(TaskStateInfo& si, qint64 taskId) = 0;
// Returns remote machine server name. TODO: refactor
virtual QString getServerName(TaskStateInfo& si) = 0;
// Lists active tasks ids on remote machine
virtual QList<qint64> getActiveTasks(TaskStateInfo& si) = 0;
// Pings remote machine
virtual void ping(TaskStateInfo& si) = 0;
virtual RemoteMachineSettingsPtr getSettings() = 0;
}; // RemoteMachine
/*
* The type determines how to treat remote machine.
* In case of remote service the remote tasks are launched via UGENE remote service protocol.
*/
enum RemoteMachineType {
RemoteMachineType_SimpleMachine,
RemoteMachineType_RemoteService
};
/*
* Authentication information
*/
struct U2REMOTE_EXPORT UserCredentials {
UserCredentials() : permanent(false), valid(false) {}
QString name;
QString passwd;
bool permanent;
bool valid;
};
/*
* Base class for remote machine settings
*/
class U2REMOTE_EXPORT RemoteMachineSettings : public HumanSerializable {
public:
RemoteMachineSettings( ProtocolInfo * proto, RemoteMachineType type );
virtual ~RemoteMachineSettings();
RemoteMachineType getMachineType() const { return machineType; }
const QString& getProtocolId() const {return protoId;}
const UserCredentials& getUserCredentials() const { return credentials; }
void flushCredentials();
void setupCredentials(const QString& userName, const QString& passwd, bool permanent );
virtual QString serialize() const = 0;
virtual QString getName() const = 0;
virtual bool usesGuestAccount() const = 0;
virtual bool operator==( const RemoteMachineSettings& ) const = 0;
protected:
UserCredentials credentials;
QString protoId;
RemoteMachineType machineType;
}; // RemoteMachineSettings
/*
* Base class for RemoteMachinesFactories
*/
class U2REMOTE_EXPORT RemoteMachineFactory {
public:
virtual ~RemoteMachineFactory();
virtual RemoteMachine * createInstance( const QString& serializedSettings ) const = 0;
virtual RemoteMachine * createInstance( const RemoteMachineSettingsPtr& settings ) const = 0;
virtual RemoteMachineSettingsPtr createSettings( const QString & serializedSettings ) const = 0;
}; // RemoteMachineFactory
} // U2
#endif // U2_REMOTE_MACHINE_H_
|
/**
* Represents the component that manages {@link PojoMapperEndpoint}.
*/
public class PojoMapperComponent extends DefaultComponent {
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
PojoMapperEndpoint endpoint = new PojoMapperEndpoint(uri, this);
setProperties(endpoint, parameters);
validateConfiguration(endpoint);
return endpoint;
}
private void validateConfiguration(PojoMapperEndpoint endpoint) {
if (endpoint.getPojoDirection() == PojoDirection.FROM) {
if (endpoint.getFieldLocation() == FieldLocation.HEADER_PROPERTY || endpoint.getFieldLocation() == FieldLocation.PROPERTY_HEADER) {
throw new UnsupportedOperationException("pojoDirection FROM cannot be used in combination with FieldLocation=" + endpoint.getFieldLocation().name());
}
}
}
} |
Distributed Sensing for High Quality Structural Health Monitoring Using Wireless Sensor Networks
In recent years, using wireless sensor networks (WSNs) for structural health monitoring (SHM) has attracted increasing attention. Traditional centralized SHM algorithms developed by civil engineers can achieve the highest damage detection quality since they have the raw data from all the sensor nodes. However, directly implementing these algorithms in a typical WSN is impractical considering the large amount of data transmissions and extensive computations required. Correspondingly, many SHM algorithms have been tailored for WSNs to become distributed and less complicated. However, the modified algorithms usually cannot achieve the same damage detection quality of the original centralized counterparts. In this paper, we select a classical SHM algorithm: the eigen-system realization algorithm (ERA), and propose a distributed version for WSNs. In this approach, the required computations in the ERA are updated incrementally along a path constructed from the deployed sensor nodes. This distributed version is able to achieve the same quality of the original ERA using much smaller wireless transmissions and computations. The efficacy of the proposed approach is demonstrated through both simulation and experiment. |
Generic Style Music Preferences of Fifth-Grade Students
The primary intent of this study was to find fifth-graders' most preferred generic music style and identify the critical competitors of that style, if any existed. A short listening test was developed to measure preference for different generic styles of music. Ambient sound was employed as a reference point to anchor the response scale. The test was administered to 278 students of varying socioeconomic status and ethnic background in 11 fifth-grade classrooms in the greater St. Louis area. Test reliability was evaluated in terms of stability of preference responses over time. Naturalistic behavioral observation was employed during test administration to secure a rough confirmation or denial of the truthfulness of student preference responses. Easy-listening pop music was the most preferred generic style and five other generic styles earned preference ratings that would qualify them as critical competitors. An exploratory factor analysis was conducted on preference responses and four factors were obtained and interpreted in an oblique solution. |
def image_required(func):
@patch_required
def image_wrapper(request, image_type, *args, **kwds):
content_key = None
content = None
if image_type == "0":
content_key = request.patch.content_key
elif image_type == "1":
content_key = request.patch.patched_content_key
if content_key:
content = content_key.get()
if not content or not content.data:
return HttpResponseRedirect(django_settings.MEDIA_URL + "blank.jpg")
request.mime_type = mimetypes.guess_type(request.patch.filename)[0]
if not request.mime_type or not request.mime_type.startswith('image/'):
return HttpResponseRedirect(django_settings.MEDIA_URL + "blank.jpg")
request.content = content
return func(request, *args, **kwds)
return image_wrapper |
<filename>src/configuration/configuration.service.ts
import { Injectable } from '@nestjs/common';
import { ConfigurationInterface } from './interface/configuration.interface';
@Injectable()
export class ConfigurationService {
async getConfiguration(enviroment: string): Promise<any>{
if(enviroment === 'production'){
process.env.NODE_ENV = 'production';
}else{
process.env.NODE_ENV = 'development';
}
delete require.cache[require.resolve('config')];
const config = require('config');
const data: ConfigurationInterface = {
server: {
port: config.get('server.port')
},
db: {
type: config.get('db.type'),
port: config.get('db.port'),
database: config.get('db.database'),
host: config.get('db.host'),
username: config.get('db.username'),
password: <PASSWORD>('<PASSWORD>'),
synchronize: config.get('db.synchronize')
}
};
return data;
}
}
|
/**
* Hydro Time Series Shef Issue xml.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 28, 2011 mpduff Initial creation
*
* </pre>
*
* @author mpduff
* @version 1.0
*/
@XmlRootElement(name = "ShefIssue")
@XmlAccessorType(XmlAccessType.NONE)
public class ShefIssueXML implements ISerializableObject {
@XmlElement(name = "internalDirectory")
private InternalDirectoryXML internalDirectory;
@XmlElement(name = "distributeProduct")
private boolean distributeProduct;
@XmlElement(name = "dirCopy")
private boolean dirCopy = true;
/**
* @return the internalDirectory
*/
public InternalDirectoryXML getInternalDirectory() {
if (internalDirectory == null) {
internalDirectory = new InternalDirectoryXML();
}
return internalDirectory;
}
/**
* @param internalDirectory the internalDirectory to set
*/
public void setInternalDirectory(InternalDirectoryXML internalDirectory) {
this.internalDirectory = internalDirectory;
}
/**
* @return the distributeProduct
*/
public boolean isDistributeProduct() {
return distributeProduct;
}
/**
* @param distributeProduct the distributeProduct to set
*/
public void setDistributeProduct(boolean distributeProduct) {
this.distributeProduct = distributeProduct;
}
/**
* @param dirCopy the dirCopy to set
*/
public void setDirCopy(boolean dirCopy) {
this.dirCopy = dirCopy;
}
/**
* @return the dirCopy
*/
public boolean isDirCopy() {
return dirCopy;
}
} |
import sys
input=sys.stdin.readline
#for _ in range(int(input().strip())):
n,q,k=map(int,input().strip().split(" "))
#s=input().strip()
#n=int(input().strip())
a=list(map(int,input().strip().split(" ")))
for _ in range(q):
l,r=map(int,input().strip().split(" "))
L=a[l-1]
R=a[r-1]
print(L-1+k-R+2*(R-L-(r-l)))
|
#!/usr/bin/env python3
import sys
from helpers import *
if len(sys.argv) < 3:
print("usage: python split.py streusle.conllulex ud_{train, dev, test}_sent_ids.txt", file=sys.stderr)
sys.exit(1)
sent_ids = set()
with open(sys.argv[2]) as f:
for line in f:
line = line.strip()
if line:
sent_ids.add(line.strip("# ").split(" = ")[1])
csplit = 0
call = 0
for sent in sentences(sys.argv[1]):
call += 1
if sent.meta_dict["sent_id"] in sent_ids:
for line in sent.meta:
print(line)
for token in sent.tokens:
print(token.orig)
print()
csplit += 1
print("{}/{}".format(csplit, call), file=sys.stderr)
|
<reponame>dangxuanvuong98/pineapples_harvester<gh_stars>0
import cv2
import os
import xml.etree.ElementTree as ET
from tqdm import tqdm
data_directory_path = '/media/trivu/data/DataScience/ComputerVision/dua/new_data/train'
result_path = '/media/trivu/data/DataScience/ComputerVision/dua/new_data/cropped_pineapple'
labels_path = os.listdir(data_directory_path)
labels_path = [label_path for label_path in labels_path if label_path.endswith('.xml')]
_id = 0
for label_path in tqdm(labels_path):
tree = ET.parse(os.path.join(data_directory_path, label_path))
root = tree.getroot()
filename = root.find('filename').text
img_path = os.path.join(data_directory_path, filename)
if not os.path.exists(img_path):
continue
img = cv2.imread(img_path)
for obj in root.findall('object'):
obj_name = obj.find('name').text
# if not obj_name.startswith('full'):
if not obj_name in ['body ripe pineapple', 'full ripe pineapple']:
continue
bndbox = obj.find('bndbox')
xmin = int(bndbox.find('xmin').text)
ymin = int(bndbox.find('ymin').text)
ymax = int(bndbox.find('ymax').text)
xmax = int(bndbox.find('xmax').text)
img_cropped = img[ymin:ymax, xmin:xmax]
_id += 1
cv2.imwrite(os.path.join(result_path, obj_name+str(_id)+'.jpg'), img_cropped)
# break
|
Oral Manifestations in SARS-CoV-2 Positive Patients: A Systematic Review
The COVID-19 pandemic has severely affected the human population by revealing many health problems, including within the oral cavity. This systematic review was designed to answer the question “Is there a relationship between oral manifestations and SARS-CoV-2 infection?”. Following the inclusion and exclusion criteria, twenty-seven studies were included (according to PRISMA statement guidelines). Based on the meta-analysis, nearly two-thirds of the SARS-CoV-2 positive patients reported oral symptoms, in particular taste alterations, xerostomia and ulcerations (54.73% , 37.58% , and 21.43% , respectively). In conclusion, despite the conducted systematic review, the increased prevalence of oral manifestations in SARS-CoV-2 infection cannot be clearly established due to the possible association of other factors, e.g., individual or environmental factors.
Introduction
Multiple cases of pneumonia of unknown etiology were reported in medical facilities in Wuhan city at the end of 2019. Researchers confirmed that the acute respiratory infection was caused by a novel coronavirus. On 7 January 2020 the Chinese Centre for Disease Control and Prevention (CCDC) identified the causative agent and named its Severe Acute Respiratory Syndrome Coronavirus 2 (SARS-CoV-2) . The disease was named Coronavirus disease 2019 (COVID- 19) by Director-General of The World Health Organization (WHO). The disease quickly spread among many parts of the world . WHO declared the outbreak of a global pandemic of COVID-19 on 11 March 2020 .
SARS-CoV-2 is the seventh known coronavirus able to infect humans and is capable of causing severe infection alongside SARS-CoV and MERS-CoV (Middle East Respiratory Syndrome Coronavirus) . Transmission of the virus is based on direct contact of respiratory droplets with mucous membrane . Studies show that aerosol and airborne methods of transmission of SARS-CoV-2 are a crucial pathway of disease spreading . SARS-CoV-2 RNA (ribonucleic acid) was present in air samples from hospitals in Wuhan city . It is also possible to become infected through contact with contaminated objects .
The asymptomatic incubation period of SARS-CoV-2 was estimated to be between 2.2 and 12.5 days . The most common systemic symptoms of COVID-19 are fever, cough, diarrhea and fatigue . However, some of the patients with a severe course of disease have developed fatal complications such as organ failure, septic shock, pulmonary edema, severe pneumonia, and Acute Respiratory Distress Syndrome (ARDS) . Antiviral agents such as lopinavir or remdesivir are helpful in suppressing the progression of COVID-19. Furthermore, an improvement in survivability was found in patients treated with plasma and hyperimmune immunoglobulins . Social isolation, washing and disinfecting hands, and wearing a mask in public proved effective in avoiding disease and reducing transmission of the virus .
The outbreak of the pandemic also made a huge impact on human mental health. Each individual dealt with various stressors such as fear of death or health complications, worry about one's financial status related to the massive loss of workplaces due to restrictions, and social isolation . People also started to experience more negative emotions such as loneliness, unhappiness, depression, anger, emotional exhaustion and more . Despite all of these changes people adapted well to the situation and seemed to find a way of fulfilling their social and emotional needs .
The COVID-19 pandemic had its challenging outcome on dental care. The shutdown of academic institutions had its impact on the quality of education of dental students and subsequently led to a growth in the number of patients in need of urgent dental care . The transmission of SARS-CoV-2 through aerosol is a main problem during dental treatment, associating it with a high risk of infection. In order to prevent the transmission of the virus, guidelines recommend wearing an N95 mask, protective clothing, eye protector, latex gloves, surgical cap and shoe covers . The increasing number of COVID-19 survivors forced dentists to come out with solutions and find ways to provide patients in need of dental care with a multi-professional approach . The factors discussed above regarding psychological isolation and limited access to dental care may have influenced the occurrence and development of changes in the oral mucosa in the course of SARS-CoV-2 infection.
Our systematic review was designed in order to answer the question: "Is there a relationship between oral manifestations and SARS-CoV-2 infection?"; formulated according to PICO ("Population", "Intervention", "Comparison" and "Outcome").
Search Strategy and Data Extraction
A systematic review was conducted up to 5th March 2022, according to the Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) statement guidelines , using the databases PubMed, Scopus and Web of Science. The search formulas included: -For PubMed: COVID AND (oral AND ((manifestation*) OR (lesion*))) -For Scopus: TITLE-ABS-KEY(COVID* AND (oral AND (manifestation* OR lesion*))) -For Web of Science: TS = (COVID* AND (oral AND (manifestation* OR lesion*))).
Records were screened by the title, abstract and full text by two independent investigators. Studies included in this review matched all the predefined criteria according to PICOS ("Population", "Intervention", "Comparison", "Outcomes", and "Study design"), as shown in Table 1. A detailed search flowchart is presented in the Results section. The study protocol was registered in the International prospective register of systematic reviews PROSPERO (CRD42022315223). The results of the meta-analysis are presented in forest plots using MedCalc Statistical Software version 19.5.3 (MedCalc Software Ltd., Ostend, Belgium).
Quality Assessment and Critical Appraisal for the Systematic Review of Included Studies
The risk of bias in each individual study was assessed according to the "Study Quality Assessment Tool" issued by the National Heart, Lung, and Blood Institute within the National Institute of Health . These questionnaires were answered by two independent investigators, and any disagreements were resolved by a discussion between them.
The summarized quality assessment for every single study is reported in Figure 1. The most frequently encountered risks of bias were the absence of data regarding sample size justification (except two studies), and sufficient timeframe (all studies). Critical appraisal was summarized by adding up the points for each criterion of potential risk (points: 1-low, 0.5-unspecified, 0-high). A total of 23 studies (85.2%) were classified as having "good" quality (≥80% total score) and four (14.8%) as "intermediate" (≥60% total score).
Results
Following the search criteria, our systematic review included twenty-seven studies, demonstrating data collected in twelve different countries from a total of 6722 participants with diagnosed SARS-CoV-2 infection (including 2476 females and 2990 males, and 1256 patients without reported gender). Figure 2 shows the detailed selection strategy of The level of evidence was assessed using the classification of the Oxford Centre for Evidence-Based Medicine levels for diagnosis . All of the included studies have the third or fourth level of evidence (in this 5-graded scale).
Results
Following the search criteria, our systematic review included twenty-seven studies, demonstrating data collected in twelve different countries from a total of 6722 participants with diagnosed SARS-CoV-2 infection (including 2476 females and 2990 males, and 1256 patients without reported gender). Figure 2 shows the detailed selection strategy of the articles. The inclusion and exclusion criteria are presented in From each eligible study included in the present systematic review, we c data about its general characteristics, such as the year of publication and setting, in participants (gender, age, co-morbidities), COVID-19 treatment and hospitaliza clusion and exclusion criteria, general manifestations, and smoking status (Table ble 3 presents the detailed characteristics considering oral manifestations, includ vestigation, type, location and frequency of oral lesions. From each eligible study included in the present systematic review, we collected data about its general characteristics, such as the year of publication and setting, involved participants (gender, age, co-morbidities), COVID-19 treatment and hospitalization, inclusion and exclusion criteria, general manifestations, and smoking status ( Table 2). Table 3 presents the detailed characteristics considering oral manifestations, including investigation, type, location and frequency of oral lesions. Most studies (involving at least 20 patients) were included in the meta-analysis, the results of which are presented in the forest plots (Figures 3-6 Most studies (involving at least 20 patients) were included in the results of which are presented in the forest plots (Figures 3-6).
Discussion
In our discussion, the literature reviewed in regards to oral SARS-CoV-2 infection were divided into two subgroups: descriptiv case series) and analytical studies (observational: cross-sectional, pr spective).
Discussion
In our discussion, the literature reviewed in regards to oral manifestations of SARS-CoV-2 infection were divided into two subgroups: descriptive studies (surveys, case series) and analytical studies (observational: cross-sectional, prospective or retrospective).
Descriptive Studies
A study conducted by Abubakr et al. surveyed Egyptian patients who presented with mild-moderate cases of COVID-19 and suffered from one or more oral manifestations due to the virus. As such, 47.6% of the participants experienced xerostomia, 23% suffered from oral or dental pain, 20.4% reported the appearance of ulcerations in the oral cavity with a significant occurrence in males when compared to females, 12% complained of pain in the jaw bones or joints, and 10.5% of patients felt they had halitosis. Moreover, 28.3% of COVID-19 patients simultaneously experienced two or three of these oral manifestations. This study also examined oral hygiene status among these infected patients. It confirmed that patients with poor oral hygiene had ulcerations and oral/dental pain more often than those with good oral hygiene. The most prevalent oral manifestation was xerostomia, and the least pervasive was halitosis.
Similarly, Biadsee et al. examined a web-based questionnaire distributed to 128 patients with COVID-19 who were quarantining in designated hotels in Israel. The following oral manifestations reported are olfactory dysfunction, taste alterations, dry mouth, facial pain, and masticatory muscle pain. As such, eighty-six patients (67%) had olfactory dys-function, with 19.5% of patients who felt it was heightened from day three to five of the infection. A total of sixty-seven patients (52%) experienced changes in their taste sensation, specifically toward spicy, salty, sour, and sweet foods, and women were impacted more than men. Furthermore, seventy-two patients reported dry mouth, strongly correlated to burning mouth and taste alterations. Facial pain was reported by eighteen patients (26%), and fifteen patients (11%) experienced masticatory muscle pain. Additionally, twenty patients described changes to their tongue sensation, nine patients reported the appearance of plaque on their tongue, and three patients reported oral bleeding. Lastly, oral hygiene did not contribute to these manifestations.
In a study by El Kady et al. , a pilot survey on Google Form was conducted on SARS-CoV-2 positive patients, asking the patients to report any oral manifestations listed in the questionnaire, and four categories were created: 1. Gustatory disorders (loss of salt sensation, loss of sweetness, and altered food taste); 2. Symptoms of salivary gland infection (dry mouth, difficulty swallowing, pain or swelling in the submandibular gland area, and pain or swelling in the parotid gland area); 3. Oral mucosal changes (oral ulcers, spots on mouth or lips, tongue redness, gingival bleeding, and burning sensation); 4. Category for patients with no symptoms related to the oral cavity and salivary gland. The results reported that 67.2% patients had at least one oral manifestation, with the highest prevalence symptom of dry mouth at 39.7%. Other symptoms of gustatory dysfunction were 34.5% loss of salt sensation, 29.3% loss of sweetness and 25.9% altered food taste. In this study, the highest percentage of occurrence of taste alterations among all the reports included in the meta-analysis was found. Salivary gland infection at 22.4% difficult swelling, 13.8% pain or swellings in the salivary gland or cheek, and 10.3% pain or swelling below the mandible. Oral mucosal changes were less prevalent. Loss of salt and sweetness were most associated (27.6%), and dry mouth and gustatory impairment symptoms were often associated (27.6%). The high prevalence of salivary gland related symptoms highlights the protective role and value of saliva against viruses.
Moreover, in a survey study by Bulut et al. , the possible effects of SARS-CoV-2 on the oral tissues were investigated. A total of 200 volunteers, who survived COVID-19, filled out a questionnaire after routine clinical and radiographic examinations, regarding their demographic characteristics, general health, oral habits and symptoms in the oral cavity. During the active period of SARS-CoV-2 infection the following observations of the oral cavity were recorded; taste loss (53%), halitosis (21%), oropharyngeal wound and pain (18%), pain in temporomandibular joint (17.5%), pain in the chewing muscles (16%), aphthous ulcer (14.5%), sensitivity and/or pain in teeth (12%), herpes labialis (8.5%), and burning in the tongue (7.5%). Xerostomia was observed in 38% of the patients during the active period, and of those, 27.6% continued to have xerostomia after. The issue with halitosis can be due to the wound and pain in the oropharynx region, infection of tonsils and the soft palate leading to the collection of bacteria and fungi.
In contrast, Riad et al. conducted a case series on patients who sought care in their hospital for pain in the tongue and had a confirmed SARS-CoV-2 laboratory test. General symptoms experienced by these patients included only one oral manifestation, which is ageusia. The onset of tongue ulcers was after five days for 53.8% of patients, 26.9% after four days, and 7.7% after two days. The number of ulcers varied considerably among these individuals, and their size ranged from 1 to 5 mm. The location of these ulcers was predominately on the dorsum of the tongue, while a few patients had them on the ventral surface of the tongue.
In another study by Riad et al. , thirteen patients with oral mucositis were examined. These patients complained of generalized pain and soreness within the oral cavity, focused on the nonkeratinized mucosa. The onset of mucositis was recorded at 0-2 days after a positive PCR SARS-CoV-2 test was done, and the mean duration was 7-14 days. Intraoral examinations found depapillation of the tongue in all cases, sporadic erythema (53.8%) on the buccal mucosa (30.8%), palate (15.4%) and gingiva (7.7%). Significant associations of COVID-19 severity and duration of mucositis with pain, and the loss of sense of taste associated with mucositis durations was disclosed. These results support the role of oral mucosa in being the entryway for the virus. Patients with COVID-19 may easily present with mucositis due to cellular damage triggered by the virus, or as an opportunistic infection due to immune deregulation.
Furthermore, Riad et al. reported eighteen patients who presented a new-onset halitosis during the course of COVID-19 infection. The issue was brought up due to an offensive oral malodor that precipitated notable psychosocial distress, in which the Halimeter Plus was used for quantitative assessment. Oral hygiene states were assessed with the Oral Health Assessment Tool finding that most of the patients had a fair level of oral hygiene, therefore suggesting that the epithelial alterations by SARS-CoV-2 on the dorsum of the tongue is the probable reason for the halitosis.
Interestingly, Said Ahmed et al. present a series of fourteen cases that discuss the prevalence of maxillary mucormycosis osteomyelitis in post-COVID-19 patients. In this study, nine patients had diabetes before contracting COVID-19, and five patients did not, but both groups showed signs of hyperglycemia after their quarantine. This case report concluded that patients with diabetes are at a higher risk of maxillary mucormycosis osteomyelitis due to drugs used to treat COVID-19. It is suggested that these medications can cause patients to become immunocompromised, primarily due to their diabetic status.
Rafałowicz et al. studied 1256 patients, and of those presented 6 cases of patients with the most common long COVID-19 oral cavity symptoms. Amidst these patients the following oral cavity changes were reported: 32% discolouration, ulcerations and hemorrhagic changes on the oral mucosa, 29.69% mycosis on the tongue, 25.79% aphthouslike lesion on hard palate, and 12.5% atrophic cheilitis.
Based on the included descriptive studies, xerostomia, olfactory dysfunction and changes in taste sensation were the most prevalent oral manifestation in SARS-CoV-2 positive patients. Halitosis, masticatory muscle pain, and burning sensation were the least prevalent and were rarely observed.
Analytical Studies
In an observational study by Sinjari et al. , patients hospitalized due to COVID-19 participated in a questionnaire to better understand the oral manifestation of these patients. Health status, oral hygiene habits, and symptoms in the oral cavity before and during the hospitalization/COVID-19 manifestation were collected. Additionally, questions to the Unit of Internal Medicine of the hospital, in regards to the clinical conditions of the patients, were collected. As such, 30% of patients reported xerostomia during hospitalization, 25% impaired taste, 15% burning sensation, and 20% difficulty in swallowing. Nuno-Gonzalez et al. examined 666 patients with COVID-19 in a field hospital in Spain to determine the prevalence of oral and palmoplantar mucocutaneous lesions in infected individuals. The authors established the following results: 25.7% had oral manifestations, including 11.5% transient lingual papillitis, 6.6% aphthous stomatitis, 3.9% glossitis with patchy depapillation, and 3.9% with mucositis. Other common adjuncts to these oral manifestations are burning sensation, which was felt among 5.3% of patients, and taste disturbances.
Gherlone et al. completed a retrospective and prospective cohort study that included COVID-19 patients admitted to San Raffaele University Hospital in Milan. The most frequent oral manifestations were salivary gland ectasia, dry mouth, TMJ abnormalities, and masticatory muscle weakness. In total, 38% of patients suffered from salivary gland ectasia, making it this study's most common oral manifestation. The majority of cases were found among the male patients, individuals who had a severe case of COVID-19, and patients who received antibiotics during hospitalization. At follow-up evaluations, dry mouth was found within 30% of the patients. Diabetes mellitus and COPD are associated with dry mouth; therefore, these medical conditions significantly increase the likelihood of dry mouth within these individuals. Masticatory muscle weakness was found among 19% of patients, 17% suffered from dysgeusia, 14% anosmia, 7% TMJ abnormalities, 3% facial tingling, and 2% trigeminal neuralgia.
Moreover, Fantozzi et al. conducted a retrospective study that included patients admitted to the emergency department in Italy and had a confirmed diagnosis of COVID-19. This study determined the prevalence of xerostomia and gustatory/olfactory dysfunction among these individuals. Olfactory alterations were the most severe, followed by dysgeusia and xerostomia. A total of 60% of patients reported taste alterations, 45.9% suffered from xerostomia, and 41.4% reported smell dysfunctions, and 70% of patients suggested that these symptoms occurred before their COVID-19 diagnosis.
In a study by Villarroel-Dorrego et al. , 55 hospitalized patients, of which 19 were admitted to the intensive care unit, were examined for oral lesions. A total of 40% of patients presented with at least one lesion or variation. Ulcers, both hemorrhagic and aphthous-like, were the most common, and it is believed this manifestation is due to the virus directly causing damage to the oral tissue. Secondary manifestations on the buccal mucosa are commonly seen, specifically candidiasis and recurrent herpetic infection. Effects on the loss of taste (60%), pain or burning in mouth (36.4%), and xerostomia (27.3%) were also reported.
Similarly, Eduardo et al. conducted a retrospective study on COVID-19 patients in the intensive care unit, in which 51.3% presented oral lesions and/or saliva alterations. Majority of the lesions in this study are due to the intubation process by an orotracheal tube, causing traumatic injuries (such as intraoral and extra-oral ulcerations) and the medications used in treatment, such as anticoagulants and corticosteroid-induced immunosuppression. Petechial and bruising can be from anticoagulants, however it is believed that it can also be possibly linked with SARS-CoV-2 s role in the microvasculature of the oral cavity. Dryness of the oral cavity is reported frequently as well.
In contrast, Bardellini et al. completed a retrospective study among children infected with COVID-19 at a Pediatric Hospital in Brescia, a city in Italy with the highest number of COVID-19 cases. The authors reported the following findings among the examined cohort: hyperaemic pharynx, oral pseudomembranous candidiasis, coated tongue, geographic tongue which developed while the individual had a high fever, and lastly, alterations in taste and loss of appetite. Some children also presented with cutaneous lesions such as non-itchy confluent flat papular lesions of the face and limb but did not have any oral manifestations. Hyperaemic pharynx was the more frequent oral manifestation presented among the patients. The authors have suggested no consistent oral manifestations associated with COVID-19 in children, but instead typically found these clinical presentations in influenza virus infection.
Furthermore, Halepas et al. conducted a cross-sectional study of pediatric patients admitted at Morgan Stanley Children's Hospital of New York-Presbyterian and tested positive for SARS-CoV-2. This study aimed to determine if multisystem inflammatory syndrome increased the incidence of oral manifestations among positive individuals. Findings of this study are: 48.9% of patients had red or swollen lips, and 10.6% had strawberry tongue. In conclusion, these clinical appearances were present alongside systemic conditions such as full-body rash and conjunctivitis.
In a study by Favia et al. , 123 patients were observed for oral lesions, in which 65.9% of the oral lesions took place in the early stage of COVID-19. The early stage is defined as lesions that "appeared together with the onset of general symptoms or within one week and always before the beginning of COVID-19-specific therapies". The most frequent lesions were single and multiple ulcers (52.8%) with a likelihood of developing into a larger necrotic area. Blisters were identified in 15.4% cases and of those, about 10% were mentioned as early ulcerative lesions in the medical record and exam of the patients. Several kinds of candidiasis were identified in 28 patients.
Ganesan et al. conducted an observational cross-sectional study in a facility dedicated to SARS-CoV-2 positive patients. The yielded results are: 51.2% of patients experienced an alteration in taste sensation followed by complete ageusia, 28% of patients had xerostomia, and 15.4% of patients had mucosal, hard tissue, or bony lesions. Erythematous macules were the most common soft tissue lesions observed, followed by non-specific solitary ulcers, atrophic glossitis, and candida-like lesions.
Furthermore, in a study by Elamrousy et al. , 124 patients were admitted to explore if oral lesions affect the tongue mainly due to the greater number of cells expressing angiotensin-converting enzyme 2 than in other oral tissues. A total of 90.3% of patients presented with oral manifestations, of which 62% were asymptomatic. Dry mouth was examined in about 84% of the patients. Those that were symptomatic described a painful or burning sensation. Multiple types of oral lesions were identified, however oral ulcers (mostly aphthous-like ulcers covered with pseudomembrane) counted for the highest amount in 92.8% of patients. The most common sites for oral ulcers were the lip (with hemorrhagic ulcers with crust), tongue, and labial mucosa. Overall, the tongue presented with the highest amount of oral lesions at 85.7%, such as ulcers, atrophy, or combination of lesions with Candida infection. Among all the included studies above, the highest prevalence of xerostomia and ulcerations were observed in this study.
A cross-sectional study was conducted by Natto et al. to determine the prevalence of oral manifestations in SARS-CoV-2 positive patients. 109 patients were examined in a single medical center with the following results: 43.4% lost their taste, 7.3% had erythema/desquamated gingivitis and coated tongue, lastly, 6.4% had visible ulcers/blisters. Loss of taste was the most common oral manifestation, and patients reported that it persisted for ten days. Other symptoms appeared for seven days or less, and 79.3% appeared as a single symptom. The most common site for these oral diseases to occur was the dorsum of the tongue (72.4%), followed by vestibules (12.1%) and gingiva, lips, buccal mucosa (8.6%).
Fidan et al. examined 74 SARS-CoV-2 positive patients at their clinic and included them in their observational study. The results suggested that aphthous-like ulcers were the most common oral manifestation in 27 patients. Other findings were also recognized, such as erythema, which occurred in 19 patients, and lichen planus among 12 patients. The most common site for these clinical appearances was on the tongue (n = 27), and other areas included the buccal mucosa (n = 20), gingiva (n = 11), and palate (n = 4). In a study by Naser et al. , 338 patients were observed for identifying the most common oral and maxillofacial lesions. The most common oral conditions were the presence of a white coat on the tongue, pain related to the oral cavity, multiple aphthous ulceration in the oral cavity, dryness of the oral cavity, white coat of the gingiva and cheek, and white coat of the palate.
Surprisingly, in a study by Subramaniam et al. , only nine patients out of 713 reported oral discomfort due to some form of oral lesions. After oral examination, these oral lesions were found to range from herpes simplex ulcers to angular cheilitis. The authors conclude that they support that oral manifestations could be secondary lesions resulting from the deterioration of systemic health, or due to treatments for COVID-19, or could be just coexisting conditions.
In contrast, in a study by Soares et al. , fourteen patients were studied who presented oral lesions due to COVID-19. The identified oral lesions were divided into two groups, and a combination of the two groups also occurred: 1. Lesions presenting as ecchymosis, purplish areas, and petechial. These alterations were most commonly observed in the palate and tongue; 2. Vesiculobullous lesions or ulcerations with ischemic aspects, occurring in any location of the oral mucosa, but mainly on the lips, buccal mucosa, and tongue. Furthermore, eight of the fourteen patients presented with lesions only located on the palate (57.1%), four patients with tongue lesions, and two patients (14.3%) with lesions on lip or palate (14.3%). Clinical features consisted of petechial, ecchymosis, reddish macules, chronic ulcers, vesiculobullous eruptions in the lip, palate and buccal mucosa. In other studies by these authors, clinically and histopathologically it is confirmed that ulcers associated with COVID-19 have a distinguishing appearance, where ischemic borders and central area with a fibrinous pseudomembrane is identifiable, and evolves in about 21-28 days.
A brief report presented by Katz and Yue examined patients with COVID-19 in Florida that included outpatients and inpatients at different health centers across the state. The study determined if there was an increased odds ratio for COVID-19 patients with recurrent aphthous stomatitis. The hospital population served as the control group, and the prevalence of recurrent aphthous stomatitis was 0.148% compared with 0.64% in the COVID-19 group. The odds ratio was adjusted based on demographics and comorbidities. The authors suggest a strong association between COVID-19 and recurrent aphthous stomatitis; however, more data is required.
Interestingly, Zarpellon et al. conducted a post-mortem study on twenty-six deceased patients due to COVID-19. The autopsies included a thorough examination of the hard palate, tongue, jugal and gingival mucosa, anterior tonsillar pillar, and inner lips. Five patients displayed ulcerative lesions in the lower lip and vesiculobullous and ulcerative lesions on the tongue and jugal mucosa. Two patients underwent immunohistochemical staining, which indicated the presence of herpes simplex virus (HSV-1). One patient underwent a histopathological analysis that concluded the existence of Sarcina ventriculi colonies. The authors concluded that these oral findings are typical for immunocompromised patients and are secondary lesions related to traumatic events or co-infections. Most of these examined patients were hospitalized due to severe SARS-CoV-2 infection and received mechanical ventilation and, as a consequence, developed oral injuries.
Based on the included analytical studies, alterations in taste sensations, xerostomia, and aphthous-like ulcers were the most prevalent oral manifestation in COVID-19 patients. The most common site for these manifestations is the dorsum of the tongue, in which it is believed to be due to epithelial alterations caused by SARS-CoV-2.
Study Limitations
The main limitations of our systematic review should be considered to be the heterogeneity of the included studies. Sources of heterogeneity include individual factors, e.g., age group, gender, race and comorbidities, as well as environmental factors such as hospitalization, treatment implemented or pandemic period. Furthermore, the variety of selected study design and the methods for the evaluation of determined oral manifestations makes it difficult to compare the meta-analysis results with the individual studies. Unfortunately, the studies did not have adequate timeframes for long-term observation of the evolution of changes in the oral mucosa with the development of SARS-CoV-2 infection or improvement of health condition after the hospitalization caused by COVID-19. However, it must be emphasized that the subject is very topical, and at the same time dynamic.
Conclusions
Our systematic review suggests higher prevalence of oral manifestations in SARS-CoV-2 positive patients, especially xerostomia, ulcerations and taste alterations. However, the relationship between oral health status and SARS-CoV-2 infection cannot be clearly defined due to confounders such as individual or environmental factors.
Conflicts of Interest:
The authors declare no conflict of interest. |
<gh_stars>100-1000
/*++
Module Name:
Util.cpp
Abstract:
Generic support routines that don't seem to belong elsewhere.
Authors:
<NAME>, March, 2013
Environment:
User mode service.
Revision History:
Factored from other places
--*/
#include "stdafx.h"
#include "Util.h"
#include "Error.h"
#include "GenericFile.h"
_int64 FirstPowerOf2GreaterThanOrEqualTo(_int64 value)
{
int highestBitSet;
for (highestBitSet = 0; highestBitSet <= 62; highestBitSet++) { // Only go to 63, since this is signed
if (!(value & ~((((_int64)1) << highestBitSet) - 1))) {
highestBitSet -= 1;
break;
}
}
if (((_int64)1) << highestBitSet == value) return value;
return ((_int64)1) << (highestBitSet + 1);
}
int cheezyLogBase2(_int64 value)
{
int retVal = 0;
value /= 2; // Since 2^0 = 1; we'll also define cheezyLogBase2(x) = 0 where x<= 0.
while (value > 0) {
retVal++;
value /= 2;
}
return retVal;
}
void
util::memrevcpy(
void* dst,
const void* src,
size_t bytes)
{
size_t dwords = bytes >> 3;
_uint64* p = (_uint64*) dst;
const _uint64* q = (const _uint64*) ((const char*)src + bytes - 8);
for (size_t i = 0; i < dwords; i++) {
*p++ = ByteSwapUI64(*q--);
}
int left = (int) (bytes & 7);
if (left > 0) {
char* p2 = (char*) p;
const char* q2 = (left - 1) + (const char*) src;
for (int i = 0; i < left; i++) {
*p2++ = *q2--;
}
}
}
NWaiter::NWaiter(size_t n)
{
_signalsRequired = n;
_signalsReceived = 0;
InitializeExclusiveLock(&_lock);
CreateEventObject(&_waiter);
}
NWaiter::~NWaiter()
{
DestroyExclusiveLock(&_lock);
DestroyEventObject(&_waiter);
}
void NWaiter::wait()
{
while (true) {
bool done;
AcquireExclusiveLock(&_lock);
done = (_signalsReceived >= _signalsRequired);
ReleaseExclusiveLock(&_lock);
if (done)
return;
else {
WaitForEvent(&_waiter);
}
}
}
void NWaiter::signal()
{
AcquireExclusiveLock(&_lock);
_signalsReceived += 1;
ReleaseExclusiveLock(&_lock);
AllowEventWaitersToProceed(&_waiter);
}
char *FormatUIntWithCommas(_uint64 val, char *outputBuffer, size_t outputBufferSize, size_t desiredLength)
{
_ASSERT(desiredLength < outputBufferSize);
//
// First, figure out the number of digits.
//
unsigned nDigits = 0;
_uint64 x = val;
while (x > 0) {
nDigits++;
x = x / 10;
}
if (0 == nDigits) {
//
// Special case for the value 0 (which, I suppose if the world was rational, would be represented by the empty string. :-))
//
_ASSERT(0 == val);
nDigits = 1;
}
int nCommas = (nDigits - 1) / 3;
if (outputBufferSize < (size_t)nDigits + nCommas + 1) {
WriteErrorMessage("Internal error: too small buffer for FormatUIntWithCommas, value %lld, outputBufferSize %lld\n", val, outputBufferSize);
if (outputBufferSize > 0) {
*outputBuffer = 0;
} else {
soft_exit(1);
}
return outputBuffer;
}
//
// Now build up the string backwards.
//
size_t offset = (size_t)nDigits + nCommas;
outputBuffer[offset] = '\0';
if (0 == val) {
outputBuffer[0] = '0';
return outputBuffer;
}
x = val;
while (x > 0) {
char tempBuf[5];
if (x > 999) {
sprintf(tempBuf, ",%03lld", x % 1000);
_ASSERT(strlen(tempBuf) == 4);
} else {
sprintf(tempBuf, "%lld", x);
}
_ASSERT(offset >= strlen(tempBuf));
offset -= strlen(tempBuf);
memcpy(outputBuffer + offset, tempBuf, strlen(tempBuf));
x /= 1000;
}
for (size_t i = strlen(outputBuffer); i < desiredLength; i++) {
strcat(outputBuffer, " ");
}
return outputBuffer;
}
//
// Version of fgets that dynamically (re-)allocates the buffer to be big enough to fit the whole line
//
class FgetsObject
{
public:
virtual char *fgets(char *s, int size) = 0;
};
char *genericReallocatingFgets(char **buffer, int *io_bufferSize, FgetsObject *getsObject)
{
if (*io_bufferSize == 0) {
//
// Just pick a decent size to start out with.
//
*io_bufferSize = 128;
*buffer = new char[*io_bufferSize];
}
int offset = 0;
for (;;) { // loop with middle exit
if (NULL == getsObject->fgets((*buffer) + offset, *io_bufferSize - offset)) { // Recall that if this clips, then the next time it just keeps reading from the clip.
//
// EOF or error.
//
return NULL;
}
int len = (int)strlen(*buffer);
if (len != *io_bufferSize - 1 || (*buffer)[len - 1] == '\n') {
//
// It fit.
//
return *buffer;
}
//
// Double the buffer and retry.
//
if (((_int64)*io_bufferSize) * 2 > 0x7fffffff) {
WriteErrorMessage("Trying to fgets() a string bigger than 2^31 bytes. Perhaps you've supplied an incorrect input file.");
soft_exit(1);
}
int newBufferSize = *io_bufferSize * 2;
char *newBuffer = new char[newBufferSize];
offset = len;
memcpy(newBuffer, *buffer, offset);
delete[] *buffer;
*buffer = newBuffer;
*io_bufferSize = newBufferSize;
}
//NOTREACHED
return NULL; // Just to keep the compiler happy
}
class StdioFGetsObject : public FgetsObject
{
public:
StdioFGetsObject(FILE *_file) : file(_file) {}
virtual char *fgets(char *s, int size) {
return ::fgets(s, size, file);
}
private:
FILE *file;
};
char *reallocatingFgets(char **buffer, int *io_bufferSize, FILE *stream)
{
StdioFGetsObject fgetsObject(stream);
return genericReallocatingFgets(buffer, io_bufferSize, &fgetsObject);
}
class GenericFileFGetsObject : public FgetsObject
{
public:
GenericFileFGetsObject(GenericFile *_file) : file(_file) {}
virtual char *fgets(char *s, int size) {
return file->gets(s, size);
}
private:
GenericFile *file;
};
char *reallocatingFgetsGenericFile(char **buffer, int *io_bufferSize, GenericFile *file)
{
GenericFileFGetsObject fgetsObject(file);
return genericReallocatingFgets(buffer, io_bufferSize, &fgetsObject);
}
|
/**
* Tests getting and setting fields.
*/
@Test
public void fields() {
Random random = new Random();
boolean createParent = random.nextBoolean();
boolean ensureAtomic = random.nextBoolean();
String owner = CommonUtils.randomAlphaNumString(10);
String group = CommonUtils.randomAlphaNumString(10);
Mode mode = new Mode((short) random.nextInt());
CreateOptions options = CreateOptions.defaults(mConfiguration);
options.setCreateParent(createParent);
options.setEnsureAtomic(ensureAtomic);
options.setOwner(owner);
options.setGroup(group);
options.setMode(mode);
assertEquals(createParent, options.getCreateParent());
assertEquals(ensureAtomic, options.isEnsureAtomic());
assertEquals(owner, options.getOwner());
assertEquals(group, options.getGroup());
assertEquals(mode, options.getMode());
} |
.
A case of metastatic renal cell carcinoma in which complete remission was achieved using Interferon therapy is reported. A 67-year-old male was found to have extensive paraaortic lymph node metastasis while undergoing left nephrectomy following diagnosis of 1-renal cancer. Pathological diagnosis of the node was metastasis of clear cell type renal cell carcinoma. Interferon (Hu IFN-alpha 2: Sch 30500; Yamanouchi Pharmaceutical Co.) therapy was started 34 days after surgery. The Interferon was administered i.m. at a dose of 1 X 10(6) units per injection, 3 times a week, and was continued up to a total dose of 236 X 10(6) units. Response of the metastatic lymph nodes was examined using CT scan. Marked response was observed 4 weeks after starting the Interferon therapy, and complete disappearance of the tumors was obtained 5 months after Commencement of therapy. The duration of complete response has lasted 4 months. The toxic manifestations of the Interferon therapy were fever and loss of appetite. |
import { Vec3 } from "./vec3";
import { Color } from "./color";
import { CameraRay } from "./cameraray";
export class Viewport {
private width: number
private height: number
private buffer: Uint8ClampedArray
constructor(width: number, height: number) {
this.width = width;
this.height = height;
this.buffer = new Uint8ClampedArray(width * height * 4);
this.Clear();
}
GetBuffer() {
return this.buffer;
}
Width(): number {
return this.width;
}
Height(): number {
return this.height;
}
Ratio(): number {
return this.width / this.height;
}
ClearColor(): Color {
return Color.Magenta();
}
Clear() {
let clearColor = this.ClearColor();
for(var i = 0; i < this.width * this.height * 4;) {
this.buffer[i+0] = clearColor.r;
this.buffer[i+1] = clearColor.g;
this.buffer[i+2] = clearColor.b;
this.buffer[i+3] = 255;
i += 4;
}
}
FillRandom() {
for(var i = 0; i < this.width * this.height * 4;) {
this.buffer[i+0] = Math.random() * 255;
this.buffer[i+1] = Math.random() * 255;
this.buffer[i+2] = Math.random() * 255;
this.buffer[i+3] = 255;
i += 4;
}
}
GetRays(x: number, y: number, count: number): Array<CameraRay> {
let res = new Array<CameraRay>();
for(let j = 0; j < 1; j += 1/count) {
for(let i = 0; i < 1; i += 1/count) {
//TODO: cast the ray from the center for the pixel
let dirX: number = (this.Ratio() / this.width) * ( x + i ) - this.Ratio()/2;
let dirY: number = 1/2 - (1 / this.height) * ( y + j );
res.push(new CameraRay(new Vec3(0,0,0), new Vec3(dirX, dirY, 1)));
}
}
return res;
}
DrawPixel(x: number, y: number, color: Color) {
let offset = this.width * y + x;
this.buffer[offset * 4 + 0] = color.r;
this.buffer[offset * 4 + 1] = color.g;
this.buffer[offset * 4 + 2] = color.b;
this.buffer[offset * 4 + 3] = 255;
}
}
|
package com.avaje.ebean.dbmigration.ddlgeneration.platform;
import com.avaje.ebean.config.DbConstraintNaming;
import com.avaje.ebean.config.ServerConfig;
import com.avaje.ebean.config.dbplatform.DbIdentity;
import com.avaje.ebean.config.dbplatform.DbTypeMap;
import com.avaje.ebean.config.dbplatform.IdType;
import com.avaje.ebean.dbmigration.ddlgeneration.BaseDdlHandler;
import com.avaje.ebean.dbmigration.ddlgeneration.DdlBuffer;
import com.avaje.ebean.dbmigration.ddlgeneration.DdlHandler;
import com.avaje.ebean.dbmigration.ddlgeneration.DdlWrite;
import com.avaje.ebean.dbmigration.ddlgeneration.platform.util.PlatformTypeConverter;
import com.avaje.ebean.dbmigration.migration.AddHistoryTable;
import com.avaje.ebean.dbmigration.migration.AlterColumn;
import com.avaje.ebean.dbmigration.migration.Column;
import com.avaje.ebean.dbmigration.migration.DropHistoryTable;
import com.avaje.ebean.dbmigration.migration.IdentityType;
import com.avaje.ebean.dbmigration.model.MTable;
import java.io.IOException;
import java.util.List;
/**
* Controls the DDL generation for a specific database platform.
*/
public class PlatformDdl {
protected PlatformHistoryDdl historyDdl = new NoHistorySupportDdl();
/**
* Converter for logical/standard types to platform specific types. (eg. clob -> text)
*/
private final PlatformTypeConverter typeConverter;
/**
* For handling support of sequences and autoincrement.
*/
private final DbIdentity dbIdentity;
/**
* Default assumes if exists is supported.
*/
protected String dropTableIfExists = "drop table if exists ";
protected String dropTableCascade = "";
/**
* Default assumes if exists is supported.
*/
protected String dropSequenceIfExists = "drop sequence if exists ";
protected String foreignKeyRestrict = "on delete restrict on update restrict";
protected String identitySuffix = " auto_increment";
protected String dropConstraintIfExists = "drop constraint if exists";
protected String dropIndexIfExists = "drop index if exists ";
protected String alterColumn = "alter column";
protected String dropUniqueConstraint = "drop constraint";
protected String columnSetType = "";
protected String columnSetDefault = "set default";
protected String columnDropDefault = "drop default";
protected String columnSetNotnull = "set not null";
protected String columnSetNull = "set null";
/**
* Set false for MsSqlServer to allow multiple nulls for OneToOne mapping.
*/
protected boolean inlineUniqueOneToOne = true;
protected DbConstraintNaming naming;
public PlatformDdl(DbTypeMap platformTypes, DbIdentity dbIdentity) {
this.dbIdentity = dbIdentity;
this.typeConverter = new PlatformTypeConverter(platformTypes);
}
/**
* Set configuration options.
*/
public void configure(ServerConfig serverConfig) {
historyDdl.configure(serverConfig, this);
naming = serverConfig.getConstraintNaming();
}
/**
* Create a DdlHandler for the specific database platform.
*/
public DdlHandler createDdlHandler(ServerConfig serverConfig) {
return new BaseDdlHandler(serverConfig, this);
}
/**
* Return the identity type to use given the support in the underlying database
* platform for sequences and identity/autoincrement.
*/
public IdType useIdentityType(IdentityType modelIdentityType) {
return dbIdentity.useIdentityType(modelIdentityType);
}
/**
* Modify and return the column definition for autoincrement or identity definition.
*/
public String asIdentityColumn(String columnDefn) {
return columnDefn + identitySuffix;
}
/**
* Write all the table columns converting to platform types as necessary.
*/
public void writeTableColumns(DdlBuffer apply, List<Column> columns, boolean useIdentity) throws IOException {
for (int i = 0; i < columns.size(); i++) {
apply.newLine();
writeColumnDefinition(apply, columns.get(i), useIdentity);
if (i < columns.size() - 1) {
apply.append(",");
}
}
}
/**
* Write the column definition to the create table statement.
*/
protected void writeColumnDefinition(DdlBuffer buffer, Column column, boolean useIdentity) throws IOException {
boolean identityColumn = useIdentity && isTrue(column.isPrimaryKey());
String platformType = convert(column.getType(), identityColumn);
buffer.append(" ");
buffer.append(lowerColumnName(column.getName()), 29);
buffer.append(platformType);
if (isTrue(column.isNotnull()) || isTrue(column.isPrimaryKey())) {
buffer.append(" not null");
}
// add check constraints later as we really want to give them a nice name
// so that the database can potentially provide a nice SQL error
}
/**
* Return the drop foreign key clause.
*/
public String alterTableDropForeignKey(String tableName, String fkName) {
return "alter table " + tableName + " " + dropConstraintIfExists + " " + fkName;
}
/**
* Convert the standard type to the platform specific type.
*/
public String convert(String type, boolean identity) {
String platformType = typeConverter.convert(type);
return identity ? asIdentityColumn(platformType) : platformType;
}
/**
* Add history support to this table using the platform specific mechanism.
*/
public void createWithHistory(DdlWrite writer, MTable table) throws IOException {
historyDdl.createWithHistory(writer, table);
}
/**
* Drop history support for a given table.
*/
public void dropHistoryTable(DdlWrite writer, DropHistoryTable dropHistoryTable) throws IOException {
historyDdl.dropHistoryTable(writer, dropHistoryTable);
}
/**
* Add history support to an existing table.
*/
public void addHistoryTable(DdlWrite writer, AddHistoryTable addHistoryTable) throws IOException {
historyDdl.addHistoryTable(writer, addHistoryTable);
}
/**
* Regenerate the history triggers (or function) due to a column being added/dropped/excluded or included.
*/
public void regenerateHistoryTriggers(DdlWrite write, HistoryTableUpdate update) throws IOException {
historyDdl.regenerateHistoryTriggers(write, update);
}
/**
* Generate and return the create sequence DDL.
*/
public String createSequence(String sequenceName, int initialValue, int allocationSize) {
StringBuilder sb = new StringBuilder("create sequence ");
sb.append(sequenceName);
if (initialValue > 1) {
sb.append(" start with ").append(initialValue);
}
if (allocationSize > 0 && allocationSize != 50) {
// at this stage ignoring allocationSize 50 as this is the 'default' and
// not consistent with the way Ebean batch fetches sequence values
sb.append(" increment by ").append(allocationSize);
}
sb.append(";");
return sb.toString();
}
/**
* Return the drop sequence statement (potentially with if exists clause).
*/
public String dropSequence(String sequenceName) {
return dropSequenceIfExists + sequenceName;
}
/**
* Return the drop table statement (potentially with if exists clause).
*/
public String dropTable(String tableName) {
return dropTableIfExists + tableName + dropTableCascade;
}
/**
* Return the drop index statement.
*/
public String dropIndex(String indexName, String tableName) {
return dropIndexIfExists + indexName;
}
/**
* Return the create index statement.
*/
public String createIndex(String indexName, String tableName, String[] columns) {
StringBuilder buffer = new StringBuilder();
buffer.append("create index ").append(indexName).append(" on ").append(tableName);
appendColumns(columns, buffer);
return buffer.toString();
}
/**
* Add foreign key.
*/
public String alterTableAddForeignKey(String tableName, String fkName, String[] columns, String refTable, String[] refColumns) {
StringBuilder buffer = new StringBuilder(90);
buffer
.append("alter table ").append(tableName)
.append(" add constraint ").append(fkName)
.append(" foreign key");
appendColumns(columns, buffer);
buffer
.append(" references ")
.append(lowerTableName(refTable));
appendColumns(refColumns, buffer);
appendWithSpace(foreignKeyRestrict, buffer);
return buffer.toString();
}
/**
* Drop a unique constraint from the table.
*/
public String alterTableDropUniqueConstraint(String tableName, String uniqueConstraintName) {
return "alter table " + tableName + " " + dropUniqueConstraint + " " + uniqueConstraintName;
}
/**
* Add a unique constraint to the table.
* <p>
* Overridden by MsSqlServer for specific null handling on unique constraints.
*/
public String alterTableAddUniqueConstraint(String tableName, String uqName, String[] columns) {
StringBuilder buffer = new StringBuilder(90);
buffer.append("alter table ").append(tableName).append(" add constraint ").append(uqName).append(" unique ");
appendColumns(columns, buffer);
return buffer.toString();
}
/**
* Return true if unique constraints for OneToOne can be inlined as normal.
* Returns false for MsSqlServer due to it's null handling for unique constraints.
*/
public boolean isInlineUniqueOneToOne() {
return inlineUniqueOneToOne;
}
/**
* Alter a column type.
* <p>
* Note that that MySql and SQL Server instead use alterColumnBaseAttributes()
* </p>
*/
public String alterColumnType(String tableName, String columnName, String type) {
return "alter table " + tableName + " " + alterColumn + " " + columnName + " " + columnSetType + type;
}
/**
* Alter a column adding or removing the not null constraint.
* <p>
* Note that that MySql and SQL Server instead use alterColumnBaseAttributes()
* </p>
*/
public String alterColumnNotnull(String tableName, String columnName, boolean notnull) {
String suffix = notnull ? columnSetNotnull : columnSetNull;
return "alter table " + tableName + " " + alterColumn + " " + columnName + " " + suffix;
}
/**
* Return true if the default value is the special DROP DEFAULT value.
*/
public boolean isDropDefault(String defaultValue) {
return "DROP DEFAULT".equals(defaultValue);
}
/**
* Alter column setting the default value.
*/
public String alterColumnDefaultValue(String tableName, String columnName, String defaultValue) {
String suffix = isDropDefault(defaultValue) ? columnDropDefault : columnSetDefault + " " + defaultValue;
return "alter table " + tableName + " " + alterColumn + " " + columnName + " " + suffix;
}
/**
* Alter column setting both the type and not null constraint.
* <p>
* Used by MySql and SQL Server as these require both column attributes to be set together.
* </p>
*/
public String alterColumnBaseAttributes(AlterColumn alter) {
// by default do nothing, only used by mysql and sql server as they can only
// modify the column with the full column definition
return null;
}
protected void appendColumns(String[] columns, StringBuilder buffer) {
buffer.append(" (");
for (int i = 0; i < columns.length; i++) {
if (i > 0) {
buffer.append(",");
}
buffer.append(lowerColumnName(columns[i].trim()));
}
buffer.append(")");
}
protected void appendWithSpace(String content, StringBuilder buffer) {
if (content != null && !content.isEmpty()) {
buffer.append(" ").append(content);
}
}
/**
* Convert the table to lower case.
* <p>
* Override as desired. Generally lower case with underscore is a good cross database
* choice for column/table names.
*/
protected String lowerTableName(String name) {
return naming.lowerTableName(name);
}
/**
* Convert the column name to lower case.
* <p>
* Override as desired. Generally lower case with underscore is a good cross database
* choice for column/table names.
*/
protected String lowerColumnName(String name) {
return naming.lowerColumnName(name);
}
/**
* Null safe Boolean true test.
*/
protected boolean isTrue(Boolean value) {
return Boolean.TRUE.equals(value);
}
}
|
# Discord python bot for Konziis!
#
# Python discord bot:
# https://realpython.com/how-to-make-a-discord-bot-python/
# https://discordpy.readthedocs.io/en/latest/index.html
# https://github.com/Rapptz/discord.py/tree/v1.7.2/examples
#
# Wolfram-API:
# https://products.wolframalpha.com/api/documentation/#getting-started
import os
import random
import asyncio
import discord
from discord.ext import commands
from discord.ext import tasks
from mcstatus import MinecraftServer
import wolframalpha
from src.mycommands import MyCommands
TOKEN = os.getenv('DISCORD_TOKEN')
GUILD = os.getenv('DISCORD_GUILD')
WOLFRAM_APPID = os.getenv('WOLFRAM_APPID')
MC_SERVER_CHECK_TIME = 10 * 60
MC_SERVER_ADDRESS = "ratius99.aternos.me"
MC_SERVER_STATUS_INT = 0
MESSAGE_CHANNEL = "📯mitteilungen"
TXT_VOICE_UPDATE = ["is needy and wait's for academic trash talk",
"is lonely and want's to talk",
"is waiting for you ",
"is sitting alone here",
"<put here some random text stuff>"
]
basic_activity_name =" in der Cloud! ☁"
bot = commands.Bot(command_prefix="!", activity= discord.Game(name=basic_activity_name))
wolframclient = wolframalpha.Client(WOLFRAM_APPID)
# Initialization errors
if not (TOKEN and GUILD and WOLFRAM_APPID):
raise RuntimeError("Missing environmental variable.")
# Tasks
@tasks.loop(minutes=10)
async def check_mc_status():
print("loopmc")
mc_status = basic_activity_name
players = 0
try:
server = MinecraftServer.lookup(MC_SERVER_ADDRESS)
status = server.status()
players = status.players.online
except ConnectionRefusedError:
mc_status = " mit Errors ..."
except Exception:
mc_status = " mit \"bad status error\" :-("
# if no error happend:
if (players):
mc_status = " mit "+("einem Spieler" if (players==1) else str(players)+" Spielern")+" MC!"
await bot.change_presence(activity = discord.Game(name=mc_status))
# Events
@bot.event
async def on_voice_state_update(member, before, after):
if before.channel is None and after.channel is not None:
channel_name=after.channel.name
print("lonely state")
await asyncio.sleep(10) # wait to est if user is shy / has misclicked
if after.channel is not None:
print("trigger")
guild = discord.utils.get(bot.guilds, name=GUILD)
voice_channel = discord.utils.get(guild.voice_channels, name=channel_name)
print(voice_channel.voice_states)
if len(voice_channel.voice_states)==1:
print("t2")
text_channel = discord.utils.get(guild.text_channels, name=MESSAGE_CHANNEL)
await text_channel.send(f"Moin! {member.name} "+random.choice(TXT_VOICE_UPDATE)+". Visit him at #"+after.channel.name+".")
@bot.event
async def on_ready():
print(f'{bot.user.name} has connected to Discord!')
check_mc_status.start()
@bot.event
async def on_member_join(member):
await member.create_dm()
await member.dm_channel.send(
f'Hi {member.name}, hier ist der nerfffiger Diiscordbot aus Konziis!'
)
@bot.event
async def on_message(message):
if message.author == bot.user:
return
# here read messageses
await bot.process_commands(message)
@bot.event
async def on_command_error(ctx: commands.Context, error):
print(error.__cause__)
await ctx.send(">> Error: "+str(error.__cause__))
bot.add_cog(MyCommands(bot, wolframclient))
bot.run(TOKEN)
# Custom event example:
#
# bot.dispatch("custom_event", arg1, arg2)
#
# @bot.event
# async def on_custom_event(arg1, arg2):
# print("Custom event")
|
<gh_stars>1-10
package com.todoist.mediaparser.mediaentity;
import com.todoist.mediaparser.util.HttpStack;
import java.util.regex.Pattern;
/**
* Holds various information about the associated media. It needs to be configured through a call to
* {@link #configure(HttpStack)}, which can be a blocking call depending on {@link #isConfigurationBlocking()}.
* No information is available before this is called, except {@link #getUrl()}.
*/
public abstract class MediaEntity {
protected String mUrl;
protected boolean mConfigured;
protected String mContentUrl;
protected String mContentType;
protected String mUnderlyingContentType;
protected MediaEntity(String url) {
mUrl = url;
}
/**
* Returns the original url.
*/
public final String getUrl() {
return mUrl;
}
/**
* Configure this media entity.
*
* @param httpStack the HTTP stack to use, falls back to the default one if {@code null}.
* @see #isConfigured()
* @see #isConfigurationBlocking()
*/
public synchronized void configure(HttpStack httpStack) {
if (!mConfigured) {
try {
doConfigure(httpStack);
mConfigured = true;
} catch (Exception e) {
e.printStackTrace();
}
}
}
public boolean isConfigured() {
return mConfigured;
}
public abstract boolean isConfigurationBlocking();
/**
* Returns true if this parser is appropriate for {@code mUrl} passed in during instantiation, or false if not.
*/
public boolean matches() {
return getMatchingPattern().matcher(mUrl).lookingAt();
}
/**
* Returns the url for this media. Redirects might need to be followed.
*/
public String getContentUrl() {
ensureConfigured();
return mContentUrl;
}
/**
* Returns the content type of the content, in the form of type/subtype. The subtype can be unknown, hence be "*".
*/
public String getContentType() {
ensureConfigured();
return mContentType;
}
/**
* Returns the underlying content type. Useful for non-direct urls which are associated with certain types of
* media,
* eg. YouTube URLs will always point to videos.
*/
public String getUnderlyingContentType() {
ensureConfigured();
return mUnderlyingContentType;
}
/**
* Returns an image thumbnail for this media. Redirects might need to be followed.
*
* Subclasses should override and provide a thumbnail whose smallest side is equal or larger than
* {@code smallestSide}, unless it's not available or the size is negative. In those cases, the largest possible
* thumbnail image should be returned.
*/
public abstract String getThumbnailUrl(int smallestSide);
/**
* Returns a pattern that matches valid urls. Used by {@link #matches()}.
*/
protected abstract Pattern getMatchingPattern();
/**
* Perform the setup. {@code #mContentUrl}, {@code #mContentType} and {@code #mUnderlyingContentType} should all
* be set.
*
* The passed-in {@code httpStack} can be null. If so, and it's needed, use
* {@link com.todoist.mediaparser.util.SimpleHttpStack}.
*/
protected abstract void doConfigure(HttpStack httpStack) throws Exception;
private void ensureConfigured() {
if (!mConfigured) {
throw new IllegalStateException("configure() was never called");
}
}
}
|
Kevin Smith Presents Slamdance Award Winning 'The Dirties' Trailer
"The most important movie you will see all year," says Kevin Smith. One of the first films debuting through Kevin Smith's new "Movie Club" distribution company (aka SModcast Pictures with Phase 4) is a Slamdance award-winning indie titled The Dirties. Somewhat controversial, the documentary-like story follows high school kids making a film to get revenge on bullies, but it seems to almost become more than that. While this is only just a trailer, it looks to balance the discussion of a school shooting and a documentary about bullying in a rather bold but smart way. I'm intrigued. This may be worth checking out, give it a look below.
Here's the official release trailer for Matthew Johnson's The Dirties, uploaded via YouTube:
Synopsis: When two best friends team up to film a comedy about getting revenge on bullies, the exercise takes a devastating turn when one of them begins to think of it as more than a joke. The Dirties is directed by Matthew Johnson, from a screenplay by Johnson & Evan Morgan, starring Matthew Johnson & Owen Williams. This first premiered at the Slamdance Film Festival in January and won the Grand Jury Prize and Spirit of Slamdance Award. Kevin Smith's Movie Club will be releasing the independent film in theaters and on demand starting October 4th later this fall. For more info, visit their official website or Facebook page.
1 Guy who comments on things on Jul 22, 2013
2 DAVIDPD on Jul 22, 2013
3 Carpola on Jul 22, 2013
4 Danimal on Jul 23, 2013
5 tommyturner on Jul 25, 2013
Sorry, no commenting is allowed at this time. |
#include <stdio.h>
int countfunc(int n, int x);
int main(void){
int n, x, cou;
while(1){
scanf("%d %d", &n, &x);
if((n == 0) && (x == 0)) break;
cou = countfunc(n, x);
printf("%d\n", cou);
}
return 0;
}
int countfunc(int n, int x)
{
int i, j, k;
int count = 0;
for(i = 1;i <= n;i++){
for(j = i + 1;j <= n;j++){
for(k = j + 1;k <= n;k++){
if(i + j + k == x){
count = count + 1;
}
}
}
}
return count;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.