content
stringlengths
10
4.9M
/** * This class stores all the Conversations for a flowType. * Created by wangqi on 2019/9/5. */ @ConfigurationProperties("conversationmanager") public class ConversationsFlow { /** * Specifies the flowType of the class. */ private String flowType; private Map<String, ConversationImpl> conversationMap = new ConcurrentHashMap<>(); public Conversation getConversation(String id) { if (id == null || id.isEmpty()) { return null; } return conversationMap.get(id); } public Map<String, ConversationImpl> getConversationMap() { return conversationMap; } public void setConversationMap(Map<String, ConversationImpl> conversationMap) { this.conversationMap = conversationMap; } public String getFlowType() { return flowType; } public void setFlowType(String flowType) { this.flowType = flowType; } }
// Copyright 1998-2015 Epic Games, Inc. All Rights Reserved. /** * Commandlet to allow diff in P4V, and expose that functionality to the editor */ #pragma once #include "Commandlets/Commandlet.h" #include "DiffAssetsCommandlet.generated.h" UCLASS() class UDiffAssetsCommandlet : public UCommandlet { GENERATED_UCLASS_BODY() // Begin UCommandlet Interface virtual int32 Main(const FString& Params) override { return !ExportFilesToTextAndDiff(Params); } // End UCommandlet Interface /** * The meat of the commandlet, this can be called from the editor * Format of commandline is as follow * File1.uasset File2.uasset -DiffCmd="C:/Program Files/Araxis/Araxis Merge/AraxisP4Diff.exe {1} {2}" * @param Params Command line * @return true if success **/ static bool ExportFilesToTextAndDiff(const FString& Params); /** * Copies a uasset file or map to a temp location so it can be loaded without disruption to anything * @param InOutFilename Both input and output. The original filename as input, output as the temp filename * @return true if success **/ static bool CopyFileToTempLocation(FString& InOutFilename); /** * Loads a uasset file or map and provides a sorted list of contained objects (but not subobjects as those will get exported anyway) * @param Filename File to load * @param LoadedObjects Sorted list of objects * @return true if success **/ static bool LoadFile(const FString& Filename, TArray<UObject *>& LoadedObjects); /** * Loads a uasset file or map and provides a sorted list of contained objects (but not subobjects as those will get exported anyway) * @param Filename Name to save the text export as * @param LoadedObjects List of objects to export * @return true if success **/ static bool ExportFile(const FString& Filename, const TArray<UObject *>& LoadedObjects); /** * Runs an external diff utility * @param Filename1 First filename * @param Filename1 Second filename * @param DiffCommand Diff command, with {1} {2} in it....for example: C:/Program Files/Araxis/Araxis Merge/AraxisP4Diff.exe {1} {2} * @return true if success **/ static bool ExportFilesToTextAndDiff(const FString& Filename1, const FString& Filename2, const FString& DiffCommand); };
/** * @brief Cleanup some crap in here. */ static void clean_dis_shiz(void) { DEBUGF(("Cleaning up some daemon junk... ;[\n")); donky_conn_clear(); FD_ZERO(&donky_fds); }
def stack(self, new_column_name=None, drop_na=False, new_column_type=None): from .sframe import SFrame as _SFrame return _SFrame({"SArray": self}).stack( "SArray", new_column_name=new_column_name, drop_na=drop_na, new_column_type=new_column_type, )
def check_all(self, *args, **kwargs): self.errors = [] super(Checker, self).check_all(*args, **kwargs) return self.errors
/** * A composite unary functor with no return value. On call every composed * consumer is called. * * @param <E> the type parameter * @author rferranti */ public class PipelinedConsumer<E> implements Consumer<E> { private final Iterable<Consumer<E>> consumers; public PipelinedConsumer(Iterable<Consumer<E>> consumers) { dbc.precondition(consumers != null, "cannot create a pipeline from a null iterable of consumers"); this.consumers = consumers; } /** * performs every composed consumer * * @param value */ @Override public void accept(E value) { for (Consumer<E> consumer : consumers) { consumer.accept(value); } } }
import { Injectable } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; import { Repository } from 'typeorm'; import { Reservation } from './reservations.entity'; @Injectable() export class ReservationsService { constructor( @InjectRepository(Reservation) private readonly cardsRepository: Repository<Reservation>, ) {} async create(entity: Partial<Reservation>) { return await this.cardsRepository.save(entity); } async findAll() { return await this.cardsRepository.find(); } findOne(id) { return this.cardsRepository.findOne(id); } async update(id: string, entity: Partial<Reservation>) { await this.cardsRepository.update(id, entity); return await this.cardsRepository.findOne(id); } async delete(id: string) { await this.cardsRepository.delete({ id }); return { id }; } }
<filename>utils/constant/tab.ts import type { TWorksTab, TBlogTab } from '@/spec' export const WORKS_TAB = { STORY: 'story', BASIC: 'basic', TECHSTACKS: 'techstacks', COMMUNITY: 'community', MILESTONE: 'milestone', INTERVIEW: 'interview', } as Record<Uppercase<TWorksTab>, TWorksTab> export const WORKS_TAB_ITEMS = [ { title: '简介', raw: WORKS_TAB.STORY, }, { title: '概况', raw: WORKS_TAB.BASIC, }, { title: '技术栈', raw: WORKS_TAB.TECHSTACKS, }, // { // title: '作者访谈', // raw: 'interview', // }, // { // title: '更新', // raw: WORKS_TAB.MILESTONE, // }, // { // title: '作者访谈', // raw: WORKS_TAB.INTERVIEW, // }, // { // title: '社区', // raw: WORKS_TAB.COMMUNITY, // }, ] export const BLOG_TAB = { DIGEST: 'digest', AUTHOR: 'author', FEEDS: 'feeds', } as Record<Uppercase<TBlogTab>, TBlogTab> export const BLOG_TAB_ITEMS = [ { title: '摘要', raw: BLOG_TAB.DIGEST, }, { title: '历史文章', raw: BLOG_TAB.FEEDS, }, { title: '博客作者', raw: BLOG_TAB.AUTHOR, }, ]
import styled from 'styled-components'; import moment from 'moment'; const St = { CommentWrapper: styled.div` //padding: 1rem; // border-radius: 8px; // border: 1px solid; margin: 1rem 0; `, UserInfoContainer: styled.div` padding: 1rem 0; .rating { font-size: 2rem; font-weight: 800; padding-right: 1rem; } .user-name { font-weight: 800; } .date { color: grey; } `, IconContainer: styled.div` width: 40px; height: 40px; border-radius: 50%; background: grey; `, CommentTextContainer: styled.div` text-align: left; ` }; type CommentProps = { commentInfo: { USER_ID: string; ITEM_SCORE: number; ITEM_COMMENT: string; INSERT_DATE: string; UPDATE_DATE: string; }; }; const Comment = ({ commentInfo }: CommentProps) => { return ( <St.CommentWrapper> <St.UserInfoContainer className="flex"> <div className="flex-center"> <St.IconContainer /> </div> <div style={{ padding: '0 1rem' }}> <div className="flex"> <span className="rating">{commentInfo?.ITEM_SCORE}</span> </div> <div className="flex"> <div className="user-name">{commentInfo?.USER_ID}</div> <div className="date"> {moment(commentInfo?.UPDATE_DATE).format('YYYY년 MM월 DD일 ')} </div> </div> </div> </St.UserInfoContainer> <St.CommentTextContainer> <span>{commentInfo?.ITEM_COMMENT}</span> </St.CommentTextContainer> </St.CommentWrapper> ); }; export default Comment;
//======================================================================== // // Parser.h // // Copyright 1996-2003 Glyph & Cog, LLC // //======================================================================== //======================================================================== // // Modified under the Poppler project - http://poppler.freedesktop.org // // All changes made under the Poppler project to this file are licensed // under GPL version 2 or later // // Copyright (C) 2006, 2010 Albert Astals Cid <[email protected]> // // To see a description of the changes please see the Changelog file that // came with your tarball or type make ChangeLog if you are building from git // //======================================================================== #ifndef PARSER_H #define PARSER_H #ifdef USE_GCC_PRAGMAS #pragma interface #endif #include "Lexer.h" //------------------------------------------------------------------------ // Parser //------------------------------------------------------------------------ class Parser { public: // Constructor. Parser(XRef *xrefA, Lexer *lexerA, GBool allowStreamsA); // Destructor. ~Parser(); // Get the next object from the input stream. Object *getObj(Object *obj, Guchar *fileKey = NULL, CryptAlgorithm encAlgorithm = cryptRC4, int keyLength = 0, int objNum = 0, int objGen = 0); Object *getObj(Object *obj, Guchar *fileKey, CryptAlgorithm encAlgorithm, int keyLength, int objNum, int objGen, std::set<int> *fetchOriginatorNums); Object *getObj(Object *obj, std::set<int> *fetchOriginatorNums); // Get stream. Stream *getStream() { return lexer->getStream(); } // Get current position in file. int getPos() { return lexer->getPos(); } private: XRef *xref; // the xref table for this PDF file Lexer *lexer; // input stream GBool allowStreams; // parse stream objects? Object buf1, buf2; // next two tokens int inlineImg; // set when inline image data is encountered Stream *makeStream(Object *dict, Guchar *fileKey, CryptAlgorithm encAlgorithm, int keyLength, int objNum, int objGen, std::set<int> *fetchOriginatorNums); void shift(int objNum = -1); }; #endif
#test1=[['c','b','a'],['b','c','d'],['c','b','c']] test1=['cba','bcd','cbc'] def african_crossword(n,m,grid): encrypted = "" for i in range(n): for j in range(m): is_encrypted=True for k in range(n): if grid[i][j]==grid[k][j] and k!=i: is_encrypted=False break for h in range(m): if grid[i][j]==grid[i][h] and h!=j: is_encrypted=False break if is_encrypted==True: encrypted+=grid[i][j] return (encrypted) #print (african_crossword(3,3,test1)) n,m=map(int,input().split()) grid=[] for i in range(n): grid.append(input()) print (african_crossword(n,m,grid))
<gh_stars>1-10 #include<bits/stdc++.h> using namespace std; int main(){ vector<int> v = {1, 2, 3, 4, 5}; int n = v.size(); cout<<arr[i] + for (int i = 0; i < n; i++) { cout<<v[i]; } return 0; }
def __get_type(self, expectation, options): if "is_custom_func" in options.keys(): setattr(self, "mtest", expectation) return "CUSTOMFUNC" elif "is_substring" in options.keys(): return "SUBSTRING" elif "is_regex" in options.keys(): return "REGEX" elif isinstance(expectation, type): return "TYPE" else: return "VALUE"
The Sister Wives premiere on TLC opened Kody Brown and his four wives up to a mix of voyeuristic interest and blatant disgust. So how are the real life sister wives of the Brown family taking their new-found fame? They've set some ground rules, and Janelle, aka Wife Number 2 (at the far left in the photo above), explained to The Stir why sex is off the table, what it's like fearing the law, and why you won't be calling it a real life Big Love much longer. How does it feel knowing people in America are looking at a life that's totally normal to you with a voyeuristic approach? We knew that was a byproduct of what we were doing. We really wanted people to see there was more to the story, as far as our life, than the current stereotype. We were willing -- and we discussed this as a family -- to kind of open our home so we can show people. I have this amazing family, and I'm really glad to show them so people can see we're happy, and our children are well-adjusted. There was a trade-off to having someone come in our home, but we're hoping that it helps dispel some of those stereotypes that are currently in the media. You are being compared to the show Big Love, kind of like the real life Big Love. Do you like that comparison, not like that comparison? I actually haven't ever seen the show. Laughs. It's about a polygamist family, that's maybe the only thing. It's a soap opera; it's completely acted. What you'll see with our family, we made a very conscious decision to be very real on the show. You'll see us being real, and the emotions are real. So as far as the reality versus something that's acted, I don't know how much similarity there will really be. I have watched Big Love, and you're right, it's acting. Yeah, I saw that guy on Twister! But I haven't seen the show. But one of the big themes of that show is that it's scary to talk about their lifestyle because they're afraid they'll get in some sort of legal trouble. Is that something you had to deal with or is that blown up for Hollywood? There was a risk. We had to accept several risks before we decided to go public. We are law-abiding citizens; we just have a faith. We're not lawbreakers. We just have a faith that prescribes a family that's a little bit different. I'm not even allowed to drive when we go places because I don't speak -- we're really boring! But, you know, we had to assess those risks. And that was one of them. We decided as a family that our chance to show the world there's more going on here than is currently known was worth it for the sake of our children to be more free, to have more choices. Those benefits far outweigh the risks. One of the things that's most fascinating about your story in particular is you were raised Mormon but you weren't familiar at all with polygamy before you met Kody? No! Isn't that crazy? I really didn't know they existed. I think I was like in my 20s before I realized. I mean, I lived in the area my whole life, but I wasn't even aware there was such a thing as polygamists growing up. Was it something your parents didn't talk about because your childhood Mormon faith was trying to move away from it? Or was there a shame to it? No, it just wasn't discussed. The Mormon church gave up polygamy 120 years ago. Would you say yours was the hardest journey into the family because you had to make that jump? I can speak for my sister wives. We all chose this as adults. So they all had to make their own decision. They grew up in homes like that where it was their family life, and they were happy. But you still have to make the choice of whether you're going to marry into a family like this. I think my choice was probably similar. It was a faith-based decision. It didn't hurt that Kody was such an awesome guy either! Being a mom, you talk about how you just kind of like going to work! Is it easier to do those kinds of things having sister wives? Oh yeah! Are you kidding? It's great. I have so much freedom. I've always been able to work, and I like working. Trust me, I was never stay-at-home mom material. I knew that from the beginning. I like going to work. I don't have to miss work generally for sick days for my kids. I've been able to travel through the years, knowing that there was someone at home who was genuinely interested in the welfare of my children and with watching them and with rescuing me from last-minute science projects that hadn't been mentioned to me until the day before they were due. And doing the doctor's appointments and the soccer runs. It's nice; I get to come home and do those things, but my children have so much more opportunity because they have somebody at home to do those things with them. And I get to work, and that makes me a much nicer Mommy. For sure! Everyone is fascinated by your sex life. What's the advantage of not having your husband in your bed every night? Laughs. We really ... we have opened our home to the camera, to show our family, but we've really tried to cut it off with bedroom questions. It's a very private thing for us, and we aren't really discussing that. I'm very happy in my relationship. I'm very happy in my life. I love my choices I've made. I would make those choices again. I have a tremendous support network, and I have a husband who's amazing. He's getting his edges knocked off all the time; he's really always in trouble with somebody. And it makes him so much more sensitive, I think. He's really fabulous. I think he's the best man I know. The take-home for other relationships seems to be make good use of the time you have together. I think we really capitalize on the time. We do so much as a family. Our life is our family, and the whole family. It's healthy for relationships to have that individual time, so we really have to make sure that when we have that individual time that he and I -- my husband and I -- are really engaging. We block off everything, and we really talk to each other and spend some time together. That's a healthy part of our relationship; that helps keep it healthy. Are there any other things you've decided as a family to keep private? We really are representing only our family. What you're seeing here is our family, and we've chosen to open up our home to show our family. That's all I can really speak for. Are you watching Sister Wives? How would a sister wife help out in your house? Images via Discovery
TAMPA, Fla. -- Evel Knievel has sued Kanye West, taking issue with a music video in which the rapper takes on the persona of "Evel Kanyevel" and tries to jump a rocket-powered motorcycle over a canyon. Knievel, whose real name is Robert Craig Knievel, filed a lawsuit in federal court in Tampa on Monday claiming infringement on his trademark name and likeness. He also claims the "vulgar and offensive" images depicted in the video damage his reputation. "That video that Kanye West put out is the most worthless piece of crap I've ever seen in my life, and he uses my image to catapult himself on the public," the 68-year-old daredevil said Tuesday. A spokesman for West said the 28-year-old rapper no comment. The lawsuit seeks damages and to halt distribution of the video. In the video for "Touch the Sky," released earlier this year, West dons the familiar Knievel star-studded jumpsuit and jumps a canyon in a vehicle "visually indistinguishable" from the one used by Knievel in his failed attempt to jump the Snake River Canyon in Idaho in 1974, the lawsuit said. The video, which features Pamela Anderson as West's girlfriend, contains "vulgar and offensive sexual images, language and conduct involving `Evel Kanyevel' and women apparently trying to gain his sexual interest," according to the lawsuit. "The guy just went too far using me to promote his filth to the world," said Knievel, who lives in Clearwater and has been in poor health in recent years. "I'm not in any way that kind of a person." The lawsuit also names Roc-A-Fella Records, video director Chris Milk and AOL for distributing it. West was so disappointed at not winning best video for "Touch the Sky" at the MTV Europe Music Awards last month that he crashed the stage when it was presented to Justice and Simian for "We Are Your Friends." In a tirade riddled with expletives, West said he should have won the prize because it "cost a million dollars, Pamela Anderson was in it. I was jumping across canyons." Knievel failed spectacularly in the 1974 jump. He was secured inside the cockpit and the Sky-Cycle was fired up. But his parachute opened just as he cleared the ramp. After soaring about 1,000 feet, he wound up landing about 20 feet from the river on the rocky south bank. He sustained only scrapes and bruises. In the video, West's vehicle crashes to the bottom of the canyon in flames. ___ On the Net: Evel Knievel: http:/ / www.evelknievel.com/ Kanye West: http:/ / www.kanyewest.com/
<reponame>magimenez/jatytaweb /** * */ package com.crawljax.web.jatyta.plugins.util.http; /** * Enum for common HTTP Errors Code. * @author mgimenez * */ public enum HttpError { UNAUTHORIZED("HTTP ERROR 401"), BAD_REQUEST("HTTP ERROR 400"), FORBIDDEN("HTTP ERROR 403"), NOT_FOUND("HTTP ERROR 404"), INTERNAL_SERVER_ERROR("HTTP ERROR 500"); private final String value; private HttpError(final String value) { this.value = value; } public String getValue() { return value; } @Override public String toString() { return getValue(); } }
/** * Gets the weightning value for a given grey value. It uses the specified weightning mode given in the constructur. * * @param z greyvalue * @return Weightning value */ @Override protected double w(double z) { if (this.weightMode == WeightMode.NONE) return 1; else if (this.weightMode == WeightMode.PARABEL) return -(1.0 / 129) * z * z + (127.0 / 64) * z; else return super.w(z); }
Diversity of endophytic fungi of single Norway spruce needles and their role as pioneer decomposers The diversity of endophytic fungi within single symptomless Norway spruce needles is described and their possible role as pioneer decomposers after needle detachment is investigated. The majority (90%) of all 182 isolates from green intact needles were identified as Lophodermium piceae. Up to 34 isolates were obtained from single needles. Generally, all isolates within single needles had distinct randomly amplified microsatellite (RAMS) patterns. Single trees may thus contain a higher number of L. piceae individuals than the number of their needles. To investigate the ability of needle endophytes to act as pioneer decomposers, surface‐sterilized needles were incubated on sterile sand inoculated with autoclaved or live spruce forest humus layer. The dry weight loss of 13–17% found in needles after a 20‐week incubation did not significantly differ between the sterilized and live treatments. Hence, fungi surviving the surface sterilization of needles can act as pioneer decomposers. A considerable portion of the needles remained green during the incubation. Brown and black needles, in which the weight loss had presumably taken place, were invaded throughout by single haplotypes different from L. piceae. Instead, Tiarasporella parca, a less common needle endophyte, occurred among these invaders of brown needles. Needle endophytes of Norway spruce seem thus to have different abilities to decompose host tissues after needle cast. L. piceae is obviously not an important pioneer decomposer of Norway spruce needles. The diversity of fungal individuals drops sharply when needles start to decompose. Thus, in single needles the decomposing mycota is considerably less diverse than the endophytic mycota.
Doctors Once Thought Bananas Cured Celiac Disease. They Saved Kids' Lives — At A Cost Enlarge this image toggle caption AP AP The year was 1945, and 2-year-old Lindy Thomson had been given a few weeks to live. She suffered from diarrhea and projectile vomiting, and she was so thin and weak, she could no longer walk. Her parents had taken her from doctor to doctor. Finally, Dr. Douglas Arnold in Buffalo, N.Y., offered a most unusual prescription: She was to eat bananas. "At least seven bananas a day," recalls the patient, who now goes by her married name, Lindy Redmond. "To whom it may concern," the doctor wrote on a prescription pad that Lindy still has as a keepsake. Lindy Thomson "has celiac disease (a nutritional disorder)." Enlarge this image toggle caption Courtesy of Lindy Redmond Courtesy of Lindy Redmond Arnold recommended that Lindy move to the clean mountain air in California and follow a high-calorie, banana-based diet invented by Dr. Sidney Haas in 1924. The diet forbade starches but included numerous daily bananas, along with milk, cottage cheese, meat and vegetables. It was so effective in patients with celiac disease that in the 1930s, the University of Maryland endorsed the diet, according to pediatric gastroenterologist Alessio Fasano, chair of pediatrics at Harvard Medical School and a specialist in celiac disease. "At that time, around 30 percent of children with celiac died. Parents were instructed to drop their children off at the hospital for six months," says Fasano. If the children survived and thrived on the banana-based diet, the parents could then "pick them up and take them home." We now know that celiac is an autoimmune disorder that strikes genetically predisposed people. It's triggered by gluten in grains such as wheat, barley and rye. In the presence of gluten, the immune system of people with celiac disease attacks the small intestine, damaging the precious, fingerlike projections called villi that line it. This damage can lead to malnutrition, as well as a panoply of problems — from gas and bloating to fatigue, anemia, osteoporosis and an increased risk of certain cancers. The disease is estimated to affect 1 in 100 people worldwide. Enlarge this image toggle caption Courtesy of Lindy Redmond Courtesy of Lindy Redmond But in 1924, decades before gluten was discovered to be the culprit, celiac disease was a black box of mystery. "The diet was unintentionally gluten-free and also incredibly high in calories," explains Tricia Thompson, founder of Gluten Free Watchdog. "It is incredible what the mothers and fathers did, going down to the docks to meet the ships and buy multiple bananas hanging on branches. So many people were so very grateful to him," she says of Haas. "He saved their lives." Haas arrived at his banana diet through an honest error — one that, unfortunately, had serious repercussions for people with celiac disease. In his 1924 paper, he wrote of a town in Puerto Rico where "dwellers who eat much bread suffer from [celiac] sprue while the farmers who live largely on bananas never." Haas skipped over the role of wheat and focused instead on the exotic bananas, which he thought held curative powers. (Not unlike the esteem in which exotic "superfruits" such as mangosteen and acai berry are held today.) "Dr. Haas' approach," says Fasano, "was based on the fact that bananas had the best characteristics to counterbalance the purging diarrhea that was the typical clinical presentation of celiac disease at that time." Parents and children came to Haas from all over the U.S. He eventually treated over 600 people who had celiac disease. One of his "banana babies" wrote down her memories for Gluten Free Watchdog's site, recalling how Haas' "office was filled with children of all ages and many I remember looked like they came from the concentration camps ... with their sunken eyes and swollen stomachs." Once on the diet, the children recovered. For a time, belief in the healing properties of the banana was widespread and extended beyond celiac disease. Mothers were told to feed their infants bananas starting at 4 weeks. And at Johns Hopkins University, a doctor named George Harrop tried a version of the banana diet on people with diabetes and found that it helped them lose weight. "The public went bananas," says Alan Levinovitz, a religion professor at James Madison University in Harrisonburg, Va., and author of The Gluten Lie: And Other Myths About What You Eat. But Haas' honest error led to serious consequences. As the children recovered, wheat was reintroduced. "All my life I have told doctors I had celiac as a child," says Lindy Redmond, "and that I grew out of it. And all my life I have eaten wheat." It was only when she was 66 that her doctor gave her a test and took seven intestinal biopsies. "My intestine was very damaged," she reports. "My doctor said she didn't know if it would ever recover." It was then that Redmond wondered about the possible connection between lifelong, untreated celiac disease and her two miscarriages, frequent bouts of colds and bronchitis, and interminable constipation. Now 74 and off gluten, Redmond says the colds and constipation are gone. It was a Dutch pediatrician, Willem Karel Dicke, who first realized that wheat might be linked to celiac disease. He noticed that in the last few years of World War II, when bread was unavailable in the Netherlands, the mortality rate from celiac disease dropped to zero. In 1952, Dicke and his colleagues identified gluten as the trigger for celiac disease, and the gluten-free diet was born. But Haas railed against the gluten-free diet and went on promoting his banana-based cure, according to Levinovitz. "Haas saw these miraculous reversals," explains Levinovitz, "and didn't want to give up his status as a trailblazing savior." Only the banana diet, Haas claimed, could achieve "a cure which is permanent." As a result, says Levinovitz, celiac disease was taken more seriously in Europe and continued to be "massively underdiagnosed here in the U.S." Jill Neimark is an award-winning science journalist and an author of adult and children's books.
def handle_email_message(event, context): pubsub_message = base64.b64decode(event['data']).decode('utf-8') message_dict = json.loads("[" + pubsub_message + "]")[0] API_KEY = os.environ.get('SENDGRID_API_KEY') if (API_KEY != None): sg = sendgrid.SendGridAPIClient(api_key=API_KEY) from_email = Email(message_dict["from"]) to_email = To(message_dict["to"]) subject = message_dict["subject"] content = Content("text/plain", message_dict["text"]) mail = Mail(from_email, to_email, subject, content) sg.client.mail.send.post(request_body=mail.get()) return 200 else: return 500
<filename>src/headers/package.h #ifndef PACKAGE_H #define PACKAGE_H #include "subpackage.h" class Package{ public: uint32_t name_len; char* name; struct Info{ uint64_t decompressed_offset; // Location in Decompressed Data uint64_t decompressed_size; uint64_t start_offset; uint64_t segment_size; uint32_t number_of_subpackages; } __attribute__((packed)) info; unsigned int id; SubPackage* subpackages; void read(std::ifstream&); friend std::ostream &operator<<(std::ostream&, Package); }; #endif
import os.path import json def read_user_config(): user_config_file = os.path.join(os.path.expanduser("~"), ".gensite") if not os.path.exists(user_config_file): raise CommandError("No user file exists, use gensite init first : " + user_config_file) user_config = {} with open(user_config_file, "r", encoding="utf-8") as f: user_config = json.load(f) return user_config
PUBLIC DISTRIBUTION SYSTEM IN INDIA FROM A WAR-TIME RATIONING MEASURE TO LEGAL ENTITLEMENT Public Distribution System (PDS) is an Indian food security system established under the Ministry of Consumer Affairs, Food, and Public Distribution. PDS developed as a strategy of managing shortages through the inexpensive distribution of foodgrains at reliable rates. PDS has been in use in India for more than eighty years. Present paper is an attempt to trace the development of Indian public distribution system, a mechanism for ensuring food security in India. The changing functions of the public distribution system over a long span of time since its inception is discussed in present paper. This paper is descriptive and exploratory in nature and is based on secondary data collected from the various authentic sources like research paper, government websites, newspapers and other important sources. KEY WORDS: PDS; National Food Security Act 2013; COVID-19; 2nd World War
<gh_stars>0 package def import "github.com/jumper86/jumper_error" const ( ErrConnClosedCode = 11011 ErrConnUnexpectedClosedCode = 11012 ErrInvalidConnParamCode = 11013 ErrGetExternalIpCode = 12011 ErrGetMacAddrCode = 12012 ) var ( ErrConnClosed = jumper_error.New(ErrConnClosedCode, "conn is closed.") ErrConnUnexpectedClosed = jumper_error.New(ErrConnUnexpectedClosedCode, "conn is unexpected closed.") ErrInvalidConnParam = jumper_error.New(ErrInvalidConnParamCode, "create conn invalid param.") ErrGetExternalIp = jumper_error.New(ErrGetExternalIpCode, "get external ip failed.") ErrGetMacAddr = jumper_error.New(ErrGetMacAddrCode, "get mac addr failed.") )
import tw from 'twin.macro'; import { useMousePosition } from 'lib/hooks'; export const Mouse = ({ disabled = false }: { disabled?: boolean }) => { const { x, y } = useMousePosition(); return ( <span css={tw`fixed top-0 bottom-0 left-0 right-0 z-10 h-screen pointer-events-none`} > <span hidden={disabled} css={[ tw`absolute top-0 left-0 z-10 pointer-events-none`, tw`w-72px h-72px bg-mouse opacity-75 border-mouse border-3px rounded-full`, tw`transition-all duration-75 ease-out motion-reduce:transform-none`, ]} style={{ left: x - 72 / 2, top: y - 72 / 2, }} /> </span> ); };
CSIRO (the Commonwealth Scientific and Industrial Research Organisation) is facing another round of job losses to basic public research, with the news that the organisation is making deep staffing cuts to areas such as Oceans and Atmosphere and Land and Water. Internally, there are signals that Oceans and Atmosphere will be cut substantially, amid 350 job losses over two years across the organisation. In a letter to staff, CSIRO chief executive Larry Marshall said: CSIRO pioneered climate research … But we cannot rest on our laurels as that is the path to mediocrity. Our climate models are among the best in the world and our measurements honed those models to prove global climate change. That question has been answered, and the new question is what do we do about it, and how can we find solutions for the climate we will be living with? This letter reveals a lack of insight about what climate models are for and how they can be used. Their job was not to “prove” that the climate was likely to change and that we had to respond. Their main role is to understand how the climate system works and then to use that knowledge to manage risk, make decisions and improve productivity. Of course the question of whether humans are changing the climate has been unequivocally answered in the affirmative. But that doesn’t mean there aren’t plenty more questions to answer. After the federal government was so widely criticised under Tony Abbott for frequently calling climate science into question, it is ironic now to make those same climatologists redundant on the basis that their work is done and dusted. Expect the unexpected One thing we know about climate change is that the unexpected will occur and while we try to minimise that through better science, at times we will be surprised. So how do we make sure we do better next time? Recent examples of useful outputs from the CSIRO climate research program include: The floods of 2011 were more severe than we expected they might be, following on the heels of a record-breaking drought. Both events contained a climate change signal. Groundbreaking research from Wenju Cai and his colleagues at CSIRO have given us a much better idea of how the Pacific and Indian Oceans combine under climate change to intensify extreme events. Fire danger in southeastern Australia is higher than projected a decade ago for 2030 to 2050. Now we need to understand why the fire danger is higher than expected and where it might lead, especially if climate will change the way vegetation responds to fire. CSIRO research has highlighted the role of the Southern Ocean as a carbon sink, and in combining with other oceans to influence our weather, not least its substantial role in producing the rainfall that sustains production in the wheat belt. CSIRO recently provided a comprehensive set of projections of Australia’s future climate based on the latest climate modelling and related science, tailored for a broad range of uses. CSIRO has long been a global leader in projecting climate at the regional scale and presenting the information in a form that suits decision makers, and thus Australia has been very well served in this vital input into national adaptation and mitigation planning. There is little doubt that the funds invested in climate research to date, not to mention land and water research, have been returned many times over in higher production, avoided costs and healthier people and environments. Australian climate research has a global reputation for punching above its weight. To think that it can be cut and to expect that Australia would be better off for it, shows a radical misunderstanding of what public good research is, and what it can do.
def datetime_to_mjd_years(date: datetime) -> float: return timedelta_to_si_years(date - MJD_EPOCH)
Update #4: Less than half an hour to go! Update #3: More Enemy Types Revealed - Happy Holidays! Update #2: WE'RE FUNDED! :D Update #1: 4-Pack details, Demo update, and Chef Penguino Planet Io Entertainment, who brought players the memory-stretching game Chef Penguino, present Ancient Axes: Heroes on Paper, an action-packed puzzler for solo players and an action-stuffed thrill ride for 2-4 players. Planet Io hopes to bring back the successful couch and arcade co-op from the lost 1980's legends like Gauntlet and Teenage Mutant Ninja Turtles, and at the same time inject some unique paper-style art influenced from the likes of the Paper Mario series. The first concept of Ancient Axes was created in 2008 by a collaboration of game design students from the University of Baltimore and the Image Campus of Argentina. The unique single player idea has been refined and the entire game expanded to include multiplayer. Our hope for Ancient Axes: Heroes on Paper is that it can be listed among the titles available at launch for the OUYA! Shortly after the game is released on the OUYA, we plan for it to be available for PC, Mac, and Linux gamers. (We expect the delay to only be about 2 weeks. Exact dates will be announced ASAP.) To make this a reality, we need your help! Hmmm... this seems familiar. Where have I seen this before? This is the second time Ancient Axes: Heroes on Paper has appeared on Kickstarter. We had good run, learned a lot, and were even selected as Technically Baltimore's Kickstarter of the Week, but were ultimately unable to meet our goal. So, we've decided to switch gears, cut out all of the unnecessary parts, and just leave the unique gameplay. With this new vision, development time is also cut down, and our aim is to be listed in the history books as an OUYA launch title. (Gaming history books will be written, we're sure of that.) Check out the original here. So, what's changed? Firstly, AA: HoP will no longer be a $10 game. Now, gamers will only have to pay $1.99... unless they're backers - for you guys it's only $1.00! Rewards have also changed slightly. We've also cut the the reward tier for getting Ancient Axes swag in half (you don't have to call it swag if you don't want to). Originally, we thought Ancient Axes: Heroes on Paper would be a collection-type game, where players could collect various hats and weapon designs. We've cut that out. That single feature added a tremendous amount of work to the project, so removing just that feature has made it possible for us to shorten the expected development time. With the exception of Animal Crossing and possibly Team Fortress 2, not many games are based heavily around collecting hats, so we're hoping the gameplay in AA: HoP can speak for itself. We have also removed the "Endurance" game mode - allowing us to focus all of our energy on making the separate single/multi-player campaigns great. Like the famous writer and pioneering aviator Antoine de Saint-Exupéry expertly said, "a designer knows he has achieved perfection not when there is nothing left to add, but when there is nothing left to take away." We hope to cut all the "fat" out and just leave the good stuff. In return, gamers can expect a much more affordable game, and be able to play it the day the OUYA arrives! (Or shortly thereafter for non-OUYA fans!) Lastly, the goal is now more than $7,000 lower. (Oh, also: zombies.) Why is the goal only $130? Here is our incredibly detailed cost breakdown: $119 - 1 Scirra Construct 2 License $10 - Approximate Kickstarter & Amazon fees $1 - 1 Double Cheeseburger "But, Mr. Planet Io, won't that mean you might not make any money on this project?" you are surely asking yourself. Yes, that is a possibility, but that is okay, because: the dream is to be a launch title for a system. Since we're a very young company, we probably won't be able to do that for the PS4 or the Xbox 720. The OUYA gives us a chance to launch a game with a system without paying $6 billion in licensing fees (approx. cost)! AWESOME. I heard pledging to AA:HoP can give backers incredible powers. Is that true? That is absolutely true. Specifically, they are... +1 POWER OF NOSTALGIA & CAMARADERIE Taking inspiration from the classic Gauntlet series, as well as many other multiplayer adventures from the 80's, we've put together a unique 2-4 player quest that will be completely separate from the single player campaign. Each player will receive an infinite amount of throwing axes, swords, arrows, or magic missiles to decimate the enemies on the battlefield (of which there will be many). Find power-ups to infuse your weapons with fire or ice. Search for keys to open a new way. Ignore the new way to find a secret area. +2 INTELLIGENCE IN MIND-EXPANDING PUZZLES While multiplayer will offer a frenzied kill-everything eat-pizza then move on mindset, playing the game single-player style will reward you for taking things a little slower. Controlling two characters at once (each with unique movement), you will need to overcome various lever switching and button pressing puzzles, all the while keeping two characters alive while throngs of ghosts, goblins, and ghouls attempt to gobble them up. +3 CHARISMA FOR YOUR CHARACTERS Warriors throwing axes! Knights throwing swords! Archers shooting arrows! Wizards shooting magic missiles! Each player will be able to select their own hero. Want a team of four wizards? Yep, you can do that. Player 1 is red, player 2 is blue, player 3 is green, and player 4 is purple. +4 MUSICAL WISDOM To complete the experience, the game will be equipped with a fitting soundtrack. Musical tracks will be composed by various artists, including eclectic desert rocker Mosno Al-Moseeki, techno-dub rasta-man Rod Gnarley, and that guy who created the music for Chef Penguino (game designer Dan Jorquera). Did you enjoy the music presented in our Kickstarter video? Download it for free. +5 FAITH IN OUR PROMISES What we promise to deliver with Ancient Axes: Heroes on Paper... We plan to use Steam's Greenlight program to make it onto Steam and add in Steam achievements. In-game achievements will be available to all systems regardless. +6 LUCK... OH, IT'S TIME TO TALK ABOUT REWARDS! Here's an easy-to-read breakdown showing what you can get for supporting the development of Ancient Axes: Heroes on Paper: Forum Access. Once the Kickstarter is completed, private forums will be available to all backers! The Full Game. Pledge only $1 or more to get the entire game for a system of your choice! Soundtrack. Pledge $2 or more to grab a digital download of the Ancient Axes: Heroes on Paper soundtrack! The Full Game for All Systems. Pledge $5 or more to grab the game for the OUYA, PC, Mac, and Linux. If you don't need them all for yourself, give them to your friends! The number of each can be whatever you want... do you need 4 OUYA copies to complete your OUYA basement arcade? Do you have Windows but your 3 best buds run Linux? We'll send a survey out at the completion of the Kickstarter asking which systems you'd like the game for. T-Shirts and Stickers. Pledge $30 or more to grab a one-of-a-kind T-shirt so you can take the Ancient Axes heroes with you wherever you go. Available in hunter green for dudes or violet for girl gamers. In addition, you will receive a sticker pack that will have those famous Heroes on Paper placed on a clear background, ready to be assigned to any post that might require a brave adventurer. WHAT!? YOU'RE AWESOME. To reward your awesomeness for funding this project in one fell swoop, your name will be featured in the game's credits as well as on our website. +7 STRENGTH FROM OUR STRETCH GOALS What could be in those treasure chests? We won't know until we meet out initial goal! UPDATE: AAAAHHH We've met our first stretch goal! Let's see what's in those treasure chests! $150 Stretch Goal: UNLOCKED: Text-based menus are BORING. Instead, launching Ancient Axes will reveal your very own home. From here, you can walk around, select your character, get some friends to join in, and choose which game type you are going to play. Walk outside the house to choose which level to play. $300 Stretch Goal: Mini-games! Challenge your friends to see who can hit the most targets in the time limit or jump into a go-kart and race around a race track (yep, really). Unlock more mini-games by playing through the campaigns! $500 Stretch Goal: Gameplay settings! Unlock "cheats" by playing through the game - want to run through the game while invincible? How about with only 1 health point? You move twice as fast? Your enemies move twice as fast? Let's unlock this option for more ways to play the game. $1000 Stretch Goal: Pets! Want a friendly slime monster to keep you company in your new menu-house? Would that dungeon be less terrifying if you had a pet bat to follow you around? Unlock this stretch goal to find plenty of furry, slimy, or scaly creatures, just waiting to befriend you. (Some are more manly looking than others.) ~ Watch out for ink monsters! How can I help make this game a reality?
/* Package bgp implements the BGP-4 protocol as described in RFC 4271 and subsequent RFCs. It is able to parse all BGP messages and deals with 32 bit ASNs (support for 16 bit ASNs is not implemented). */ package bgp
def st_dev(self, population=False): return sqrt(self.variance(population=population))
package picam_test import ( "image/png" "log" "os" "github.com/cgxeiji/picam" ) func Example_save() { cam, err := picam.New(640, 480, picam.YUV) if err != nil { log.Fatal(err) } defer cam.Close() img := cam.Read() f, err := os.Create("./image.png") if err != nil { log.Fatal(err) } defer f.Close() err = png.Encode(f, img) if err != nil { log.Fatal(err) } }
-- Copyright (c) Facebook, Inc. and its affiliates. module Glean.RTS.Foreign.Thrift ( encodeVarint, encodeZigZag ) where import Control.Monad.ST.Unsafe (unsafeIOToST) import Data.Int (Int64) import Data.Word import Foreign.C.Types import Foreign.Ptr import qualified Util.Buffer as Buffer encodeVarint :: Word64 -> Buffer.Fill s () encodeVarint !x = Buffer.alloc 10 $ unsafeIOToST . fmap fromIntegral . glean_thrift_encode_varint x encodeZigZag :: Int64 -> Buffer.Fill s () encodeZigZag !x = Buffer.alloc 10 $ unsafeIOToST . fmap fromIntegral . glean_thrift_encode_zigzag x foreign import ccall unsafe glean_thrift_encode_varint :: Word64 -> Ptr Word8 -> IO CSize foreign import ccall unsafe glean_thrift_encode_zigzag :: Int64 -> Ptr Word8 -> IO CSize
import { Tree } from '@angular-devkit/schematics'; import { getNpmScope, toClassName, toFileName } from '@nrwl/workspace'; import { libsDir } from '@nrwl/workspace/src/utils/ast-utils'; import { Schema } from '../schema'; import { NormalizedSchema } from './normalized-schema'; export function normalizeOptions( host: Tree, options: Schema ): NormalizedSchema { const name = toFileName(options.name); const projectDirectory = options.directory ? `${toFileName(options.directory)}/${name}` : name; const projectName = projectDirectory.replace(new RegExp('/', 'g'), '-'); const fileName = options.simpleModuleName ? name : projectName; const projectRoot = `${libsDir(host)}/${projectDirectory}`; const moduleName = `${toClassName(fileName)}Module`; const parsedTags = options.tags ? options.tags.split(',').map((s) => s.trim()) : []; const modulePath = `${projectRoot}/src/lib/${fileName}.module.ts`; const defaultPrefix = getNpmScope(host); return { ...options, prefix: options.prefix ? options.prefix : defaultPrefix, name: projectName, projectRoot, entryFile: 'index', moduleName, projectDirectory, modulePath, parsedTags, fileName, }; }
def main(): n=int(input()) a=list(map(int,input().split())) k=min(a.count(1),a.count(2),a.count(3)) print(k) q={1:[],2:[],3:[]} for i in range(len(a)): q[a[i]].append(i+1) for i in range(k): print(q[1][i],q[2][i],q[3][i]) if __name__=='__main__': main()
/** Connector mapping property availability. */ @Fluent public final class ConnectorMappingAvailability { @JsonIgnore private final ClientLogger logger = new ClientLogger(ConnectorMappingAvailability.class); /* * The frequency to update. */ @JsonProperty(value = "frequency") private FrequencyTypes frequency; /* * The interval of the given frequency to use. */ @JsonProperty(value = "interval", required = true) private int interval; /** * Get the frequency property: The frequency to update. * * @return the frequency value. */ public FrequencyTypes frequency() { return this.frequency; } /** * Set the frequency property: The frequency to update. * * @param frequency the frequency value to set. * @return the ConnectorMappingAvailability object itself. */ public ConnectorMappingAvailability withFrequency(FrequencyTypes frequency) { this.frequency = frequency; return this; } /** * Get the interval property: The interval of the given frequency to use. * * @return the interval value. */ public int interval() { return this.interval; } /** * Set the interval property: The interval of the given frequency to use. * * @param interval the interval value to set. * @return the ConnectorMappingAvailability object itself. */ public ConnectorMappingAvailability withInterval(int interval) { this.interval = interval; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { } }
package com.jordanluyke.reversi; import com.google.inject.AbstractModule; import com.jordanluyke.reversi.account.AccountModule; import com.jordanluyke.reversi.db.DbModule; import com.jordanluyke.reversi.lobby.LobbyModule; import com.jordanluyke.reversi.match.MatchModule; import com.jordanluyke.reversi.session.SessionModule; import com.jordanluyke.reversi.web.WebModule; /** * @author <NAME> <<EMAIL>> */ public class MainModule extends AbstractModule { @Override protected void configure() { bind(MainManager.class).to(MainManagerImpl.class); install(new WebModule()); install(new DbModule()); install(new AccountModule()); install(new MatchModule()); install(new SessionModule()); install(new LobbyModule()); } }
export type Props = { openOnInit?: boolean, classPrefix?: string, transTime?: number, transCurve?: string, onToggle?: () => void, }
/** * it iterate body data for post method. * @author GS-1629 * @param post the post * @param thirdPartyServiceData the third party service data * @return the HTTP post * @throws IOException Signals that an I/O exception has occurred. */ HttpPost iterateThirdPartyBodyData(HttpPost post, HashMap<String, Object> thirdPartyServiceData) throws IOException { if (thirdPartyServiceData != null && thirdPartyServiceData.size() > 0) { List<BasicNameValuePair> urlParameters = new ArrayList<BasicNameValuePair>(); for (Map.Entry<String, Object> entry : thirdPartyServiceData.entrySet()) { urlParameters.add(new BasicNameValuePair(entry.getKey().toString().trim(), entry.getValue().toString().trim())); } post.setEntity(new UrlEncodedFormEntity(urlParameters)); } return post; }
<gh_stars>1-10 // Cast web socket client and server in terms of common process interfaces export interface WebSocketServerWrapper { // socket server is a process that // - has fixed properties (address) // - has variable state (clients (i.e. the contingent)) // - is a state machine (created -> listening -> closed) // - has common process-related events (die, error?) // - has common subsystem related events (change to immediate children) // - but this is now about contingent only? // - has events related to itself (headers, listening?) // - has a set of contingent processes // - has an INGRESS announcing client connections } export interface WebSocketWrapper { // socket is a process that // - has fixed properties (server address) // - is a state machine (readyState: connected -> open -> closing -> closed) // - has common process-related events (die, error?) // - has events related to itself (ping/pong) // - is contingent on another process // - has an EGRESS as its (main/only?) output port // - has an INGRESS as its (main/only?) input port // - the associated server MAY be an instance in this system }
// Sets default values for this component's properties UFastNoiseComponent::UFastNoiseComponent(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) { PrimaryComponentTick.bCanEverTick = false; NoiseGenerator = CreateDefaultSubobject<UFastNoise>(UFastNoiseComponent::NoiseGeneratorName); }
def _rm_all_Sc(self): l = [] for i in range(self.wodscompleted): l.append(self.df.loc[:, self.scorel[i]].values.tolist()) ii = np.empty(shape=(self.wodscompleted, len(self.df)), dtype=int) ii[:] = -1 for i in range(self.wodscompleted): for j in range(len(self.df)): if l[i][j].endswith('- s'): ii[i,j] = j for i in range(self.wodscompleted): tmp = ii[i,:] _tmp = tmp[tmp >= 0] self.df.iloc[_tmp,self.ci[i]] = np.nan _ind = pd.isnull(self.df.loc[:,self.scorel]).all(axis=1) _in2 = _ind[_ind == True].index.values self.df.iloc[_in2,:] = np.nan self.df = self.df.dropna(axis=0, how='all').reset_index(drop=True) return self.df
/** * Utility methods for working with views. */ @SuppressWarnings("unused") public class ViewUtils { public static void setZ(Context contexts, View... views) { setZ(contexts, 4, TypedValue.COMPLEX_UNIT_DIP, views); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public static void setZ(Context contexts, float value, int unit, View... views) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { for (View view : views) { view.setZ(TypedValue.applyDimension(unit, value, contexts.getResources().getDisplayMetrics())); } } } public static void hideViews(View... views) { for (View view : views) { hideView(view); } } public static void hideView(View view) { view.setVisibility(View.GONE); view.setScaleX(0f); view.setScaleY(0f); view.setAlpha(0f); } public static void showViews(View... views) { for (View view : views) { showView(view); } } public static void showView(View view) { view.setScaleX(1f); view.setScaleY(1f); view.setAlpha(1f); view.setVisibility(View.VISIBLE); } public static void setWidth(View view, int width) { view.getLayoutParams().width = width; } public static void setHeight(View view, int height) { view.getLayoutParams().height = height; } /** * @return view color, * if view has not ColorDrawable - Color.WHITE */ public static int getColor(View view) { int color = Color.WHITE; Drawable background = view.getBackground(); if (background instanceof ColorDrawable) { color = ((ColorDrawable) background).getColor(); } return color; } public static void setBackground(Context context, View view, @DrawableRes int backgroundResId) { setBackground(view, ContextCompat.getDrawable(context, backgroundResId)); } public static void setBackground(View view, Drawable background) { view.setBackground(background); } public static RippleDrawable createRipple(@ColorInt int color, @FloatRange(from = 0f, to = 1f) float alpha, boolean bounded) { color = ColorUtils.modifyAlpha(color, alpha); return new RippleDrawable(ColorStateList.valueOf(color), null, bounded ? new ColorDrawable(Color.WHITE) : null); } public static RippleDrawable createRipple(@NonNull Palette palette, @FloatRange(from = 0f, to = 1f) float darkAlpha, @FloatRange(from = 0f, to = 1f) float lightAlpha, @ColorInt int fallbackColor, boolean bounded) { int rippleColor = fallbackColor; if (palette.getVibrantSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getVibrantSwatch().getRgb(), darkAlpha); } else if (palette.getLightVibrantSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getLightVibrantSwatch().getRgb(), lightAlpha); } else if (palette.getDarkVibrantSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getDarkVibrantSwatch().getRgb(), darkAlpha); } else if (palette.getMutedSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getMutedSwatch().getRgb(), darkAlpha); } else if (palette.getLightMutedSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getLightMutedSwatch().getRgb(), lightAlpha); } else if (palette.getDarkMutedSwatch() != null) { rippleColor = ColorUtils.modifyAlpha(palette.getDarkMutedSwatch().getRgb(), darkAlpha); } return new RippleDrawable(ColorStateList.valueOf(rippleColor), null, bounded ? new ColorDrawable(Color.WHITE) : null); } }
/* * Copyright 2017 LunaMC.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lunamc.plugins.gamebase; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; import java.util.List; @XmlRootElement(name = "games", namespace = "http://lunamc.io/game/1.0") @XmlAccessorType(XmlAccessType.FIELD) public class GamesConfiguration { @XmlElement(name = "game", namespace = "http://lunamc.io/game/1.0") private List<Game> games; public List<Game> getGames() { return games; } public void setGames(List<Game> games) { this.games = games; } @XmlAccessorType(XmlAccessType.FIELD) public static class Game { @XmlElementWrapper(name = "virtualHosts", namespace = "http://lunamc.io/game/1.0") @XmlElement(name = "virtualHost", namespace = "http://lunamc.io/game/1.0") private List<String> virtualHosts; @XmlElement(name = "statusProvider", namespace = "http://lunamc.io/game/1.0") private StatusProvider statusProvider; @XmlElementWrapper(name = "blocks", namespace = "http://lunamc.io/game/1.0") @XmlElement(name = "block", namespace = "http://lunamc.io/game/1.0") private List<Block> blocks; public List<String> getVirtualHosts() { return virtualHosts; } public void setVirtualHosts(List<String> virtualHosts) { this.virtualHosts = virtualHosts; } public StatusProvider getStatusProvider() { return statusProvider; } public void setStatusProvider(StatusProvider statusProvider) { this.statusProvider = statusProvider; } public List<Block> getBlocks() { return blocks; } public void setBlocks(List<Block> blocks) { this.blocks = blocks; } } @XmlAccessorType(XmlAccessType.FIELD) public static class StatusProvider { @XmlElement(name = "motd", namespace = "http://lunamc.io/game/1.0") private String motd; @XmlElement(name = "maxPlayers", namespace = "http://lunamc.io/game/1.0") private int maxPlayers; public String getMotd() { return motd; } public void setMotd(String motd) { this.motd = motd; } public int getMaxPlayers() { return maxPlayers; } public void setMaxPlayers(int maxPlayers) { this.maxPlayers = maxPlayers; } } @XmlAccessorType(XmlAccessType.FIELD) public static class Block { @XmlAttribute(name = "name") private String name; @XmlAttribute(name = "paletteId") private String paletteId; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getPaletteId() { return paletteId; } public void setPaletteId(String paletteId) { this.paletteId = paletteId; } } }
<filename>lab-5/include/memory.h #ifndef MEMORY_H #define MEMORY_H #define VA_START 0xffff000000000000 #define PHYS_MEMORY_SIZE 0x40000000 #define PAGE_SHIFT 12 #define TABLE_SHIFT 9 #define SECTION_SHIFT (PAGE_SHIFT + TABLE_SHIFT) #define PAGE_SIZE (1 << PAGE_SHIFT) //2^12, 4096 #define SECTION_SIZE (1 << SECTION_SHIFT) //2^21 #define EL1_LOW_MEMORY (2 * SECTION_SIZE) #define EL0_LOW_MEMORY (4 * SECTION_SIZE) #define HIGH_MEMORY 0x3F000000 // 0x3F000000 #define LOW_MEMORY (3 * SECTION_SIZE) // 3 * 2^21 = 6M // = 0x600000 #define PTRS_PER_TABLE (1 << TABLE_SHIFT) #define PGD_SHIFT (PAGE_SHIFT + 3*TABLE_SHIFT) //39 #define PUD_SHIFT (PAGE_SHIFT + 2*TABLE_SHIFT) //30 #define PMD_SHIFT (PAGE_SHIFT + TABLE_SHIFT) //21 #define PTE_SHIFT (PAGE_SHIFT) //12 #define PAGE_ENTRY (HIGH_MEMORY / PAGE_SIZE) #define FIRST_AVAILIBLE_PAGE (LOW_MEMORY / PAGE_SIZE) #define MM_TYPE_PAGE_TABLE 0b11 #define MM_TYPE_PAGE 0b11 #define MM_TYPE_BLOCK 0b01 #define MM_ACCESS (1 << 10) #define MM_ACCESS_PERMISSION (1 << 6) #define MM_READONLY (1 << 7) #define MM_NON_EXEC_EL0 (1UL << 54) #define PAGE_NOT_USED 0 #define PAGE_USED 1 #define PAGE_PRESERVE 2 #ifndef __ASSEMBLER__ typedef struct page { int used; } Page; void paging_init(); unsigned long get_free_page(); unsigned long allocate_kernel_page(); void free_page(unsigned long p); unsigned long virtual_to_physical(unsigned long); unsigned long physical_to_pfn(unsigned long); unsigned long virtual_to_pfn(unsigned long); // unsigned long allocate_user_page(struct task *task, unsigned long va); #endif #endif
Nashville police are looking for a man who had a shocking reaction to a mixed-up drive-thru order. Demetri Johnson, 21, of Nashville, Tenn., was apparently so angry that his McDonald's order was missing a McDouble cheeseburger that he returned to the restaurant and brandished a gun at the employees there, USA Today reported. Man accused of pulling gun at Nashville McDonald's because his order was incomplete. http://t.co/bqwov6DxTt pic.twitter.com/Mh2ph8A0rc — NewsChannel 5 (@NC5) December 23, 2014 According to a news release from the Metropolitan Nashville Police Department, Johnson ordered a McDouble on Thursday last week, received his food and pulled away. After realizing his sandwich was missing, he returned to the McDonald's drive-thru to complain. A manager asked him to park so he could bring Johnson his sandwich. After waiting a few minutes, it seems Johnson lost patience. He reportedly walked into the restaurant with a pistol, racked it, and demanded the staff fix the order. Three women entered with him demanding fresh french fries and sodas. The group left after receiving their food. Police have been unable to track Johnson down, and are asking the public for information on his whereabouts, according to Inquisitr. Johnson is being sought for a charge of felony aggravated assault, The Smoking Gun reported. He has been arrested previously for weapons possession and theft charges. Apparently this type of reaction to an incorrect drive-thru order has happened before this year. Inquisitr notes that a woman in Michigan became so furious when she and a friend received the wrong order twice that she fired a shot through the drive-thru window. No employees were injured.
<reponame>heaptracetechnology/cron<filename>cron/cron_test.go package cron import ( "bytes" "encoding/json" "net/http" "net/http/httptest" "log" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("Cron event subscribe negative time interval", func() { var argumentData Subscribe argumentData.IsTesting = true var data Data data.Interval = -2 data.InitialDelay = -10 argumentData.Data = data argumentData.Endpoint = "http://webhook.site/bfd1aea6-0562-4087-90a3-68efab7d0302" requestBody := new(bytes.Buffer) errr := json.NewEncoder(requestBody).Encode(argumentData) if errr != nil { log.Fatal(errr) } request, err := http.NewRequest("POST", "/subscribe", requestBody) if err != nil { log.Fatal(err) } recorder := httptest.NewRecorder() handler := http.HandlerFunc(TriggerCron) handler.ServeHTTP(recorder, request) Describe("Cron", func() { Context("Negative time interval", func() { It("Should result http.StatusBadRequest", func() { Expect(http.StatusBadRequest).To(Equal(recorder.Code)) }) }) }) }) var _ = Describe("Cron event subscribe", func() { var argumentData Subscribe argumentData.IsTesting = true var data Data data.Interval = 2 data.InitialDelay = 1 argumentData.Data = data argumentData.Endpoint = "http://webhook.site/bfd1aea6-0562-4087-90a3-68efab7d0302" requestBody := new(bytes.Buffer) errr := json.NewEncoder(requestBody).Encode(argumentData) if errr != nil { log.Fatal(errr) } request, err := http.NewRequest("POST", "/subscribe", requestBody) if err != nil { log.Fatal(err) } recorder := httptest.NewRecorder() handler := http.HandlerFunc(TriggerCron) handler.ServeHTTP(recorder, request) Describe("Send Message", func() { Context("SendMessage", func() { It("Should result http.StatusOK", func() { Expect(http.StatusOK).To(Equal(recorder.Code)) }) }) }) })
/** Used to create {@link DeserializationRuntimeConverterFactory} specified to MySQL. */ public class MySqlDeserializationConverterFactory { /** * instance */ public static DeserializationRuntimeConverterFactory instance() { return new DeserializationRuntimeConverterFactory() { private static final long serialVersionUID = 1L; @Override public Optional<DeserializationRuntimeConverter> createUserDefinedConverter( LogicalType logicalType, ZoneId serverTimeZone) { switch (logicalType.getTypeRoot()) { case CHAR: case VARCHAR: return createStringConverter(); case ARRAY: return createArrayConverter((ArrayType) logicalType); default: // fallback to default converter return Optional.empty(); } } }; } /** * Create converter of string. */ private static Optional<DeserializationRuntimeConverter> createStringConverter() { final ObjectMapper objectMapper = new ObjectMapper(); final ObjectWriter objectWriter = objectMapper.writer(); return Optional.of( new DeserializationRuntimeConverter() { private static final long serialVersionUID = 1L; @Override public Object convert(Object dbzObj, Schema schema) throws Exception { // the Geometry datatype in MySQL will be converted to // a String with Json format if (Point.LOGICAL_NAME.equals(schema.name()) || Geometry.LOGICAL_NAME.equals(schema.name())) { try { Struct geometryStruct = (Struct) dbzObj; byte[] wkb = geometryStruct.getBytes("wkb"); String geoJson = OGCGeometry.fromBinary(ByteBuffer.wrap(wkb)).asGeoJson(); JsonNode originGeoNode = objectMapper.readTree(geoJson); Optional<Integer> srid = Optional.ofNullable(geometryStruct.getInt32("srid")); Map<String, Object> geometryInfo = new HashMap<>(); String geometryType = originGeoNode.get("type").asText(); geometryInfo.put("type", geometryType); if (geometryType.equals("GeometryCollection")) { geometryInfo.put("geometries", originGeoNode.get("geometries")); } else { geometryInfo.put( "coordinates", originGeoNode.get("coordinates")); } geometryInfo.put("srid", srid.orElse(0)); return StringData.fromString( objectWriter.writeValueAsString(geometryInfo)); } catch (Exception e) { throw new IllegalArgumentException( String.format( "Failed to convert %s to geometry JSON.", dbzObj), e); } } else { return StringData.fromString(dbzObj.toString()); } } }); } /** * Create converter of array. */ private static Optional<DeserializationRuntimeConverter> createArrayConverter( ArrayType arrayType) { if (LogicalTypeChecks.hasFamily( arrayType.getElementType(), LogicalTypeFamily.CHARACTER_STRING)) { // only map MySQL SET type to Flink ARRAY<STRING> type return Optional.of( new DeserializationRuntimeConverter() { private static final long serialVersionUID = 1L; @Override public Object convert(Object dbzObj, Schema schema) throws Exception { if (EnumSet.LOGICAL_NAME.equals(schema.name()) && dbzObj instanceof String) { // for SET datatype in mysql, debezium will always // return a string split by comma like "a,b,c" String[] enums = ((String) dbzObj).split(","); StringData[] elements = new StringData[enums.length]; for (int i = 0; i < enums.length; i++) { elements[i] = StringData.fromString(enums[i]); } return new GenericArrayData(elements); } else { throw new IllegalArgumentException( String.format( "Unable convert to Flink ARRAY type from unexpected value '%s', " + "only SET type could be converted to ARRAY type for MySQL", dbzObj)); } } }); } else { // otherwise, fallback to default converter return Optional.empty(); } } }
<filename>packages/serverless-api/src/utils/package-info.ts import fs from 'fs'; import path from 'path'; import { PackageJson } from 'type-fest'; const pkgJson: PackageJson = JSON.parse( fs.readFileSync(path.resolve(__dirname, '../../package.json'), 'utf8') ); export default pkgJson;
/*----------------------------------------- */ /* Update cursor position display on screen */ /*----------------------------------------- */ void update_indicators() { int i; float scrollIndicator=1.0, positionY, displayLength; float percentage, scrollBar; write_str(columns - 24, rows, "| L: C: ", STATUSBAR, STATUSMSG); write_num(columns - 10, rows, editScroll.bufferX, 3, STATUSBAR, STATUSMSG); write_num(columns - 20, rows, editScroll.bufferY, 4, STATUSBAR, STATUSMSG); if (editScroll.scrollActiveV == VSCROLL_ON){ for(i = 4; i < rows-2; i++) { write_ch(columns, i, ' ', SCROLLBAR_BACK, SCROLLBAR_FORE); } positionY = editScroll.bufferY; displayLength = editScroll.displayLength; scrollIndicator = positionY / editScroll.scrollRatio; percentage = (scrollIndicator * 100) / displayLength; scrollBar = ((displayLength-3) * percentage)/100; if (positionY == 1) percentage = 0; write_ch(columns, 4+scrollBar, '*', SCROLLBAR_SEL, SCROLLBAR_FORE); write_str(columns-5,rows, " ", STATUSBAR,F_BLACK); i=write_num(columns-5,rows, percentage, 3, STATUSBAR,F_YELLOW); write_ch(columns-5+i,rows, '%', STATUSBAR,F_YELLOW); } }
package user import "baseservice/middleware/authenticate" type ( UpdateAccountBalanceRequest struct { authenticate.Request Amount int64 `json:"amount"` // 数额 Describe string `json:"describe"` // 说明 UpdateType int `json:"update_type"` // 变动类型 0-充值 1-提现 2-游戏变动 3-活动奖励 } UpdateAccountBalanceResponse struct { authenticate.Response AfterAmount int64 `json:"after_amount"` // 更新后数额 } )
<gh_stars>0 /******************************************************************************* * \file mutation.cpp * \brief Stylus Genome class (mutation methods) * * Stylus, Copyright 2006-2009 Biologic Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ // Includes --------------------------------------------------------------------- #include "headers.hpp" using namespace std; using namespace stylus; //-------------------------------------------------------------------------------- // // ModificationStack // //-------------------------------------------------------------------------------- /* * Function: undo * */ void ModificationStack::undo() { ENTER(GENOME,undo); for (MODIFICATIONARRAY::reverse_iterator ritModification = _vecModifications.rbegin(); ritModification != _vecModifications.rend(); ++ritModification) { TDATA(GENOME,L3,(LLTRACE, "Removing modification %s", (*ritModification)->toString().c_str())); (*ritModification)->undo(); } TFLOW(GENOME,L3,(LLTRACE, "Removed the effect of %ld modifications", _vecModifications.size())); } /* * Function: toXML * */ void ModificationStack::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { for (size_t iModification=0; iModification < _vecModifications.size(); ++iModification) _vecModifications[iModification]->toXML(xs, grfRecordDetail); } //-------------------------------------------------------------------------------- // // ChangeModification // //-------------------------------------------------------------------------------- /* * Function: ChangeModification * */ ChangeModification::ChangeModification(size_t iGene, size_t iTarget, const char* pbBasesBefore, const char* pbBasesAfter, bool fSilent) : MutationModification(iGene, iTarget, pbBasesBefore), _strBasesAfter(pbBasesAfter) { ASSERT(!Genome::isState(STGS_ROLLBACK) && !Genome::isState(STGS_RESTORING)); _fSilent = fSilent; Genome::changeBases(_iGene, _iTarget, _strBasesAfter, _fSilent); } /* * Function: undo * */ void ChangeModification::undo() { ENTER(GENOME,undo); ASSERT(Genome::isState(STGS_ROLLBACK) || Genome::isState(STGS_RESTORING)); if (Genome::isState(STGS_ROLLBACK)) Genome::undoStatistics(MT_CHANGE, _strBases.length(), _fSilent); Genome::changeBases(_iGene, _iTarget, _strBases, _fSilent); } /* * Function: toString * */ string ChangeModification::toString() const { ENTER(MUTATION,toString); ostringstream ostr; ostr << "Change mutation at " << (_iTarget+1) << " from " << _strBases << " to " << _strBasesAfter; return ostr.str(); } /* * Function: ChangeModification * */ void ChangeModification::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { ENTER(GENOME,toXML); xs.openStart(xmlTag(XT_CHANGED)); xs.writeAttribute(xmlTag(XT_TARGETINDEX), (_iTarget+1)); xs.writeAttribute(xmlTag(XT_COUNTBASES), _strBases.length()); xs.writeAttribute(xmlTag(XT_BASES), _strBases); xs.writeAttribute(xmlTag(XT_BASESAFTER), _strBasesAfter); xs.closeStart(false); } //-------------------------------------------------------------------------------- // // CopyModification // //-------------------------------------------------------------------------------- /* * Function: CopyModification * */ CopyModification::CopyModification(size_t iGene, size_t iSource, size_t iTarget, const char* pbBases) : MutationModification(iGene, iTarget, pbBases), _iSource(iSource) { ASSERT(!Genome::isState(STGS_ROLLBACK) && !Genome::isState(STGS_RESTORING)); Genome::insertBases(_iGene, _iTarget, _strBases); } /* * Function: undo * */ void CopyModification::undo() { ENTER(GENOME,undo); ASSERT(Genome::isState(STGS_ROLLBACK) || Genome::isState(STGS_RESTORING)); if (Genome::isState(STGS_ROLLBACK)) Genome::undoStatistics(MT_COPY, _strBases.length(), false); Genome::deleteBases(_iGene, _iTarget, _strBases.length()); } /* * Function: toString * */ string CopyModification::toString() const { ENTER(MUTATION,toString); ostringstream ostr; ostr << "Copy mutation from " << (_iSource+1) << " to " << (_iTarget+1) << " of " << _strBases.length() << " bases (" << _strBases << ")"; return ostr.str(); } /* * Function: CopyModification * */ void CopyModification::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { ENTER(GENOME,toXML); xs.openStart(xmlTag(XT_COPIED)); xs.writeAttribute(xmlTag(XT_SOURCEINDEX), (_iSource+1)); xs.writeAttribute(xmlTag(XT_TARGETINDEX), (_iTarget+1)); xs.writeAttribute(xmlTag(XT_COUNTBASES), _strBases.length()); xs.writeAttribute(xmlTag(XT_BASES), _strBases); xs.closeStart(false); } //-------------------------------------------------------------------------------- // // DeleteModification // //-------------------------------------------------------------------------------- /* * Function: DeleteModification * */ DeleteModification::DeleteModification(size_t iGene, size_t iTarget, const char* pbBases) : MutationModification(iGene, iTarget, pbBases) { ASSERT(!Genome::isState(STGS_ROLLBACK) && !Genome::isState(STGS_RESTORING)); Genome::deleteBases(_iGene, _iTarget, _strBases.length()); } /* * Function: undoChange * */ void DeleteModification::undo() { ENTER(GENOME,undo); ASSERT(Genome::isState(STGS_ROLLBACK) || Genome::isState(STGS_RESTORING)); if (Genome::isState(STGS_ROLLBACK)) Genome::undoStatistics(MT_DELETE, _strBases.length(), false); Genome::insertBases(_iGene, _iTarget, _strBases); } /* * Function: toString * */ string DeleteModification::toString() const { ENTER(MUTATION,toString); ostringstream ostr; ostr << "Delete mutation from " << (_iTarget+1) << " of " << _strBases.length() << " bases (" << _strBases << ")"; return ostr.str(); } /* * Function: DeleteModification * */ void DeleteModification::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { ENTER(GENOME,toXML); xs.openStart(xmlTag(XT_DELETED)); xs.writeAttribute(xmlTag(XT_TARGETINDEX), (_iTarget+1)); xs.writeAttribute(xmlTag(XT_COUNTBASES), _strBases.length()); xs.writeAttribute(xmlTag(XT_BASES), _strBases); xs.closeStart(false); } //-------------------------------------------------------------------------------- // // InsertModification // //-------------------------------------------------------------------------------- /* * Function: InsertModification * */ InsertModification::InsertModification(size_t iGene, size_t iTarget, const char* pbBases) : MutationModification(iGene, iTarget, pbBases) { ASSERT(!Genome::isState(STGS_ROLLBACK) && !Genome::isState(STGS_RESTORING)); Genome::insertBases(_iGene, _iTarget, _strBases); } /* * Function: undoChange * */ void InsertModification::undo() { ENTER(GENOME,undo); ASSERT(Genome::isState(STGS_ROLLBACK) || Genome::isState(STGS_RESTORING)); if (Genome::isState(STGS_ROLLBACK)) Genome::undoStatistics(MT_INSERT, _strBases.length(), false); Genome::deleteBases(_iGene, _iTarget, _strBases.length()); } /* * Function: toString * */ string InsertModification::toString() const { ENTER(MUTATION,toString); ostringstream ostr; ostr << "Insert mutation to " << (_iTarget+1) << " of " << _strBases.length() << " bases (" << _strBases << ")"; return ostr.str(); } /* * Function: InsertModification * */ void InsertModification::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { ENTER(GENOME,toXML); xs.openStart(xmlTag(XT_INSERTED)); xs.writeAttribute(xmlTag(XT_TARGETINDEX), (_iTarget+1)); xs.writeAttribute(xmlTag(XT_COUNTBASES), _strBases.length()); xs.writeAttribute(xmlTag(XT_BASES), _strBases); xs.closeStart(false); } //-------------------------------------------------------------------------------- // // TransposeModification // //-------------------------------------------------------------------------------- /* * Function: TransposeModification * */ TransposeModification::TransposeModification(size_t iGeneSource, size_t iGeneTarget, size_t iSource, size_t iTarget, const char* pbBases) : MutationModification(iGeneTarget, iTarget, pbBases), _iGeneSource(iGeneSource), _iSource(iSource) { ASSERT(!Genome::isState(STGS_ROLLBACK) && !Genome::isState(STGS_RESTORING)); ASSERT(_iSource >= _iTarget || (_iSource+_strBases.length() <= _iTarget)); if (_iSource < _iTarget) { _iTarget -= _strBases.length(); ASSERT(_iSource <= _iTarget); } Genome::deleteBases(_iGeneSource, _iSource, _strBases.length()); ASSERT(_iGene == Genome::indexToGene(_iTarget)); Genome::insertBases(_iGene, _iTarget, _strBases); } /* * Function: undoChange * */ void TransposeModification::undo() { ENTER(GENOME,undoChange); ASSERT(Genome::isState(STGS_ROLLBACK) || Genome::isState(STGS_RESTORING)); if (Genome::isState(STGS_ROLLBACK)) Genome::undoStatistics(MT_TRANSPOSE, _strBases.length(), false); Genome::deleteBases(_iGene, _iTarget, _strBases.length()); Genome::insertBases(_iGeneSource, _iSource, _strBases); } /* * Function: toString * */ string TransposeModification::toString() const { ENTER(MUTATION,toString); ostringstream ostr; ostr << "Transpose mutation from " << (_iSource+1) << " to " << (_iTarget+1) << " of " << _strBases.length() << " bases (" << _strBases << ")"; return ostr.str(); } /* * Function: TransposeModification * */ void TransposeModification::toXML(XMLStream& xs, STFLAGS grfRecordDetail) const { ENTER(GENOME,toXML); xs.openStart(xmlTag(XT_TRANSPOSED)); xs.writeAttribute(xmlTag(XT_SOURCEINDEX), (_iSource+1)); xs.writeAttribute(xmlTag(XT_TARGETINDEX), (_iTarget+1)); xs.writeAttribute(xmlTag(XT_COUNTBASES), _strBases.length()); xs.writeAttribute(xmlTag(XT_BASES), _strBases); xs.closeStart(false); } //-------------------------------------------------------------------------------- // // Genome // //-------------------------------------------------------------------------------- void Genome::recordStatistics(ST_ATTEMPTS & attempts, size_t cbBases) { if( _rollbackType & RT_ATTEMPT ) attempts._cAttempted += 1; if( _rollbackType & RT_CONSIDERATION ) attempts._cConsidered += 1; attempts._cAccepted += 1; attempts._cbBases += cbBases; } void Genome::recordStatistics(ST_STATISTICS & stats, MUTATIONTYPE mt, size_t cbBases, bool fSilent) { if( _rollbackType & RT_ATTEMPT ) stats._cAttempted += 1; if( _rollbackType & RT_CONSIDERATION ) stats._cConsidered += 1; stats._cAccepted += 1; switch (mt) { case MT_CHANGE: if (fSilent) { stats._cSilent += 1; } stats._cbBasesChanged += cbBases; recordStatistics(stats._atChanged, cbBases); break; case MT_COPY: stats._cbBasesInserted += cbBases; recordStatistics(stats._atCopied, cbBases); break; case MT_DELETE: stats._cbBasesDeleted += cbBases; recordStatistics(stats._atDeleted, cbBases); break; case MT_INSERT: stats._cbBasesInserted += cbBases; recordStatistics(stats._atInserted, cbBases); break; case MT_TRANSPOSE: stats._cbBasesDeleted += cbBases; stats._cbBasesInserted += cbBases; recordStatistics(stats._atTransposed, cbBases); break; case MT_ILLEGAL: case MT_MAX: break; } } void Genome::recordStatistics(MUTATIONTYPE mt, size_t cbBases, bool fSilent) { ENTER(MUTATION,recordStatistics); ASSERT(mt < MT_ILLEGAL); recordStatistics(Genome::_statsRecordRate, mt, cbBases, fSilent); recordStatistics(Genome::_stats, mt, cbBases, fSilent); } void Genome::undoAttempts(ST_ATTEMPTS & attempts, size_t cbBases) { attempts._cAccepted -= 1; attempts._cbBases -= cbBases; } void Genome::undoStatistics(ST_STATISTICS & stats, MUTATIONTYPE mt, size_t cbBases, bool fSilent) { stats._cAccepted -= 1; switch (mt) { case MT_CHANGE: if (fSilent) { stats._cSilent -= 1; } stats._cbBasesChanged -= cbBases; undoAttempts(stats._atChanged, cbBases); break; case MT_COPY: stats._cbBasesInserted -= cbBases; undoAttempts(stats._atCopied, cbBases); break; case MT_DELETE: stats._cbBasesDeleted -= cbBases; undoAttempts(stats._atDeleted, cbBases); break; case MT_INSERT: stats._cbBasesInserted -= cbBases; undoAttempts(stats._atInserted, cbBases); break; case MT_TRANSPOSE: stats._cbBasesDeleted -= cbBases; stats._cbBasesInserted -= cbBases; undoAttempts(stats._atTransposed, cbBases); break; case MT_ILLEGAL: case MT_MAX: break; } } void Genome::undoStatistics(MUTATIONTYPE mt, size_t cbBases, bool fSilent) { ENTER(MUTATION,undoStatistics); ASSERT(mt < MT_ILLEGAL); undoStatistics(Genome::_statsRecordRate, mt, cbBases, fSilent); undoStatistics(Genome::_stats, mt, cbBases, fSilent); } /* * Function: executePlan * */ void Genome::executePlan(const char* pxmlPlan, size_t iTrialFirst, size_t cTrials, ST_PFNSTATUS pfnStatus, size_t cStatusRate) { ENTER(MUTATION,executePlan); THROWIFEXECUTING(executePlan); REQUIRENOTDEAD(executePlan); ASSERT(pxmlPlan); if (!isState(STGS_ALIVE) && !isState(STGS_INVALID)) THROWRC((RC(INVALIDSTATE), "Attempt to mutate genome from incorrect state (%s)", stateToString())); // If the trial to associate with the plan was not specified, make it the current trial plus one // - The current trial is already completed, so the plan begins with the next trial if (iTrialFirst <= 0) iTrialFirst = getTrial() + 1; // Load the supplied plan _plan.load(pxmlPlan); // Save the initial genome and plan if (isRecording()) { if (Globals::isSupplied()) { ostringstream ostrGlobals; ostrGlobals << _strRecordDirectory << Constants::s_strGLOBALS << Constants::s_strXMLEXTENSION; ofstream ofstr(ostrGlobals.str().c_str(), ios::out | ios::trunc); if (!ofstr || !ofstr.is_open()) THROWRC((RC(ERROR), "Unable to create global constants file %s", ostrGlobals.str().c_str())); XMLStream xs(ofstr); Globals::toXML(xs); } record(RT_INITIAL); ostringstream ostrPlan; ostrPlan << _strRecordDirectory << Constants::s_strPLAN << Constants::s_strXMLEXTENSION; ofstream ofstr(ostrPlan.str().c_str(), ios::out | ios::trunc); if (!ofstr || !ofstr.is_open()) THROWRC((RC(ERROR), "Unable to create plan file %s", ostrPlan.str().c_str())); ImpreciseMode impreciseMode; XMLStream xs(ofstr); _plan.toXML(xs); } // Initialize a history file if recording history if (isRecordingHistory()) recordHistory(RT_INITIAL); // Execute the loaded plan _plan.execute(iTrialFirst, cTrials, pfnStatus, cStatusRate); // Save the final genome and close the history file if (isRecording()) record(RT_FINAL); if (isRecordingHistory()) recordHistory(RT_FINAL); } /* * Function: handleChange * */ bool Genome::handleChange(const Mutation& mt, bool fPreserveGenes, bool fRejectSilent) { ENTER(MUTATION,handleChange); size_t iTarget = mt.targetIndex(); size_t cbBases = mt.countBases(); string strBases(_strBases.substr(iTarget, cbBases)); const char* pbBasesBefore = strBases.c_str(); const char* pbBasesAfter = mt.bases().c_str(); ASSERT(mt.isChange()); ASSERT(cbBases == 1 || cbBases == Codon::s_cchCODON); ASSERT(cbBases == 1 || Codon::onCodonBoundary(iTarget)); ASSERT(mt.bases().length() == cbBases); ASSERT(fPreserveGenes); // Determine the containing gene, if any size_t iGene = indexToGene(iTarget); // Determine if the change is silent bool fSilent = Codon::isSilentChange(_strBases, iTarget, mt.bases()); // Record mutational statistics recordStatistics(MT_CHANGE, cbBases, fSilent); // Reject silent mutations if requested // - This only prevents silent, single base changes if (fRejectSilent && fSilent) goto REJECT1; // Preserve existing genes and prevent introduction of new start/stop codons if (fPreserveGenes) { // Ensure that the change does not alter the start/stop codon for an existing gene if (iGene < _vecGenes.size()) { const Range& rgGene = _vecGenes[iGene].getRange(); if ( (rgGene.getStart()+Codon::s_cchCODON) > iTarget || (rgGene.getEnd()-Codon::s_cchCODON) < (iTarget + cbBases - 1)) goto REJECT2; } // Check each whole codon within the affected range to ensure no new stop (anywhere) or start (between genes) codons are introduced char aryCodon[Codon::s_cchCODON]; const char* pbCodon = (cbBases == 1 ? aryCodon : pbBasesAfter); if (cbBases == 1) { ::memcpy(aryCodon, (_strBases.c_str()+Codon::toCodonBoundary(iTarget)), Codon::s_cchCODON); aryCodon[Codon::toCodonOffset(iTarget)] = *pbBasesAfter; } if (iGene >= _vecGenes.size() && Codon::isStart(pbCodon)) goto REJECT3; if (Codon::isStop(pbCodon)) goto REJECT4; } // Make and record the mutation recordModification(::new ChangeModification(iGene, iTarget, pbBasesBefore, pbBasesAfter, fSilent)); TFLOW(MUTATION,L2,(LLTRACE, "Mutating %ld from %s to %s in trial %ld", (iTarget+1), pbBasesBefore, pbBasesAfter, getTrial())); return true; REJECT1: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected change at %ld from %s to %s - silent changes disallowed", (iTarget+1), pbBasesBefore, pbBasesAfter); recordTermination(NULL, STGT_MUTATION, STGR_CHANGE, "Rejected change at %ld from %s to %s - silent changes disallowed", (iTarget+1), pbBasesBefore, pbBasesAfter); goto REJECT; REJECT2: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected change at %ld from %s to %s to preserve genes - gene start or stop codon affected", (iTarget+1), pbBasesBefore, pbBasesAfter); recordTermination(NULL, STGT_MUTATION, STGR_CHANGE, "Rejected change at %ld from %s to %s to preserve genes - gene start or stop codon affected", (iTarget+1), pbBasesBefore, pbBasesAfter); goto REJECT; REJECT3: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected change at %ld from %s to %s to preserve genes - illegal start codon created", (iTarget+1), pbBasesBefore, pbBasesAfter); recordTermination(NULL, STGT_MUTATION, STGR_CHANGE, "Rejected change at %ld from %s to %s to preserve genes - illegal start codon created", (iTarget+1), pbBasesBefore, pbBasesAfter); goto REJECT; REJECT4: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected change at %ld from %s to %s to preserve genes - illegal stop codon created", (iTarget+1), pbBasesBefore, pbBasesAfter); recordTermination(NULL, STGT_MUTATION, STGR_CHANGE, "Rejected change at %ld from %s to %s to preserve genes - illegal stop codon created", (iTarget+1), pbBasesBefore, pbBasesAfter); goto REJECT; REJECT: TFLOW(MUTATION,L4,(LLTRACE, "Rejected change at %ld from %s to %s", iTarget, pbBasesBefore, pbBasesAfter)); undoStatistics(MT_CHANGE, cbBases, fSilent); return false; } /* * Function: handleCopy * */ bool Genome::handleCopy(const Mutation& mt, bool fPreserveGenes) { ENTER(MUTATION,handleCopy); size_t iSource = mt.sourceIndex(); size_t iTarget = mt.targetIndex(); size_t cbBases = mt.countBases(); #if defined(ST_TRACE) const char* pbBases = _strBases.c_str() + iTarget; #endif ASSERT(mt.isCopy()); ASSERT(cbBases < (max<size_t>(_strBases.length(),iTarget)-iSource)); ASSERT(fPreserveGenes); ASSERT(Codon::onCodonBoundary(iSource)); ASSERT(Codon::onCodonBoundary(iTarget)); ASSERT(Codon::hasWholeCodons(cbBases)); // Determine the containing gene, if any size_t iGene = indexToGene(iTarget); // Record mutational statistics recordStatistics(MT_COPY, cbBases, false); // Preserve any existing genes if (fPreserveGenes) { size_t iGeneSource = indexToGene(iSource); // Only allow copies of whole codons and from within the same gene if (!Codon::hasWholeCodons(cbBases)) goto REJECT1; if ( iGene >= _vecGenes.size() || iGeneSource >= _vecGenes.size() || iGene != iGeneSource || iGeneSource != indexToGene(iSource+cbBases-1)) goto REJECT2; const Range& rgGene = _vecGenes[iGene].getRange(); if ( (rgGene.getStart()+Codon::s_cchCODON) > iSource || (rgGene.getEnd()-Codon::s_cchCODON) < (iSource + cbBases - 1) || (rgGene.getStart()+Codon::s_cchCODON) > iTarget || (rgGene.getEnd()-Codon::s_cchCODON+1) < iTarget) goto REJECT3; } // Make and record the mutation recordModification(::new CopyModification(iGene, iSource, iTarget, _strBases.substr(iSource,cbBases).c_str())); TFLOW(MUTATION,L2,(LLTRACE, "Copying %ld bases from %ld to %ld in trial %ld", cbBases, (iSource+1), (iTarget+1), getTrial())); TDATA(MUTATION,L4,(LLTRACE, "Copied %s", _strBases.substr(iSource,cbBases).c_str())); return true; REJECT1: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected copy to %ld of %ld bases from %ld to preserve genes - frame-shift caused", (iTarget+1), cbBases, (iSource+1)); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected copy to %ld of %ld bases from %ld to preserve genes - frame-shift caused", (iTarget+1), cbBases, (iSource+1)); goto REJECT; REJECT2: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected copy to %ld of %ld bases from %ld to preserve genes - not contained within a single gene", (iTarget+1), cbBases, (iSource+1)); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected copy to %ld of %ld bases from %ld to preserve genes - not contained within a single gene", (iTarget+1), cbBases, (iSource+1)); goto REJECT; REJECT3: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected copy to %ld of %ld bases from %ld to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases, (iSource+1)); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected copy to %ld of %ld bases from %ld to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases, (iSource+1)); goto REJECT; REJECT: TFLOW(MUTATION,L4,(LLTRACE, "Rejected copy of %s", pbBases)); undoStatistics(MT_COPY, cbBases, false); return false; } /* * Function: handleDelete * */ bool Genome::handleDelete(const Mutation& mt, bool fPreserveGenes) { ENTER(MUTATION,handleDelete); size_t iTarget = mt.targetIndex(); size_t cbBases = mt.countBases(); ASSERT(mt.isDelete()); ASSERT(fPreserveGenes); ASSERT(Codon::onCodonBoundary(iTarget)); ASSERT(Codon::hasWholeCodons(cbBases)); // Determine the affected genes, if any size_t iGeneFirst = indexToGene(iTarget); size_t iGeneLast = indexToGene(iTarget+cbBases-1); // Record mutational statistics recordStatistics(MT_DELETE, cbBases, false); // Ensure the deletion remains within the boundaries of the genome if ((iTarget+cbBases) > _strBases.length()) goto REJECT; // Preserve any existing genes if (fPreserveGenes) { // If the deletion overlaps multiple genes or causes a frame-shift, reject it if (iGeneFirst != iGeneLast) goto REJECT1; if (!Codon::hasWholeCodons(cbBases)) goto REJECT2; // If the deletion removes the start or stop codon of any one gene, reject it if (iGeneFirst < _vecGenes.size()) { const Range& rgGene = _vecGenes[iGeneFirst].getRange(); if ( (rgGene.getStart()+Codon::s_cchCODON) > iTarget || (rgGene.getEnd()-Codon::s_cchCODON) < (iTarget + cbBases - 1)) goto REJECT3; } } // Make and record the mutation recordModification(::new DeleteModification(iGeneFirst, iTarget, _strBases.substr(iTarget,cbBases).c_str())); TFLOW(MUTATION,L2,(LLTRACE, "Deleting %ld bases from %ld in trial %ld", cbBases, (iTarget+1), getTrial())); TDATA(MUTATION,L4,(LLTRACE, "Deleted %s", _strBases.substr(iTarget,cbBases).c_str())); return true; REJECT1: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected deletion at %ld of %ld bases to preserve genes - multiple genes affected", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_DELETE, "Rejected deletion at %ld of %ld bases to preserve genes - multiple genes affected", (iTarget+1), cbBases); goto REJECT; REJECT2: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected deletion at %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_DELETE, "Rejected deletion at %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), cbBases); goto REJECT; REJECT3: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected deletion at %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_DELETE, "Rejected deletion at %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases); goto REJECT; REJECT: TFLOW(MUTATION,L4,(LLTRACE, "Rejected deletion of %s", _strBases.substr(iTarget,cbBases).c_str())); undoStatistics(MT_DELETE, cbBases, false); return false; } /* * Function: handleInsert * */ bool Genome::handleInsert(const Mutation& mt, bool fPreserveGenes) { ENTER(MUTATION,handleInsert); size_t iTarget = mt.targetIndex(); size_t cbBases = mt.countBases(); const char* pbBases = mt.bases().c_str(); ASSERT(mt.isInsert()); ASSERT(VALID(pbBases)); ASSERT(::strlen(pbBases) == cbBases); ASSERT(fPreserveGenes); ASSERT(Codon::onCodonBoundary(iTarget)); ASSERT(Codon::hasWholeCodons(cbBases)); // Determine the containing gene, if any size_t iGene = indexToGene(iTarget); // Record mutational statistics recordStatistics(MT_INSERT, cbBases, false); // Preserve any existing genes if (fPreserveGenes) { // Only allow insertions of whole codons if (!Codon::hasWholeCodons(cbBases)) goto REJECT1; // Check each whole codon within the inserted bases for (size_t iBase=0; iBase < cbBases; iBase += Codon::s_cchCODON) { if (iGene >= _vecGenes.size() && Codon::isStart(pbBases+iBase)) goto REJECT2; if (Codon::isStop(pbBases+iBase)) goto REJECT3; } // Ensure the insertion occurs between the start and end codons // - Insertions are *at* the target index, so an insertion *may* occur at the gene's stop codon const Range& rgGene = _vecGenes[iGene].getRange(); if ( (rgGene.getStart()+Codon::s_cchCODON) > iTarget || (rgGene.getEnd()-Codon::s_cchCODON+1) < iTarget) goto REJECT4; } // Make and record the mutation recordModification(::new InsertModification(iGene, iTarget, pbBases)); TFLOW(MUTATION,L2,(LLTRACE, "Inserting %ld bases at %ld in trial %ld", cbBases, (iTarget+1), getTrial())); TDATA(MUTATION,L4,(LLTRACE, "Inserted %s", string(pbBases,cbBases).c_str())); return true; REJECT1: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected insertion at %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_INSERT, "Rejected insertion at %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), cbBases); goto REJECT; REJECT2: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected insertion at %ld of %ld bases to preserve genes - illegal start codon added", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_INSERT, "Rejected insertion at %ld of %ld bases to preserve genes - illegal start codon added", (iTarget+1), cbBases); goto REJECT; REJECT3: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected insertion at %ld of %ld bases to preserve genes - illegal stop codon added", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_INSERT, "Rejected insertion at %ld of %ld bases to preserve genes - illegal stop codon added", (iTarget+1), cbBases); goto REJECT; REJECT4: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected insertion at %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_INSERT, "Rejected insertion at %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), cbBases); goto REJECT; REJECT: TFLOW(MUTATION,L4,(LLTRACE, "Rejected insertion of %s", pbBases)); undoStatistics(MT_INSERT, cbBases, false); return false; } /* * Function: handleTranspose * */ bool Genome::handleTranspose(const Mutation& mt, bool fPreserveGenes) { ENTER(MUTATION,handleTranspose); size_t iSource = mt.sourceIndex(); size_t iTarget = mt.targetIndex(); size_t cbBases = mt.countBases(); ASSERT(mt.isTranspose()); ASSERT((iSource+cbBases) < _strBases.length()); ASSERT(fPreserveGenes); ASSERT(Codon::onCodonBoundary(iSource)); ASSERT(Codon::onCodonBoundary(iTarget)); ASSERT(Codon::hasWholeCodons(cbBases)); // First, determine and copy the bases from the source location size_t iGeneTarget = indexToGene(iTarget); size_t iGeneSource = indexToGene(iSource); string strBasesSource(_strBases.c_str(), iSource, cbBases); // Record mutational statistics recordStatistics(MT_TRANSPOSE, cbBases, false); // Preserve any existing genes if (fPreserveGenes) { // Only allow transposition of whole codons that do not cross gene boundaries if (!Codon::hasWholeCodons(cbBases)) goto REJECT1; if ( iGeneTarget >= _vecGenes.size() || iGeneSource >= _vecGenes.size() || iGeneTarget != iGeneSource || iGeneSource != indexToGene(iSource+cbBases-1)) goto REJECT2; const Range& rgGene = _vecGenes[iGeneSource].getRange(); if ( (rgGene.getStart()+Codon::s_cchCODON) > iSource || (rgGene.getEnd()-Codon::s_cchCODON) < (iSource + cbBases - 1) || (rgGene.getStart()+Codon::s_cchCODON) > iTarget || (rgGene.getEnd()-Codon::s_cchCODON+1) < iTarget) goto REJECT3; } // Make and record the mutation recordModification(::new TransposeModification(iGeneSource, iGeneTarget, iSource, iTarget, strBasesSource.c_str())); TFLOW(MUTATION,L2,(LLTRACE, "Transposing %ld bases from %ld to %ld in trial %ld", cbBases, (iSource+1), (iTarget+1), getTrial())); TDATA(MUTATION,L4,(LLTRACE, "Transposed %s", strBasesSource.c_str())); return true; REJECT1: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), (iSource+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - frame-shift caused", (iTarget+1), (iSource+1), cbBases); goto REJECT; REJECT2: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - not contained within a single gene", (iTarget+1), (iSource+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - not contained within a single gene", (iTarget+1), (iSource+1), cbBases); goto REJECT; REJECT3: recordAttempt(ST_FILELINE, STTR_MUTATION, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), (iSource+1), cbBases); recordTermination(NULL, STGT_MUTATION, STGR_COPY, "Rejected transposition from %ld to %ld of %ld bases to preserve genes - gene start or stop codon affected", (iTarget+1), (iSource+1), cbBases); goto REJECT; REJECT: TFLOW(MUTATION,L4,(LLTRACE, "Rejected transposition of %s", strBasesSource.c_str())); undoStatistics(MT_TRANSPOSE, cbBases, false); return false; } /* * Function: changeBase * */ void Genome::changeBases(size_t iGene, size_t iTarget, const std::string& strBases, bool fSilent) { ENTER(MUTATION,changeBases); TFLOW(MUTATION,L3,(LLTRACE, "Making a %s change in genome at %lu", (fSilent ? "silent" : "non-silent"), iTarget)); // Change the bases _strBases.replace(iTarget, strBases.length(), strBases); // Mark the affected gene invalid if (iGene < _vecGenes.size()) { _vecGenes[iGene].markInvalid(Gene::GC_CHANGE, Range(iTarget,iTarget+strBases.length()-1), fSilent); _grfGenesInvalid.set(iGene); } } /* * Function: deleteBases * */ void Genome::deleteBases(size_t iGene, size_t iTarget, size_t cbBases) { ENTER(MUTATION,deleteBases); TFLOW(MUTATION,L3,(LLTRACE, "Deleting %lu bases from genome at %lu", cbBases, (iTarget+1))); // Remove the bases _strBases.erase(iTarget,cbBases); // Update the statistics _statsRecordRate._cbBases = _strBases.length(); _stats._cbBases = _statsRecordRate._cbBases; // Shrink the affected gene and mark it invalid, if any if (iGene < _vecGenes.size()) { _vecGenes[iGene].markInvalid(Gene::GC_DELETE, Range(iTarget,iTarget+cbBases-1), false); _grfGenesInvalid.set(iGene); } // Re-align all subsequent genes for (++iGene; iGene < _vecGenes.size(); ++iGene) _vecGenes[iGene].moveRange(-cbBases); } /* * Function: insertBases * */ void Genome::insertBases(size_t iGene, size_t iTarget, const string& strBases) { ENTER(MUTATION,insertBases); TFLOW(MUTATION,L3,(LLTRACE, "Inserting %lu bases to the genome at %lu", strBases.length(), (iTarget+1))); size_t cbBases = strBases.length(); // Insert the new bases _strBases.insert(iTarget, strBases); // Update the statistics _statsRecordRate._cbBases = _strBases.length(); _stats._cbBases = _statsRecordRate._cbBases; // Extend the affected gene, if any, to encompass the inserted bases and mark it invalid if (iGene < _vecGenes.size()) { _vecGenes[iGene].markInvalid(Gene::GC_INSERT, Range(iTarget,iTarget+cbBases-1), false); _grfGenesInvalid.set(iGene); } // Re-align all subsequent genes for (++iGene; iGene < _vecGenes.size(); ++iGene) _vecGenes[iGene].moveRange(cbBases); }
<filename>lib/epdiy/epdiy/src/epd_driver/pca9555.h #ifndef PCA9555_H #define PCA9555_H #include <esp_err.h> #include <driver/i2c.h> #define PCA_PIN_P00 0x0001 #define PCA_PIN_P01 0x0002 #define PCA_PIN_P02 0x0004 #define PCA_PIN_P03 0x0008 #define PCA_PIN_P04 0x0010 #define PCA_PIN_P05 0x0020 #define PCA_PIN_P06 0x0040 #define PCA_PIN_P07 0x0080 #define PCA_PIN_PC10 0x0100 #define PCA_PIN_PC11 0x0200 #define PCA_PIN_PC12 0x0400 #define PCA_PIN_PC13 0x0800 #define PCA_PIN_PC14 0x1000 #define PCA_PIN_PC15 0x2000 #define PCA_PIN_PC16 0x4000 #define PCA_PIN_PC17 0x8000 #define PCA_PIN_P_ALL 0x00FF #define PCA_PIN_PC_ALL 0xFF00 #define PCA_PIN_ALL 0xFFFF #define PCA_PIN_NULL 0x0000 static const int EPDIY_PCA9555_ADDR = 0x20; uint8_t pca9555_read_input(i2c_port_t port, int high_port); esp_err_t pca9555_set_value(i2c_port_t port, uint8_t config_value, int high_port); esp_err_t pca9555_set_inversion(i2c_port_t port, uint8_t config_value, int high_port); esp_err_t pca9555_set_config(i2c_port_t port, uint8_t config_value, int high_port); #endif // PCA9555_H
Uganda welcoming millions of South Sudanese refugees, earning praise of United Nations Updated The African nation of Uganda is receiving praise from the United Nations for welcoming more than 1 million refugees from South Sudan, and the UN is urging other nations to follow Uganda's example and open their doors to those in need. Every day, hundreds of refugees from South Sudan are pouring across the border into neighbouring Uganda, covered in dust as they step down from the trucks delivering them to Omugo. The civilians escaping war and hunger in their homeland are seeking safety. Aid workers with loud hailers direct the refugees to their temporary homes. "It is a huge number of people and the government has been very welcoming to let people in, they have the right to work, they can move where ever they like," Carly Sheehan of the relief agency CARE said. "It's definitely a very interesting way to respond to a refugee influx that we have not seen in too many places before." More than a million South Sudanese refugees have arrived in Uganda. Last year, more refugees crossed the border here than crossed the Mediterranean by boat to reach Europe. The United Nations has described Uganda as one of the best places in the world to be a refugee. Morish Lowuya, one of the new arrivals, worked for a radio station in the South Sudanese town of Yei. He was forced to flee his home when armed rebels arrived. "We heard gunshots at our neighbourhood. Then we started running to the bush," Mr Lowuya said. "So they reached our home. They burnt everything of mine, including my documents. "Then they killed my sister-in-law and the child of three months. They were all killed. "They burnt even my grandfather in the house." Rival rebel groups and government troops continue to engage in a brutal conflict in South Sudan — there is no end in sight to the bloodshed. As a result, the South Sudanese refugees are expected to be in Uganda for several years. The Ugandan Government is giving the new arrivals small plots of land so they can farm and feed themselves. There has been some unrest between locals and refugees, but the Ugandan Government ensures that up to 30 per cent of the international aid goes to local communities to build roads, clinics and improve the water supply. Sunday Anguyo is a community leader in the district of Ocea, where the local population is 31,000 — but they have welcomed more than 65,000 refugees. "Of course people are happy. Because the refugee brought here is also helping the community within this area," Mr Anguyo said. "They improve the water facilities and give us some access roads. "The community is also enjoying it. And taking the safe water and the treated water in their community … because of the refugees coming here. "No fighting here. People are just in peace." But Uganda is warning it cannot continue to carry the refugee burden on its own. Ms Sheehan says the international community needs to provide support. "Absolutely, this response is hugely underfunded," she said. "The UN has an appeal and has only received about 25 per cent of the funding it needs. "So, while we do as much as we can as the international community, there is still lots more that needs to be done as people keep coming in." Topics: refugees, immigration, community-and-society, uganda, south-sudan First posted
package com.es.data; /** * Created by perfection on 15-12-25. */ public class IosData { String[] iosDatas = { }; }
use crate::common::BitMatrix; use crate::{Error, ResultError}; use super::FormatInformation; use super::{Version, Versions}; pub struct BitMatrixParser { bitMatrix: BitMatrix, versions: Versions, parsedVersion: Option<Version>, parsedFormatInfo: Option<FormatInformation>, mirror: bool, } impl BitMatrixParser { /** * @param bitMatrix {@link BitMatrix} to parse * @throws FormatException if dimension is not >= 21 and 1 mod 4 */ fn new(bitMatrix: &BitMatrix) -> ResultError<Self> { let dimension = bitMatrix.getHeight(); if dimension < 21 || (dimension & 0x03) != 1 { return Err(Error::FormatException(String::from(""))); } Ok(BitMatrixParser { bitMatrix: bitMatrix.clone(), parsedVersion: None, parsedFormatInfo: None, mirror: false, versions: Versions::new(), }) } /** * <p>Reads format information from one of its two locations within the QR Code.</p> * * @return {@link FormatInformation} encapsulating the QR Code's format info * @throws FormatException if both format information locations cannot be parsed as * the valid encoding of format information */ fn readFormatInformation(&mut self) -> ResultError<FormatInformation> { if let Some(parsedFormatInfo) = &self.parsedFormatInfo { return Ok(parsedFormatInfo.clone()); } // Read top-left format info bits let mut formatInfoBits1 = 0; for i in 0..6 { formatInfoBits1 = self.copyBit(i, 8, formatInfoBits1); } // .. and skip a bit in the timing pattern ... formatInfoBits1 = self.copyBit(7, 8, formatInfoBits1); formatInfoBits1 = self.copyBit(8, 8, formatInfoBits1); formatInfoBits1 = self.copyBit(8, 7, formatInfoBits1); // .. and skip a bit in the timing pattern ... for j in (0..6).rev() { formatInfoBits1 = self.copyBit(8, j, formatInfoBits1); } // Read the top-right/bottom-left pattern too let dimension = self.bitMatrix.getHeight(); let mut formatInfoBits2 = 0; let jMin = dimension - 7; for j in (jMin..dimension).rev() { formatInfoBits2 = self.copyBit(8, j, formatInfoBits2); } for i in dimension - 8..dimension { formatInfoBits2 = self.copyBit(i, 8, formatInfoBits2); } self.parsedFormatInfo = FormatInformation::decodeFormatInformation(formatInfoBits1, formatInfoBits2)?; if let Some(formatInfo) = &self.parsedFormatInfo { return Ok(formatInfo.clone()); } Err(Error::FormatException(String::from(""))) } /** * <p>Reads version information from one of its two locations within the QR Code.</p> * * @return {@link Version} encapsulating the QR Code's version * @throws FormatException if both version information locations cannot be parsed as * the valid encoding of version information */ fn readVersion(&self) -> ResultError<Version> { if let Some(version) = &self.parsedVersion { return Ok(version.clone()); } let dimension = self.bitMatrix.getHeight(); let provisionalVersion = (dimension - 17) / 4; if provisionalVersion <= 6 { return Ok(self .versions .get_version_for_number(provisionalVersion)? .clone()); } // Read top-right version info: 3 wide by 6 tall let mut versionBits = 0; let ijMin = dimension - 11; for j in (0..6).rev() { for i in (ijMin..dimension - 8).rev() { versionBits = self.copyBit(i, j, versionBits); } } todo!() } fn copyBit(&self, i: i32, j: i32, versionBits: i32) -> i32 { let bit = if self.mirror { self.bitMatrix.get(j as u32, i as u32) } else { self.bitMatrix.get(i as u32, j as u32) }; if bit { (versionBits << 1) | 0x1 } else { versionBits << 1 } } }
<reponame>brainchild-projects/printables<filename>cypress/integration/navigation.spec.ts it('can visit all subpages', () => { cy.visitHome(); cy.contains('Printables'); cy.findByRole('link', { name: /calendar/i }).click(); cy.findByRole('button', { name: /print calendar/i }); // Back home cy.findByRole('banner').within(() => { cy.findByText('Printables').click(); }); cy.contains(/Printable Materials for Education/i); // Addition Worksheets cy.findByRole('list', { name: /worksheets/i }).within(() => { cy.findByRole('link', { name: /addition.+fill.+blank/i }).click(); }); cy.findByRole('region', { name: /customize form/i }).within(() => { cy.findByRole('heading', { name: /addition.+fill.+blank/i }); }); // Back home cy.findByRole('banner').within(() => { cy.findByText('Printables').click(); }); // Pattern Worksheets cy.findByRole('list', { name: /worksheets/i }).within(() => { cy.findByRole('link', { name: /patterns/i }).click(); }); cy.findByRole('region', { name: /customize form/i }).within(() => { cy.findByRole('heading', { name: /patterns/i }); }); });
<filename>clients/rust/src/models/timezone_schema.rs /* * Location API * * Geolocation, Geocoding and Maps * * OpenAPI spec version: 2.0.0 * * Generated by: https://openapi-generator.tech */ /// TimezoneSchema : Timezone object found for the location. #[allow(unused_imports)] use serde_json::Value; #[derive(Debug, Serialize, Deserialize)] pub struct TimezoneSchema { /// Short name of the Timezone #[serde(rename = "short_name")] short_name: Option<String>, /// The offset from UTC (in seconds) for the given location. Considers DST savings. #[serde(rename = "offset_sec")] offset_sec: Option<f32>, /// Represents whether the zone currently observing DST or not #[serde(rename = "now_in_dst")] now_in_dst: Option<String>, /// Timezone name of the Location #[serde(rename = "name")] name: Option<String> } impl TimezoneSchema { /// Timezone object found for the location. pub fn new() -> TimezoneSchema { TimezoneSchema { short_name: None, offset_sec: None, now_in_dst: None, name: None } } pub fn set_short_name(&mut self, short_name: String) { self.short_name = Some(short_name); } pub fn with_short_name(mut self, short_name: String) -> TimezoneSchema { self.short_name = Some(short_name); self } pub fn short_name(&self) -> Option<&String> { self.short_name.as_ref() } pub fn reset_short_name(&mut self) { self.short_name = None; } pub fn set_offset_sec(&mut self, offset_sec: f32) { self.offset_sec = Some(offset_sec); } pub fn with_offset_sec(mut self, offset_sec: f32) -> TimezoneSchema { self.offset_sec = Some(offset_sec); self } pub fn offset_sec(&self) -> Option<&f32> { self.offset_sec.as_ref() } pub fn reset_offset_sec(&mut self) { self.offset_sec = None; } pub fn set_now_in_dst(&mut self, now_in_dst: String) { self.now_in_dst = Some(now_in_dst); } pub fn with_now_in_dst(mut self, now_in_dst: String) -> TimezoneSchema { self.now_in_dst = Some(now_in_dst); self } pub fn now_in_dst(&self) -> Option<&String> { self.now_in_dst.as_ref() } pub fn reset_now_in_dst(&mut self) { self.now_in_dst = None; } pub fn set_name(&mut self, name: String) { self.name = Some(name); } pub fn with_name(mut self, name: String) -> TimezoneSchema { self.name = Some(name); self } pub fn name(&self) -> Option<&String> { self.name.as_ref() } pub fn reset_name(&mut self) { self.name = None; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "build/build_config.h" #include "chrome/browser/memory/tab_stats.h" namespace memory { TabStats::TabStats() : is_app(false), is_internal_page(false), is_media(false), is_pinned(false), is_selected(false), is_discarded(false), has_form_entry(false), discard_count(0), renderer_handle(0), child_process_host_id(0), #if defined(OS_CHROMEOS) oom_score(0), #endif tab_contents_id(0) { } TabStats::~TabStats() { } } // namespace memory
def add_field(self, field_data): def_field = {'id':None, 'ref':None, 'orient':'H', 'posx':'0', 'posy':'0', 'size':'50', 'attributs':'0000', 'hjust':'C', 'props':'CNN', 'name':''} for key, value in field_data.items(): field_data[key] = decode_special_char(str(value)) field = dict(list(def_field.items()) + list(field_data.items())) field['id'] = str(len(self.fields)) self.fields.append(field) return field
print('---Digite dois números---\n') n1 = int(input('\033[35m1° número: ')) n2 = int(input('\033[34m2° número: ')) if n1 > n2: print('O {} é maior'.format(n1)) elif n2 > n1: print('O {} é maior'.format(n2)) else: print('\033[32mOs dois são iguais')
/// will always return a flat color regardless or alpha pub fn mix_colors<C: RGB>(color1: C, color2: C, mix_ratio: f32) -> C { let (r1, g1, b1) = color1.rgb(); let (r2, g2, b2) = color2.rgb(); let (r1, g1, b1) = (r1 as i16, g1 as i16, b1 as i16); let (r2, g2, b2) = (r2 as i16, g2 as i16, b2 as i16); let (r, g, b): (i16, i16, i16) = (((r2 - r1) as f32 * mix_ratio) as i16 + r1, ((g2 - g1) as f32 * mix_ratio) as i16 + g1, ((b2 - b1) as f32 * mix_ratio) as i16 + b1); let (r, g, b) = (max(min(r, 0xFF), 0) as u8, max(min(g, 0xFF), 0) as u8, max(min(b, 0xFF), 0) as u8); C::new(r, g, b) }
def random_mbh(type='agn'): from random import gauss if type == 'agn': return 10**gauss(7.83, 0.63) elif type == 'xrb': return 10**gauss(1.1, 0.15) else: raise Exception('type must be agn or xrb')
Harmonic Power Flow Formulation based on the Linear Power Flow in Microgrids In this paper a harmonic power flow formulation for microgrids based on a power flow linearized through Wirtinger's calculus is presented. Through this formulation it is possible to calculate the harmonic power flow independently of the fundamental power flow, which is an advantage when working with tertiary control problems in microgrids. The algorithm with which the formulation was simulated is presented. The fundamental and the harmonic power flow are implemented in a modified CIGRE test case for microgrids. Finally, an evaluation of the harmonic distortion associated to the PV systems modeled is exposed, and performance evaluation is also presented by comparing the presented approach with the currents injection harmonic power flow method starting from an iterative power flow.
import { ISimpleResponse } from '../types'; import { Router as ExpressRouter, Request, Response, urlencoded, json, Express, RequestHandler } from 'express'; import ControllerFactory from '../core/controller-factory'; import { UsersController } from '../controllers/users'; import { Router } from './router'; import { j200 } from '../decorators/responses'; import * as compression from 'compression'; import { error as logError } from '../utils/logger'; import * as mongodb from 'mongodb'; /** * Main class to use for managing user authentication */ export class AuthRouter extends Router { private _rootPath: string; private _userController: UsersController; /** * Creates an instance of the user manager */ constructor(rootPath: string) { super(); this._rootPath = rootPath; } /** * Called to initialize this controller and its related database objects */ async initialize(e: Express, db: mongodb.Db) { this._userController = ControllerFactory.get('users'); // Setup the rest calls const router = ExpressRouter(); router.use(compression()); router.use(urlencoded({ extended: true }) as RequestHandler); router.use(json() as RequestHandler); router.use(json({ type: 'application/vnd.api+json' }) as RequestHandler); router.get('/activate-account', this.activateAccount.bind(this)); router.put('/password-reset', this.passwordReset.bind(this)); // Register the path e.use((this._rootPath || '') + '/auth', router); await super.initialize(e, db); return this; } /** * Activates the user's account */ private async activateAccount(req: Request, res: Response) { const redirectURL = req.query.url; try { // Check the user's activation and forward them onto the admin message page await this._userController.checkActivation(req.query.user as string, req.query.key as string); res.setHeader('Content-Type', 'application/json'); res.redirect(`${redirectURL}?message=${encodeURIComponent('Your account has been activated!')}&status=success`); } catch (error) { logError(error.toString()); res.setHeader('Content-Type', 'application/json'); res.status(302); res.redirect(`${redirectURL}?message=${encodeURIComponent(error.message)}&status=error`); } } /** * resets the password if the user has a valid password token */ @j200() private async passwordReset(req: Request, res: Response) { if (!req.body) throw new Error('Expecting body content and found none'); if (!req.body.user) throw new Error('Please specify a user'); if (!req.body.key) throw new Error('Please specify a key'); if (!req.body.password) throw new Error('Please specify a password'); // Check the user's activation and forward them onto the admin message page await this._userController.resetPassword(req.body.user, req.body.key, req.body.password); const response: ISimpleResponse = { message: 'Your password has been reset' }; return response; } }
def _assert_all(self, query, rows): decorated_query = self._decorate_query(query) assert_all(self.session, decorated_query, rows)
<filename>SimCalorimetry/HcalSimAlgos/interface/HFShape.h<gh_stars>100-1000 #ifndef HcalSimAlgos_HFShape_h #define HcalSimAlgos_HFShape_h #include <vector> #include "SimCalorimetry/CaloSimAlgos/interface/CaloVShape.h" #include "CalibCalorimetry/HcalAlgos/interface/HcalPulseShapes.h" /** \class HFShape \brief shaper for HF */ class HFShape : public CaloVShape { public: HFShape(); ~HFShape() override {} double operator()(double time) const override; double timeToRise() const override; private: HcalPulseShapes::Shape shape_; }; #endif
// hexdig_fun() is borrowed from newlib gdtoa-gethex.c. // Possible author is David M. Gay, Copyright (C) 1998 by Lucent Technologies, // licensed with Historical Permission Notice and Disclaimer (HPND). static int hexdig_fun(unsigned char c) { if (c>='0' && c<='9') return c-'0'+0x10; else if (c>='a' && c<='f') return c-'a'+0x10+10; else if (c>='A' && c<='F') return c-'A'+0x10+10; else return 0; }
import { readFile, } from "xlsx"; import { IDirectory, Directory, } from "../source/directory"; import { IWorkbook, Workbook, } from "../source/workbook"; import { ISheet, Sheet, } from "../source/sheet"; import { IList, } from "../source/list"; import { Column, } from "../source/column"; import { Row, } from "../source/row"; import { Cell, CellBooleanParser, CellNumberParser, CellStringParser, } from "../source/cell"; async function demoDirectoryParserA(directory: IDirectory): Promise<void> { directory .bindToSubDirectory("sub", demoDirectoryParserB) .bindToWorkbook("fake_client_a.xlsx", demoWorkbookParser); } async function demoDirectoryParserB(directory: IDirectory): Promise<void> { directory .bindToSubDirectory(/fake_client/, demoDirectoryParserB) .bindToWorkbook(/fake_client_.\.xlsx/, demoWorkbookParser); } export function demoDirectory(): Directory { const directory = new Directory("./test/data/"); demoDirectoryParserA(directory); return directory; } async function demoWorkbookParser(workbook: IWorkbook): Promise<void> { workbook .bindToSheet("Other Questions", demoSheetParser) .bindToSheet(/Harm Details/, demoSheetParser); } export function demoWorkbook(): Workbook { const workbook = new Workbook(readFile("./test/data/fake_client_a.xlsx")); demoWorkbookParser(workbook); return workbook; } async function demoSheetParser(sheet: ISheet): Promise<void> { sheet .bindToColumn("M", 25, demoListParser) .bindToColumnRange("N", 26, 2, demoListParser) .bindToRow("0", 25, demoListParser) .bindToRowRange("P", 26, 2, demoListParser) .bindToCell("Q", 35, 35, CellBooleanParser); } export function demoSheet(): Sheet { const sheet = new Sheet(readFile("./test/data/fake_client_a.xlsx").Sheets["Harm Details"]); demoSheetParser(sheet); return sheet; } async function demoListParser(list: IList): Promise<void> { list .bindToCell("A", 25, CellBooleanParser) .bindToCellRange("B", 26, 2, CellNumberParser) .bindToCell("C", 28, CellStringParser) .bindToCellRange("D", 29, 2, async (raw: string) => "test"); } export function demoColumn(): Column { const column = new Column(readFile("./test/data/fake_client_a.xlsx").Sheets["Harm Details"], 0); demoListParser(column); return column; } export function demoRow(): Row { const row = new Row(readFile("./test/data/fake_client_a.xlsx").Sheets["Harm Details"], 0); demoListParser(row); return row; }
<gh_stars>0 package de._13ducks.spacebatz.server.data.entities.move; import de._13ducks.spacebatz.shared.Movement; /** * Verwaltet die Position und Bewegung einer Entity. Hat also mindestens Methoden, um X und Y zu bekommen. * * WIE ALLE KLASSEN IN DIESEM PAKET UNTERLIEGT AUCH DIESE EINER SCHREIBSPERRE! * NIEMAND AUßER MIR DARF DIESE KLASSE ÄNDERN! * ALLE ANDEREN ÄNDERUNGEN WERDEN ZURÜCKGESETZT! * * @author <NAME> <<EMAIL>> */ public interface Mover { /** * Liefert die aktuelle X-Position der zugehörigen Entity. * Kann wegen Interpolation vom aktuellen Gametick abhängen (muss aber nicht). * * @return X-Koordinate */ public double getX(); /** * Liefert die aktuelle Y-Position der zugehörigen Entity. * Kann wegen Interpolation vom aktuellen Gametick abhängen (muss aber nicht). * * @return Y-Koordinate */ public double getY(); /** * Liefert die Geschwindigkeit der zughörigen Entity in Feldern pro Tick. * * @return Geschwindigkeit der Entity in Feldern pro Tick */ public double getSpeed(); /** * Setzt die Geschwindigkeit der zugehörigen Entity. * Implementierungen müssen garantieren, dass diese Methode jederzeit aufgerufen werden darf, auch während Bewegungen. * Weiter müssen die neuen Werte sofort übernommen werden. * Werte kleiner oder gleich 0 sind verboten. * * @param speed neue Geschwindigkeit */ public void setSpeed(double speed); /** * Berechnet die Bewegung/Position für den gegebenen Tick. * * @param gametick der gametick */ public void tick(int gametick); /** * Berechnet, ob die Einheitenposition sich seit dem letzten Tick geändert hat. * Wenn ja, muss das FFG die Position aktualisieren. * Muss aber nicht 100% präzise sein, darf bei kleinen / kurzen Bewegungen auch falsche Angaben machen. * @return true, wenn Position sich geändert hat */ public boolean positionUpdateRequired(); /** * Liefert ein Movement für die Synchronisation mit dem Client. * Auch Bewegungssysteme, die sich nicht gleichförmig/interpoliert bewegen (was erlaubt ist), * müssen diese Bewegungen hiermit für den Client simulieren. */ public Movement getMovement(); }
use toy_rsa::{decrypt, encrypt}; fn main() { let msg: u32 = 12345; let p: u32 = 0xed23_e6cd; let q: u32 = 0xf050_a04d; let pub_key: u64 = <KEY>; let enc = encrypt(pub_key, msg); let dec = decrypt((p, q), enc); println!("p: {}; q: {}; pubkey: {}", p, q, pub_key); println!("enc: {}, dec: {}", enc, dec); }
/** * Created by jack_zhao on 2018/2/5. */ public abstract class BaseAdapter<T> extends RecyclerView.Adapter<BaseViewHolder> implements IBaseAdapter { private static final String TAG = "BaseAdapter"; protected Map<Integer, Class> viewHolderTypeMap = new HashMap<>(); protected Map<Integer, ViewHolderGenerator> viewHolderGeneratorMap = new HashMap<>(); protected List<T> dataList = new ArrayList<>(); public BaseAdapter() { super(); registerType(); } public Class getViewHolderTypeByDataType(int dataType) { return viewHolderTypeMap.get(dataType); } public interface ItemChangedListener<T> { void itemMoved(int fromPosition, int toPosition); void itemSwiped(int pos, T data); } public abstract void registerType(); @Override public BaseViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { if (viewType == -1) { return null; } ViewHolderGenerator generator = viewHolderGeneratorMap.get(viewType); if (generator != null) { return generator.getViewHolder(parent); } Class clazz = viewHolderTypeMap.get(viewType); BaseViewHolder viewHolder = null; try { Constructor c = clazz.getConstructor(ViewGroup.class); viewHolder = (BaseViewHolder) c.newInstance(parent); } catch (Exception e) { Log.e(TAG, "onCreateViewHolder: ", e); throw new RuntimeException(e); } return viewHolder; } @Override public void onBindViewHolder(BaseViewHolder holder, int position) { holder.bindDataSafe(dataList.get(position)); } //has bug @Override public int getItemViewType(int position) { if (dataList.size() <= position || position < 0) { return -1; } Log.d(TAG, "getItemViewType: " + dataList.get(position).getClass() + "-->" + dataList.get(position).getClass().hashCode()); return dataList.get(position).getClass().hashCode(); } public Object removeItemWithAnimation(Object target) { return removeItemWithAnimation(CollectionUtils.position(getDataList(), target)); } public Object removeItemWithAnimation(int position) { if (position < 0) return null; Object o = getDataList().remove(position); notifyItemRemoved(position); notifyItemRangeChanged(position, getDataList().size() - position); return o; } @Override public int getItemCount() { return dataList.size(); } public List bindData(Context context, List dataList) { this.dataList = dataList; ListProxy proxy = new ListProxy(); proxy.newProxyInstance(dataList, () -> new Handler(context.getMainLooper()).post(() -> this.notifyDataSetChanged())); List resultList = (List<T>) proxy.getProxyInstance(); return resultList; } @Override public List<T> getDataList() { return dataList; } public <T> void addHolderGenerator(Class<T> dataType, ViewHolderGenerator<T> viewHolderGenerator) { viewHolderGeneratorMap.put(dataType.hashCode(), viewHolderGenerator); } public <T> void addHolderGenerator(int key, ViewHolderGenerator<T> viewHolderGenerator) { viewHolderGeneratorMap.put(key, viewHolderGenerator); } @FunctionalInterface public interface ViewHolderGenerator<T> { BaseViewHolder<T> getViewHolder(ViewGroup parent); } @Override public boolean isPrivatePosition(int position) { return true; } @Override public int getInnerPosition(int position) { return position; } @Override public int getOuterPosition(int position) { return position; } @Override public void registerViewHolderType(Class data, Class<BaseViewHolder> viewHolder) { viewHolderTypeMap.put(data.hashCode(), viewHolder); } interface DataListener { void onDataChanged(); } class ListProxy implements InvocationHandler { private List realObject; private DataListener listener; private Handler handler = new Handler(); private Runnable runnable = () -> listener.onDataChanged(); public Object newProxyInstance(List realObject, DataListener listener) { this.realObject = realObject; this.listener = listener; return Proxy.newProxyInstance(realObject.getClass().getClassLoader(), realObject.getClass().getInterfaces(), this); } public Object getProxyInstance() { return Proxy.newProxyInstance(realObject.getClass().getClassLoader(), realObject.getClass().getInterfaces(), this); } @Override public Object invoke(Object proxy, Method method, Object[] objects) throws Throwable { Log.e(TAG, "invoke: " + method); Object result = method.invoke(realObject, objects); if (method.getName().equals("set")) { handler.removeCallbacks(runnable); handler.postDelayed(runnable, 100); } return result; } } }
/** * Tests http pages of ocsp * * @version $Id: ProtocolOcspHttpTest.java 22642 2016-01-25 14:05:47Z mikekushner $ * */ public class ProtocolOcspHttpTest extends ProtocolOcspTestBase { public static final String DEFAULT_SUPERADMIN_CN = "SuperAdmin"; private static final String DSA_DN = "CN=OCSPDSATEST,O=Foo,C=SE"; private static final Logger log = Logger.getLogger(ProtocolOcspHttpTest.class); private static final InternalEjbcaResources intres = InternalEjbcaResources.getInstance(); private static final AuthenticationToken admin = new TestAlwaysAllowLocalAuthenticationToken(new UsernamePrincipal("ProtocolOcspHttpTest")); private static byte[] ks3 = Base64.decode(("MIACAQMwgAYJKoZIhvcNAQcBoIAkgASCAyYwgDCABgkqhkiG9w0BBwGggCSABIID" + "DjCCAwowggMGBgsqhkiG9w0BDAoBAqCCAqkwggKlMCcGCiqGSIb3DQEMAQMwGQQU" + "/h0pQXq7ZVjYWlDvzEwwmiJ8O8oCAWQEggJ4MZ12+kTVGd1w7SP4ZWlq0bCc4MsJ" + "O0FFSX3xeVp8Bx16io1WkEFOW3xfqjuxKOL6YN9atoOZdfhlOMhmbhglm2PJSzIg" + "JSDHvWk2xKels5vh4hY1iXWOh48077Us4wP4Qt94iKglCq4xwxYcSCW8BJwbu93F" + "uxE1twnWXbH192nMhaeIAy0v4COdduQamJEtHRmIJ4GZwIhH+lNHj/ARdIfNw0Dm" + "uPspuSu7rh6rQ8SrRsjg63EoxfSH4Lz6zIJKF0OjNX07T8TetFgznCdGCrqOZ1fK" + "5oRzXIA9hi6UICiuLSm4EoHzEpifCObpiApwNj3Kmp2uyz2uipU0UKhf/WqvmU96" + "yJj6j1JjZB6p+9sgecPFj1UMWhEFTwxMEwR7iZDvjkKDNWMit+0cQyeS7U0Lxn3u" + "m2g5e6C/1akwHZsioLC5OpFq/BkPtnbtuy4Kr5Kwb2y7vSiKpjFr7sKInjdAsgCi" + "8kyUV8MyaIfZdtREjwqBe0imfP+IPVqAsl1wGW95YXsLlK+4P1bspAgeHdDq7Q91" + "bJJQAS5OTD38i1NY6MRtt/fWsShVBLjf2FzNpw6siHHl2N7BDNyO3ALtgfp50e0Z" + "Dsw5WArgKLiXfwZIrIKbYA73RFc10ReDqnJSF+NXgBo1/i4WhZLHC1Osl5UoKt9q" + "UoXIUmYhAwdAT5ZKVw6A8yp4e270yZTXNsDz8u/onEwNc1iM0v0RnPQhNE5sKEZH" + "QrMxttiwbKe3YshCjbruz/27XnNA51t2p1M6eC1HRab4xSHAyH5NTxGJ8yKhOfiT" + "aBKqdTH3P7QzlcoCUDVDDe7aLMaZEf+a2Te63cZTuUVpkysxSjAjBgkqhkiG9w0B" + "CRQxFh4UAHAAcgBpAHYAYQB0AGUASwBlAHkwIwYJKoZIhvcNAQkVMRYEFCfeHSg6" + "EdeP5A1IC8ydjyrjyFSdAAQBAAQBAAQBAAQBAASCCBoAMIAGCSqGSIb3DQEHBqCA" + "MIACAQAwgAYJKoZIhvcNAQcBMCcGCiqGSIb3DQEMAQYwGQQURNy47tUcttscSleo" + "8gY6ZAPFOl0CAWSggASCB8jdZ+wffUP1B25Ys48OFBMg/itT0EBS6J+dYVofZ84c" + "x41q9U+CRMZJwVNZbkqfRZ+F3tLORSwuIcwyioa2/JUpv8uJCjQ2tru5+HtqCrzR" + "Huh7TfdiMqvjkKpnXi69DPPjQdCSPwYMy1ahZrP5KgEZg4S92xpU2unF1kKQ30Pq" + "PTEBueDlFC39rojp51Wsnqb1QzjPo53YvJQ8ztCoG0yk+0omELyPbc/qMKe5/g5h" + "Lx7Q+2D0PC/ZHtoDkCRfMDKwgwALFsSj2uWNJsCplspmc7YgIzSr/GqqeSXHp4Ue" + "dwVJAswrhpkXZTlp1rtl/lCSFl9akwjY1fI144zfpYKpLqfoHL1uI1c3OumrFzHd" + "ZldZYgsM/h3qjgu8qcXqI0sKVXsffcftCaVs+Bxmdu9vpY15rlx1e0an/O05nMKU" + "MBU2XpGkmWxuy0tOKs3QtGzHUJR5+RdEPURctRyZocEjJgTvaIMq1dy/FIaBhi+d" + "IeAbFmjBu7cv9C9v/jMuUjLroycmo7QW9jGgyTOQ68J+6w2/PtqiqIo3Ry9WC0SQ" + "8+fVNOGLr5O2YPpw17sDQa/+2gjozngvL0OHiABwQ3EbXAQLF046VYkTi5R+8iGV" + "3jlTvvStIKY06E/s/ih86bzwJWAQENCazXErN69JO+K3IUiwxac+1AOO5WyR9qyv" + "6m/yHdIdbOVE21M2RARbI8UiDpRihCzk4duPfj/x2bZyFqLclIMhbTd2UOQQvr+W" + "4etpMJRtyFGhdLmNgYAhYrbUgmdL1kRkzPzOs77PqleMpfkii7HPk3HlVkM7NIqd" + "dN0WQaQwGJuh5f1ynhyqtsaw6Gu/X56H7hpziAh0eSDQ5roRE7yy98h2Mcwb2wtY" + "PqVFTmoKuRWR2H5tT6gCaAM3xiSC7RLa5SF1hYQGaqunqBaNPYyUIg/r03dfwF9r" + "AkOhh6Mq7Z2ktzadWTxPl8OtIZFVeyqIOtSKBHhJyGDGiz3+SSnTnSX81NaTSJYZ" + "7YTiXkXvSYNpjpPckIKfjpBw0T4pOva3a6s1z5p94Dkl4kz/zOmgveGd3dal6wUV" + "n3TR+2cyv51WcnvB9RIp58SJOc+CvCvYTvkEdvE2QtRw3wt4ngGJ5pxmC+7+8fCf" + "hRDzw9LBNz/ry88y/0Bidpbhwr8gEkmHuaLp43WGQQsQ+cWYJ8AeLZMvKplbCWqy" + "iuks0MnKeaC5dcB+3BL55OvcTfGkMtz0oYBkcGBTbbR8BKJZgkIAx7Q+/rCaqv6H" + "HN/cH5p8iz5k+R3MkmR3gi6ktelQ2zx1pbPz3IqR67cTX3IyTX56F2aY54ueY17m" + "7hFwSy4aMen27EO06DXn/b6vPKj73ClE2B/IPHO/H2e8r04JWMltFWuStV0If5x0" + "5ZImXx068Xw34eqSWvoMzr97xDxUwdlFgrKrkMKNoTDhA4afrZ/lwHdUbNzh6cht" + "jHW/IfIaMo3NldN/ihO851D399FMsWZW7YA7//RrWzBDiLvh+RfwkMOfEpbujy0G" + "73rO/Feed2MoVXvmuKBRpTNyFuBVvFDwIzBT4m/RaVf5m1pvprSk3lo43aumdN9f" + "NDETktVZ/CYaKlYK8rLcNBKJicM5+maiQSTa06XZXDMY84Q0xtCqJ/aUH4sa/z8j" + "KukVUSyUZDJk/O82B3NA4+CoP3Xyc9LAUKucUvoOmGt2JCw6goB/vqeZEg9Tli0Q" + "+aRer720QdVRkPVXKSshL2FoXHWUMaBF8r//zT6HbjTNQEdxbRcBNvkUXUHzITfl" + "YjQcEn+FGrF8+HVdXCKzSXSgu7mSouYyJmZh42spUFCa4j60Ks1fhQb2H1p72nJD" + "n1mC5sZkU68ITVu1juVl/L2WJPmWfasb1Ihnm9caJ/mEE/i1iKp7qaY9DPTw5hw4" + "3QplYWFv47UA/sOmnWwupRuPk7ISdimuUnih8OYR75rJ0z6OYexvj/2svx9/O5Mw" + "654jFF2hAq69jt7GJo6VZaeCRCAxEU7N97l3EjqaKJVrpIPQ+3yLmqHit/CWxImB" + "iIl3sW7MDEHgPdQy3QiZmAYNLQ0Te0ygcIHwtPyzhFoFmjbQwib2vxDqWaMQpUM1" + "/W96R/vbCjA7tfKYchImwAPCyRM5Je2FHewErG413kZct5tJ1JqkcjPsP7Q8kmgw" + "Ec5QNq1/PZOzL1ZLr6ryfA4gLBXa6bJmf43TUkdFYTvIYbvH2jp4wpAtA152YgPI" + "FL19/Tv0B3Bmb1qaK+FKiiQmYfVOm/J86i/L3b8Z3jj8dRWEBztaI/KazZ/ZVcs/" + "50bF9jH7y5+2uZxByjkM/kM/Ov9zIHbYdxLw2KHnHsGKTCooSSWvPupQLBGgkd6P" + "M9mgE6MntS+lk9ucpP5j1LXo5zlZaLSwrvSzE3/bbWJKsJuomhRbKeZ+qSYOWvPl" + "/1RqREyZHbSDKzVk39oxH9EI9EWKlCbrz5EHWiSv0+9HPczxbO3q+YfqcY8plPYX" + "BvgxHUeDR+LxaAEcVEX6wd2Pky8pVwxQydU4cEgohrgZnKhxxLAvCp5sb9kgqCrh" + "luvBsHpmiUSCi/r0PNXDgApvTrVS/Yv0jTpX9u9IWMmNMrnskdcP7tpEdkw8/dpf" + "RFLLgqwmNEhCggfbyT0JIUxf2rldKwd6N1wZozaBg1uKjNmAhJc1RxsABAEABAEA" + "BAEABAEABAEABAEABAEABAEABAEABAEABAEAAAAAAAAAMDwwITAJBgUrDgMCGgUA" + "BBSS2GOUxqv3IT+aesPrMPNn9RQ//gQUYhjCLPh/h2ULjh+1L2s3f5JIZf0CAWQA" + "AA==") .getBytes()); private static byte[] ksexpired = Base64.decode(("MIACAQMwgAYJKoZIhvcNAQcBoIAkgASCA+gwgDCABgkqhkiG9w0BBwGggCSABIID" + "FzCCAxMwggMPBgsqhkiG9w0BDAoBAqCCArIwggKuMCgGCiqGSIb3DQEMAQMwGgQU" + "+FPoYyKdBmCiikns2YwMZh4pPSkCAgQABIICgC5leUCbJ8w3O8KEUMRvHOA+Xhzm" + "R5y7aHJHL1z3ZnoskDL4YW/r1TQ5AFliaH7e7kuA7NYOjv9HdFsZ9BekLkWPybit" + "rcryLkPbRF+YdAXNkbGluukY0F8O4FP9n7FtfBd5uKitvOHZgHp3JAC9A+jYfayk" + "ULfZRRGmzUys+D4czobY1tkCbQIb3kzR1kaqBownMkie+y5P56dRB2lJXpkpeilM" + "H0PZvckG5jQw7ua4sVUkIzyDAZpiCtNmOF5nvyRwQRLWAHwn7Yid5e8w2A6xTq6P" + "wko+2OdqHK/r/fmABREWf9GJa5Lb1QkUzITsWmPVskCUdl+VZzcYL8EV8cREH7DG" + "sWuKyp8UJ0m3fiJEZHR2538Ydp6yp6R6/9DcGwxj20fO9FQnUanYcs6bDgwZ46UK" + "blnbJAWGaChG3C9T6moXroLT7Mt2gxefW8RCds09EslhVTES01fmkovpcNuF/3U9" + "ukGTCN49/mnuUpeMDrm8/BotuL+jkWBOnFy3RfEfsHyPzYflBb/M9T7Q8wsGuh0O" + "oPecIsVvo4hgXX6R0fpYdPArMfuI5JaGopt07XRhbUuCqlEc4Q6DD46F/SVLk34Q" + "Yaq76xwVplsa4QZZKNE6QTpApM61KpIKFxP3FzkqQIL4AKNb/mbSclr7L25aQmMw" + "YiIgWOOaXlVh1U+4eZjqqVyYH5a6Y5e0EpMdMagvfuIA09b/Bp9LVnxQD6GmQgRC" + "MRCaTr3wMQqEv92iTrj718rWmyYWTRArH/7mb4Ef250x2WgqjytuShBcL4McagQG" + "NMpMBZLFAlseQYQDlgkGDMfcSZJQ34CeH7Uvy+lBYvFIGnb2o3hnHuZicOgxSjAj" + "BgkqhkiG9w0BCRQxFh4UAG8AYwBzAHAAYwBsAGkAZQBuAHQwIwYJKoZIhvcNAQkV" + "MRYEFO0W5oXdg6jY3vp316fMaEFzMEYpAAAAAAAAMIAGCSqGSIb3DQEHBqCAMIAC" + "AQAwgAYJKoZIhvcNAQcBMCgGCiqGSIb3DQEMAQYwGgQU30rkEXMscb9M1uCfhs6v" + "wV3eWCICAgQAoIAEggcYMs4iLKX/OQHK9oFu7l79H2zf0IlV58kAyjQG4yvadJnK" + "Y6FOVLkwidcX33qRnMkGI1vidvRBbxnyH5+HVd3hVws/v3XBbZvhhX7A8loZZmye" + "wFlHwT6TzIy/MJsz3Ev6EwoYBIID6HUrQhJiT/YPmiVhoWuaMw50YSbRGOUKwxEJ" + "ggqnC4WOPxdP8xZbD+h3V1/W0KdbKyqFyXYVnfTgDisyEBnEn2BN3frl7vlucRsS" + "ci0ZpJpkdlCyuF77KzPaq6/yAgPHAhABvjgiEPE11hsdDA635mDb1dRPoM6IFfzR" + "n6JGZ7PEkKHdHudimx55eoUTJskXYaNcrPR2jlrxxX6tWV07m1G61kbgNIeuBdK6" + "trJslSVPlli2YsTDQ2g+EmtDZc186nAYuQN03/TdSdhByPZxcT5nVs+xv1A3BdDX" + "ow1HCyuGyBrAIEVoITE171csT78iPxNY9bukYy678XDxWkDQu7QMV8FeGEXec5sh" + "NL/IUSYtzuPxaP5V/QALC0ybGxjIoxmdKS0zPxyekA+Cj8XjQBKVW2DPjWXWtAHR" + "6lfWpwIgTwD0B7o59RVjKo/jrWRsH+RKfN17FXSKInTrm1gNHQPDCyIAv2luTSUa" + "2qMRqH7/qivEWXbAWBz9dtEkqeuf/j698Rfie3QNtZ5qXmaVq1LBI0sduSJM+jHr" + "uRtICzEzWMvSqVnW+3ejyHmpLc6zBYx8VwNuFy8IH+qtV0pDYyoNL96KBOJhX2hf" + "DsH82SNf1CbIf8245YNmtzDby8h+3NXNIo8qAleLvgTgSN1tmS5kEJKw3M9/MYgE" + "8XHGATAJB0E7uVRS1Ktr8R1w0hunautq7ylsw62zXdPp+6EsO0tMluCyWB0lMNAh" + "uPiIMudNMA+O7NlCFQVTPxPxaRXg37dLm2XFy4ZnquKDuLvKkujdIwc9VBMER+MC" + "6FiNtJw5Kq4PcARt1ulKGMknn38+3jSh3Dzg93XNMUx7lmqZCosYc4kf5X6dAWKd" + "xBVNi3/hLejvWCCb55BncXiGMvs75L6b07IXcm3HTXZxCzzl5QtWM7XqpPVqbqhW" + "wz03K4qko97YdD61oa8719SRjqBpbaW6RKIx5qGvAWYKg5usNorm/SsGg37zAfPa" + "0LRoD22M5psU8MmH2E0iDDsf4sZDjeAY7LUGhgUGyyQ9t6hlEjD1Nhsxb9TSKNc+" + "UBzCVRqjUWqImo8q7ZHhcDn64eXY4sSyQWWRP+TUfbpfgo+tb6NQvEhceU8sQlAh" + "HGqi1/4kvc54O+dUFsRMJkXoobSRc053JgdUgaLQ22iI0nZSVVLgcR8/jTTvQhbv" + "LRNES5vdoSUd+QiC83Hlx38uZtCgJ7HZfdnhYdaRIFIc7K1nqV+8ht6s7DdXK/JP" + "8/QhtsLLfn1kies1/Xi+FeATef57jtBKh75yeBR5WFigEtSgFbRUNTLIrQQiDK07" + "71bi+VA8QGH/dpUVNg0EggLZI0qqSXqD+2f2XnhK90fHl3RLZWX8xvU6sP6wGMLj" + "R+OlW0Gsv0gWeVLbSKRmNyesl0lznC2yVAeoyLMSkU6YLYCuzQTzZ2dpjdPwkBOP" + "7YhIIL7c1PWPGDLb35E/57Zd+I+dUdSX8SQyKzDgWyxyLGTaozkyaR3PK3XPKJNf" + "t+RjfAJOtN3uSIjhpj90YL28p+kSlWxGRLM7FFDsS8nkcWQ113ZSfUnC5k5HmGmK" + "FA5b6oVkxk98uxgK7jJ6h9wONZR9t8WbyfMYnjMgo5ZgGmKzoBRJ9rD0WiIJfHiR" + "zrv9yejClIHdseps4rB96hqQjXDSk1f3e/5IQ6Zp++x7nIZy50C9HfnuDugigpNr" + "IJS46o/86AgrBikc+CUoGLnu9OKvVCznFkwyz6ZzBdE3ITwHW4TXnlbkP888wax9" + "lCKde+7/dBdUVwasgrU/F05MKCGqjWHIZ0po0owOTjMzkllqDtEmUdyUrGmLEmsA" + "0tE8txLSi6TPmqL/th/7Os0B+7nyC3Ju8kBhmXVmoudcmWh2QH6VM6pegqETkCtA" + "hGErIKKrdUSVNXy4izJFh9dgyYJKwm+X6XAaLWN1nlQlS08U0jR3vikDfJqUknxP" + "Dg14TeC5Sgl2UjIpGX+XVxM8PV+2+WwvcwR0Nn1HFu99toZUD7FjkP6DR+XcHOhQ" + "1tZZsutVPuyVJW9sTiYw48fIlYWDJXVESbLHDNN5TJD4NY9fhzfG3BYlex+YbbOx" + "sCvmUNrrFwi1ZOGa/Z2ow5V7Kdf4rbWbyuV+0CCVJBcPTKageONp4AOaARpBMFg3" + "QuTvzwEXmrTMbbrPY2o1GOS8ulwOp1VI8PcOyGwRpHXzpRZPv2u9gTmYgnfu2PcU" + "F8NfHRFnPzFkO95KYFTYxZrg3vrU49IRJXqbjaeruQaKxPibxTDOsatJpWYAnw/s" + "KuCHXrnUlw5RLeublCbUAAAAAAAAAAAAAAAAAAAAAAAAMD0wITAJBgUrDgMCGgUA" + "BBRo3arw4fuHPsqvDnvA8Q/TLyjoRQQU3Xm6ZsAJT0/iLV7S3mKeme0FVGACAgQA" + "AAA=") .getBytes()); private final CAAdminSessionRemote caAdminSession = EjbRemoteHelper.INSTANCE.getRemoteSession(CAAdminSessionRemote.class); private final CaSessionRemote caSession = EjbRemoteHelper.INSTANCE.getRemoteSession(CaSessionRemote.class); private final CesecoreConfigurationProxySessionRemote cesecoreConfigurationProxySession = EjbRemoteHelper.INSTANCE .getRemoteSession(CesecoreConfigurationProxySessionRemote.class, EjbRemoteHelper.MODULE_TEST); private final GlobalConfigurationSessionRemote globalConfigurationSession = EjbRemoteHelper.INSTANCE.getRemoteSession(GlobalConfigurationSessionRemote.class); private final RevocationSessionRemote revocationSession = EjbRemoteHelper.INSTANCE.getRemoteSession(RevocationSessionRemote.class); private final SignSessionRemote signSession = EjbRemoteHelper.INSTANCE.getRemoteSession(SignSessionRemote.class); private final EndEntityManagementSessionRemote endEntityManagementSession = EjbRemoteHelper.INSTANCE.getRemoteSession(EndEntityManagementSessionRemote.class); private final OcspResponseGeneratorSessionRemote ocspResponseGeneratorSession = EjbRemoteHelper.INSTANCE.getRemoteSession(OcspResponseGeneratorSessionRemote.class); @Rule public final TestWatcher traceLogMethodsRule = new TestWatcher() { @Override protected void starting(final Description description) { log.trace(">" + description.getMethodName()); super.starting(description); }; @Override protected void finished(final Description description) { log.trace("<" + description.getMethodName()); super.finished(description); } }; @BeforeClass public static void beforeClass() throws CertificateException { // Install BouncyCastle provider CryptoProviderTools.installBCProviderIfNotAvailable(); } public ProtocolOcspHttpTest() throws MalformedURLException, URISyntaxException { super("http", "ejbca", "publicweb/status/ocsp"); } @Before public void setUp() throws Exception { CaTestCase.removeTestCA(); CaTestCase.createTestCA(); unknowncacert = CertTools.getCertfromByteArray(unknowncacertBytes, X509Certificate.class); helper.reloadKeys(); log.debug("httpReqPath=" + httpReqPath); assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); cacert = (X509Certificate) CaTestCase.getTestCACert(); caid = CaTestCase.getTestCAId(); Map<String, String> config = new HashMap<String, String>(); config.put("ocsp.nonexistingisgood", "false"); config.put("ocsp.nonexistingisrevoked", "false"); helper.alterConfig(config); helper.reloadKeys(); GlobalOcspConfiguration ocspConfiguration = (GlobalOcspConfiguration) globalConfigurationSession.getCachedConfiguration(GlobalOcspConfiguration.OCSP_CONFIGURATION_ID); ocspConfiguration.setOcspDefaultResponderReference(CertTools.getSubjectDN(CaTestCase.getTestCACert())); globalConfigurationSession.saveConfiguration(admin, ocspConfiguration); } @After public void tearDown() throws Exception { CaTestCase.removeTestCA(); removeDSACA(); removeECDSACA(); } public String getRoleName() { return this.getClass().getSimpleName(); } @Test public void test01Access() throws Exception { super.test01Access(); } /** * Tests ocsp message * * @throws Exception * error */ @Test public void test02OcspGood() throws Exception { log.trace(">test02OcspGood()"); // find a CA (TestCA?) create a user and generate his cert // send OCSP req to server and get good response // change status of cert to bad status // send OCSP req and get bad status // (send crap message and get good error) // Get user and ocspTestCert that we know... loadUserCert(this.caid); this.helper.reloadKeys(); this.helper.verifyStatusGood( this.caid, this.cacert, this.ocspTestCert.getSerialNumber()); log.trace("<test02OcspGood()"); } /** * Tests ocsp message * * @throws Exception * error */ @Test public void test03OcspRevoked() throws Exception { log.trace(">test03OcspRevoked()"); loadUserCert(this.caid); // Now revoke the certificate and try again this.revocationSession.revokeCertificate(admin, this.ocspTestCert, null, RevokedCertInfo.REVOCATION_REASON_KEYCOMPROMISE, null); this.helper.reloadKeys(); this.helper.verifyStatusRevoked( this.caid, this.cacert, this.ocspTestCert.getSerialNumber(), RevokedCertInfo.REVOCATION_REASON_KEYCOMPROMISE, null); log.trace("<test03OcspRevoked()"); } @Test public void test04OcspUnknown() throws Exception { super.test04OcspUnknown(); } @Test public void test05OcspUnknownCA() throws Exception { super.test05OcspUnknownCA(); } @Test public void test06OcspSendWrongContentType() throws Exception { super.test06OcspSendWrongContentType(); } @Test public void test07SignedOcsp() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); // find a CA (TestCA?) create a user and generate his cert // send OCSP req to server and get good response // change status of cert to bad status // send OCSP req and get bad status // (send crap message and get good error) try { KeyPair keys = createUserCert(caid); // And an OCSP request OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); Extension[] extensions = new Extension[1]; extensions[0] = new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false, new DEROctetString("123456789".getBytes())); gen.setRequestExtensions(new Extensions(extensions)); X509CertificateHolder chain[] = new X509CertificateHolder[2]; chain[0] = new JcaX509CertificateHolder(ocspTestCert); chain[1] = new JcaX509CertificateHolder(cacert); gen.setRequestorName(chain[0].getSubject()); OCSPReq req = gen.build(new JcaContentSignerBuilder("SHA1WithRSA").setProvider(BouncyCastleProvider.PROVIDER_NAME).build(keys.getPrivate()), chain); // First test with a signed OCSP request that can be verified Collection<Certificate> cacerts = new ArrayList<Certificate>(); cacerts.add(cacert); CaCertificateCache certcache = CaCertificateCache.INSTANCE; certcache.loadCertificates(cacerts); X509Certificate signer = checkRequestSignature("127.0.0.1", req, certcache); assertNotNull(signer); assertEquals(ocspTestCert.getSerialNumber().toString(16), signer.getSerialNumber().toString(16)); // Try with an unsigned request, we should get a SignRequestException req = gen.build(); boolean caught = false; try { signer = checkRequestSignature("127.0.0.1", req, certcache); } catch (SignRequestException e) { caught = true; } assertTrue(caught); // sign with a keystore where the CA-certificate is not known KeyStore store = KeyStore.getInstance("PKCS12", "BC"); ByteArrayInputStream fis = new ByteArrayInputStream(ks3); store.load(fis, "foo123".toCharArray()); Certificate[] certs = KeyTools.getCertChain(store, "privateKey"); chain[0] = new JcaX509CertificateHolder((X509Certificate) certs[0]); chain[1] = new JcaX509CertificateHolder((X509Certificate) certs[1]); PrivateKey pk = (PrivateKey) store.getKey("privateKey", "foo123".toCharArray()); req = gen.build(new BufferingContentSigner(new JcaContentSignerBuilder("SHA1WithRSA").build(pk), 20480), chain); // Send the request and receive a singleResponse, this response should // throw an SignRequestSignatureException caught = false; try { signer = checkRequestSignature("127.0.0.1", req, certcache); } catch (SignRequestSignatureException e) { caught = true; } assertTrue(caught); // sign with a keystore where the signing certificate has expired store = KeyStore.getInstance("PKCS12", "BC"); fis = new ByteArrayInputStream(ksexpired); store.load(fis, "foo123".toCharArray()); certs = KeyTools.getCertChain(store, "ocspclient"); chain[0] = new JcaX509CertificateHolder((X509Certificate) certs[0]); chain[1] = new JcaX509CertificateHolder((X509Certificate) certs[1]); pk = (PrivateKey) store.getKey("ocspclient", "foo123".toCharArray()); req = gen.build(new BufferingContentSigner(new JcaContentSignerBuilder("SHA1WithRSA").build(pk), 20480), chain); // Send the request and receive a singleResponse, this response should // throw an SignRequestSignatureException caught = false; try { signer = checkRequestSignature("127.0.0.1", req, certcache); } catch (SignRequestSignatureException e) { caught = true; } assertTrue(caught); } finally { endEntityManagementSession.deleteUser(admin, "ocsptest"); } } // test07SignedOcsp /** * Tests ocsp message * * @throws Exception error */ @Test public void test08OcspEcdsaGood() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()).getResponseCode() == 200); final int ecdsacaid = "CN=OCSPECDSATEST".hashCode(); final CAInfo caInfo = addECDSACA("CN=OCSPECDSATEST", "secp256r1"); final X509Certificate ecdsacacert = (X509Certificate) caInfo.getCertificateChain().iterator().next(); helper.reloadKeys(); try { // Make user and ocspTestCert that we know... createUserCert(ecdsacaid); this.helper.verifyStatusGood( ecdsacaid, ecdsacacert, this.ocspTestCert.getSerialNumber() ); } finally { endEntityManagementSession.deleteUser(admin, "ocsptest"); CryptoTokenTestUtils.removeCryptoToken(admin, caInfo.getCAToken().getCryptoTokenId()); } } // test08OcspEcdsaGood /** * Tests ocsp message * * @throws Exception * error */ @Test public void test09OcspEcdsaImplicitlyCAGood() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); int ecdsacaid = "CN=OCSPECDSAIMPCATEST".hashCode(); final CAInfo caInfo = addECDSACA("CN=OCSPECDSAIMPCATEST", "implicitlyCA"); final X509Certificate ecdsacacert = (X509Certificate) caInfo.getCertificateChain().iterator().next(); helper.reloadKeys(); try { // Make user and ocspTestCert that we know... createUserCert(ecdsacaid); this.helper.verifyStatusGood( ecdsacaid, ecdsacacert, this.ocspTestCert.getSerialNumber() ); } finally { endEntityManagementSession.deleteUser(admin, "ocsptest"); CryptoTokenTestUtils.removeCryptoToken(admin, caInfo.getCAToken().getCryptoTokenId()); } } // test09OcspEcdsaImplicitlyCAGood @Test public void test10MultipleRequests() throws Exception { this.helper.reloadKeys(); super.test10MultipleRequests(); } @Test public void test11MalformedRequest() throws Exception { super.test11MalformedRequest(); } @Test public void test12CorruptRequests() throws Exception { super.test12CorruptRequests(); } /** * Just verify that a simple GET works. */ @Test public void test13GetRequests() throws Exception { // See if the OCSP Servlet can read non-encoded requests final String plainReq = httpReqPath + '/' + resourceOcsp + '/' + "MGwwajBFMEMwQTAJBgUrDgMCGgUABBRBRfilzPB+Aevx0i1AoeKTkrHgLgQUFJw5gwk9BaEgsX3pzsRF9iso29ICCCzdx5N0v9XwoiEwHzAdBgkrBgEFBQcwAQIEECrZswo/a7YW+hyi5Sn85fs="; URL url = new URL(plainReq); log.info(url.toString()); // Dump the exact string we use for access HttpURLConnection con = (HttpURLConnection) url.openConnection(); assertEquals("Response code did not match. ", 200, con.getResponseCode()); assertNotNull(con.getContentType()); assertTrue(con.getContentType().startsWith("application/ocsp-response")); OCSPResp response = new OCSPResp(IOUtils.toByteArray(con.getInputStream())); assertNotNull("Response should not be null.", response); assertTrue("Should not be considered malformed.", OCSPRespBuilder.MALFORMED_REQUEST != response.getStatus()); final String dubbleSlashNonEncReq = httpReqPath + '/' + resourceOcsp + '/' + "MGwwajBFMEMwQTAJBgUrDgMCGgUABBRBRfilzPB%2BAevx0i1AoeKTkrHgLgQUFJw5gwk9BaEgsX3pzsRF9iso29ICCAvB//HJyKqpoiEwHzAdBgkrBgEFBQcwAQIEEOTzT2gv3JpVva22Vj8cuKo%3D"; url = new URL(dubbleSlashNonEncReq); log.info(url.toString()); // Dump the exact string we use for access con = (HttpURLConnection) url.openConnection(); assertEquals("Response code did not match. ", 200, con.getResponseCode()); assertNotNull(con.getContentType()); assertTrue(con.getContentType().startsWith("application/ocsp-response")); response = new OCSPResp(IOUtils.toByteArray(con.getInputStream())); assertNotNull("Response should not be null.", response); assertTrue("Should not be concidered malformed.", OCSPRespBuilder.MALFORMED_REQUEST != response.getStatus()); // An OCSP request, ocspTestCert is already created in earlier tests loadUserCert(this.caid); this.helper.reloadKeys(); this.helper.verifyStatusGood( this.caid, this.cacert, this.ocspTestCert.getSerialNumber() ); } @Test public void test14CorruptGetRequests() throws Exception { super.test14CorruptGetRequests(); } @Test public void test15MultipleGetRequests() throws Exception { super.test15MultipleGetRequests(); } /** * Tests ocsp message * * @throws Exception * error */ @Test public void test16OcspDsaGood() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); int dsacaid = DSA_DN.hashCode(); X509Certificate ecdsacacert = addDSACA(DSA_DN, "DSA1024"); helper.reloadKeys(); // Make user and ocspTestCert that we know... createUserCert(dsacaid); this.helper.verifyStatusGood( dsacaid, ecdsacacert, this.ocspTestCert.getSerialNumber() ); } // test16OcspDsaGood /** * Verify that Internal OCSP responses are signed by CA signing key. */ @Test public void test17OCSPResponseSignature() throws Exception { // Get user and ocspTestCert that we know... loadUserCert(caid); this.helper.reloadKeys(); // And an OCSP request OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); Extension[] extensions = new Extension[1]; extensions[0] = new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false, new DEROctetString("123456789".getBytes())); gen.setRequestExtensions(new Extensions(extensions)); OCSPReq req = gen.build(); // POST the OCSP request URL url = new URL(httpReqPath + '/' + resourceOcsp); HttpURLConnection con = (HttpURLConnection) url.openConnection(); // we are going to do a POST con.setDoOutput(true); con.setRequestMethod("POST"); // POST it con.setRequestProperty("Content-Type", "application/ocsp-request"); OutputStream os = con.getOutputStream(); os.write(req.getEncoded()); os.close(); assertTrue("HTTP error", con.getResponseCode() == 200); // Some appserver (Weblogic) responds with // "application/ocsp-response; charset=UTF-8" assertNotNull("No Content-Type in reply.", con.getContentType()); assertTrue(con.getContentType().startsWith("application/ocsp-response")); OCSPResp response = new OCSPResp(IOUtils.toByteArray(con.getInputStream())); assertTrue("Response status not the expected.", response.getStatus() != 200); BasicOCSPResp brep = (BasicOCSPResp) response.getResponseObject(); boolean verify = brep.isSignatureValid(new JcaContentVerifierProviderBuilder().build(cacert.getPublicKey())); assertTrue("Signature verification", verify); } /** * Verify OCSP response for a malicious request. Uses nonsense payload. * * HTTP Content-length: 1000 byte ASN1 sequence length: 199995 byte Payload * size: 200000 byte (not including HTTP header) */ @Test public void test18MaliciousOcspRequest() throws Exception { log.trace(">test18MaliciousOcspRequest"); int i = 0; // Construct the fake data. byte data[] = new byte[LimitLengthASN1Reader.MAX_REQUEST_SIZE * 2]; // The first byte indicate that this is a sequence. Necessary to past // the first test as an accepted OCSP object. data[0] = (byte) BERTags.SEQUENCE; // The second byte indicates the number if the following bytes are more // than can be represented by one byte and will be represented by 3 // bytes instead. data[1] = (byte) 0x83; // The third through the forth bytes are the number of the following // bytes. (0x030D3B = 199995) data[2] = (byte) 0x03; // MSB data[3] = (byte) 0x0D; data[4] = (byte) 0x3B; // LSB // Fill the rest of the array with some fake data. for (i = 5; i < data.length; i++) { data[i] = (byte) i; } // Create the HTTP header String path = "/ejbca/" + resourceOcsp; String headers = "POST " + path + " HTTP/1.1\r\n" + "Host: "+httpHost+"\r\n" + "Content-Type: application/ocsp-request\r\n" + "Content-Length: 1000\r\n" + "\r\n"; // Merge the HTTP headers and the raw data into one package. byte input[] = concatByteArrays(headers.getBytes(), data); // Create the socket. Socket socket = new Socket(InetAddress.getByName(httpHost), Integer.parseInt(httpPort)); OutputStream os = socket.getOutputStream(); try { // Send data byte for byte. try { os.write(input); } catch (IOException e) { log.info("Socket threw an IOException.", e); // Windows throws an IOException when trying to write more bytes to // the server than it should. JBoss on Linux does not. // assertTrue("Tried to write more than it should to the server (>1000), "+i, i > 1000); return; } /* Note that an Apache proxy interprets this as two requests in the same session (where the second one is bad): HTTP/1.1 200 OK Date: Thu, 27 Mar 2014 16:13:24 GMT Server: Apache/2.4.6 (Unix) OpenSSL/1.0.1e Content-Type: application/ocsp-response Content-Length: 5 0 HTTP/1.1 400 Bad Request Date: Thu, 27 Mar 2014 16:13:24 GMT Server: Apache/2.4.6 (Unix) OpenSSL/1.0.1e Content-Length: 226 Connection: close Content-Type: text/html; charset=iso-8859-1 <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN"> <html><head> <title>400 Bad Request</title> </head><body> <h1>Bad Request</h1> <p>Your browser sent a request that this server could not understand.<br /> </p> </body></html> But since the response is ANS1 encoded, the response is still correctly parsed even though we provide 420 bytes extra. */ // Reading the response. InputStream ins = socket.getInputStream(); byte ret[] = new byte[1024]; int len = ins.read(ret); assertTrue("Could not read response.", len!=-1); // Removing the HTTP headers. The HTTP headers end at the first occurrence of "\r\n\r\n". for (i = 3; i < len; i++) { if ((ret[i] == 0x0A) && (ret[i - 1] == 0x0D) && (ret[i-2] == 0x0A) && (ret[i - 3] == 0x0D)) { break; } } log.info("response headers: " + new String(ret, 0, i)); int start = i + 1; byte respa[] = new byte[len - start]; for (i = start; i < len; i++) { respa[i - start] = ret[i]; } log.info("response contains: " + respa.length + " bytes."); log.info("response bytes: " + Hex.toHexString(respa)); log.info("response as string:" + new String(respa)); // Reading the response as a OCSPResp. When the input data array is // longer than allowed the OCSP response will return as an internal // error. OCSPResp response = new OCSPResp(respa); assertEquals("Incorrect response status.", OCSPRespBuilder.INTERNAL_ERROR, response.getStatus()); } finally { os.close(); socket.close(); } log.trace("<test18MaliciousOcspRequest"); } /** * Verify OCSP response for a malicious request. Uses nonsense payload. * * HTTP Content-length: 200000 byte ASN1 sequence length: 9996 byte Payload * size: 200000 byte (not including HTTP header) */ @Test public void test19MaliciousOcspRequest() throws Exception { log.trace(">test19MaliciousOcspRequest"); int i = 0; // Construct the fake data. byte data[] = new byte[LimitLengthASN1Reader.MAX_REQUEST_SIZE * 2]; // The first byte indicate that this is a sequence. Necessary to past // the first test as an accepted OCSP object. data[0] = (byte) BERTags.SEQUENCE; // The second byte indicates the number of the following bytes are more // than can be represented by one byte and will be represented by 2 // bytes instead. data[1] = (byte) 0x82; // The third through the forth bytes are the number of the following // bytes. (0x270C = 9996) data[2] = (byte) 0x27; // MSB data[3] = (byte) 0x0C; // LSB // Fill the rest of the array with some fake data. for (i = 4; i < data.length; i++) { data[i] = (byte) i; } // Create the HTTP header String path = "/ejbca/" + resourceOcsp; String headers = "POST " + path + " HTTP/1.1\r\n" + "Host: "+httpHost+"\r\n" + "Content-Type: application/ocsp-request\r\n" + "Content-Length: 200000\r\n" + "\r\n"; // Merge the HTTP headers and the raw data into one package. byte input[] = concatByteArrays(headers.getBytes(), data); // Create the socket. Socket socket = new Socket(InetAddress.getByName(httpHost), Integer.parseInt(httpPort)); // Send data byte for byte. OutputStream os = socket.getOutputStream(); try { os.write(input); } catch (IOException e) { log.info("Socket threw an IOException.", e); } // Reading the response. InputStream ins = socket.getInputStream(); byte ret[] = new byte[1024]; ins.read(ret); socket.close(); // Removing the HTTP headers. The HTTP headers end at the last // occurrence of "\r\n". for (i = ret.length - 1; i > 0; i--) { if ((ret[i] == 0x0A) && (ret[i - 1] == 0x0D)) { break; } } int start = i + 1; byte respa[] = new byte[ret.length - start]; for (i = start; i < ret.length; i++) { respa[i - start] = ret[i]; } log.info("response contains: " + respa.length + " bytes."); // Reading the response as a OCSPResp. OCSPResp response = new OCSPResp(respa); assertEquals("Incorrect response status.", OCSPRespBuilder.MALFORMED_REQUEST, response.getStatus()); log.trace("<test19MaliciousOcspRequest"); } /** * Verify OCSP response for a malicious request where the POST data starts * with a proper OCSP request. */ @Test public void test20MaliciousOcspRequest() throws Exception { log.trace(">test20MaliciousOcspRequest"); // Start by sending a valid OCSP requests so we know the helpers work byte validOcspReq[] = getValidOcspRequest(); OCSPResp response = sendRawRequestToOcsp(validOcspReq.length, validOcspReq, false); if (OCSPRespBuilder.SUCCESSFUL != response.getStatus()) { throw new IllegalStateException("Could not send standard raw request, test cannot continue. Instead of Successful (0), status was " + response.getStatus()); } // Try sending a valid request and then keep sending some more data. byte[] buf = new byte[LimitLengthASN1Reader.MAX_REQUEST_SIZE * 2]; Arrays.fill(buf, (byte) 123); buf = concatByteArrays(validOcspReq, buf); // This should return an error because we only allow content length of 100000 bytes response = sendRawRequestToOcsp(buf.length, buf, false); assertEquals("Incorrect response status.", OCSPRespBuilder.MALFORMED_REQUEST, response.getStatus()); // Now try with a fake HTTP content-length header try { response = sendRawRequestToOcsp(validOcspReq.length, buf, false); // When sending a large request body with a too short content-length the serves sees this as two streaming // requests. The first request will be read and processed by EJBCA normally and sent back, but the // second one will not be a valid request so the server will send back an error. // Glassfish actually sends back a "400 Bad request". Our reading code in sendRawRequestToOcsp // does not handle multiple streaming responses so it will barf on the second one. // This is different for JBoss and Glassfish though, with JBoss we will get a IOException trying // to read the response, while for Glassfish we will get the response with 0 bytes from the 400 response // Only glassfish will come here, with a non-null response, but of length 2(/r/n?). JBoss (4, 5, 6) will go to the // IOException below try { byte[] encoded = response.getEncoded(); if ((encoded != null) && (encoded.length > 2)) { // Actually this error message is wrong, since it is our client that does not handle streaming responses // where the first response should be good. fail("Was able to send a lot of data with a fake HTTP Content-length without any error."); } } catch (NullPointerException npe) { // NOPMD // the response.getEncoded() can give NPE, in some versions of BC, if it was not created with correct input } } catch (IOException e) { } // Try sneaking through a payload that is just under the limit. The // responder will answer politely, but log a warning. buf = new byte[LimitLengthASN1Reader.MAX_REQUEST_SIZE - validOcspReq.length]; Arrays.fill(buf, (byte) 123); buf = concatByteArrays(validOcspReq, buf); response = sendRawRequestToOcsp(buf.length, buf, false); assertEquals("Server accepted malicious request. (This might be a good thing!)", OCSPRespBuilder.SUCCESSFUL, response.getStatus()); log.trace("<test20MaliciousOcspRequest"); } /** * Tests ocsp message * * @throws Exception * error */ @Test public void test50OcspUnknownMayBeGood() throws Exception { log.trace(">test50OcspUnknownMayBeGood()"); loadUserCert(this.caid); // An OCSP request for an unknown certificate (not exist in db) this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); final String bad1 = "Bad"; final String bad2 = "Ugly"; final String good1 = "Good"; final String good2 = "Beautiful"; { final Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD, "true"); map.put(OcspConfiguration.NONE_EXISTING_IS_BAD_URI+'1', ".*"+bad1+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_BAD_URI+'2', ".*"+bad2+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD_URI+'1', ".*"+good1+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD_URI+'2', ".*"+good2+"$"); this.helper.alterConfig(map); } this.helper.reloadKeys(); this.helper.verifyStatusGood( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(bad1); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(bad2); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); { final Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD, "false"); this.helper.alterConfig(map); } this.helper.setURLEnding(""); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(good1); this.helper.verifyStatusGood( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(good2); this.helper.verifyStatusGood( this.caid, this.cacert, new BigInteger("1") ); log.trace("<test50OcspUnknownMayBeGood()"); } /** * This test tests the feature of extensions of setting a '*' in front of the value in ocsp.extensionoid * forces that extension to be used for all requests. * * @throws Exception */ @Test public void testUseAlwaysExtensions() throws Exception { log.trace(">testUseAlwaysExtensions"); final String EXTENSION_OID = "ocsp.extensionoid"; final String EXTENSION_CLASS = "ocsp.extensionclass"; final String oldOidValue = cesecoreConfigurationProxySession.getConfigurationValue(EXTENSION_OID); final String oldClass = cesecoreConfigurationProxySession.getConfigurationValue(EXTENSION_CLASS); try { cesecoreConfigurationProxySession.setConfigurationValue(EXTENSION_OID, "*" + OcspCertHashExtension.CERT_HASH_OID); cesecoreConfigurationProxySession.setConfigurationValue(EXTENSION_CLASS, "org.ejbca.core.protocol.ocsp.extension.certhash.OcspCertHashExtension"); ocspResponseGeneratorSession.reloadOcspExtensionsCache(); // An OCSP request, ocspTestCert is already created in earlier tests OCSPReqBuilder gen = new OCSPReqBuilder(); loadUserCert(this.caid); this.helper.reloadKeys(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); OCSPReq req = gen.build(); BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); if (response == null) { throw new Exception("Could not retrieve response, test could not continue."); } Extension responseExtension = response.getExtension(new ASN1ObjectIdentifier(OcspCertHashExtension.CERT_HASH_OID)); assertNotNull("No extension sent with reply", responseExtension); } finally { cesecoreConfigurationProxySession.setConfigurationValue(EXTENSION_OID, oldOidValue); cesecoreConfigurationProxySession.setConfigurationValue(EXTENSION_CLASS, oldClass); ocspResponseGeneratorSession.reloadOcspExtensionsCache(); log.trace("<testUseAlwaysExtensions"); } } /** * Tests ocsp message * * @throws Exception * error */ @Test public void test60OcspUnknownIsRevoked() throws Exception { log.trace(">test60OcspUnknownIsRevoked()"); loadUserCert(this.caid); // An OCSP request for an unknown certificate (not exist in db) this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); final String bad1 = "Bad"; final String bad2 = "Ugly"; final String good1 = "Good"; final String good2 = "Beautiful"; final String revoked1 = "Revoked"; final String revoked2 = "Denied"; { final Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.NONE_EXISTING_IS_REVOKED, "true"); map.put(OcspConfiguration.NONE_EXISTING_IS_BAD_URI+'1', ".*"+bad1+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_BAD_URI+'2', ".*"+bad2+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD_URI+'1', ".*"+good1+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_GOOD_URI+'2', ".*"+good2+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_REVOKED_URI+'1', ".*"+revoked1+"$"); map.put(OcspConfiguration.NONE_EXISTING_IS_REVOKED_URI+'2', ".*"+revoked2+"$"); this.helper.alterConfig(map); } this.helper.reloadKeys(); this.helper.verifyStatusRevoked( this.caid, this.cacert, new BigInteger("1"), CRLReason.certificateHold, new Date(0) ); this.helper.setURLEnding(bad1); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(bad2); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(good1); this.helper.verifyStatusGood( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(good2); this.helper.verifyStatusGood( this.caid, this.cacert, new BigInteger("1") ); { final Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.NONE_EXISTING_IS_REVOKED, "false"); this.helper.alterConfig(map); } this.helper.setURLEnding(""); this.helper.verifyStatusUnknown( this.caid, this.cacert, new BigInteger("1") ); this.helper.setURLEnding(revoked1); this.helper.verifyStatusRevoked( this.caid, this.cacert, new BigInteger("1"), CRLReason.certificateHold, new Date(0) ); this.helper.setURLEnding(revoked2); this.helper.verifyStatusRevoked( this.caid, this.cacert, new BigInteger("1"), CRLReason.certificateHold, new Date(0) ); log.trace("<test60OcspUnknownIsRevoked()"); } /** * This test tests that the OCSP response contains the extension "id-pkix-ocsp-extended-revoke" in case the * status of an unknown cert is returned as revoked. * * @throws Exception */ @Test public void testExtendedRevokedExtension() throws Exception { OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, new BigInteger("1") )); OCSPReq req = gen.build(); BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); assertTrue(response.getResponses()[0].getCertStatus() instanceof UnknownStatus); // RFC 6960: id-pkix-ocsp-extended-revoke OBJECT IDENTIFIER ::= {id-pkix-ocsp 9} Extension responseExtension = response.getExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".9")); assertNull("Wrong extension sent with reply", responseExtension); final Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.NONE_EXISTING_IS_REVOKED, "true"); this.helper.alterConfig(map); gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, new BigInteger("1") )); req = gen.build(); response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); assertTrue(response.getResponses()[0].getCertStatus() instanceof RevokedStatus); responseExtension = response.getExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".9")); assertNotNull("No extension sent with reply", responseExtension); assertEquals(DERNull.INSTANCE, responseExtension.getParsedValue()); } /** * This test tests that the OCSP response contains the extension "id_pkix_ocsp_archive_cutoff" if "ocsp.expiredcert.retentionperiod" * is set in the condfiguration file * * @throws Exception */ @Test public void testExpiredCertArchiveCutoffExtension() throws Exception { final String username = "expiredCertUsername"; String cpname = "ValidityCertProfile"; String eepname = "ValidityEEProfile"; X509Certificate xcert = null; CertificateProfileSessionRemote certProfSession = EjbRemoteHelper.INSTANCE.getRemoteSession(CertificateProfileSessionRemote.class); EndEntityProfileSessionRemote eeProfSession = EjbRemoteHelper.INSTANCE.getRemoteSession(EndEntityProfileSessionRemote.class); try { if (certProfSession.getCertificateProfile(cpname) == null) { final CertificateProfile cp = new CertificateProfile(CertificateProfileConstants.CERTPROFILE_FIXED_ENDUSER); cp.setAllowValidityOverride(true); try { certProfSession.addCertificateProfile(admin, cpname, cp); } catch (CertificateProfileExistsException e) { log.error("Certificate profile exists: ", e); } } final int cpId = certProfSession.getCertificateProfileId(cpname); if (eeProfSession.getEndEntityProfile(eepname) == null) { final EndEntityProfile eep = new EndEntityProfile(true); eep.setValue(EndEntityProfile.AVAILCERTPROFILES, 0, "" + cpId); try { eeProfSession.addEndEntityProfile(admin, eepname, eep); } catch (EndEntityProfileExistsException e) { log.error("Could not create end entity profile.", e); } } final int eepId = eeProfSession.getEndEntityProfileId(eepname); if (!endEntityManagementSession.existsUser(username)) { endEntityManagementSession.addUser(admin, username, "foo123", "CN=expiredCertUsername", null, "[email protected]", false, eepId, cpId, EndEntityTypes.ENDUSER.toEndEntityType(), SecConst.TOKEN_SOFT_PEM, 0, caid); log.debug("created user: expiredCertUsername, foo123, CN=expiredCertUsername"); } else { log.debug("User expiredCertUsername already exists."); EndEntityInformation userData = new EndEntityInformation(username, "CN=expiredCertUsername", caid, null, "[email protected]", EndEntityConstants.STATUS_NEW, EndEntityTypes.ENDUSER.toEndEntityType(), eepId, cpId, null, null, SecConst.TOKEN_SOFT_PEM, 0, null); userData.setPassword("foo123"); endEntityManagementSession.changeUser(admin, userData, false); log.debug("Reset status to NEW"); } // Generate certificate for the new user KeyPair keys = KeyTools.genKeys("512", "RSA"); long now = (new Date()).getTime(); long notAfter = now + 1000; xcert = (X509Certificate) signSession.createCertificate(admin, username, "foo123", new PublicKeyWrapper(keys.getPublic()), -1, new Date(), new Date(notAfter)); assertNotNull("Failed to create new certificate", xcert); Thread.sleep(2000L); // wait for the certificate to expire // -------- Testing with default config value OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, xcert.getSerialNumber() )); OCSPReq req = gen.build(); BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); SingleResp resp = response.getResponses()[0]; Extension singleExtension = resp.getExtension(OCSPObjectIdentifiers.id_pkix_ocsp_archive_cutoff); assertNotNull("No extension sent with reply", singleExtension); ASN1GeneralizedTime extvalue = ASN1GeneralizedTime.getInstance(singleExtension.getParsedValue()); long expectedValue = (new Date()).getTime() - (31536000L * 1000); long actualValue = extvalue.getDate().getTime(); long diff = expectedValue - actualValue; assertTrue("Wrong archive cutoff value.", diff < 60000); // -------- Send a request where id_pkix_ocsp_archive_cutoff SHOULD NOT be used // set ocsp configuration Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.EXPIREDCERT_RETENTIONPERIOD, "-1"); this.helper.alterConfig(map); gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, xcert.getSerialNumber() )); req = gen.build(); response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); resp = response.getResponses()[0]; singleExtension = resp.getExtension(OCSPObjectIdentifiers.id_pkix_ocsp_archive_cutoff); assertNull("The wrong extension was sent with reply", singleExtension); // ------------ Send a request where id_pkix_ocsp_archive_cutoff SHOULD be used // set ocsp configuration map = new HashMap<String, String>(); map.put(OcspConfiguration.EXPIREDCERT_RETENTIONPERIOD, "63072000"); // 2 years this.helper.alterConfig(map); gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, xcert.getSerialNumber() )); req = gen.build(); response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); resp = response.getResponses()[0]; singleExtension = resp.getExtension(OCSPObjectIdentifiers.id_pkix_ocsp_archive_cutoff); assertNotNull("No extension sent with reply", singleExtension); extvalue = ASN1GeneralizedTime.getInstance(singleExtension.getParsedValue()); expectedValue = (new Date()).getTime() - (63072000L * 1000); actualValue = extvalue.getDate().getTime(); diff = expectedValue - actualValue; assertTrue("Wrong archive cutoff value.", diff < 60000); } finally { endEntityManagementSession.revokeAndDeleteUser(admin, username, CRLReason.unspecified); eeProfSession.removeEndEntityProfile(admin, eepname); certProfSession.removeCertificateProfile(admin, cpname); } } /** * This test tests that the OCSP response for a status unknown contains the header "cache-control" with the value "no-cache, must-revalidate" * * @throws Exception */ @Test public void testUnknownStatusCacheControlHeader() throws Exception { // set ocsp configuration Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.UNTIL_NEXT_UPDATE, "1"); this.helper.alterConfig(map); OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, new BigInteger("1") )); OCSPReq req = gen.build(); String sBaseURL = httpReqPath + '/' + resourceOcsp; String urlEnding = ""; String b64 = new String(Base64.encode(req.getEncoded(), false)); //String urls = URLEncoder.encode(b64, "UTF-8"); // JBoss/Tomcat will not accept escaped '/'-characters by default URL url = new URL(sBaseURL + '/' + b64 + urlEnding); HttpURLConnection con = (HttpURLConnection)url.openConnection(); if (con.getResponseCode() != 200) { log.info("URL when request gave unexpected result: " + url.toString() + " Message was: " + con.getResponseMessage()); } assertEquals("Response code did not match. ", 200, con.getResponseCode()); assertNotNull(con.getContentType()); assertTrue(con.getContentType().startsWith("application/ocsp-response")); assertNotNull("No Cache-Control in reply.", con.getHeaderField("Cache-Control")); assertEquals("no-cache, must-revalidate", con.getHeaderField("Cache-Control")); // Create a GET request using Nonce extension, in this case we should have no cache-control header gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, new BigInteger("1") )); Extension[] extensions = new Extension[1]; extensions[0] = new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false, new DEROctetString("123456789".getBytes())); gen.setRequestExtensions(new Extensions(extensions)); req = gen.build(); b64 = new String(Base64.encode(req.getEncoded(), false)); url = new URL(sBaseURL + '/' + b64 + urlEnding); con = (HttpURLConnection)url.openConnection(); if (con.getResponseCode() != 200) { log.info("URL when request gave unexpected result: " + url.toString() + " Message was: " + con.getResponseMessage()); } assertEquals("Response code did not match. ", 200, con.getResponseCode()); assertNotNull(con.getContentType()); assertTrue(con.getContentType().startsWith("application/ocsp-response")); OCSPResp response = new OCSPResp(IOUtils.toByteArray(con.getInputStream())); BasicOCSPResp brep = (BasicOCSPResp) response.getResponseObject(); byte[] noncerep = brep.getExtension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce).getExtnValue().getEncoded(); // Make sure we have a nonce in the response, we should have since we sent one in the request assertNotNull("Response should have nonce since we sent a nonce in the request", noncerep); ASN1InputStream ain = new ASN1InputStream(noncerep); ASN1OctetString oct = ASN1OctetString.getInstance(ain.readObject()); ain.close(); assertEquals("Response Nonce was not the same as the request Nonce, it must be", "123456789", new String(oct.getOctets())); assertNull("Cache-Control in reply although we used Nonce in the request. Responses with Nonce should not have a Cache-control header.", con.getHeaderField("Cache-Control")); } /** * This test tests that the OCSP response contains is signed by the preferred signature algorithm specified in the request. * * @throws Exception */ @Test @Deprecated // This test verifies legacy behavior from EJBCA 6.1.0 and should be removed when we no longer need to support it public void testSigAlgExtensionLegacy() throws Exception { loadUserCert(this.caid); // Try sending a request where the preferred signature algorithm in the extension is expected to be used to sign the response. // set ocsp configuration Map<String,String> map = new HashMap<String, String>(); map.put("ocsp.signaturealgorithm", AlgorithmConstants.SIGALG_SHA256_WITH_RSA + ";" + AlgorithmConstants.SIGALG_SHA1_WITH_RSA); this.helper.alterConfig(map); ASN1EncodableVector algVec = new ASN1EncodableVector(); algVec.add(X9ObjectIdentifiers.ecdsa_with_SHA256); algVec.add(PKCSObjectIdentifiers.sha1WithRSAEncryption); ASN1Sequence algSeq = new DERSequence(algVec); ExtensionsGenerator extgen = new ExtensionsGenerator(); // RFC 6960: id-pkix-ocsp-pref-sig-algs OBJECT IDENTIFIER ::= { id-pkix-ocsp 8 } extgen.addExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".8"), false, algSeq); Extensions exts = extgen.generate(); assertNotNull(exts); OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber() ), exts); gen.setRequestExtensions(exts); OCSPReq req = gen.build(); assertTrue(req.hasExtensions()); BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); assertEquals(PKCSObjectIdentifiers.sha1WithRSAEncryption, response.getSignatureAlgOID()); // Try sending a request where the preferred signature algorithm is not compatible with the signing key, but // the configured algorithm is. Expected a response signed using the first configured algorithm algVec = new ASN1EncodableVector(); algVec.add(X9ObjectIdentifiers.ecdsa_with_SHA256); algSeq = new DERSequence(algVec); extgen = new ExtensionsGenerator(); extgen.addExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".8"), false, algSeq); exts = extgen.generate(); assertNotNull(exts); gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber() ), exts); gen.setRequestExtensions(exts); req = gen.build(); assertTrue(req.hasExtensions()); response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); assertEquals(PKCSObjectIdentifiers.sha256WithRSAEncryption, response.getSignatureAlgOID()); } /** This test tests that the OCSP response contains is signed by the preferred signature algorithm specified in the request. */ /* Example of the ASN.1 dump (with friendly names from the RFC added ) of what the extensions should look like. * * Note that we have left out the optional * PreferredSignatureAlgorithm.pubKeyAlgIdentifier * and * AlgorithmIdentifier.parameters * * ... * 75 48: requestExtensions [2] { * 77 46: Extensions ::= SEQUENCE { * 79 44: Extension ::= SEQUENCE { * 81 9: extnID OBJECT IDENTIFIER '1 3 6 1 5 5 7 48 1 8' * 92 31: extnValue OCTET STRING, encapsulates { * 94 29: PreferredSignatureAlgorithms ::= SEQUENCE { * 96 12: PreferredSignatureAlgorithm ::= SEQUENCE { * 98 10: sigIdentifier AlgorithmIdentifier ::= SEQUENCE { * 100 8: algorithm OBJECT IDENTIFIER * : ecdsaWithSHA256 (1 2 840 10045 4 3 2) * : } * : } * 110 13: PreferredSignatureAlgorithm ::= SEQUENCE { * 112 11: sigIdentifier AlgorithmIdentifier ::= SEQUENCE { * 114 9: algorithm OBJECT IDENTIFIER * : sha1WithRSAEncryption (1 2 840 113549 1 1 5) * : } * : ... */ @Test public void testSigAlgExtension() throws Exception { log.trace(">testSigAlgExtensionNew"); loadUserCert(caid); // Try sending a request where the preferred signature algorithm in the extension is expected to be used to sign the response. // set ocsp configuration final Map<String,String> map = new HashMap<String, String>(); map.put("ocsp.signaturealgorithm", AlgorithmConstants.SIGALG_SHA256_WITH_RSA + ";" + AlgorithmConstants.SIGALG_SHA1_WITH_RSA); helper.alterConfig(map); final ASN1Sequence preferredSignatureAlgorithms = getPreferredSignatureAlgorithms(X9ObjectIdentifiers.ecdsa_with_SHA256, PKCSObjectIdentifiers.sha1WithRSAEncryption); final ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator(); // RFC 6960: id-pkix-ocsp-pref-sig-algs OBJECT IDENTIFIER ::= { id-pkix-ocsp 8 } extensionsGenerator.addExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".8"), false, preferredSignatureAlgorithms); final Extensions extensions = extensionsGenerator.generate(); assertNotNull(extensions); final OCSPReqBuilder ocspReqBuilder = new OCSPReqBuilder(); ocspReqBuilder.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); ocspReqBuilder.setRequestExtensions(extensions); final OCSPReq ocspRequest = ocspReqBuilder.build(); assertTrue(ocspRequest.hasExtensions()); log.debug("base64 encoded request: " + new String(Base64.encode(ocspRequest.getEncoded(), false))); final BasicOCSPResp response1 = helper.sendOCSPGet(ocspRequest.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response1); assertEquals(PKCSObjectIdentifiers.sha1WithRSAEncryption, response1.getSignatureAlgOID()); } /** Test with a preferred signature algorithm specified in the request that is incompatible with the singing key. */ @Test public void testSigAlgExtensionMismatch() throws Exception { log.trace(">testSigAlgExtensionNewMismatch"); loadUserCert(caid); final Map<String,String> map = new HashMap<String, String>(); map.put("ocsp.signaturealgorithm", AlgorithmConstants.SIGALG_SHA256_WITH_RSA + ";" + AlgorithmConstants.SIGALG_SHA1_WITH_RSA); helper.alterConfig(map); // Try sending a request where the preferred signature algorithm is not compatible with the signing key, but // the configured algorithm is. Expected a response signed using the first configured algorithm final ASN1Sequence preferredSignatureAlgorithms = getPreferredSignatureAlgorithms(X9ObjectIdentifiers.ecdsa_with_SHA256); final ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator(); extensionsGenerator.addExtension(new ASN1ObjectIdentifier(OCSPObjectIdentifiers.id_pkix_ocsp + ".8"), false, preferredSignatureAlgorithms); final Extensions extensions = extensionsGenerator.generate(); assertNotNull(extensions); final OCSPReqBuilder ocspReqBuilder = new OCSPReqBuilder(); ocspReqBuilder.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); ocspReqBuilder.setRequestExtensions(extensions); final OCSPReq ocspRequest = ocspReqBuilder.build(); assertTrue(ocspRequest.hasExtensions()); log.debug("base64 encoded request: " + new String(Base64.encode(ocspRequest.getEncoded(), false))); final BasicOCSPResp response2 = helper.sendOCSPGet(ocspRequest.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response2); assertEquals(PKCSObjectIdentifiers.sha256WithRSAEncryption, response2.getSignatureAlgOID()); } /** @return a RFC 6960 PreferredSignatureAlgorithms object. */ private ASN1Sequence getPreferredSignatureAlgorithms(final ASN1ObjectIdentifier...algorithmOids) { final ASN1Encodable[] asn1Encodables = new ASN1Encodable[algorithmOids.length]; for (int i=0; i<algorithmOids.length; i++) { // PreferredSignatureAlgorithm ::= SEQUENCE { sigIdentifier AlgorithmIdentifier, pubKeyAlgIdentifier SMIMECapability OPTIONAL } final ASN1Sequence preferredSignatureAlgorithm = new DERSequence(new ASN1Encodable[] { new AlgorithmIdentifier(algorithmOids[i]) }); asn1Encodables[i] = preferredSignatureAlgorithm; } // PreferredSignatureAlgorithms ::= SEQUENCE OF PreferredSignatureAlgorithm final ASN1Sequence preferredSignatureAlgorithms = new DERSequence(asn1Encodables); return preferredSignatureAlgorithms; } /** * This test tests that the OCSP response does not contain the signing cert if Ejbca is configured that way. * * @throws Exception */ @Test public void testSignCertNotIncludedInResponse() throws Exception { loadUserCert(this.caid); // set OCSP configuration Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.INCLUDE_SIGNING_CERT, "false"); helper.alterConfig(map); // This setting is part of the OCSP signing cache so a reload of the cache is required helper.reloadKeys(); // Build the OCSP request OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber() ), null); OCSPReq req = gen.build(); // Send and verify the OCSP request BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200, false, cacert); assertNotNull("Could not retrieve response, test could not continue.", response); assertTrue("Response does contain certificates", response.getCerts().length == 0); } /** * This test tests that the OCSP response does not contain the root CA cert in the included certificate chain. * * @throws Exception */ @Test public void testRootCACertNotIncludedInResponse() throws Exception { log.trace(">testRootCACertNotIncludedInResponse()"); // Create a subCA and a subsubCA String subcaDN = "CN=SubTestCA"; createSubCA(subcaDN, caid); String subSubCaDN = "CN=SubSubTestCA"; X509Certificate subSubCaCert = createSubCA(subSubCaDN, subcaDN.hashCode()); // set OCSP configuration Map<String,String> map = new HashMap<String, String>(); map.put(OcspConfiguration.INCLUDE_CERT_CHAIN, "true"); GlobalOcspConfiguration ocspConfiguration = (GlobalOcspConfiguration) globalConfigurationSession.getCachedConfiguration(GlobalOcspConfiguration.OCSP_CONFIGURATION_ID); ocspConfiguration.setOcspDefaultResponderReference(subSubCaDN); globalConfigurationSession.saveConfiguration(admin, ocspConfiguration); this.helper.alterConfig(map); helper.reloadKeys(); // Expects an OCSP response including a certchain that contains only the 2 subCAs and not their rootCA. try { loadUserCert(subSubCaDN.hashCode()); OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), subSubCaCert, ocspTestCert.getSerialNumber() ), null); OCSPReq req = gen.build(); BasicOCSPResp response = helper.sendOCSPGet(req.getEncoded(), null, OCSPRespBuilder.SUCCESSFUL, 200); assertNotNull("Could not retrieve response, test could not continue.", response); assertTrue("Response contains more that 2 certificate", response.getCerts().length == 2); X509CertificateHolder[] includedCerts = response.getCerts(); assertEquals(subSubCaDN, includedCerts[0].getSubject().toString()); assertEquals(subcaDN, includedCerts[1].getSubject().toString()); } finally { try { endEntityManagementSession.deleteUser(admin, "ocsptest"); } catch (Exception e) { log.error("",e); } try { int cryptoTokenId = caSession.getCAInfo(admin, subSubCaDN.hashCode()).getCAToken().getCryptoTokenId(); CryptoTokenTestUtils.removeCryptoToken(admin, cryptoTokenId); cryptoTokenId = caSession.getCAInfo(admin, subcaDN.hashCode()).getCAToken().getCryptoTokenId(); CryptoTokenTestUtils.removeCryptoToken(admin, cryptoTokenId); } catch (Exception e) { log.error("",e); } try { caSession.removeCA(admin, subSubCaDN.hashCode()); caSession.removeCA(admin, subcaDN.hashCode()); } catch (Exception e) { log.info("Could not remove CA with SubjectDN " + subSubCaDN); } } log.trace("<testRootCACertNotIncludedInResponse()"); } /** * removes DSA CA * * @throws Exception * error */ public void removeDSACA() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); try { if (caSession.existsCa(DSA_DN.hashCode())) { final int cryptoTokenId = caSession.getCAInfo(admin, DSA_DN.hashCode()).getCAToken().getCryptoTokenId(); CryptoTokenTestUtils.removeCryptoToken(admin, cryptoTokenId); } } catch (Exception e) { log.error("", e); } try { if (caSession.existsCa(DSA_DN.hashCode())) { caSession.removeCA(admin, DSA_DN.hashCode()); } } catch (Exception e) { log.info("Could not remove CA with SubjectDN " + DSA_DN); } try { if (caSession.existsCa("CN=OCSPDSAIMPCATEST".hashCode())) { caSession.removeCA(admin, "CN=OCSPDSAIMPCATEST".hashCode()); } } catch (Exception e) { log.info("Could not remove CA with SubjectDN CN=OCSPDSAIMPCATEST"); } } /** * removes ECDSA CA * * @throws Exception * error */ public void removeECDSACA() throws Exception { assertTrue("This test can only be run on a full EJBCA installation.", ((HttpURLConnection) new URL(httpReqPath + '/').openConnection()) .getResponseCode() == 200); try { caSession.removeCA(admin, "CN=OCSPECDSATEST".hashCode()); } catch (Exception e) { log.info("Could not remove CA with SubjectDN CN=OCSPECDSATEST"); } try { caSession.removeCA(admin, "CN=OCSPECDSAIMPCATEST".hashCode()); } catch (Exception e) { log.info("Could not remove CA with SubjectDN CN=OCSPECDSAIMPCATEST"); } } // // Private helper methods // /** * Generate a simple OCSP Request object */ private byte[] getValidOcspRequest() throws Exception { // Get user and ocspTestCert that we know... loadUserCert(caid); // And an OCSP request OCSPReqBuilder gen = new OCSPReqBuilder(); gen.addRequest(new JcaCertificateID(SHA1DigestCalculator.buildSha1Instance(), cacert, ocspTestCert.getSerialNumber())); Extension[] extensions = new Extension[1]; extensions[0] = new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false, new DEROctetString("123456789".getBytes())); gen.setRequestExtensions(new Extensions(extensions)); OCSPReq req = gen.build(); return req.getEncoded(); } /** * Sends the payload to the OCSP Servlet using TCP. Can be used for testing * malformed or malicious requests. * * @param contentLength * The HTTP 'Content-Length' header to send to the server. * @return the OCSP Response from the server * @throws IOException * if the is a IO problem */ private OCSPResp sendRawRequestToOcsp(int contentLength, byte[] payload, final boolean writeByteByByte) throws IOException { // Create the HTTP header String headers = "POST " + "/ejbca/" + resourceOcsp + " HTTP/1.1\r\n" + "Host: "+httpHost+"\r\n" + "Content-Type: application/ocsp-request\r\n" + "Content-Length: " + contentLength + "\r\n" + "\r\n"; // Merge the HTTP headers, the OCSP request and the raw data into one // package. byte[] input = concatByteArrays(headers.getBytes(), payload); log.debug("HTTP request headers: " + headers); log.debug("HTTP headers size: " + headers.getBytes().length); log.debug("Size of data to send: " + input.length); // Create the socket. Socket socket = new Socket(InetAddress.getByName(httpHost), Integer.parseInt(httpPort)); // Send data byte for byte. OutputStream os = socket.getOutputStream(); if (writeByteByByte) { int i = 0; try { for (i = 0; i < input.length; i++) { os.write(input[i]); } } catch (IOException e) { log.info("Socket wrote " + i + " bytes before throwing an IOException."); } } else { try { os.write(input); } catch (IOException e) { log.info("Could not write to TCP Socket " + e.getMessage()); } } // Reading the response. byte rawResponse[] = getHttpResponse(socket.getInputStream()); log.info("Response contains: " + rawResponse.length + " bytes."); socket.close(); return new OCSPResp(rawResponse); } /** * Read the payload of a HTTP response as a byte array. */ private byte[] getHttpResponse(InputStream ins) throws IOException { byte buf[] = IOUtils.toByteArray(ins); ins.close(); int i = 0; // Removing the HTTP headers. The HTTP headers end at the last // occurrence of "\r\n". for (i = buf.length - 1; i > 0; i--) { if ((buf[i] == 0x0A) && (buf[i - 1] == 0x0D)) { break; } } byte[] header = ArrayUtils.subarray(buf, 0, i + 1); log.debug("HTTP reponse header: " + new String(header)); log.debug("HTTP reponse header size: " + header.length); log.debug("Stream length: " + buf.length); log.debug("HTTP payload length: " + (buf.length - header.length)); return ArrayUtils.subarray(buf, header.length, buf.length); } /** * @return a new byte array with the two arguments concatenated. */ private byte[] concatByteArrays(byte[] array1, byte[] array2) { byte[] ret = new byte[array1.length + array2.length]; System.arraycopy(array1, 0, ret, 0, array1.length); System.arraycopy(array2, 0, ret, array1.length, array2.length); return ret; } /** * adds a CA Using ECDSA keys to the database. * * It also checks that the CA is stored correctly. * * @throws Exception * error */ private CAInfo addECDSACA(String dn, String keySpec) throws Exception { log.trace(">addECDSACA()"); boolean ret = false; int cryptoTokenId = 0; CAInfo info = null; try { cryptoTokenId = CryptoTokenTestUtils.createCryptoTokenForCA(admin, dn, keySpec); final CAToken catoken = CaTestUtils.createCaToken(cryptoTokenId, AlgorithmConstants.SIGALG_SHA256_WITH_ECDSA, AlgorithmConstants.SIGALG_SHA1_WITH_RSA); // Create and active OSCP CA Service. List<ExtendedCAServiceInfo> extendedcaservices = new ArrayList<ExtendedCAServiceInfo>(); extendedcaservices.add(new HardTokenEncryptCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); extendedcaservices.add(new KeyRecoveryCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); List<CertificatePolicy> policies = new ArrayList<CertificatePolicy>(1); policies.add(new CertificatePolicy("2.5.29.32.0", "", "")); X509CAInfo cainfo = new X509CAInfo(dn, dn, CAConstants.CA_ACTIVE, CertificateProfileConstants.CERTPROFILE_FIXED_ROOTCA, 365, CAInfo.SELFSIGNED, null, catoken); cainfo.setDescription("JUnit ECDSA CA"); cainfo.setPolicies(policies); cainfo.setExtendedCAServiceInfos(extendedcaservices); caAdminSession.createCA(admin, cainfo); info = caSession.getCAInfo(admin, dn); X509Certificate cert = (X509Certificate) info.getCertificateChain().iterator().next(); assertTrue("Error in created ca certificate", cert.getSubjectDN().toString().equals(dn)); assertTrue("Creating CA failed", info.getSubjectDN().equals(dn)); // Make BC cert instead to make sure the public key is BC provider type (to make our test below easier) X509Certificate bccert = CertTools.getCertfromByteArray(cert.getEncoded(), X509Certificate.class); PublicKey pk = bccert.getPublicKey(); if (pk instanceof JCEECPublicKey) { JCEECPublicKey ecpk = (JCEECPublicKey) pk; assertEquals(ecpk.getAlgorithm(), "EC"); org.bouncycastle.jce.spec.ECParameterSpec spec = ecpk.getParameters(); if (StringUtils.equals(keySpec, "implicitlyCA")) { assertNull("ImplicitlyCA must have null spec", spec); } else { assertNotNull("secp256r1 must not have null spec", spec); } } else if (pk instanceof BCECPublicKey) { BCECPublicKey ecpk = (BCECPublicKey) pk; assertEquals(ecpk.getAlgorithm(), "EC"); org.bouncycastle.jce.spec.ECParameterSpec spec = ecpk.getParameters(); if (StringUtils.equals(keySpec, "implicitlyCA")) { assertNull("ImplicitlyCA must have null spec", spec); } else { assertNotNull("secp256r1 must not have null spec", spec); } } else { assertTrue("Public key is not EC: "+pk.getClass().getName(), false); } ret = true; } catch (CAExistsException pee) { log.info("CA exists."); } assertTrue("Creating ECDSA CA failed", ret); log.trace("<addECDSACA()"); return info; } /** * adds a CA Using DSA keys to the database. * * It also checks that the CA is stored correctly. * * @throws Exception * error */ private X509Certificate addDSACA(String dn, String keySpec) throws Exception { log.trace(">addDSACA()"); boolean ret = false; X509Certificate cacert = null; int cryptoTokenId = 0; try { cryptoTokenId = CryptoTokenTestUtils.createCryptoTokenForCA(admin, dn, keySpec); final CAToken catoken = CaTestUtils.createCaToken(cryptoTokenId, AlgorithmConstants.SIGALG_SHA1_WITH_DSA, AlgorithmConstants.SIGALG_SHA1_WITH_RSA); // Create and active OSCP CA Service. final List<ExtendedCAServiceInfo> extendedcaservices = new ArrayList<ExtendedCAServiceInfo>(); extendedcaservices.add(new HardTokenEncryptCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); extendedcaservices.add(new KeyRecoveryCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); final List<CertificatePolicy> policies = new ArrayList<CertificatePolicy>(1); policies.add(new CertificatePolicy("2.5.29.32.0", "", "")); X509CAInfo cainfo = new X509CAInfo(dn, dn, CAConstants.CA_ACTIVE, CertificateProfileConstants.CERTPROFILE_FIXED_ROOTCA, 365, CAInfo.SELFSIGNED, null, catoken); cainfo.setDescription("JUnit DSA CA"); cainfo.setPolicies(policies); caAdminSession.createCA(admin, cainfo); CAInfo info = caSession.getCAInfo(admin, dn); X509Certificate cert = (X509Certificate) info.getCertificateChain().iterator().next(); assertEquals("Error in created ca certificate", dn, CertTools.getSubjectDN(cert)); assertEquals("Creating CA failed, DN was incorrect.", dn, info.getSubjectDN()); assertTrue("Public key was not an instance of DSAPublicKey", cert.getPublicKey() instanceof DSAPublicKey); ret = true; Collection<Certificate> coll = info.getCertificateChain(); Object[] certs = coll.toArray(); cacert = (X509Certificate) certs[0]; } catch (CAExistsException e) { log.info("CA exists."); throw e; } assertTrue("Creating DSA CA failed", ret); log.trace("<addDSACA()"); return cacert; } private X509Certificate createSubCA(String subcaDN, int signbyID) throws CryptoTokenOfflineException, CryptoTokenAuthenticationFailedException, InvalidAlgorithmException, AuthorizationDeniedException, CADoesntExistsException, CAExistsException { try { int cryptoTokenId = CryptoTokenTestUtils.createCryptoTokenForCA(admin, subcaDN, "1024"); final CAToken catoken = CaTestUtils.createCaToken(cryptoTokenId, AlgorithmConstants.SIGALG_SHA1_WITH_RSA, AlgorithmConstants.SIGALG_SHA1_WITH_RSA); // Create and active OSCP CA Service. final List<ExtendedCAServiceInfo> extendedcaservices = new ArrayList<ExtendedCAServiceInfo>(); extendedcaservices.add(new HardTokenEncryptCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); extendedcaservices.add(new KeyRecoveryCAServiceInfo(ExtendedCAServiceInfo.STATUS_ACTIVE)); final List<CertificatePolicy> policies = new ArrayList<CertificatePolicy>(1); policies.add(new CertificatePolicy("2.5.29.32.0", "", "")); X509CAInfo cainfo = new X509CAInfo(subcaDN, subcaDN, CAConstants.CA_ACTIVE, CertificateProfileConstants.CERTPROFILE_FIXED_SUBCA, 365, signbyID, null, catoken); cainfo.setDescription("JUnit DSA CA"); cainfo.setPolicies(policies); cainfo.setExtendedCAServiceInfos(extendedcaservices); caAdminSession.createCA(admin, cainfo); CAInfo info = caSession.getCAInfo(admin, subcaDN); return (X509Certificate) info.getCertificateChain().iterator().next(); } catch (CAExistsException e) { log.info("CA exists."); throw e; } } /** * This method creates the user "ocsptest" and generated a certificate for it */ protected void loadUserCert(int caid) throws Exception { createUserCert(caid); } private KeyPair createUserCert(int caid) throws AuthorizationDeniedException, UserDoesntFullfillEndEntityProfile, ApprovalException, WaitingForApprovalException, Exception, ObjectNotFoundException, AuthStatusException, AuthLoginException, IllegalKeyException, CADoesntExistsException { final String USERNAME = "ocsptest"; if (!endEntityManagementSession.existsUser(USERNAME)) { endEntityManagementSession.addUser(admin, USERNAME, "foo123", "C=SE,O=AnaTom,CN=OCSPTest", null, "[email protected]", false, SecConst.EMPTY_ENDENTITYPROFILE, CertificateProfileConstants.CERTPROFILE_FIXED_ENDUSER, EndEntityTypes.ENDUSER.toEndEntityType(), SecConst.TOKEN_SOFT_PEM, 0, caid); log.debug("created user: ocsptest, foo123, C=SE, O=AnaTom, CN=OCSPTest"); } else { log.debug("User ocsptest already exists."); EndEntityInformation userData = new EndEntityInformation(USERNAME, "C=SE,O=AnaTom,CN=OCSPTest", caid, null, "[email protected]", EndEntityConstants.STATUS_NEW, EndEntityTypes.ENDUSER.toEndEntityType(), SecConst.EMPTY_ENDENTITYPROFILE, CertificateProfileConstants.CERTPROFILE_FIXED_ENDUSER, null, null, SecConst.TOKEN_SOFT_PEM, 0, null); userData.setPassword("foo123"); endEntityManagementSession.changeUser(admin, userData, false); log.debug("Reset status to NEW"); } // Generate certificate for the new user KeyPair keys = KeyTools.genKeys("512", "RSA"); // user that we know exists... ocspTestCert = (X509Certificate) signSession.createCertificate(admin, USERNAME, "foo123", new PublicKeyWrapper(keys.getPublic())); assertNotNull("Failed to create new certificate", ocspTestCert); return keys; } /** Checks the signature on an OCSP request and checks that it is signed by an allowed CA. * Does not check for revocation of the signer certificate * * @param clientRemoteAddr The ip address or hostname of the remote client that sent the request, can be null. * @param req The signed OCSPReq * @param cacerts a CertificateCache of Certificates, the authorized CA-certificates. The signer certificate must be issued by one of these. * @return X509Certificate which is the certificate that signed the OCSP request * @throws SignRequestSignatureException if signature verification fail, or if the signing certificate is not authorized * @throws SignRequestException if there is no signature on the OCSPReq * @throws OCSPException if the request can not be parsed to retrieve certificates * @throws NoSuchProviderException if the BC provider is not installed * @throws CertificateException if the certificate can not be parsed * @throws NoSuchAlgorithmException if the certificate contains an unsupported algorithm * @throws InvalidKeyException if the certificate, or CA key is invalid * @throws OperatorCreationException */ public static X509Certificate checkRequestSignature(String clientRemoteAddr, OCSPReq req, CaCertificateCache cacerts) throws SignRequestException, OCSPException, NoSuchProviderException, CertificateException, NoSuchAlgorithmException, InvalidKeyException, SignRequestSignatureException, OperatorCreationException { X509Certificate signercert = null; if (!req.isSigned()) { String infoMsg = intres.getLocalizedMessage("ocsp.errorunsignedreq", clientRemoteAddr); log.info(infoMsg); throw new SignRequestException(infoMsg); } // Get all certificates embedded in the request (probably a certificate chain) X509CertificateHolder[] certs = req.getCerts(); // Set, as a try, the signer to be the first certificate, so we have a name to log... String signer = null; JcaX509CertificateConverter converter = new JcaX509CertificateConverter(); if (certs.length > 0) { signer = CertTools.getSubjectDN(converter.getCertificate(certs[0])); } // We must find a cert to verify the signature with... boolean verifyOK = false; for (int i = 0; i < certs.length; i++) { if (req.isSignatureValid(new JcaContentVerifierProviderBuilder().build(certs[i])) == true) { signercert = converter.getCertificate(certs[i]); signer = CertTools.getSubjectDN(signercert); Date now = new Date(); String signerissuer = CertTools.getIssuerDN(signercert); String infoMsg = intres.getLocalizedMessage("ocsp.infosigner", signer); log.info(infoMsg); verifyOK = true; // Also check that the signer certificate can be verified by one of the CA-certificates // that we answer for X509Certificate signerca = cacerts.findLatestBySubjectDN(HashID.getFromIssuerDN(certs[i])); String subject = signer; String issuer = signerissuer; if (signerca != null) { try { signercert.verify(signerca.getPublicKey()); if (log.isDebugEnabled()) { log.debug("Checking validity. Now: "+now+", signerNotAfter: "+signercert.getNotAfter()); } CertTools.checkValidity(signercert, now); // Move the error message string to the CA cert subject = CertTools.getSubjectDN(signerca); issuer = CertTools.getIssuerDN(signerca); CertTools.checkValidity(signerca, now); } catch (SignatureException e) { infoMsg = intres.getLocalizedMessage("ocsp.infosigner.invalidcertsignature", subject, issuer, e.getMessage()); log.info(infoMsg); verifyOK = false; } catch (InvalidKeyException e) { infoMsg = intres.getLocalizedMessage("ocsp.infosigner.invalidcertsignature", subject, issuer, e.getMessage()); log.info(infoMsg); verifyOK = false; } catch (CertificateNotYetValidException e) { infoMsg = intres.getLocalizedMessage("ocsp.infosigner.certnotyetvalid", subject, issuer, e.getMessage()); log.info(infoMsg); verifyOK = false; } catch (CertificateExpiredException e) { infoMsg = intres.getLocalizedMessage("ocsp.infosigner.certexpired", subject, issuer, e.getMessage()); log.info(infoMsg); verifyOK = false; } } else { infoMsg = intres.getLocalizedMessage("ocsp.infosigner.nocacert", signer, signerissuer); log.info(infoMsg); verifyOK = false; } break; } } if (!verifyOK) { String errMsg = intres.getLocalizedMessage("ocsp.errorinvalidsignature", signer); log.info(errMsg); throw new SignRequestSignatureException(errMsg); } return signercert; } }
/** * Returns whether the class path contains any output entries. */ public boolean hasOutput() { for (int index = 0; index < classPathEntries.size(); index++) { if (((ClassPathEntry)classPathEntries.get(index)).isOutput()) { return true; } } return false; }
<reponame>yingjunyu/getinfo package com.yingjunyu.GetInfo.stock.view; import com.yingjunyu.GetInfo.beans.StockBean; import java.util.List; /** * Description : * Author : yingjunyu * Email : <EMAIL> * Blog : https://github.com/yingjunyu * Date : 2015/12/22 */ public interface StockView { void showProgress(); void hideProgress(); void showStockLayout(); void setSname(String sname); void setCurdot(String curdot); void setCurprice(String curprice); void setRate(String rate); void setStockData(List<StockBean> lists); void showErrorToast(String msg); }
/** * Create the input graph for the parameter study. * Reads files from the data directory. * @param start_date start date of the simulation. * @param end_date end date of the simulation. * @param data_dir data directory. * @returns created graph or any io errors that happen during reading of the files. */ mio::IOResult<mio::Graph<mio::SecirModel, mio::MigrationParameters>> create_graph(mio::Date start_date, mio::Date end_date, const fs::path& data_dir) { const auto start_day = mio::get_day_in_year(start_date); const int num_age_groups = 6; mio::SecirParams params(num_age_groups); params.get<mio::StartDay>() = start_day; BOOST_OUTCOME_TRY(set_covid_parameters(params)); BOOST_OUTCOME_TRY(set_contact_matrices(data_dir, params)); BOOST_OUTCOME_TRY(set_npis(start_date, end_date, params)); mio::Graph<mio::SecirModel, mio::MigrationParameters> params_graph; BOOST_OUTCOME_TRY(set_nodes(params, start_date, end_date, data_dir, params_graph)); BOOST_OUTCOME_TRY(set_edges(data_dir, params_graph)); return mio::success(params_graph); }
def capeesh(message): users[str(message.from_user.id)]['action'] = 'capeesh' pilot_selection(message)
<reponame>talnordan/gloo package consul import ( envoyapi "github.com/envoyproxy/go-control-plane/envoy/api/v2" envoyauth "github.com/envoyproxy/go-control-plane/envoy/api/v2/auth" envoycore "github.com/envoyproxy/go-control-plane/envoy/api/v2/core" "github.com/pkg/errors" defaultv1 "github.com/solo-io/gloo/pkg/api/defaults/v1" "github.com/solo-io/gloo/pkg/secretwatcher" "github.com/solo-io/gloo/pkg/api/types/v1" "github.com/solo-io/gloo/pkg/bootstrap" "github.com/solo-io/gloo/pkg/endpointdiscovery" "github.com/solo-io/gloo/pkg/plugins" ) func init() { plugins.Register(&Plugin{}) } //go:generate protoc -I=./ -I=${GOPATH}/src/github.com/gogo/protobuf/ -I=${GOPATH}/src/github.com/gogo/protobuf/protobuf/ --gogo_out=Mgoogle/protobuf/wrappers.proto=github.com/gogo/protobuf/types:${GOPATH}/src spec.proto func (p *Plugin) SetupEndpointDiscovery() (endpointdiscovery.Interface, error) { cfg := p.opts.ConsulOptions.ToConsulConfig() disc, err := NewEndpointController(cfg) if err != nil { return nil, errors.Wrap(err, "failed to start consul endpoint discovery") } return disc, err } type Plugin struct{ opts bootstrap.Options } const ( // define Upstream type name UpstreamTypeConsul = "consul" ) func (p *Plugin) Init(options bootstrap.Options) error{ p.opts = options return nil } func (p *Plugin) GetDependencies(cfg *v1.Config) *plugins.Dependencies { deps := new(plugins.Dependencies) for _, us := range cfg.Upstreams { if us.Type != UpstreamTypeConsul { continue } spec, err := DecodeUpstreamSpec(us.Spec) if err != nil { continue } if spec.Connect == nil || spec.Connect.TlsSecretRef == "" { continue } deps.SecretRefs = append(deps.SecretRefs, spec.Connect.TlsSecretRef) } return deps } func (p *Plugin) ProcessUpstream(params *plugins.UpstreamPluginParams, in *v1.Upstream, out *envoyapi.Cluster) error { if in.Type != UpstreamTypeConsul { return nil } // decode does validation for us spec, err := DecodeUpstreamSpec(in.Spec) if err != nil { return errors.Wrap(err, "invalid consul upstream spec") } // consul upstreams use EDS out.Type = envoyapi.Cluster_EDS out.EdsClusterConfig = &envoyapi.Cluster_EdsClusterConfig{ EdsConfig: &envoycore.ConfigSource{ ConfigSourceSpecifier: &envoycore.ConfigSource_Ads{ Ads: &envoycore.AggregatedConfigSource{}, }, }, } if spec.Connect == nil || spec.Connect.TlsSecretRef == "" { return nil } secretRef := spec.Connect.TlsSecretRef _, ok := params.Secrets[secretRef] if !ok { return errors.Errorf("missing secret %v", secretRef) } certChain, privateKey, rootCa, err := getSslSecrets(secretRef, params.Secrets) if err != nil { return err } certChainData := &envoycore.DataSource{ Specifier: &envoycore.DataSource_InlineString{ InlineString: certChain, }, } privateKeyData := &envoycore.DataSource{ Specifier: &envoycore.DataSource_InlineString{ InlineString: privateKey, }, } rootCaData := &envoycore.DataSource{ Specifier: &envoycore.DataSource_InlineString{ InlineString: rootCa, }, } var validationContext *envoyauth.CertificateValidationContext if rootCa != "" { validationContext = &envoyauth.CertificateValidationContext{ TrustedCa: rootCaData, } } out.TlsContext = &envoyauth.UpstreamTlsContext{ CommonTlsContext: &envoyauth.CommonTlsContext{ TlsParams: &envoyauth.TlsParameters{}, TlsCertificates: []*envoyauth.TlsCertificate{ { CertificateChain: certChainData, PrivateKey: privateKeyData, }, }, ValidationContextType: &envoyauth.CommonTlsContext_ValidationContext{ ValidationContext: validationContext, }, }, } return nil } // TODO(yuval-k): un-copy-paste this from route_config.go func getSslSecrets(ref string, secrets secretwatcher.SecretMap) (string, string, string, error) { sslSecrets, ok := secrets[ref] if !ok { return "", "", "", errors.Errorf("ssl secret not found for ref %v", ref) } certChain, ok := sslSecrets.Data[defaultv1.SslCertificateChainKey] privateKey, ok := sslSecrets.Data[defaultv1.SslPrivateKeyKey] rootCa := sslSecrets.Data[defaultv1.SslRootCaKey] return certChain, privateKey, rootCa, nil }
/* NSC_DigestInit initializes a message-digesting operation. */ CK_RV NSC_DigestInit(CK_SESSION_HANDLE hSession, CK_MECHANISM_PTR pMechanism) { SFTKSession *session; SFTKSessionContext *context; CK_RV crv = CKR_OK; CHECK_FORK(); session = sftk_SessionFromHandle(hSession); if (session == NULL) return CKR_SESSION_HANDLE_INVALID; crv = sftk_InitGeneric(session,&context,SFTK_HASH,NULL,0,NULL, 0, 0); if (crv != CKR_OK) { sftk_FreeSession(session); return crv; } #define INIT_MECH(mech,mmm) \ case mech: { \ mmm ## Context * mmm ## _ctx = mmm ## _NewContext(); \ context->cipherInfo = (void *)mmm ## _ctx; \ context->cipherInfoLen = mmm ## _FlattenSize(mmm ## _ctx); \ context->currentMech = mech; \ context->hashUpdate = (SFTKHash) mmm ## _Update; \ context->end = (SFTKEnd) mmm ## _End; \ context->destroy = (SFTKDestroy) mmm ## _DestroyContext; \ context->maxLen = mmm ## _LENGTH; \ if (mmm ## _ctx) \ mmm ## _Begin(mmm ## _ctx); \ else \ crv = CKR_HOST_MEMORY; \ break; \ } switch(pMechanism->mechanism) { INIT_MECH(CKM_MD2, MD2) INIT_MECH(CKM_MD5, MD5) INIT_MECH(CKM_SHA_1, SHA1) INIT_MECH(CKM_SHA224, SHA224) INIT_MECH(CKM_SHA256, SHA256) INIT_MECH(CKM_SHA384, SHA384) INIT_MECH(CKM_SHA512, SHA512) default: crv = CKR_MECHANISM_INVALID; break; } if (crv != CKR_OK) { sftk_FreeContext(context); sftk_FreeSession(session); return crv; } sftk_SetContextByType(session, SFTK_HASH, context); sftk_FreeSession(session); return CKR_OK; }
/** * @author Roman Smirnov * */ public class HistoricProcessInstanceAuthorizationTest extends AuthorizationTest { protected static final String PROCESS_KEY = "oneTaskProcess"; protected static final String MESSAGE_START_PROCESS_KEY = "messageStartProcess"; protected String deploymentId; public void setUp() throws Exception { deploymentId = createDeployment(null, "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml", "org/camunda/bpm/engine/test/api/authorization/messageStartEventProcess.bpmn20.xml").getId(); super.setUp(); } public void tearDown() { super.tearDown(); deleteDeployment(deploymentId); } // historic process instance query ////////////////////////////////////////////////////////// public void testSimpleQueryWithoutAuthorization() { // given startProcessInstanceByKey(PROCESS_KEY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 0); } public void testSimpleQueryWithReadHistoryPermissionOnProcessDefinition() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 1); HistoricProcessInstance instance = query.singleResult(); assertNotNull(instance); assertEquals(processInstanceId, instance.getId()); } public void testSimpleQueryWithReadHistoryPermissionOnAnyProcessDefinition() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 1); HistoricProcessInstance instance = query.singleResult(); assertNotNull(instance); assertEquals(processInstanceId, instance.getId()); } // historic process instance query (multiple process instances) //////////////////////// public void testQueryWithoutAuthorization() { // given startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 0); } public void testQueryWithReadHistoryPermissionOnProcessDefinition() { // given startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 3); } public void testQueryWithReadHistoryPermissionOnAnyProcessDefinition() { // given startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY); createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 7); } // delete deployment (cascade = false) public void testQueryAfterDeletingDeployment() { // given startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); startProcessInstanceByKey(PROCESS_KEY); createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY); disableAuthorization(); List<Task> tasks = taskService.createTaskQuery().list(); for (Task task : tasks) { taskService.complete(task.getId()); } enableAuthorization(); disableAuthorization(); repositoryService.deleteDeployment(deploymentId); enableAuthorization(); // when HistoricProcessInstanceQuery query = historyService.createHistoricProcessInstanceQuery(); // then verifyQueryResults(query, 3); disableAuthorization(); List<HistoricProcessInstance> instances = historyService.createHistoricProcessInstanceQuery().list(); for (HistoricProcessInstance instance : instances) { historyService.deleteHistoricProcessInstance(instance.getId()); } enableAuthorization(); } // delete historic process instance ////////////////////////////// public void testDeleteHistoricProcessInstanceWithoutAuthorization() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); String taskId = selectSingleTask().getId(); disableAuthorization(); taskService.complete(taskId); enableAuthorization(); try { // when historyService.deleteHistoricProcessInstance(processInstanceId); fail("Exception expected: It should not be possible to delete the historic process instance"); } catch (AuthorizationException e) { // then String message = e.getMessage(); assertTextPresent(userId, message); assertTextPresent(DELETE_HISTORY.getName(), message); assertTextPresent(PROCESS_KEY, message); assertTextPresent(PROCESS_DEFINITION.resourceName(), message); } } public void testDeleteHistoricProcessInstanceWithDeleteHistoryPermissionOnProcessDefinition() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); String taskId = selectSingleTask().getId(); disableAuthorization(); taskService.complete(taskId); enableAuthorization(); createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, DELETE_HISTORY); // when historyService.deleteHistoricProcessInstance(processInstanceId); // then disableAuthorization(); long count = historyService .createHistoricProcessInstanceQuery() .processInstanceId(processInstanceId) .count(); assertEquals(0, count); enableAuthorization(); } public void testDeleteHistoricProcessInstanceWithDeleteHistoryPermissionOnAnyProcessDefinition() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); String taskId = selectSingleTask().getId(); disableAuthorization(); taskService.complete(taskId); enableAuthorization(); createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, DELETE_HISTORY); // when historyService.deleteHistoricProcessInstance(processInstanceId); // then disableAuthorization(); long count = historyService .createHistoricProcessInstanceQuery() .processInstanceId(processInstanceId) .count(); assertEquals(0, count); enableAuthorization(); } public void testDeleteHistoricProcessInstanceAfterDeletingDeployment() { // given String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId(); String taskId = selectSingleTask().getId(); disableAuthorization(); taskService.complete(taskId); enableAuthorization(); createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, DELETE_HISTORY); disableAuthorization(); repositoryService.deleteDeployment(deploymentId); enableAuthorization(); // when historyService.deleteHistoricProcessInstance(processInstanceId); // then disableAuthorization(); long count = historyService .createHistoricProcessInstanceQuery() .processInstanceId(processInstanceId) .count(); assertEquals(0, count); enableAuthorization(); } // helper //////////////////////////////////////////////////////// protected void verifyQueryResults(HistoricProcessInstanceQuery query, int countExpected) { verifyQueryResults((AbstractQuery<?, ?>) query, countExpected); } }
A roughness penalty approach and its application to noisy hyphenated chromatographic two‐way data In order to improve the signal detection and resolution of chemical components with very low concentrations in hyphenated chromatographic two‐way data, the effect of measurement noise from the instruments on these two aspects is first investigated in the present paper. A new smoothing technique called the roughness penalty method is introduced to reduce the influence of this measurement noise. Our results show that the proposed method can enhance the detection ability significantly. In addition, the resolved spectra after the roughness penalty smoothing are found to be significantly improved. The performance of the method was assessed using simulated and real hyphenated two‐way data. Copyright © 1999 John Wiley & Sons, Ltd.
/** * * * @author Almas Baimagambetov (AlmasB) ([email protected]) */ public class AttractorSample extends GameApplication { @Override protected void initSettings(GameSettings settings) { settings.setWidth(800); settings.setHeight(600); settings.setTitle("AttractorSample"); settings.setVersion("0.1"); } @Override protected void initInput() { getInput().addAction(new UserAction("spawn") { @Override protected void onActionBegin() { Entities.builder() .at(getInput().getMousePositionWorld()) .viewFromNode(new Rectangle(40, 40, Color.BLUE)) .with(new AttractorComponent(FXGLMath.random(30, 60), 350)) .buildAndAttach(); } }, MouseButton.PRIMARY); } @Override protected void initGame() { Entities.builder() .at(400, 100) .viewFromNode(new Rectangle(40, 40, Color.RED)) .with(new AttractableComponent(25)) .buildAndAttach(); } public static void main(String[] args) { launch(args); } }
def ndarray_to_qimage(arr, fmt): if QT_LIB.startswith('PyQt'): if QtCore.PYQT_VERSION == 0x60000: img_ptr = Qt.sip.voidptr(arr) else: img_ptr = int(Qt.sip.voidptr(arr)) else: img_ptr = arr h, w = arr.shape[:2] bytesPerLine = arr.strides[0] qimg = QtGui.QImage(img_ptr, w, h, bytesPerLine, fmt) qimg.data = arr return qimg
package com.spark.live.sdk.network.rtmp.message.commands; import com.spark.live.sdk.network.rtmp.amf.AMFNull; import com.spark.live.sdk.network.rtmp.amf.AMFNumber; import com.spark.live.sdk.network.rtmp.amf.AMFString; /** * * Created by devzhaoyou on 9/12/16. */ public class FCPublish extends Command { public FCPublish(double transactionId, String stream) { setCommandField(new AMFString(NAME_FC_PUBLISH)); setCommandField(new AMFNumber(transactionId)); setCommandField(new AMFNull()); setCommandField(new AMFString(stream)); } }
package util import ( "archive/tar" "compress/gzip" "fmt" "io" "io/ioutil" "os" "path/filepath" "strings" "github.com/mholt/archiver" ) var tarfile = "studios.tar.gz" func TarBallFiles(files []string, src, tarFile string) (err error) { if _, err := os.Stat(tarFile); !os.IsNotExist(err) { // path/to/whatever does not exist err = os.RemoveAll(tarFile) if err != nil { return err } } var paths []string // ensure the src actually exists before trying to tar it for _, path := range files { lpath := filepath.Join(src, path) if _, lerr := os.Stat(lpath); lerr == nil { paths = append(paths, lpath) } } err = archiver.Archive(paths, tarFile) if err != nil { return err } return nil } func TarFiles(files []string, src string) (data []byte, err error) { err = TarBallFiles(files, src, tarfile) if err != nil { return nil, err } data, err = ioutil.ReadFile(tarfile) if err != nil { return nil, err } /* err = os.RemoveAll(tarfile) if err != nil { return data, err } */ return data, err } func OldTarFiles(files []string, src string, writers ...io.Writer) error { var paths []string // ensure the src actually exists before trying to tar it for _, path := range files { if _, err := os.Stat(filepath.Join(src, path)); err == nil { paths = append(paths, path) } } mw := io.MultiWriter(writers...) gzw := gzip.NewWriter(mw) defer gzw.Close() tw := tar.NewWriter(gzw) defer tw.Close() // walk path walker := func(file string, fi os.FileInfo, err error) error { // fmt.Println("+", file) // return on any error if err != nil { return err } // return on non-regular files (thanks to [kumo](https://medium.com/@komuw/just-like-you-did-fbdd7df829d3) for this suggested update) if !fi.Mode().IsRegular() { return nil } // create a new dir/file header header, err := tar.FileInfoHeader(fi, fi.Name()) if err != nil { return err } // update the name to correctly reflect the desired destination when untaring header.Name = strings.TrimPrefix(strings.Replace(file, src, "", -1), string(filepath.Separator)) // write the header if err := tw.WriteHeader(header); err != nil { return err } // open files for taring f, err := os.Open(file) if err != nil { return err } // copy file data into tar writer if _, err := io.Copy(tw, f); err != nil { return err } // manually close here after each file operation; defering would cause each file close // to wait until all operations have completed. f.Close() return nil } for _, path := range paths { err := filepath.Walk(filepath.Join(src, path), walker) if err != nil { fmt.Println("Error", path, err) } } return nil }
Around 75 trash and recycling dumpsters have been tossed by Capitol Hill businesses for high-frequency bag pick ups starting last week. Another 36 dumpsters have been pulled off streets and sidewalks and on to private property. It’s part of a city-mandated program to improve safety in Capitol Hill’s core restaurant and nightlife area by moving the large metal containers out of the public right-of-way. Half of the businesses in the corridor — roughly bound by Melrose, E John, E Union, and 15th — were able to keep their dumpsters by storing them on private property. Some of those businesses may still be dragging dumpsters into the street for pickup, but Seattle Public Utilities officials say they should not be out for long and certainly not over night. “It’s definitely going to make the neighborhood look and feel a lot cleaner,” said SPU spokesperson Becca Fong. The program is “pay as you throw” — pickup fees are paid by how many bags businesses purchase ahead of time. Trash bags cost more than recycling, which officials hope will encourage more recycling. Compost will stay in bins with frequent pickup and broken down cardboard will be picked up free of charge. Bars and other businesses that need to recycle large amounts of glass will be given bins for regular pickup. The origins of the program stem from an unlikely source — a recommendation made by Mayor Ed Murray’s LGBTQ safety task force last year, which built off an earlier study by the Capitol Hill EcoDistrict. A similar program in Belltown called Clean Alleys has largely been seen as a success, though not everyone on Capitol Hill is on board. The bag program operated by Recology CleanScapes costs about 15% more than dumpsters and will bring more trucks into the neighborhood. Restaurant owner Dave Meinert said in a recent Facebook post the city was not being consistent in its priorities. Plastic to go bags are banned from restaurants for environmental reasons (a good thing) but now the City wants restaurants to use thick plastic garbage bags instead of dumpsters, which will get picked up 3 times as often, meaning MORE plastic bags and 3x as many garbage truck trips adding to traffic and exhaust problems. Irony? While the mostly alley-less neighborhood has long dealt with dumpsters in plain view, the issue has been exacerbated in recent years by Capitol Hill’s explosion of construction and new residents. For instance, the dumpsters that accumulated at 11th and Pike had been dispersed across a larger area before three construction sites ate up the space. After a month of the bag pick up program, SPU officials will be meeting with be taking feedback from business owners. Ideally it will go over better than the official name of the new effort, the Pike/Pine Retail Corridor Solid Waste Site Review Program.
def inv_mel_spectrogram_tensorflow(mel_spectrogram, hparams): if hparams.signal_normalization: D = _denormalize_tensorflow(mel_spectrogram, hparams) else: D = mel_spectrogram S = _db_to_amp_tensorflow(D + hparams.ref_level_db) S = _mel_to_linear(S, hparams) return _griffin_lim_tensorflow(S ** hparams.power, hparams)
#include <stdio.h> #include <math.h> #include <getopt.h> #define BUFFER_SIZE 2048 size_t get_input(int*); size_t print_output(int*, size_t); void run(); void print_start(); void print_end(); static int prints; static int colors; int main(int argc, char** argv) { int c; while(1) { static struct option long_options[] = { {"print", no_argument, &prints, 0}, {"color", no_argument, &colors, 1} }; int option_index = 0; c = getopt_long(argc, argv, "cp", long_options, &option_index); if (c == -1) break; else if (c == 'p') prints = 1; else if (c == 'c') colors = 1; } run(); } void run() { int buffer[BUFFER_SIZE]; size_t n; if (colors) fputs("\e[90;2m", stderr); if (prints) print_start(); while((n = get_input(buffer)) != 0) print_output(buffer, n); putc('\n', stdout); putc('\n', stderr); if (prints) print_end(); if (colors) fputs("\e[0m", stderr); } size_t get_input(int* buffer) { int c; size_t i = 0; while((c = getc(stdin)) != EOF && i < BUFFER_SIZE) { buffer[i++] = c; } return i == 0 ? 0 : i-1; } size_t print_output(int* buffer, size_t n) { for (size_t i = 0; i < n; i++) { putc(buffer[i], stderr); putc(buffer[i], stdout); } return n; } void print_start() { fprintf(stderr, "----- p begin -----\n"); } void print_end() { fprintf(stderr, "----- p end -----\n"); }
<filename>scripts/getData.py import json path_to_file = "C:/Users/dilGoe/Desktop/CopyrightDetection/data/article7.txt" path_to_targetfile = "C:/Users/dilGoe/Desktop/CopyrightDetection/dataClean/article7.json" pathToF1 = "C:/Users/dilGoe/Desktop/book4.txt" pathToF2 = "C:/Users/dilGoe/Desktop/CopyrightDetection/dataClean/book4.json" def getSentencesWithCopyright(filepath, targetFilepath): file = open(filepath, "r", encoding="utf-8") resultDict = {} content = file.readline() while content: if "copyright" in content.lower(): resultDict[content[:-1]] = 0 content = file.readline() resultDictJSON = json.dumps(resultDict) file = open(targetFilepath, "w") file.write(resultDictJSON) def main(): getSentencesWithCopyright(pathToF1, pathToF2) # pass if __name__ == "__main__": main()
/** * Encodes the given byte array and returns a base 64 generated string. * * @param bytes The base 10 byte array to be encoded into base 64. */ public static String encode(byte[] bytes) { ByteArrayStream tempIn = new ByteArrayStream(bytes); try { tempIn.setPos(bytes.length); } catch (IOException e) { e.printStackTrace(); } ByteArrayStream tempOut = new ByteArrayStream(bytes.length); encode(tempIn, tempOut); return new String(tempOut.getBuffer(), 0, tempOut.getPos()); }
/** Optional callback to inform the plugin about a sample rate change. */ void PluginFverb::sampleRateChanged(double newSampleRate) { fDsp->init(fVintage ? kVintageSampleRate : newSampleRate); fDry.setSampleRate(newSampleRate); fWet.setSampleRate(newSampleRate); for (uint32_t ch = 0; ch < kNumChannels; ++ch) fInputKeep[ch].resize(kMaxResampledBlock); fDownsampler.SetRates((int)newSampleRate, kVintageSampleRate); fUpsampler.SetRates(kVintageSampleRate, (int)newSampleRate); fDownsamplerOut.resize(kNumChannels * (4 + kMaxResampledBlock * (kVintageSampleRate / newSampleRate))); fUpsamplerOut.resize(kNumChannels * (4 + kMaxResampledBlock * (newSampleRate / kVintageSampleRate))); for (uint32_t ch = 0; ch < kNumChannels; ++ch) { fDspIn[ch].resize((4 + kMaxResampledBlock * (kVintageSampleRate / newSampleRate))); fDspOut[ch].resize((4 + kMaxResampledBlock * (kVintageSampleRate / newSampleRate))); } clear(); }
Leading sustainable schools in the era of Education 4.0: identifying school leadership competencies in Malaysian secondary schools Abstrac t The purpose of the study was to develop an empirical School Leadership Competency Model for the era of Education 4.0 (SLCMEduc4.0) to identify school leadership competencies that facilitate and maximize effectiveness in leading sustainable schools in Malaysia. Exploratory Factor Analysis (EFA) was employed to identify the underlying factors whereas Confirmatory Factor Analysis (CFA) was applied to test the measurement models using Structural Equation Modelling. A total of 444 and 931 respondents completed the survey with usable data for EFA and CFA respectively. The results suggested that the SLCMEdu4.0 can be explained by eight factors namely; Leading for Learning, Emotional Intelligence, Critical Thinking , Communication and Ethics , Collaboration , Decision Making and Problem Solving , Digital Dexterity and Entrepreneurial with good fit statistics; normed x 2 =2.628, TLI=.950, CFI=.954 and RMSEA=.042. With a total of 40 items, the model also features good convergent and discriminant validity and construct reliability. The SLCMEduc4.0 is a coherent premier model that provides useful feedback for practitioners in planning, designing and evaluating future professional development programmes for school leaders. The study encourages a fresh look at educational leadership development locally and globally specifically in enhancing the leadership development of Malaysian school leaders towards productive change in the era of Education 4.0. research are in the fields of school change management, school leadership, teacher professional development and professional learning communities. She has presented papers at both national and international conferences. Her written contributions have appeared in a number of international journals. to and also as the Dean of Post Institute from to He has conducted research, presented papers and published books and journal articles on educational leadership and human resource development. He also involves in training consultancy projects with various governments and private agencies including educational leadership training for local and international participants.
If there were any doubt the government shutdown and debt ceiling standoff in Washington have been a political shot to the jugular for lawmakers, look no further than the recent Houston Chronicle piece, entitled: “Why we miss Kay Bailey Hutchison.” In it, the paper’s editorial staff laments its reluctant endorsement given last year to Hutchison’s successor, Tea Party rabble rouser Sen. Ted Cruz. They write: “When we endorsed Ted Cruz in last November’s general election, we did so with many reservations and at least one specific recommendation–that he follow Hutchison’s example in his conduct as a senator. Obviously, he has not done so. Cruz has been part of the problem in specific situations where Hutchison would have been part of the solution. We feel certain she would have worked shoulder to shoulder with Sen. Susan Collins, R-Maine, in crafting a workable solution that likely would have avoided the government shutdown altogether. But we’ll never know.” Since the shutdown began at the beginning of the month over a budget impasse, congressional Republicans have braved cratering poll numbers and mounting outrage all in the name of their commitment to the obstruction of Obamacare. Among the senators tethered to this strategy, none have carried the banner more proudly than Cruz, who has marched against closed war memorials and staged day-long talk-a-thons against the president’s signature health care law. Now, with hours to go until Congress hits the deadline to raise the debt limit, Cruz told reporters he does not intend to delay a Senate vote to avert a U.S. default. But it may already be too late for his reputation. “Right wingers like Cruz have already done the U.S. irreparable damage all over the world,” said Independent Sen. Bernie Sanders on MSNBC Wednesday. “I think it will take years for us to recover from this fiasco.”
<reponame>brotherneeru/iot-identity-service // Copyright (c) Microsoft. All rights reserved. #![deny(rust_2018_idioms)] #![warn(clippy::all, clippy::pedantic)] #![allow( clippy::default_trait_access, clippy::let_unit_value, clippy::too_many_lines, clippy::use_self )] #[tokio::main] async fn main() -> Result<(), Error> { openssl::init(); let command = structopt::StructOpt::from_args(); match command { Command::GenerateCaCert { key_handle, out_file, subject, } => generate_cert(key_handle, &out_file, &subject, &GenerateCertKind::Ca)?, Command::GenerateClientCert { ca_cert, ca_key_handle, key_handle, out_file, subject, } => generate_cert( key_handle, &out_file, &subject, &GenerateCertKind::Client { ca_cert, ca_key_handle, }, )?, Command::GenerateServerCert { ca_cert, ca_key_handle, key_handle, out_file, subject, } => generate_cert( key_handle, &out_file, &subject, &GenerateCertKind::Server { ca_cert, ca_key_handle, }, )?, Command::WebClient { cert, key_handle, port, } => { let mut http_connector = hyper::client::HttpConnector::new(); http_connector.enforce_http(false); let mut engine = load_engine()?; let key = load_private_key(&mut engine, key_handle)?; let mut tls_connector = openssl::ssl::SslConnector::builder(openssl::ssl::SslMethod::tls_client())?; tls_connector.set_private_key(&key)?; tls_connector.set_certificate_chain_file(&cert)?; // The root of the client cert is the CA, and we expect the server cert to be signed by this same CA. // So add it to the cert store. let ca_cert = { let cert_chain_file = std::fs::read(cert)?; let mut cert_chain = openssl::x509::X509::stack_from_pem(&cert_chain_file)?; cert_chain.pop().unwrap() }; tls_connector.cert_store_mut().add_cert(ca_cert)?; // Log the server cert chain. Does not change the verification result from what openssl already concluded. tls_connector.set_verify_callback( openssl::ssl::SslVerifyMode::PEER, |openssl_verification_result, context| { println!("Server cert:"); let chain = context.chain().unwrap(); for (i, cert) in chain.into_iter().enumerate() { println!( " #{}: {}", i + 1, cert.subject_name() .entries() .next() .unwrap() .data() .as_utf8() .unwrap() ); } println!( "openssl verification result: {}", openssl_verification_result ); openssl_verification_result }, ); let tls_connector = hyper_openssl::HttpsConnector::with_connector(http_connector, tls_connector)?; let client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(tls_connector); let response = client .get(format!("https://127.0.0.1:{}/", port).parse()?) .await?; let (http::response::Parts { status, .. }, response_body) = response.into_parts(); let response_body = hyper::body::to_bytes(response_body).await?; println!("server returned {} {:?}", status, response_body); if status != http::StatusCode::OK || &*response_body != b"Hello, world!\n" { return Err("server did not return expected response".into()); } } Command::WebServer { cert, key_handle, port, } => { let mut engine = load_engine()?; let key = load_private_key(&mut engine, key_handle)?; let incoming = test_common::tokio_openssl2::Incoming::new("0.0.0.0", port, &cert, &key, true)?; let server = hyper::Server::builder(incoming).serve(hyper::service::make_service_fn(|_| { futures_util::future::ok::<_, std::convert::Infallible>( hyper::service::service_fn(|_| { futures_util::future::ok::<_, std::convert::Infallible>( hyper::Response::new(hyper::Body::from("Hello, world!\n")), ) }), ) })); println!("Starting web server..."); let () = server.await?; } } Ok(()) } fn load_engine() -> Result<openssl2::FunctionalEngine, Error> { const ENGINE_ID: &[u8] = b"aziot_keys\0"; unsafe { openssl_sys2::ENGINE_load_builtin_engines(); } let engine_id = std::ffi::CStr::from_bytes_with_nul(ENGINE_ID).expect("hard-coded engine ID is valid CStr"); let engine = openssl2::StructuralEngine::by_id(engine_id)?; let engine: openssl2::FunctionalEngine = engine.try_into()?; println!("Loaded engine: [{}]", engine.name()?.to_string_lossy()); Ok(engine) } fn load_public_key( engine: &mut openssl2::FunctionalEngine, key_handle: String, ) -> Result<openssl::pkey::PKey<openssl::pkey::Public>, Error> { let key_handle = std::ffi::CString::new(key_handle)?; let key = engine.load_public_key(&key_handle)?; Ok(key) } fn load_private_key( engine: &mut openssl2::FunctionalEngine, key_handle: String, ) -> Result<openssl::pkey::PKey<openssl::pkey::Private>, Error> { let key_handle = std::ffi::CString::new(key_handle)?; let key = engine.load_private_key(&key_handle)?; Ok(key) } fn generate_cert( key_handle: String, out_file: &std::path::Path, subject: &str, kind: &GenerateCertKind, ) -> Result<(), Error> { let mut engine = load_engine()?; let mut builder = openssl::x509::X509::builder()?; builder.set_version(2)?; let public_key = load_public_key(&mut engine, key_handle.clone())?; builder.set_pubkey(&public_key)?; let not_after = openssl::asn1::Asn1Time::days_from_now(match &kind { GenerateCertKind::Ca => 365, GenerateCertKind::Client { .. } | GenerateCertKind::Server { .. } => 30, })?; builder.set_not_after(std::borrow::Borrow::borrow(&not_after))?; let not_before = openssl::asn1::Asn1Time::days_from_now(0)?; builder.set_not_before(std::borrow::Borrow::borrow(&not_before))?; let mut subject_name = openssl::x509::X509Name::builder()?; subject_name.append_entry_by_text("CN", subject)?; let subject_name = subject_name.build(); builder.set_subject_name(&subject_name)?; match &kind { GenerateCertKind::Ca => { builder.set_issuer_name(&subject_name)?; let ca_extension = openssl::x509::extension::BasicConstraints::new() .ca() .build()?; builder.append_extension(ca_extension)?; } GenerateCertKind::Client { ca_cert, .. } | GenerateCertKind::Server { ca_cert, .. } => { let ca_cert = std::fs::read(ca_cert)?; let ca_cert = openssl::x509::X509::from_pem(&ca_cert)?; builder.set_issuer_name(ca_cert.subject_name())?; match kind { GenerateCertKind::Ca => unreachable!(), GenerateCertKind::Client { .. } => { let client_extension = openssl::x509::extension::ExtendedKeyUsage::new() .client_auth() .build()?; builder.append_extension(client_extension)?; } GenerateCertKind::Server { .. } => { let server_extension = openssl::x509::extension::ExtendedKeyUsage::new() .server_auth() .build()?; builder.append_extension(server_extension)?; let context = builder.x509v3_context(Some(&ca_cert), None); let san_extension = openssl::x509::extension::SubjectAlternativeName::new() .ip("127.0.0.1") .build(&context)?; builder.append_extension(san_extension)?; } } } } let ca_key_handle = match &kind { GenerateCertKind::Ca => key_handle, GenerateCertKind::Client { ca_key_handle, .. } | GenerateCertKind::Server { ca_key_handle, .. } => ca_key_handle.clone(), }; let ca_key = load_private_key(&mut engine, ca_key_handle)?; builder.sign(&ca_key, openssl::hash::MessageDigest::sha256())?; let cert = builder.build(); let cert = cert.to_pem()?; let mut out_file = std::fs::File::create(out_file)?; std::io::Write::write_all(&mut out_file, &cert)?; match &kind { GenerateCertKind::Ca => (), GenerateCertKind::Client { ca_cert, .. } | GenerateCertKind::Server { ca_cert, .. } => { let ca_cert = std::fs::read(ca_cert)?; std::io::Write::write_all(&mut out_file, &ca_cert)?; } } std::io::Write::flush(&mut out_file)?; Ok(()) } #[derive(Debug)] enum GenerateCertKind { Ca, Client { ca_cert: std::path::PathBuf, ca_key_handle: String, }, Server { ca_cert: std::path::PathBuf, ca_key_handle: String, }, } struct Error(Box<dyn std::error::Error>, backtrace::Backtrace); impl std::fmt::Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "{}", self.0)?; let mut source = self.0.source(); while let Some(err) = source { writeln!(f, "caused by: {}", err)?; source = err.source(); } writeln!(f)?; writeln!(f, "{:?}", self.1)?; Ok(()) } } impl<E> From<E> for Error where E: Into<Box<dyn std::error::Error>>, { fn from(err: E) -> Self { Error(err.into(), Default::default()) } } #[derive(structopt::StructOpt)] enum Command { /// Generate a CA cert. GenerateCaCert { /// A key handle to the key pair that will be used for the CA cert. #[structopt(long)] key_handle: String, /// The path where the CA cert PEM file will be stored. #[structopt(long)] out_file: std::path::PathBuf, /// The subject CN of the new cert. #[structopt(long)] subject: String, }, /// Generate a client auth cert. GenerateClientCert { /// The path of the CA cert PEM file. #[structopt(long)] ca_cert: std::path::PathBuf, /// A key handle to the key pair of the CA. #[structopt(long)] ca_key_handle: String, /// A key handle to the key pair that will be used for the client cert. #[structopt(long)] key_handle: String, /// The path where the client cert PEM file will be stored. #[structopt(long)] out_file: std::path::PathBuf, /// The subject CN of the new cert. #[structopt(long)] subject: String, }, /// Generate a server auth cert. GenerateServerCert { /// The path of the CA cert PEM file. #[structopt(long)] ca_cert: std::path::PathBuf, /// A key handle to the key pair of the CA. #[structopt(long)] ca_key_handle: String, /// A key handle to the key pair that will be used for the server cert. #[structopt(long)] key_handle: String, /// The path where the server cert PEM file will be stored. #[structopt(long)] out_file: std::path::PathBuf, /// The subject CN of the new cert. #[structopt(long)] subject: String, }, /// Start a web client that uses the specified private key and cert file for TLS. WebClient { /// Path of the client cert file. #[structopt(long)] cert: std::path::PathBuf, /// A key handle to the client cert's key pair. #[structopt(long)] key_handle: String, /// The port to listen on. #[structopt(long, default_value = "8443")] port: u16, }, /// Start a web server that uses the specified private key and cert file for TLS. WebServer { /// Path of the server cert file. #[structopt(long)] cert: std::path::PathBuf, /// A key handle to the server cert's key pair. #[structopt(long)] key_handle: String, /// The port to listen on. #[structopt(long, default_value = "8443")] port: u16, }, }
// Sweeps spans in list until reclaims at least npages into heap. // Returns the actual number of pages reclaimed. func mHeap_ReclaimList(h *mheap, list *mspan, npages uintptr) uintptr { n := uintptr(0) sg := mheap_.sweepgen retry: for s := list.next; s != list; s = s.next { if s.sweepgen == sg-2 && cas(&s.sweepgen, sg-2, sg-1) { mSpanList_Remove(s) mSpanList_InsertBack(list, s) unlock(&h.lock) snpages := s.npages if mSpan_Sweep(s, false) { n += snpages } lock(&h.lock) if n >= npages { return n } goto retry } if s.sweepgen == sg-1 { continue } break } return n }
<reponame>TheGrimsey/NovusCore-Gameplay #pragma once #include <NovusTypes.h> #include <Utils/ByteBuffer.h> struct NetworkComponent { virtual bool Serialize(Bytebuffer* buffer) const = 0; virtual bool Deserialize(Bytebuffer* buffer) = 0; static constexpr size_t GetPacketSize() { return 0; }; };
################################################################################ # Copyright: <NAME> 2019 # # Apache 2.0 License # # This file contains all code related to pid check objects # ################################################################################ import re import requests from breadp.checks import Check from breadp.checks.result import BooleanResult class IsValidDoiCheck(Check): """ Checks whether an RDP has a valid DOI as PID Methods ------- _do_check(self, rdp) returns a BooleanResult """ def __init__(self): Check.__init__(self) self.id = 0 self.version = "0.0.1" def _do_check(self, rdp): if not rdp.pid: msg = "RDP has no PID" return BooleanResult(False, msg, False) if re.match(r"^10\.\d{4}\d*/.*", rdp.pid): return BooleanResult(True, "", True) msg = "{} is not a valid DOI".format(rdp.pid) return BooleanResult(False, msg, True) class DoiResolvesCheck(Check): """ Checks whether the DOI of an RDP resolves Methods ------- _do_check(self, rdp) returns a BooleanResult """ def __init__(self): Check.__init__(self) self.id = 1 self.version = "0.0.1" def _do_check(self, rdp): if not rdp.pid: msg = "RDP has no PID" return BooleanResult(False, msg, False) try: response = requests.head('https://doi.org/' + rdp.pid) except Exception as e: msg = "{}: {}".format(type(e).__name__, e) return BooleanResult(False, msg, False) if response.status_code != 302: msg = "Could not resolve {}, status code: {}".format( rdp.pid, response.status_code) return BooleanResult(False, msg, True) msg = "Location of resolved doi: {}".format( response.headers.get('Location')) return BooleanResult(True, msg, True)
// Type definitions for @xmpp/client 0.13 // Project: https://github.com/xmppjs/xmpp.js/tree/main/packages/client // Definitions by: BendingBender <https://github.com/BendingBender> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped import { Client as ClientCore, jid as xmppJid, xml as xmppXml } from '@xmpp/client-core'; import { Options as ConnectionOptions } from '@xmpp/connection'; import { IQCallee } from '@xmpp/iq/callee'; import { IQCaller } from '@xmpp/iq/caller'; import { IncomingContext, Middleware } from '@xmpp/middleware'; import { StreamFeatures } from '@xmpp/stream-features'; import { Reconnect } from '@xmpp/reconnect'; import { Resource } from '@xmpp/resource-binding'; import { Credentials, SASL } from '@xmpp/sasl'; import { StreamManagement } from '@xmpp/stream-management'; import * as koaCompose from 'koa-compose'; /** * An XMPP client is an entity that connects to an XMPP server. * * `@xmpp/client` package includes a minimal set of features to connect and authenticate securely and reliably. * * @example * import { client, xml } from "@xmpp/client"; * import debug = require("@xmpp/debug"); * * const xmpp = client({ * service: "ws://localhost:5280/xmpp-websocket", * domain: "localhost", * resource: "example", * username: "username", * password: "password", * }); * * debug(xmpp, true); * * xmpp.on("error", (err) => { * console.error(err); * }); * * xmpp.on("offline", () => { * console.log("offline"); * }); * * xmpp.on("stanza", async (stanza) => { * if (stanza.is("message")) { * await xmpp.send(xml("presence", { type: "unavailable" })); * await xmpp.stop(); * } * }); * * xmpp.on("online", async (address) => { * // Makes itself available * await xmpp.send(xml("presence")); * * // Sends a chat message to itself * const message = xml( * "message", * { type: "chat", to: address }, * xml("body", {}, "hello world"), * ); * await xmpp.send(message); * }); * * xmpp.start().catch(console.error); */ export function client(options?: Options): Client; export interface Options extends ConnectionOptions { /** * Resource for `@xmpp/resource-binding`. */ resource?: Resource | undefined; credentials?: Credentials | undefined; /** * Username for `@xmpp/sasl`. */ username?: string | undefined; /** * Password for <PASSWORD>`. */ password?: string | undefined; } export interface Client extends ClientCore { entity: Client; reconnect: Reconnect<Client>; middleware: Middleware<Client>; streamFeatures: StreamFeatures<Client>; iqCaller: IQCaller<Client>; iqCallee: IQCallee<Client>; starttls: koaCompose.Middleware<IncomingContext<Client>>; sasl: SASL; streamManagement: StreamManagement; mechanisms: Array<{ scramsha1: undefined } | { plain: undefined } | { anonymous: undefined }>; } export const jid: typeof xmppJid; export const xml: typeof xmppXml;
/** * Create a ContentObject, encrypt it if requested, and add it to the list of ContentObjects * awaiting signing and output to the flow controller. Also creates the segmented name for the CO. * * @param rootName * @param segmentNumber * @param signedInfo * @param contentBlock * @param offset * @param blockLength * @param keys * @return next segment number to use * @throws InvalidKeyException * @throws InvalidAlgorithmParameterException * @throws ContentEncodingException */ protected long newBlock(ContentName rootName, long segmentNumber, SignedInfo signedInfo, byte contentBlock[], int offset, int blockLength, ContentKeys keys) throws InvalidKeyException, InvalidAlgorithmParameterException, ContentEncodingException { int length = blockLength; if (null != keys) { try { Cipher thisCipher = keys.getSegmentEncryptionCipher(rootName, signedInfo.getPublisherKeyID(), segmentNumber); contentBlock = thisCipher.doFinal(contentBlock, offset, blockLength); length = contentBlock.length; offset = 0; signedInfo.setType(ContentType.ENCR); } catch (IllegalBlockSizeException e) { Log.warning("Unexpected IllegalBlockSizeException for an algorithm we have already used!"); throw new InvalidKeyException("Unexpected IllegalBlockSizeException for an algorithm we have already used!", e); } catch (BadPaddingException e) { Log.warning("Unexpected BadPaddingException for an algorithm we have already used!"); throw new InvalidAlgorithmParameterException("Unexpected BadPaddingException for an algorithm we have already used!", e); } } ContentObject co = new ContentObject( SegmentationProfile.segmentName(rootName, segmentNumber), signedInfo,contentBlock, offset, length,(Signature)null); if (this.testhash!=null) { if(!this.testhash.isPreLoad()){ _blocks.add(co); } } else{ _blocks.add(co); } if(testhash!=null){ String path =testhash.parsePath(rootName.toString(),3); String trieTreeName,hashName; String paraMeter="para"; if(path.indexOf("/"+paraMeter+"/")!=-1) { hashName = path.substring(0,path.indexOf("/para/")); trieTreeName = CCNConnector.parsePara(path.substring(path.indexOf("/"+paraMeter+"/")+paraMeter.length()+1)); } else { hashName = path; trieTreeName = ""; } System.out.println("Segment 932 hashName:"+hashName+" trieTreeName:"+trieTreeName); Vertex vertex = testhash.FindPath("/localhost/ROOT"+hashName, trieTreeName); if(vertex!=null) { System.out.println("find success"); synchronized(vertex){ if(vertex.isExist()==true){ vertex.getVertexJumpList().add(co); System.out.println("ContentObject name is "+co.name().toString()); } } } } if (null == _firstSegment) { _firstSegment = co; } int contentLength = co.contentLength(); long nextSegment = nextSegmentIndex(segmentNumber, contentLength); return nextSegment; }
<gh_stars>10-100 import {Message, MessageBox} from 'element-ui'; import {UserModule} from '@/store/modules/user'; import {AxiosRequestConfig, AxiosResponse} from 'axios'; import {getFullToken} from '@/utils/auth'; import {ResponseResult} from '@/types'; export function authHeader(config: AxiosRequestConfig): AxiosRequestConfig { // if (UserModule.token) { const fullToken = getFullToken(); if (fullToken !== undefined) { config.headers.Authorization = getFullToken(); // 让每个请求携带自定义token 请根据实际情况自行修改 } return config; } export function authRejectFilter(error: any) { const response = error.response; let message = ''; if (typeof response === 'undefined') { message = error.message; Message({ message: '错误:' + message, type: 'error', duration: 5 * 1000, }); return Promise.reject(error); } else { message = response.data.error_description || response.data.message || response.data || '服务器资源访问出错'; if (response !== undefined) { const status = response.status; if (status === 401) { if (response.data.error === 'unauthorized') { Message({ message: '账号密码错误!', type: 'error', duration: 5 * 1000, }); return Promise.reject('error'); } MessageBox.confirm( '你已被登出,可以取消继续留在该页面,或者重新登录', '确定登出', { confirmButtonText: '重新登录', cancelButtonText: '取消', type: 'warning', }, ).then(() => { UserModule.FedLogOut().then(() => { location.reload(); // 为了重新实例化vue-router对象 避免bug }); }); const errorInfo: ResponseResult<any> = {code: response.status, message, success: false, data: response}; return Promise.reject(errorInfo); } else { // Message({ // message: '服务错误:' + message, // type: 'error', // duration: 5 * 1000, // }); const errorInfo: ResponseResult<any> = {code: response.status, message, success: false, data: response}; return Promise.reject(errorInfo); } } } } export function handlerCommonError(error: ResponseResult<any>): void { Message({ message: '错误:' + error.message, type: 'error', duration: 5 * 1000, }); } export function authFilter(response: AxiosResponse<any>): AxiosResponse<any> | Promise<AxiosResponse<any>> { const res = response.data; if (typeof res.success !== 'undefined' && !res.success) { return Promise.reject(res); } else { return response; } }
package cmd type Configuration struct { Header struct { Version string `json:"version"` Environment string `json:"environment"` DataDir string `json:"data_dir"` } `json:"header"` Provider struct { Aws struct { AccessKey string `json:"access_key" env:"TF_VAR_aws_access_key"` SecretKey string `json:"secret_key" env:"TF_VAR_aws_secret_key"` Ami string `json:"ami" env:"TF_VAR_aws_ami"` KeyName string `json:"ssh_key" env:"TF_VAR_aws_ssh_key"` Region string `json:"region"` SwarmNodes string `json:"swarm_nodes" env:"TF_VAR_swarm_nodes"` SwarmManager string `json:"swarm_manager" env:"TF_VAR_swarm_manager"` TypeManager string `json:"type_manager" env:"TF_VAR_aws_type_manager"` TypeNode string `json:"type_node" env:"TF_VAR_aws_type_node"` } `json:"aws"` } `json:"provider"` Component struct { Log bool `json:"log"` Monitoring bool `json:"monitoring"` Sweady bool `json:"sweady"` } `json:"component"` } func Init() *Configuration { c := &Configuration{} c.Header.Version = "v0.1.0" c.Component.Log = true c.Component.Monitoring = true c.Component.Sweady = true return c }