hexsha
stringlengths 40
40
| size
int64 5
1.04M
| ext
stringclasses 6
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
344
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
11
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
344
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
11
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
344
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
11
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.04M
| avg_line_length
float64 1.14
851k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
| lid
stringclasses 191
values | lid_prob
float64 0.01
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4705780adc01d72b9ec63de8901157a9db984801 | 69,736 | md | Markdown | collated/test/junyango.md | CS2103AUG2017-T09-B3/main | 2d428948232a24ebf624a72d55a81dc398600298 | [
"MIT"
] | 1 | 2018-10-12T16:24:23.000Z | 2018-10-12T16:24:23.000Z | collated/test/junyango.md | CS2103AUG2017-T09-B3/main | 2d428948232a24ebf624a72d55a81dc398600298 | [
"MIT"
] | 210 | 2017-09-24T15:22:07.000Z | 2017-11-13T14:44:42.000Z | collated/test/junyango.md | CS2103AUG2017-T09-B3/main | 2d428948232a24ebf624a72d55a81dc398600298 | [
"MIT"
] | 6 | 2017-09-24T14:25:49.000Z | 2019-10-02T14:40:36.000Z | # junyango
###### \java\guitests\guihandles\event\EventCardHandle.java
``` java
/**
* Provides a handle to an event card in the {@code EventListPanel}.
*/
public class EventCardHandle extends NodeHandle<Node> {
private static final String ID_FIELD_ID = "#idEvent";
private static final String NAME_FIELD_ID = "#name";
private static final String VENUE_FIELD_ID = "#venue";
private static final String TIME_FIELD_ID = "#dateTime";
private final Label idLabel;
private final Label nameLabel;
private final Label venueLabel;
private final Label timeLabel;
public EventCardHandle(Node cardNode) {
super(cardNode);
this.idLabel = getChildNode(ID_FIELD_ID);
this.nameLabel = getChildNode(NAME_FIELD_ID);
this.venueLabel = getChildNode(VENUE_FIELD_ID);
this.timeLabel = getChildNode(TIME_FIELD_ID);
}
public String getId() {
return idLabel.getText();
}
public String getEventName() {
return nameLabel.getText();
}
public String getVenue() {
return venueLabel.getText();
}
public String getTime() {
return timeLabel.getText();
}
}
```
###### \java\guitests\guihandles\event\EventListPanelHandle.java
``` java
/**
* Provides a handle for {@code EventListPanel} containing the list of {@code EventCard}.
*/
public class EventListPanelHandle extends NodeHandle<ListView<EventCard>> {
public static final String EVENT_LIST_VIEW_ID = "#eventListView";
private Optional<EventCard> lastRememberedSelectedEventCard;
public EventListPanelHandle(ListView<EventCard> eventListPanelNode) {
super(eventListPanelNode);
}
/**
* Returns a handle to the selected {@code EventCardHandle}.
* A maximum of 1 item can be selected at any time.
* @throws AssertionError if no card is selected, or more than 1 card is selected.
*/
public EventCardHandle getHandleToSelectedCard() {
List<EventCard> eventList = getRootNode().getSelectionModel().getSelectedItems();
if (eventList.size() != 1) {
throw new AssertionError("Event list size expected 1.");
}
return new EventCardHandle(eventList.get(0).getRoot());
}
/**
* Returns the index of the selected card.
*/
public int getSelectedCardIndex() {
return getRootNode().getSelectionModel().getSelectedIndex();
}
/**
* Returns true if a card is currently selected.
*/
public boolean isAnyCardSelected() {
List<EventCard> selectedCardsList = getRootNode().getSelectionModel().getSelectedItems();
if (selectedCardsList.size() > 1) {
throw new AssertionError("Card list size expected 0 or 1.");
}
return !selectedCardsList.isEmpty();
}
/**
* Navigates the listview to display and select the event.
*/
public void navigateToCard(ReadOnlyEvent event) {
List<EventCard> cards = getRootNode().getItems();
Optional<EventCard> matchingCard = cards.stream().filter(card -> card.event.equals(event)).findFirst();
if (!matchingCard.isPresent()) {
throw new IllegalArgumentException("Event does not exist.");
}
guiRobot.interact(() -> {
getRootNode().scrollTo(matchingCard.get());
getRootNode().getSelectionModel().select(matchingCard.get());
});
guiRobot.pauseForHuman();
}
/**
* Returns the event card handle of a event associated with the {@code index} in the list.
*/
public EventCardHandle getEventCardHandle(int index) {
return getEventCardHandle(getRootNode().getItems().get(index).event);
}
/**
* Returns the {@code EventCardHandle} of the specified {@code event} in the list.
*/
public EventCardHandle getEventCardHandle(ReadOnlyEvent event) {
Optional<EventCardHandle> handle = getRootNode().getItems().stream()
.filter(card -> card.event.equals(event))
.map(card -> new EventCardHandle(card.getRoot()))
.findFirst();
return handle.orElseThrow(() -> new IllegalArgumentException("Event does not exist."));
}
/**
* Selects the {@code EventCard} at {@code index} in the list.
*/
public void select(int index) {
getRootNode().getSelectionModel().select(index);
}
/**
* Remembers the selected {@code EventCard} in the list.
*/
public void rememberSelectedEventCard() {
List<EventCard> selectedItems = getRootNode().getSelectionModel().getSelectedItems();
if (selectedItems.size() == 0) {
lastRememberedSelectedEventCard = Optional.empty();
} else {
lastRememberedSelectedEventCard = Optional.of(selectedItems.get(0));
}
}
/**
* Returns true if the selected {@code EventCard} is different from the value remembered by the most recent
* {@code rememberSelectedEventCard()} call.
*/
public boolean isSelectedEventCardChanged() {
List<EventCard> selectedItems = getRootNode().getSelectionModel().getSelectedItems();
if (selectedItems.size() == 0) {
return lastRememberedSelectedEventCard.isPresent();
} else {
return !lastRememberedSelectedEventCard.isPresent()
|| !lastRememberedSelectedEventCard.get().equals(selectedItems.get(0));
}
}
/**
* Returns the size of the list.
*/
public int getListSize() {
return getRootNode().getItems().size();
}
}
```
###### \java\seedu\address\commons\events\ui\SwitchThemeEventTest.java
``` java
public class SwitchThemeEventTest {
@Test
public void createEvent_success() throws Exception {
BaseEvent event = new SwitchThemeEvent();
assertEquals("SwitchThemeEvent", event.toString());
}
}
```
###### \java\seedu\address\logic\commands\CommandTestUtil.java
``` java
/**
* Updates {@code model}'s filtered list to show only the first person in the {@code model}'s address book.
*/
public static void showFirstEventOnly(Model model) {
ReadOnlyEvent event = model.getAddressBook().getEventList().get(0);
final String[] splitName = event.getName().getValue().split("\\s+");
model.updateFilteredEventsList(new EventNameContainsKeywordsPredicate(Arrays.asList(splitName[0])));
assert model.getFilteredEventList().size() == 1;
}
```
###### \java\seedu\address\logic\commands\event\AddEventCommandTest.java
``` java
public class AddEventCommandTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void constructor_nullEvent_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
new AddEventCommand(null);
}
@Test
public void execute_eventAcceptedByModel_addSuccessful() throws Exception {
ModelStubAcceptingEventAdded modelStub = new ModelStubAcceptingEventAdded();
Event validEvent = new EventBuilder().build();
CommandResult commandResult = getAddEventCommandForEvent(validEvent, modelStub).execute();
assertEquals(String.format(AddEventCommand.MESSAGE_SUCCESS, validEvent), commandResult.feedbackToUser);
assertEquals(Arrays.asList(validEvent), modelStub.eventsAdded);
}
@Test
public void execute_duplicateEvent_throwsCommandException() throws Exception {
ModelStub modelStub = new ModelStubThrowingDuplicateEventException();
Event validEvent = new EventBuilder().build();
thrown.expect(CommandException.class);
thrown.expectMessage(AddEventCommand.MESSAGE_DUPLICATE_EVENT);
getAddEventCommandForEvent(validEvent, modelStub).execute();
}
@Test
public void equals() {
Event hack = new EventBuilder().withName("Hack").build();
Event test = new EventBuilder().withName("Test").build();
AddEventCommand addHackCommand = new AddEventCommand(hack);
AddEventCommand addTestCommand = new AddEventCommand(test);
// same object -> returns true
assertTrue(addHackCommand.equals(addHackCommand));
// same values -> returns true
AddEventCommand addHackCommandCopy = new AddEventCommand(hack);
assertTrue(addHackCommand.equals(addHackCommandCopy));
// different types -> returns false
assertFalse(addHackCommand.equals(1));
// null -> returns false
assertFalse(addHackCommand.equals(null));
// different event -> returns false
assertFalse(addHackCommand.equals(addTestCommand));
}
/**
* Generates a new AddEventCommand with the details of the given event.
*/
private AddEventCommand getAddEventCommandForEvent(Event event, Model model) {
AddEventCommand command = new AddEventCommand(event);
command.setData(model, new CommandHistory(), new UndoRedoStack());
return command;
}
/**
* A Model stub that always throw a DuplicateEventException when trying to add a event.
*/
private class ModelStubThrowingDuplicateEventException extends ModelStub {
@Override
public void addEvent(ReadOnlyEvent event) throws DuplicateEventException {
throw new DuplicateEventException();
}
@Override
public ReadOnlyAddressBook getAddressBook() {
return new AddressBook();
}
}
/**
* A Model stub that always accept the event being added.
*/
private class ModelStubAcceptingEventAdded extends ModelStub {
final ArrayList<Event> eventsAdded = new ArrayList<>();
@Override
public void addEvent(ReadOnlyEvent event) throws DuplicateEventException {
eventsAdded.add(new Event(event));
}
@Override
public ReadOnlyAddressBook getAddressBook() {
return new AddressBook();
}
}
}
```
###### \java\seedu\address\logic\commands\event\DeleteEventCommandTest.java
``` java
/**
* Contains integration tests (interaction with the Model) and unit tests for {@code DeleteEventCommand}.
*/
public class DeleteEventCommandTest {
private Model model = new ModelManager(getTypicalAddressBook(), new UserPrefs());
@Test
public void execute_validIndexUnfilteredList_success() throws Exception {
ReadOnlyEvent eventToDelete = model.getFilteredEventList().get(INDEX_FIRST_PERSON.getZeroBased());
DeleteEventCommand deleteEventCommand = prepareCommand(INDEX_FIRST_PERSON);
String expectedMessage = String.format(DeleteEventCommand.MESSAGE_DELETE_EVENT_SUCCESS, eventToDelete);
ModelManager expectedModel = new ModelManager(model.getAddressBook(), new UserPrefs());
expectedModel.deleteEvent(eventToDelete);
assertCommandSuccess(deleteEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_validIndexFilteredList_success() throws Exception {
showFirstEventOnly(model);
ReadOnlyEvent eventToDelete = model.getFilteredEventList().get(INDEX_FIRST_PERSON.getZeroBased());
DeleteEventCommand deleteEventCommand = prepareCommand(INDEX_FIRST_PERSON);
String expectedMessage = String.format(DeleteEventCommand.MESSAGE_DELETE_EVENT_SUCCESS, eventToDelete);
Model expectedModel = new ModelManager(model.getAddressBook(), new UserPrefs());
expectedModel.deleteEvent(eventToDelete);
showNoEvent(expectedModel);
assertCommandSuccess(deleteEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_invalidIndexUnfilteredList_throwsCommandException() throws Exception {
Index outOfBoundIndex = Index.fromOneBased(model.getFilteredEventList().size() + 1);
DeleteEventCommand deleteEventCommand = prepareCommand(outOfBoundIndex);
assertCommandFailure(deleteEventCommand, model, Messages.MESSAGE_INVALID_EVENT_DISPLAYED_INDEX);
}
@Test
public void equals() {
DeleteEventCommand deleteFirstCommand = new DeleteEventCommand(INDEX_FIRST_PERSON);
DeleteEventCommand deleteSecondCommand = new DeleteEventCommand(INDEX_SECOND_PERSON);
// same object -> returns true
assertTrue(deleteFirstCommand.equals(deleteFirstCommand));
// same values -> returns true
DeleteEventCommand deleteFirstCommandCopy = new DeleteEventCommand(INDEX_FIRST_PERSON);
assertTrue(deleteFirstCommand.equals(deleteFirstCommandCopy));
// different types -> returns false
assertFalse(deleteFirstCommand.equals(1));
// null -> returns false
assertFalse(deleteFirstCommand.equals(null));
// different event -> returns false
assertFalse(deleteFirstCommand.equals(deleteSecondCommand));
}
/**
* Returns a {@code DeleteEventCommand} with the parameter {@code index}.
*/
private DeleteEventCommand prepareCommand(Index index) {
DeleteEventCommand deleteEventCommand = new DeleteEventCommand(index);
deleteEventCommand.setData(model, new CommandHistory(), new UndoRedoStack());
return deleteEventCommand;
}
/**
* Updates {@code model}'s filtered list to show no one.
*/
private void showNoEvent(Model model) {
model.updateFilteredEventsList(p -> false);
assert model.getFilteredEventList().isEmpty();
}
}
```
###### \java\seedu\address\logic\commands\event\EditEventCommandTest.java
``` java
/**
* Contains integration tests (interaction with the Model) and unit tests for EditEventCommand.
*/
public class EditEventCommandTest {
private Model model = new ModelManager(getTypicalAddressBook(), new UserPrefs());
@Test
public void execute_allFieldsSpecifiedUnfilteredList_success() throws Exception {
Event editedEvent = new EventBuilder().build();
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder(editedEvent).build();
EditEventCommand editEventCommand = prepareCommand(INDEX_FIRST_PERSON, descriptor);
String expectedMessage = String.format(EditEventCommand.MESSAGE_EDIT_EVENT_SUCCESS, editedEvent);
Model expectedModel = new ModelManager(new AddressBook(model.getAddressBook()), new UserPrefs());
expectedModel.updateEvent(model.getFilteredEventList().get(0), editedEvent);
assertCommandSuccess(editEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_someFieldsSpecifiedUnfilteredList_success() throws Exception {
Index indexLastEvent = Index.fromOneBased(model.getFilteredEventList().size());
ReadOnlyEvent lastEvent = model.getFilteredEventList().get(indexLastEvent.getZeroBased());
EventBuilder eventInList = new EventBuilder(lastEvent);
Event editedEvent = eventInList.withName(VALID_NAME_EVENT1).withDateTime(VALID_DATE_EVENT1).build();
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1)
.withTime(VALID_DATE_EVENT1).build();
EditEventCommand editEventCommand = prepareCommand(indexLastEvent, descriptor);
String expectedMessage = String.format(EditEventCommand.MESSAGE_EDIT_EVENT_SUCCESS, editedEvent);
Model expectedModel = new ModelManager(new AddressBook(model.getAddressBook()), new UserPrefs());
expectedModel.updateEvent(lastEvent, editedEvent);
assertCommandSuccess(editEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_noFieldSpecifiedUnfilteredList_success() {
EditEventCommand editEventCommand =
prepareCommand(INDEX_FIRST_PERSON, new EditEventCommand.EditEventDescriptor());
ReadOnlyEvent editedEvent = model.getFilteredEventList().get(INDEX_FIRST_PERSON.getZeroBased());
String expectedMessage = String.format(EditEventCommand.MESSAGE_EDIT_EVENT_SUCCESS, editedEvent);
Model expectedModel = new ModelManager(new AddressBook(model.getAddressBook()), new UserPrefs());
assertCommandSuccess(editEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_filteredList_success() throws Exception {
showFirstEventOnly(model);
ReadOnlyEvent eventInFilteredList = model.getFilteredEventList().get(INDEX_FIRST_PERSON.getZeroBased());
Event editedEvent = new EventBuilder(eventInFilteredList).withName(VALID_NAME_EVENT1).build();
EditEventCommand editEventCommand = prepareCommand(INDEX_FIRST_PERSON,
new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1).build());
String expectedMessage = String.format(EditEventCommand.MESSAGE_EDIT_EVENT_SUCCESS, editedEvent);
Model expectedModel = new ModelManager(new AddressBook(model.getAddressBook()), new UserPrefs());
expectedModel.updateEvent(model.getFilteredEventList().get(0), editedEvent);
assertCommandSuccess(editEventCommand, model, expectedMessage, expectedModel);
}
@Test
public void execute_duplicateEventUnfilteredList_failure() {
Event firstEvent = new Event(model.getFilteredEventList().get(INDEX_FIRST_PERSON.getZeroBased()));
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder(firstEvent).build();
EditEventCommand editEventCommand = prepareCommand(INDEX_SECOND_PERSON, descriptor);
assertCommandFailure(editEventCommand, model, EditEventCommand.MESSAGE_DUPLICATE_EVENT);
}
@Test
public void execute_duplicateEventFilteredList_failure() {
showFirstEventOnly(model);
// edit person in filtered list into a duplicate in address book
ReadOnlyEvent eventInList = model.getAddressBook().getEventList().get(INDEX_SECOND_PERSON.getZeroBased());
EditEventCommand editEventCommand = prepareCommand(INDEX_FIRST_PERSON,
new EditEventDescriptorBuilder(eventInList).build());
assertCommandFailure(editEventCommand, model, EditEventCommand.MESSAGE_DUPLICATE_EVENT);
}
@Test
public void execute_invalidEventIndexUnfilteredList_failure() {
Index outOfBoundIndex = Index.fromOneBased(model.getFilteredEventList().size() + 1);
EditEventCommand.EditEventDescriptor descriptor =
new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1).build();
EditEventCommand editEventCommand = prepareCommand(outOfBoundIndex, descriptor);
assertCommandFailure(editEventCommand, model, Messages.MESSAGE_INVALID_EVENT_DISPLAYED_INDEX);
}
/**
* Edit filtered list where index is larger than size of filtered list,
* but smaller than size of address book
*/
@Test
public void execute_invalidEventIndexFilteredList_failure() {
showFirstEventOnly(model);
Index outOfBoundIndex = INDEX_SECOND_PERSON;
// ensures that outOfBoundIndex is still in bounds of address book list
assertTrue(outOfBoundIndex.getZeroBased() < model.getAddressBook().getEventList().size());
EditEventCommand editEventCommand = prepareCommand(outOfBoundIndex,
new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1).build());
assertCommandFailure(editEventCommand, model, Messages.MESSAGE_INVALID_EVENT_DISPLAYED_INDEX);
}
@Test
public void equals() {
final EditEventCommand standardCommand = new EditEventCommand(INDEX_FIRST_PERSON, DESC_EVENT1);
// same values -> returns true
EditEventCommand.EditEventDescriptor copyDescriptor = new EditEventCommand.EditEventDescriptor(DESC_EVENT1);
EditEventCommand commandWithSameValues = new EditEventCommand(INDEX_FIRST_PERSON, copyDescriptor);
assertTrue(standardCommand.equals(commandWithSameValues));
// same object -> returns true
assertTrue(standardCommand.equals(standardCommand));
// null -> returns false
assertFalse(standardCommand.equals(null));
// different types -> returns false
assertFalse(standardCommand.equals(new ClearCommand()));
// different index -> returns false
assertFalse(standardCommand.equals(new EditEventCommand(INDEX_SECOND_PERSON, DESC_EVENT1)));
// different descriptor -> returns false
assertFalse(standardCommand.equals(new EditEventCommand(INDEX_FIRST_PERSON, DESC_EVENT2)));
}
/**
* Returns an {@code EditEventCommand} with parameters {@code index} and {@code descriptor}
*/
private EditEventCommand prepareCommand(Index index, EditEventCommand.EditEventDescriptor descriptor) {
EditEventCommand editEventCommand = new EditEventCommand(index, descriptor);
editEventCommand.setData(model, new CommandHistory(), new UndoRedoStack());
return editEventCommand;
}
}
```
###### \java\seedu\address\logic\commands\event\EditEventDescriptorTest.java
``` java
public class EditEventDescriptorTest {
@Test
public void equals() {
// same values -> returns true
EditEventCommand.EditEventDescriptor descriptorWithSameValues =
new EditEventCommand.EditEventDescriptor(DESC_EVENT1);
assertTrue(DESC_EVENT1.equals(descriptorWithSameValues));
// same object -> returns true
assertTrue(DESC_EVENT1.equals(DESC_EVENT1));
// null -> returns false
assertFalse(DESC_EVENT1.equals(null));
// different types -> returns false
assertFalse(DESC_EVENT1.equals(5));
// different values -> returns false
assertFalse(DESC_EVENT1.equals(DESC_EVENT2));
// different name -> returns false
EditEventCommand.EditEventDescriptor editedEvent1 =
new EditEventDescriptorBuilder(DESC_EVENT1).withName(VALID_NAME_EVENT2).build();
assertFalse(DESC_EVENT1.equals(editedEvent1));
// different Time -> returns false
editedEvent1 = new EditEventDescriptorBuilder(DESC_EVENT1).withTime(VALID_DATE_EVENT2).build();
assertFalse(DESC_EVENT1.equals(editedEvent1));
// different address -> returns false
editedEvent1 = new EditEventDescriptorBuilder(DESC_EVENT1).withAddress(VALID_VENUE_EVENT2).build();
assertFalse(DESC_EVENT1.equals(editedEvent1));
}
}
```
###### \java\seedu\address\logic\commands\event\ListEventCommandTest.java
``` java
/**
* Contains integration tests (interaction with the Model) and unit tests for ListCommand.
*/
public class ListEventCommandTest {
private Model model;
private Model expectedModel;
private ListEventCommand listEventCommand;
@Before
public void setUp() {
model = new ModelManager(getTypicalAddressBook(), new UserPrefs());
expectedModel = new ModelManager(model.getAddressBook(), new UserPrefs());
listEventCommand = new ListEventCommand();
listEventCommand.setData(model, new CommandHistory(), new UndoRedoStack());
}
@Test
public void execute_listIsNotFiltered_showsSameList() {
assertCommandSuccess(listEventCommand, model, ListEventCommand.MESSAGE_EVENT_SUCCESS, expectedModel);
}
@Test
public void execute_listIsFiltered_showsEverything() {
showFirstEventOnly(model);
assertCommandSuccess(listEventCommand, model, ListEventCommand.MESSAGE_EVENT_SUCCESS, expectedModel);
}
}
```
###### \java\seedu\address\logic\commands\SwitchThemeTest.java
``` java
public class SwitchThemeTest {
@Rule
public final EventsCollectorRule eventsCollectorRule = new EventsCollectorRule();
@Test
public void execute_switch_success() {
CommandResult result = new SwitchThemeCommand().execute();
assertEquals(MESSAGE_SUCCESS, result.feedbackToUser);
assertTrue(eventsCollectorRule.eventsCollector.getMostRecent() instanceof SwitchThemeEvent);
assertTrue(eventsCollectorRule.eventsCollector.getSize() == 1);
}
}
```
###### \java\seedu\address\logic\parser\AddressBookParserTest.java
``` java
@Test
public void parseCommand_addEvent() throws Exception {
Event event = new EventBuilder().build();
AddEventCommand command = (AddEventCommand) parser.parseCommand(EventUtil.getAddEvent(event));
assertEquals(new AddEventCommand(event), command);
}
@Test
public void parseCommand_deleteEvent() throws Exception {
DeleteEventCommand command = (DeleteEventCommand) parser.parseCommand(
DeleteEventCommand.COMMAND_WORD + " " + INDEX_FIRST_PERSON.getOneBased());
assertEquals(new DeleteEventCommand(INDEX_FIRST_PERSON), command);
}
@Test
public void parseCommand_editEvent() throws Exception {
Event event = new EventBuilder().build();
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder(event).build();
EditEventCommand command = (EditEventCommand) parser.parseCommand(EditEventCommand.COMMAND_WORD + " "
+ INDEX_FIRST_PERSON.getOneBased() + " " + EventUtil.getEventDetails(event));
assertEquals(new EditEventCommand(INDEX_FIRST_PERSON, descriptor), command);
}
```
###### \java\seedu\address\logic\parser\AddressBookParserTest.java
``` java
@Test
public void parseCommand_theme() throws Exception {
assertTrue(parser.parseCommand(SwitchThemeCommand.COMMAND_WORD) instanceof SwitchThemeCommand);
assertTrue(parser.parseCommand(SwitchThemeCommand.COMMAND_WORD + " 3") instanceof SwitchThemeCommand);
}
```
###### \java\seedu\address\logic\parser\AddressBookParserTest.java
``` java
@Test
public void parseCommand_listEvents() throws Exception {
assertTrue(parser.parseCommand(ListEventCommand.COMMAND_WORD) instanceof ListEventCommand);
assertTrue(parser.parseCommand(ListEventCommand.COMMAND_WORD + " 3") instanceof ListEventCommand);
}
```
###### \java\seedu\address\logic\parser\event\AddEventCommandParserTest.java
``` java
public class AddEventCommandParserTest {
private final AddEventParser parser = new AddEventParser();
@Test
public void parse_allFieldsPresent_success() {
Event expectedEvent = new EventBuilder().withName(VALID_NAME_EVENT1).withDateTime(VALID_DATE_EVENT1)
.withAddress(VALID_VENUE_EVENT1).withReminder().build();
// multiple names - last name accepted
assertParseSuccess(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT2 + NAME_DESC_EVENT1
+ DATE_DESC_EVENT1 + VENUE_DESC_EVENT1, new AddEventCommand(expectedEvent));
// multiple phones - last date accepted
assertParseSuccess(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT2
+ DATE_DESC_EVENT1 + VENUE_DESC_EVENT1, new AddEventCommand(expectedEvent));
// multiple addresses - last address accepted
assertParseSuccess(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1
+ VENUE_DESC_EVENT2 + VENUE_DESC_EVENT1, new AddEventCommand(expectedEvent));
}
@Test
public void parse_compulsoryFieldMissing_failure() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddEventCommand.MESSAGE_USAGE);
// missing name prefix
assertParseFailure(parser, AddEventCommand.COMMAND_WORD + VALID_NAME_EVENT1 + DATE_DESC_EVENT1
+ VENUE_DESC_EVENT1, expectedMessage);
// missing phone prefix
assertParseFailure(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + VALID_DATE_EVENT1
+ VENUE_DESC_EVENT1, expectedMessage);
// missing email prefix
assertParseFailure(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT2 + PHONE_DESC_BOB
+ VALID_EMAIL_BOB + ADDRESS_DESC_BOB, expectedMessage);
// missing address prefix
assertParseFailure(parser, AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1
+ VALID_VENUE_EVENT1, expectedMessage);
// all prefixes missing
assertParseFailure(parser, AddEventCommand.COMMAND_WORD + VALID_NAME_EVENT1 + VALID_DATE_EVENT1
+ VALID_VENUE_EVENT1, expectedMessage);
}
}
```
###### \java\seedu\address\logic\parser\event\DeleteEventCommandParserTest.java
``` java
/**
* As we are only doing white-box testing, our test cases do not cover path variations
* outside of the DeleteEventCommand code. For example, inputs "1" and "1 abc" take the
* same path through the DeleteEventCommand, and therefore we test only one of them.
* The path variation for those two cases occur inside the ParserUtil, and
* therefore should be covered by the ParserUtilTest.
*/
public class DeleteEventCommandParserTest {
private DeleteEventParser parser = new DeleteEventParser();
@Test
public void parse_validArgs_returnsDeleteEventCommand() {
assertParseSuccess(parser, "1", new DeleteEventCommand(INDEX_FIRST_PERSON));
}
@Test
public void parse_invalidArgs_throwsParseException() {
assertParseFailure(parser, "a", String.format
(MESSAGE_INVALID_COMMAND_FORMAT, DeleteEventCommand.MESSAGE_USAGE));
}
}
```
###### \java\seedu\address\logic\parser\event\EditEventCommandParserTest.java
``` java
public class EditEventCommandParserTest {
private static final String MESSAGE_INVALID_FORMAT =
String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditEventCommand.MESSAGE_USAGE);
private EditEventParser parser = new EditEventParser();
@Test
public void parse_missingParts_failure() {
// no index specified
assertParseFailure(parser, VALID_NAME_EVENT1, MESSAGE_INVALID_FORMAT);
// no field specified
assertParseFailure(parser, "1", EditEventCommand.MESSAGE_NOT_EDITED);
// no index and no field specified
assertParseFailure(parser, "", MESSAGE_INVALID_FORMAT);
}
@Test
public void parse_invalidPreamble_failure() {
// negative index
assertParseFailure(parser, "-5" + NAME_DESC_EVENT1, MESSAGE_INVALID_FORMAT);
// zero index
assertParseFailure(parser, "0" + NAME_DESC_EVENT1, MESSAGE_INVALID_FORMAT);
// invalid arguments being parsed as preamble
assertParseFailure(parser, "1 some random string", MESSAGE_INVALID_FORMAT);
// invalid prefix being parsed as preamble
assertParseFailure(parser, "1 i/ string", MESSAGE_INVALID_FORMAT);
}
@Test
public void parse_invalidValue_failure() {
assertParseFailure(parser, "1" + INVALID_NAME_DESC,
PropertyManager.getPropertyConstraintMessage("n")); // invalid name
assertParseFailure(parser, "1" + INVALID_DATE_DESC,
PropertyManager.getPropertyConstraintMessage("dt")); // invalid date time
assertParseFailure(parser, "1" + INVALID_ADDRESS_DESC,
PropertyManager.getPropertyConstraintMessage("a")); // invalid address
// multiple invalid values, but only the first invalid value is captured
assertParseFailure(parser, "1" + INVALID_NAME_DESC + INVALID_DATE_DESC + VALID_VENUE_EVENT2,
PropertyManager.getPropertyConstraintMessage("n"));
}
@Test
public void parse_allFieldsSpecified_success() {
Index targetIndex = INDEX_SECOND_PERSON;
String userInput = targetIndex.getOneBased() + DATE_DESC_EVENT1 + VENUE_DESC_EVENT1 + NAME_DESC_EVENT1;
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1)
.withTime(VALID_DATE_EVENT1).withAddress(VALID_VENUE_EVENT1).build();
EditEventCommand expectedCommand = new EditEventCommand(targetIndex, descriptor);
assertParseSuccess(parser, userInput, expectedCommand);
}
@Test
public void parse_someFieldsSpecified_success() {
Index targetIndex = INDEX_FIRST_PERSON;
String userInput = targetIndex.getOneBased() + DATE_DESC_EVENT2 + VENUE_DESC_EVENT1;
EditEventCommand.EditEventDescriptor descriptor = new EditEventDescriptorBuilder().withTime(VALID_DATE_EVENT2)
.withAddress(VALID_VENUE_EVENT1).build();
EditEventCommand expectedCommand = new EditEventCommand(targetIndex, descriptor);
assertParseSuccess(parser, userInput, expectedCommand);
}
@Test
public void parse_oneFieldSpecified_success() {
// name
Index targetIndex = INDEX_THIRD_PERSON;
String userInput = targetIndex.getOneBased() + NAME_DESC_EVENT1;
EditEventDescriptor descriptor = new EditEventDescriptorBuilder().withName(VALID_NAME_EVENT1).build();
EditEventCommand expectedCommand = new EditEventCommand(targetIndex, descriptor);
assertParseSuccess(parser, userInput, expectedCommand);
// date/time
userInput = targetIndex.getOneBased() + DATE_DESC_EVENT1;
descriptor = new EditEventDescriptorBuilder().withTime(VALID_DATE_EVENT1).build();
expectedCommand = new EditEventCommand(targetIndex, descriptor);
assertParseSuccess(parser, userInput, expectedCommand);
// address
userInput = targetIndex.getOneBased() + VENUE_DESC_EVENT1;
descriptor = new EditEventDescriptorBuilder().withAddress(VALID_VENUE_EVENT1).build();
expectedCommand = new EditEventCommand(targetIndex, descriptor);
assertParseSuccess(parser, userInput, expectedCommand);
}
}
```
###### \java\seedu\address\logic\parser\person\FbCommandParserTest.java
``` java
public class FbCommandParserTest {
private final FbCommandParser parser = new FbCommandParser();
@Test
public void parse_validIndex_checkCorrectness() {
Command expected = new FbCommand(Index.fromOneBased(1));
assertParseSuccess(parser, " 1 ", expected);
}
@Test
public void parse_invalidIndex_expectException() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT , MESSAGE_USAGE);
assertParseFailure(parser, " -1 ", expectedMessage);
}
}
```
###### \java\seedu\address\logic\parser\person\GMapCommandParserTest.java
``` java
public class GMapCommandParserTest {
private final GMapCommandParser parser = new GMapCommandParser();
@Test
public void parse_validIndex_checkCorrectness() {
Command expected = new GMapCommand(Index.fromOneBased(1));
assertParseSuccess(parser, " 1 ", expected);
}
@Test
public void parse_invalidIndex_expectException() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT , MESSAGE_USAGE);
assertParseFailure(parser, " -1 ", expectedMessage);
}
}
```
###### \java\seedu\address\model\AddressBookTest.java
``` java
@Test
public void getEventList_modifyList_throwsUnsupportedOperationException() {
thrown.expect(UnsupportedOperationException.class);
addressBook.getEventList().remove(0);
}
@Test
public void removeEvent_eventNotFound_expectException() throws Exception {
thrown.expect(EventNotFoundException.class);
AddressBook addressBook = getTypicalAddressBook();
addressBook.removeEvent(EVENT1);
}
@Test
public void getTagList_modifyList_throwsUnsupportedOperationException() {
thrown.expect(UnsupportedOperationException.class);
addressBook.getTagList().remove(0);
}
/**
* A stub of {@link ReadOnlyAddressBook} whose persons and tags lists can violate interface constraints.
*/
private static class AddressBookStub implements ReadOnlyAddressBook {
private final ObservableList<ReadOnlyPerson> persons = FXCollections.observableArrayList();
private final ObservableList<Tag> tags = FXCollections.observableArrayList();
private final ObservableList<ReadOnlyEvent> events = FXCollections.observableArrayList();
AddressBookStub(Collection<? extends ReadOnlyPerson> persons, Collection<? extends ReadOnlyEvent> events,
Collection<? extends Tag> tags) {
this.persons.setAll(persons);
this.tags.setAll(tags);
this.events.setAll(events);
}
@Override
public ObservableList<ReadOnlyPerson> getPersonList() {
return persons;
}
@Override
public ObservableList<ReadOnlyEvent> getEventList() {
return events;
}
@Override
public ObservableList<Tag> getTagList() {
return tags;
}
}
}
```
###### \java\seedu\address\model\event\EventNotFoundExceptionTest.java
``` java
public class EventNotFoundExceptionTest {
private ExpectedException thrown = ExpectedException.none();
@Test
public void createException_toString_checkCorrectness() throws Exception {
thrown.expect(EventNotFoundException.class);
Exception exception = new EventNotFoundException("Some message here");
assertEquals("Some message here", exception.toString());
}
}
```
###### \java\seedu\address\model\event\EventTest.java
``` java
public class EventTest {
private static Name name;
private static DateTime dateTime;
private static Address address;
private static Set<Property> properties;
private static List<Reminder> reminders;
@BeforeClass
public static void setUp() throws Exception {
PropertyManager.initializePropertyManager();
name = new Name(VALID_NAME_EVENT1);
dateTime = new DateTime(VALID_DATE_EVENT1);
address = new Address(VALID_ADDRESS_AMY);
properties = new HashSet<>();
properties.add(name);
properties.add(dateTime);
properties.add(address);
reminders = new ArrayList<>();
}
@Test
public void createEvent_preDefinedFieldsPresent_checkCorrectness() throws Exception {
Event event = new Event(name, dateTime, address, Collections.emptyList());
assertNotNull(event);
assertEquals(name, event.getName());
assertEquals(dateTime, event.getTime());
assertEquals(address, event.getAddress());
assertEquals(0, event.getReminders().size());
assertEquals(3, event.getProperties().size());
}
```
###### \java\seedu\address\model\event\EventTest.java
``` java
@Test
public void equal_twoSameStateEvent_checkCorrectness() throws Exception {
Event event = new Event(name, dateTime, address, new ArrayList<>());
Event another = new Event(name, dateTime, address, new ArrayList<>());
assertEquals(event, another);
Event copied = new Event(event);
assertEquals(event, copied);
}
```
###### \java\seedu\address\model\event\UniqueEventListTest.java
``` java
public class UniqueEventListTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void asObservableList_modifyList_throwsUnsupportedOperationException() {
UniqueEventList uniqueEventList = new UniqueEventList();
thrown.expect(UnsupportedOperationException.class);
uniqueEventList.asObservableList().remove(0);
}
```
###### \java\seedu\address\model\ModelManagerTest.java
``` java
@Test
public void getFilteredEventList_modifyList_throwsUnsupportedOperationException() {
ModelManager modelManager = new ModelManager();
thrown.expect(UnsupportedOperationException.class);
modelManager.getFilteredEventList().remove(0);
}
```
###### \java\seedu\address\model\ModelManagerTest.java
``` java
@Test
public void addPerson_successfullyAddEvent() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager = new ModelManager(addressBook, userPrefs);
ObservableList<ReadOnlyPerson> persons = modelManager.getAddressBook().getPersonList();
int originalPersonListSize = persons.size();
modelManager.addPerson(TypicalPersons.HOON);
int newPersonListSize = modelManager.getAddressBook().getPersonList().size();
assertEquals(1, newPersonListSize - originalPersonListSize);
}
@Test
public void sortEventList_successfullySortEvent() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager1 = new ModelManager(addressBook, userPrefs);
modelManager1.addEvent(EVENT2);
modelManager1.addEvent(EVENT1);
ModelManager modelManager2 = new ModelManager(addressBook, userPrefs);
modelManager2.addEvent(EVENT1);
modelManager2.addEvent(EVENT2);
assertEquals(modelManager1, modelManager2);
}
@Test
public void addEvent_successfullyAddEvent() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager = new ModelManager(addressBook, userPrefs);
ObservableList<ReadOnlyEvent> events = modelManager.getAddressBook().getEventList();
int originalEventListSize = events.size();
modelManager.addEvent(EVENT1);
int newEventListSize = modelManager.getAddressBook().getEventList().size();
assertEquals(1, newEventListSize - originalEventListSize);
}
@Test
public void removePerson_successfullyRemoveEvent() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager = new ModelManager(addressBook, userPrefs);
ObservableList<ReadOnlyPerson> persons = modelManager.getAddressBook().getPersonList();
int originalPersonListSize = persons.size();
modelManager.deletePerson(persons.get(1));
int newPersonListSize = modelManager.getAddressBook().getPersonList().size();
assertEquals(1, originalPersonListSize - newPersonListSize);
}
@Test
public void removeEvent_successfullyRemoveEvent() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager = new ModelManager(addressBook, userPrefs);
ObservableList<ReadOnlyEvent> events = modelManager.getAddressBook().getEventList();
int originalEventListSize = events.size();
modelManager.addEvent(EVENT1);
modelManager.addEvent(EVENT2);
modelManager.deleteEvent(events.get(1));
int newEventListSize = modelManager.getAddressBook().getEventList().size();
assertEquals(1, newEventListSize - originalEventListSize);
}
@Test
public void addEvent_successfullyAddReminder() throws Exception {
AddressBook addressBook = getTypicalAddressBook();
UserPrefs userPrefs = new UserPrefs();
ModelManager modelManager = new ModelManager(addressBook, userPrefs);
ObservableList<ReadOnlyEvent> events = modelManager.getAddressBook().getEventList();
modelManager.addEvent(EVENT1);
Reminder r = new Reminder((Event) EVENT1, "You have an event today");
events.get(0).addReminder(r);
events.get(0).getReminders().size();
assertEquals(1, events.get(0).getReminders().size());
}
```
###### \java\seedu\address\model\person\PersonTest.java
``` java
@Test
public void hashCode_checkCorrectness() {
Person person = new Person(name, phone, email, address, Collections.emptySet());
assertNotNull(person);
assertEquals(Objects.hash(person.nameProperty(), person.phoneProperty(), person.emailProperty(),
person.addressProperty(), person.tagProperty()), person.hashCode());
}
}
```
###### \java\seedu\address\model\property\DateTimeTest.java
``` java
public class DateTimeTest {
@BeforeClass
public static void setUp() {
PropertyManager.initializePropertyManager();
}
@Test
public void isValidTime() {
// invalid time
assertFalse(DateTime.isValidTime("")); // empty string
assertFalse(DateTime.isValidTime("some random staff")); // unrelated string
// valid time
assertTrue(DateTime.isValidTime("25122015 08:30"));
assertTrue(DateTime.isValidTime("14122016 13:30"));
assertTrue(DateTime.isValidTime("09121924 23:30"));
}
```
###### \java\seedu\address\model\property\EventNameContainsKeywordsPredicateTest.java
``` java
public class EventNameContainsKeywordsPredicateTest {
@Test
public void equals() {
List<String> firstPredicateKeywordList = Collections.singletonList("first");
List<String> secondPredicateKeywordList = Arrays.asList("first", "second");
EventNameContainsKeywordsPredicate firstPredicate =
new EventNameContainsKeywordsPredicate(firstPredicateKeywordList);
EventNameContainsKeywordsPredicate secondPredicate =
new EventNameContainsKeywordsPredicate(secondPredicateKeywordList);
// same object -> returns true
assertTrue(firstPredicate.equals(firstPredicate));
// same values -> returns true
EventNameContainsKeywordsPredicate firstPredicateCopy =
new EventNameContainsKeywordsPredicate(firstPredicateKeywordList);
assertTrue(firstPredicate.equals(firstPredicateCopy));
// different types -> returns false
assertFalse(firstPredicate.equals(1));
// null -> returns false
assertFalse(firstPredicate.equals(null));
// different person -> returns false
assertFalse(firstPredicate.equals(secondPredicate));
}
@Test
public void test_nameContainsKeywords_returnsTrue() {
// One keyword
EventNameContainsKeywordsPredicate predicate =
new EventNameContainsKeywordsPredicate(Collections.singletonList("Alice"));
assertTrue(predicate.test(new EventBuilder().withName("Alice Bob").build()));
// Multiple keywords
predicate = new EventNameContainsKeywordsPredicate(Arrays.asList("Alice", "Bob"));
assertTrue(predicate.test(new EventBuilder().withName("Alice Bob").build()));
// Only one matching keyword
predicate = new EventNameContainsKeywordsPredicate(Arrays.asList("Bob", "Carol"));
assertTrue(predicate.test(new EventBuilder().withName("Alice Carol").build()));
// Mixed-case keywords
predicate = new EventNameContainsKeywordsPredicate(Arrays.asList("aLIce", "bOB"));
assertTrue(predicate.test(new EventBuilder().withName("Alice Bob").build()));
}
@Test
public void test_nameDoesNotContainKeywords_returnsFalse() {
// Zero keywords
EventNameContainsKeywordsPredicate predicate = new EventNameContainsKeywordsPredicate(Collections.emptyList());
assertFalse(predicate.test(new EventBuilder().withName("Alice").build()));
// Non-matching keyword
predicate = new EventNameContainsKeywordsPredicate(Arrays.asList("Carol"));
assertFalse(predicate.test(new EventBuilder().withName("Alice Bob").build()));
}
}
```
###### \java\seedu\address\model\reminder\exceptions\DuplicateReminderExceptionTest.java
``` java
public class DuplicateReminderExceptionTest {
private ExpectedException thrown = ExpectedException.none();
@Test
public void createException_getMessage_checkCorrectness() throws Exception {
thrown.expect(ReminderNotFoundException.class);
Exception exception = new DuplicateReminderException();
assertEquals(DuplicateReminderException.MESSAGE, exception.getMessage());
}
}
```
###### \java\seedu\address\model\reminder\ReminderTest.java
``` java
public class ReminderTest {
private static Event event;
private static String message;
@BeforeClass
public static void setUp() throws Exception {
PropertyManager.initializePropertyManager();
event = new Event(EVENT1);
message = "You have an event";
}
@Test
public void createReminder_preDefinedFieldsPresent_checkCorrectness() throws Exception {
Reminder reminder = new Reminder(event, message);
assertNotNull(reminder);
assertEquals(event, reminder.getEvent());
assertEquals(message, reminder.getMessage());
}
@Test
public void setMessage_test_checkCorrectness() {
Reminder reminder = new Reminder(event, message);
assertNotNull(reminder);
reminder.setMessage("test");
assertEquals("test", reminder.getMessage());
}
@Test
public void equal_twoSameTag_checkCorrectness() throws Exception {
Reminder reminder1 = new Reminder((Event) EVENT1, EVENT1.getName().toString());
Reminder reminder2 = new Reminder((Event) EVENT1, EVENT1.getName().toString());
assertEquals(reminder1, reminder2);
}
```
###### \java\seedu\address\testutil\EditEventDescriptorBuilder.java
``` java
import java.util.Optional;
import seedu.address.commons.exceptions.IllegalValueException;
import seedu.address.logic.commands.event.EditEventCommand.EditEventDescriptor;
import seedu.address.logic.parser.util.ParserUtil;
import seedu.address.model.event.ReadOnlyEvent;
import seedu.address.model.property.exceptions.PropertyNotFoundException;
/**
* A utility class to help with building EditEventDescriptor objects.
*/
public class EditEventDescriptorBuilder {
private EditEventDescriptor descriptor;
public EditEventDescriptorBuilder() {
descriptor = new EditEventDescriptor();
}
public EditEventDescriptorBuilder(EditEventDescriptor descriptor) {
this.descriptor = new EditEventDescriptor(descriptor);
}
/**
* Returns an {@code EditEventDescriptor} with fields containing {@code event}'s details
*/
public EditEventDescriptorBuilder(ReadOnlyEvent event) {
descriptor = new EditEventDescriptor();
descriptor.setName(event.getName());
descriptor.setTime(event.getTime());
descriptor.setAddress(event.getAddress());
}
/**
* Sets the {@code Name} of the {@code EditEventDescriptor} that we are building.
*/
public EditEventDescriptorBuilder withName(String name) {
try {
ParserUtil.parseName(Optional.of(name)).ifPresent(descriptor::setName);
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("name is expected to be unique.");
}
return this;
}
/**
* Sets the {@code Time} of the {@code EditEventDescriptor} that we are building.
*/
public EditEventDescriptorBuilder withTime(String dateTime) {
try {
ParserUtil.parseTime(Optional.of(dateTime)).ifPresent(descriptor::setTime);
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("date/time is expected to be unique.");
}
return this;
}
/**
* Sets the {@code Address} of the {@code EditEventDescriptor} that we are building.
*/
public EditEventDescriptorBuilder withAddress(String address) {
try {
ParserUtil.parseAddress(Optional.of(address)).ifPresent(descriptor::setAddress);
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("Address is expected to be unique.");
}
return this;
}
public EditEventDescriptor build() {
return descriptor;
}
}
```
###### \java\seedu\address\testutil\EventBuilder.java
``` java
/**
* A utility class to help with building Event objects.
*/
public class EventBuilder {
public static final String DEFAULT_EVENT_NAME = "Hack Your Way 2017";
public static final String DEFAULT_TIME = "25102010 12:00";
public static final String DEFAULT_VENUE = "123, Clementi West Ave 6, #08-123";
private Event event;
static {
PropertyManager.initializePropertyManager();
}
public EventBuilder() {
try {
Name defaultEventName = new Name(DEFAULT_EVENT_NAME);
DateTime defaultTime = new DateTime(DEFAULT_TIME);
Address defaultAddress = new Address(DEFAULT_VENUE);
ArrayList<Reminder> defaultReminder = new ArrayList<>();
this.event = new Event(defaultEventName, defaultTime, defaultAddress, defaultReminder);
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new AssertionError("Default event's values are invalid.");
}
}
/**
* Initializes the EventBuilder with the data of {@code EventToCopy}.
*/
public EventBuilder(ReadOnlyEvent eventToCopy) {
this.event = new Event(eventToCopy);
}
/**
* Sets the {@code Name} of the {@code Event} that we are building.
*/
public EventBuilder withName(String name) {
try {
this.event.setName(new Name(name));
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("name is expected to be unique.");
}
return this;
}
/**
* Adds a reminder into the event.
*/
public EventBuilder withReminder() {
this.event.getReminders().add(new Reminder(event, event.getTime().toString()));
return this;
}
/**
* Sets the {@code Address} of the {@code Event} that we are building.
*/
public EventBuilder withAddress(String address) {
try {
this.event.setAddress(new Address(address));
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("address is expected to be unique.");
}
return this;
}
/**
* Sets the {@code Date Time} of the {@code Event} that we are building.
*/
public EventBuilder withDateTime(String time) {
try {
this.event.setDateTime(new DateTime(time));
} catch (IllegalValueException | PropertyNotFoundException ive) {
throw new IllegalArgumentException("Date and Time are expected to be unique.");
}
return this;
}
public Event build() {
return this.event;
}
}
```
###### \java\seedu\address\testutil\EventUtil.java
``` java
/**
* A utility class for Event.
*/
public class EventUtil {
/**
* Returns an add command string for adding the {@code event}.
*/
public static String getAddEvent(ReadOnlyEvent event) {
return AddEventCommand.COMMAND_WORD + " " + getEventDetails(event);
}
/**
* Returns the part of command string for the given {@code event}'s details.
*/
public static String getEventDetails(ReadOnlyEvent event) {
StringBuilder sb = new StringBuilder();
sb.append(PREFIX_NAME + event.getName().toString() + " ");
sb.append(PREFIX_DATE_TIME + event.getTime().toString() + " ");
sb.append(PREFIX_ADDRESS + event.getAddress().toString());
return sb.toString();
}
}
```
###### \java\seedu\address\testutil\TypicalEvents.java
``` java
/**
* A utility class containing a list of {@code Event} objects to be used in tests.
*/
public class TypicalEvents {
public static final ReadOnlyEvent EVENT1 = new EventBuilder().withName("HHN 6001")
.withDateTime("22022015 08:30")
.withAddress("123, Sentosa, #08-111").withReminder().build();
public static final ReadOnlyEvent EVENT2 = new EventBuilder().withName("ZoukOut 6001")
.withDateTime("25122017 10:30")
.withAddress("123, Clarke Quay #01-111").withReminder().build();
// Manually added
public static final ReadOnlyEvent EVENTM1 = new EventBuilder().withName("Volleyball Tour 17")
.withDateTime("25122017 08:30")
.withAddress("OCBC ARENA Hall 3, #01-111").withReminder().build();
public static final ReadOnlyEvent EVENTM2 = new EventBuilder().withName("Meeting with Jason")
.withDateTime("25112016 02:30")
.withAddress("123, Sheraton Towers , #06-111").withReminder().build();
// Manually added - Person's details found in {@code CommandTestUtil}
public static final ReadOnlyEvent EV1 = new EventBuilder().withName(VALID_NAME_EVENT1)
.withDateTime(VALID_DATE_EVENT1).withAddress(VALID_VENUE_EVENT1).build();
public static final ReadOnlyEvent EV2 = new EventBuilder().withName(VALID_NAME_EVENT2)
.withDateTime(VALID_DATE_EVENT2).withAddress(VALID_VENUE_EVENT2).build();
private TypicalEvents() {} // prevents instantiation
/**
* Returns an {@code AddressBook} with all the typical persons.
*/
public static AddressBook getTypicalAddressBook() {
AddressBook ab = new AddressBook();
for (ReadOnlyEvent events : getTypicalEvents()) {
try {
ab.addEvent(events);
} catch (DuplicateEventException e) {
assert false : "not possible";
}
}
return ab;
}
static {
PropertyManager.initializePropertyManager();
}
public static List<ReadOnlyEvent> getTypicalEvents() {
return new ArrayList<>(Arrays.asList(EVENT1, EVENT2));
}
}
```
###### \java\seedu\address\ui\EventCardTest.java
``` java
public class EventCardTest extends GuiUnitTest {
@Test
public void equals() {
Event event = new EventBuilder().build();
EventCard eventCard = new EventCard(event, 0);
// same event, same index -> returns true
EventCard copy = new EventCard(event, 0);
assertTrue(eventCard.equals(copy));
// same object -> returns true
assertTrue(eventCard.equals(eventCard));
// null -> returns false
assertFalse(eventCard.equals(null));
// different types -> returns false
assertFalse(eventCard.equals(0));
// different event, same index -> returns false
Event differentEvent = new EventBuilder().withName("differentName").build();
assertFalse(eventCard.equals(new EventCard(differentEvent, 0)));
// same event, different index -> returns false
assertFalse(eventCard.equals(new EventCard(event, 1)));
}
/**
* Asserts that {@code eventCard} displays the details of {@code expectedEvent} correctly and matches
* {@code expectedId}.
*/
private void assertCardDisplay(EventCard eventCard, ReadOnlyEvent expectedEvent, int expectedId) {
guiRobot.pauseForHuman();
EventCardHandle eventCardHandle = new EventCardHandle(eventCard.getRoot());
// verify id is displayed correctly
assertEquals(Integer.toString(expectedId) + ". ", eventCardHandle.getId());
// verify event details are displayed correctly
assertCardDisplaysEvent(expectedEvent, eventCardHandle);
}
}
```
###### \java\seedu\address\ui\EventListPanelTest.java
``` java
public class EventListPanelTest extends GuiUnitTest {
private static final ObservableList<ReadOnlyEvent> TYPICAL_EVENTS =
FXCollections.observableList(getTypicalEvents());
private static final JumpToListRequestEvent JUMP_TO_SECOND_EVENT = new JumpToListRequestEvent(INDEX_SECOND_PERSON);
private EventListPanelHandle eventListPanelHandle;
@Before
public void setUp() {
EventListPanel eventListPanel = new EventListPanel(TYPICAL_EVENTS);
uiPartRule.setUiPart(eventListPanel);
eventListPanelHandle = new EventListPanelHandle(getChildNode(eventListPanel.getRoot(),
EventListPanelHandle.EVENT_LIST_VIEW_ID));
}
@Test
public void display() {
for (int i = 0; i < TYPICAL_EVENTS.size(); i++) {
eventListPanelHandle.navigateToCard(TYPICAL_EVENTS.get(i));
ReadOnlyEvent expectedEvent = TYPICAL_EVENTS.get(i);
EventCardHandle actualCard = eventListPanelHandle.getEventCardHandle(i);
assertCardDisplaysEvent(expectedEvent, actualCard);
assertEquals(Integer.toString(i + 1) + ". ", actualCard.getId());
}
}
@Test
public void handleJumpToListRequestEvent() {
postNow(JUMP_TO_SECOND_EVENT);
guiRobot.pauseForHuman();
EventCardHandle expectedCard = eventListPanelHandle.getEventCardHandle(INDEX_SECOND_PERSON.getZeroBased());
EventCardHandle selectedCard = eventListPanelHandle.getHandleToSelectedCard();
assertCardEquals(expectedCard, selectedCard);
}
}
```
###### \java\systemtests\AddEventCommandSystemTest.java
``` java
public class AddEventCommandSystemTest extends AddressBookSystemTest {
@Test
public void add() throws Exception {
showAllEvents();
Model model = getModel();
/* Case: add a event without tags to a non-empty address book, command with leading spaces and trailing spaces
* -> added
*/
ReadOnlyEvent toAdd = EV1;
String command = " " + AddEventCommand.COMMAND_WORD + " " + NAME_DESC_EVENT1 + " " + DATE_DESC_EVENT1
+ " " + VENUE_DESC_EVENT1;
String inputCommand = command;
assertCommandSuccess(command, toAdd);
/* Case: undo adding EV1 to the list -> EV1 deleted */
command = UndoCommand.COMMAND_WORD;
String expectedResultMessage = String.format(UndoCommand.MESSAGE_SUCCESS, inputCommand);
assertCommandSuccess(command, model, expectedResultMessage);
/* Case: redo adding EV1 to the list -> EV1 added again */
command = RedoCommand.COMMAND_WORD;
expectedResultMessage = String.format(RedoCommand.MESSAGE_SUCCESS, inputCommand);
model.addEvent(toAdd);
assertCommandSuccess(command, model, expectedResultMessage);
/* Case: add a duplicate event -> rejected */
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1 + VENUE_DESC_EVENT1;
assertCommandFailure(command, AddEventCommand.MESSAGE_DUPLICATE_EVENT);
/* Case: add a event with all fields same as another event in the address book except name -> added */
toAdd = new EventBuilder().withName(VALID_NAME_EVENT2).withDateTime(VALID_DATE_EVENT1)
.withAddress(VALID_VENUE_EVENT1).build();
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT2 + DATE_DESC_EVENT1 + VENUE_DESC_EVENT1;
assertCommandSuccess(command, toAdd);
/* Case: add a event with all fields same as another event in the address book except date -> added */
toAdd = new EventBuilder().withName(VALID_NAME_EVENT1).withDateTime(VALID_DATE_EVENT2)
.withAddress(VALID_VENUE_EVENT1).build();
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT2 + VENUE_DESC_EVENT1;
assertCommandSuccess(command, toAdd);
/* Case: add a event with all fields same as another event in the address book except address -> added */
toAdd = new EventBuilder().withName(VALID_NAME_EVENT1).withDateTime(VALID_DATE_EVENT1)
.withAddress(VALID_VENUE_EVENT2).build();
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1 + VENUE_DESC_EVENT2;
assertCommandSuccess(command, toAdd);
/* Case: add to empty address book -> added */
executeCommand(ClearCommand.COMMAND_WORD);
assert getModel().getAddressBook().getEventList().size() == 0;
assertCommandSuccess(EVENT1);
/* Case: missing name -> rejected */
command = AddEventCommand.COMMAND_WORD + DATE_DESC_EVENT1 + VENUE_DESC_EVENT1;
assertCommandFailure(command, String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddEventCommand.MESSAGE_USAGE));
/* Case: missing date -> rejected */
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + VENUE_DESC_EVENT1;
assertCommandFailure(command, String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddEventCommand.MESSAGE_USAGE));
/* Case: missing address -> rejected */
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1;
assertCommandFailure(command, String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddEventCommand.MESSAGE_USAGE));
/* Case: invalid keyword -> rejected */
command = "addEs " + EventUtil.getEventDetails(toAdd);
assertCommandFailure(command, Messages.MESSAGE_UNKNOWN_COMMAND);
/* Case: invalid name -> rejected */
command = AddEventCommand.COMMAND_WORD + INVALID_NAME_DESC + DATE_DESC_EVENT1 + VENUE_DESC_EVENT1;
assertCommandFailure(command, PropertyManager.getPropertyConstraintMessage("n"));
/* Case: invalid address -> rejected */
command = AddEventCommand.COMMAND_WORD + NAME_DESC_EVENT1 + DATE_DESC_EVENT1 + INVALID_ADDRESS_DESC;
assertCommandFailure(command, PropertyManager.getPropertyConstraintMessage("a"));
}
/**
* Executes the {@code AddEventCommand} that adds {@code toAdd} to the model and verifies that the command box
* displays an empty string, the result display box displays the success message of executing
* {@code AddEventCommand} with the of {@code toAdd}, and the model related components equal to the current model
* added with {@code toAdd}. These verifications are done by
* {@code AddressBookSystemTest#assertApplicationDisplaysExpected(String, String, Model)}.<br>
* Also verifies that the command box has the default style class, the status bar's sync status changes,
* the browser url and selected card remains unchanged.
* @see AddressBookSystemTest#assertApplicationDisplaysExpected(String, String, Model)
*/
private void assertCommandSuccess(ReadOnlyEvent toAdd) {
assertCommandSuccess(EventUtil.getAddEvent(toAdd), toAdd);
}
/**
* Performs the same verification as {@code assertCommandSuccess(ReadOnlyEvent)}. Executes {@code command}
* instead.
* @see AddEventCommandSystemTest#assertCommandSuccess(ReadOnlyEvent)
*/
private void assertCommandSuccess(String command, ReadOnlyEvent toAdd) {
Model expectedModel = getModel();
try {
expectedModel.addEvent(toAdd);
} catch (DuplicateEventException dpe) {
throw new IllegalArgumentException("toAdd already exists in the model.");
}
String expectedResultMessage = String.format(AddEventCommand.MESSAGE_SUCCESS, toAdd);
assertCommandSuccess(command, expectedModel, expectedResultMessage);
}
/**
* Performs the same verification as {@code assertCommandSuccess(String, ReadOnlyEvent)} except that the result
* display box displays {@code expectedResultMessage} and the model related components equal to
* {@code expectedModel}.
* @see AddEventCommandSystemTest#assertCommandSuccess(String, ReadOnlyEvent)
*/
private void assertCommandSuccess(String command, Model expectedModel, String expectedResultMessage) {
executeCommand(command);
assertApplicationDisplaysExpected("", expectedResultMessage, expectedModel);
assertSelectedCardUnchanged();
assertCommandBoxShowsDefaultStyle();
assertStatusBarUnchangedExceptSyncStatus();
}
/**
* Executes {@code command} and verifies that the command box displays {@code command}, the result display
* box displays {@code expectedResultMessage} and the model related components equal to the current model.
* These verifications are done by
* {@code AddressBookSystemTest#assertApplicationDisplaysExpected(String, String, Model)}.<br>
* Also verifies that the browser url, selected card and status bar remain unchanged, and the command box has the
* error style.
* @see AddressBookSystemTest#assertApplicationDisplaysExpected(String, String, Model)
*/
private void assertCommandFailure(String command, String expectedResultMessage) {
Model expectedModel = getModel();
executeCommand(command);
assertApplicationDisplaysExpected(command, expectedResultMessage, expectedModel);
assertSelectedCardUnchanged();
assertCommandBoxShowsErrorStyle();
assertStatusBarUnchanged();
}
}
```
###### \java\systemtests\util\ModelHelper.java
``` java
/**
* Returns a predicate that evaluates to true if this {@code ReadOnlyEvent} equals to {@code other}.
*/
private static Predicate<ReadOnlyEvent> getPredicateMatching(ReadOnlyEvent other) {
return event -> event.equals(other);
}
/**
* Returns a predicate that evaluates to true if this {@code ReadOnlyEvent} equals to {@code other}.
*/
private static Predicate<ReadOnlyPerson> getPredicateMatching(ReadOnlyPerson other) {
return person -> person.equals(other);
}
/**
* Updates {@code model}'s filtered list to display only {@code toDisplay}.
*/
public static void setFilteredEventsList(Model model, List<ReadOnlyEvent> toDisplay) {
Optional<Predicate<ReadOnlyEvent>> predicate =
toDisplay.stream().map(ModelHelper::getPredicateMatching).reduce(Predicate::or);
model.updateFilteredEventsList(predicate.orElse(PREDICATE_MATCHING_NO_EVENTS));
}
/**
* @see ModelHelper#setFilteredList(Model, List)
*/
public static void setFilteredEventsList(Model model, ReadOnlyEvent... toDisplay) {
setFilteredEventsList(model, Arrays.asList(toDisplay));
}
}
```
| 39.332205 | 119 | 0.700356 | yue_Hant | 0.295551 |
4705c477a0d201e656317b73c6e1b671d90edbeb | 3,585 | md | Markdown | tutorial/setup.md | sbracaloni/mlv-tools-tutorial | 37db0ce38a79f42e25b4848b1b28a7d934606c94 | [
"BSD-3-Clause"
] | 61 | 2018-11-15T14:55:29.000Z | 2020-03-23T22:23:51.000Z | tutorial/setup.md | sbracaloni/mlv-tools-tutorial | 37db0ce38a79f42e25b4848b1b28a7d934606c94 | [
"BSD-3-Clause"
] | 4 | 2020-04-12T02:47:56.000Z | 2020-08-31T12:33:16.000Z | tutorial/setup.md | dajea24/mlvtools-tutorial | cda8476a696031264934aa861916a7f6c88df41a | [
"BSD-3-Clause"
] | 11 | 2019-04-06T11:08:34.000Z | 2020-03-11T17:58:10.000Z | # Tutorial Setup
This is the setup section for realistic tutorial.
## 1. Create Project Structure
All resource files needed in this tutorial are provided in `ml-poc-version/resources`.
The structure of the project will be created along the tutorial.
If it is not already done, clone the repository on the tutorial branch.
git clone -b tutorial https://github.com/peopledoc/mlv-tools-tutorial
cd ml-poc-version
Create your working branch
git checkout -b working
Create the project base structure.
make init-struct
Following structure must be created:
├── poc
│ ├── pipeline
│ │ ├── __init__.py
│ │ ├── notebooks # contains Jupyter notebooks (one by pipeline step)
| | └── steps # contains generated configurable Python 3 scripts
| ├── data # contains pipeline data
│ └── commands
│ └── dvc # contains dvc command wrapped in a bash script
...
├── resources # contains Jupyter notebooks needed in this tutorial
│ ├── 01_Extract_dataset.ipynb
│ ├── 02_Tokenize_text.ipynb
│ ├── 03_bis_Classify_text.ipynb
│ ├── 03_Classify_text.ipynb
│ └── 04_Evaluate_model.ipynb
...
> It is not mandatory to follow this structure, it is just an example for this tutorial.
## 2. Prepare Environment
Create a virtual environment using **conda** or **virtualenv**, then activate it.
Then setup the project.
make develop
## 3. Initialize DVC Project
**DVC** works on top of **git** repositories. Run **DVC** initialization in a **git**
repository directory to create **DVC meta files**.
dvc init
The directory `.dvc` should be created in the project root directory.
Add it under git versioning:
git commit -m 'Tutorial setup: dvc init' ./.dvc/
## 4. Create MLV-tools Project Configuration
Using **MLV-tools**, it can be repetitive to repeat output paths parameters for each `ipynb_to_python`
and `gen_dvc` command.
It is possible to provide a configuration to declare project structure and
let **MLV-tools** generates output path.
(For more information see [documentation](https://github.com/mlflow/mlflow))
make mlvtools-conf
The configuration file `./.mlvtools` should be created.
Add it under git versioning:
git add .mlvtools && git commit -m 'Tutorial setup: dvc init'
## 5. Add Git Hooks and Filters
### 5.1 Automatise Jupyter Notebook Cleanup
Usually it is not useful to version **Jupyter notebook** embedded outputs. Sometimes it is even forbidden,
if you work on production data for example. To avoid mistakes, use git pre-commit or git filter to cleanup
**Jupyter notebook** outputs. Several tools can do that,
see for example [nbstripout](https://github.com/kynan/nbstripout).
pip install --upgrade nbstripout
nbstripout --install
With **nbstripout** git filter, **Jupyter notebook** outputs are cleaned on each branch on check-in. That means
when you will commit a change you will keep outputs into the notebook to continue working.
But those outputs will not be sent to the remote server when you push.
Notebook outputs are also excluded from the git diff.
## 6. Get Tutorial Data
This tutorial is based on data from [20_newsgroup](http://scikit-learn.org/stable/datasets/).
Run the following command to download them.
make download-data
Data are stored in `./poc/data/20news-bydate_py3.pkz`.
You reached the end of the setup part, see [Use Case 1: Build and Reproduce a Pipeline](./use_case1.md)
Or [go back to README](../README.md)
| 32.297297 | 112 | 0.706555 | eng_Latn | 0.984947 |
4705dc3cac4fe8e371a238201616e368adbdb732 | 831 | md | Markdown | README.md | Automannn/idea | d687d59767e876231fe5bb8bd37dbc306881d9e8 | [
"Apache-2.0"
] | 1 | 2021-08-30T03:16:26.000Z | 2021-08-30T03:16:26.000Z | README.md | Automannn/idea | d687d59767e876231fe5bb8bd37dbc306881d9e8 | [
"Apache-2.0"
] | null | null | null | README.md | Automannn/idea | d687d59767e876231fe5bb8bd37dbc306881d9e8 | [
"Apache-2.0"
] | null | null | null | ### 授权服务器
* 构建状态 [](https://travis-ci.org/jinfeijie/idea)
* 使用方法

* 构建方法:
- Dockerfile构建:`docker build -f Dockerfile -t idea ./ `
- DockerHub构建:`docker pull mrjin/idea:latest`
* 部署方法:
- 本地部署:`docker run -d -e PORT=9501 -e USER=jinfeijie.cn -p 9501:9501 --restart=always --name=idea idea`
- DockerHub部署:`docker run -d -e PORT=9501 -e USER=jinfeijie.cn -p 9501:9501 --restart=always --name=idea mrjin/idea:latest`
* 参数
| 环境变量 | 默认值 | 备注 |
| ------------- |:-------------:| :---:|
| PORT | 9501 | 内部端口号|
| USER | jinfeijie.cn | 授权用户 |
* 手头宽裕请支持正版:[https://www.jetbrains.com/idea/](https://www.jetbrains.com/idea/)
* 其他问题[@jinfeijie](https://t.me/jinfeijie)
| 33.24 | 124 | 0.638989 | yue_Hant | 0.600035 |
47061506f98359c848c312989ffc5c40eaee4f25 | 7,512 | md | Markdown | README.md | ParalelniPolis/obcanka-public | 826332e330ec10dc039b58d590d344283089408a | [
"Apache-2.0"
] | 34 | 2018-11-14T23:08:09.000Z | 2022-03-12T22:11:51.000Z | README.md | ParalelniPolis/obcanka-public | 826332e330ec10dc039b58d590d344283089408a | [
"Apache-2.0"
] | null | null | null | README.md | ParalelniPolis/obcanka-public | 826332e330ec10dc039b58d590d344283089408a | [
"Apache-2.0"
] | 12 | 2019-01-19T09:36:42.000Z | 2021-08-03T21:31:59.000Z |
Seznam projektů pracujících s eObčankou
=========================================
| Jmeno projeku | Popis projektu | Odkaz na projekt|
| --- | --- | --- |
| **obcanka-reader**|Čtečka eObcanek jako USB klavesnice | https://github.com/premek/obcanka-reader |
| **Czech ID card service** |Multiplatformni open-source identifikacni klient | https://github.com/devnautica/czeidcardservice |
| **soFair-eObcanka** | JS knihovna pro přihlášení, postaveno nad řešením Czech ID card service + použití v aplikaci petiční systém | https://github.com/Hixqqo/soFair-eObcanka |
| **eObcanka.Java** | Java a Android knihovna pro práci s eObčankou | https://github.com/ParalelniPolis/obcanka-public/tree/master/java|
| **eObcanka.NET** | .NET knihovna pro práci s eObčankou | https://github.com/wurducius/eObcanka.NET |
| **oPYcanka** | Python knihovna pro práci s eObčankou | https://github.com/ParalelniPolis/opycanka |
| **eobcanka-rust** | Minimální Rust lib impementace pro práci s eObčankou | https://github.com/Qubeo/eobcanka-rust |
| **EOP** | Python skript umožnující výpočet veřejného klíče z podpisu certifikátu uloženého na eObčance | https://github.com/andrewkozlik/eop
Zajímavé dokumenty k eObčance
========================================
| Popis | Odkaz |
| --- | --- |
| Prezentace pro vývojáře z Hackathonu k základnímu seznámení se s eObčankou | https://github.com/ParalelniPolis/obcanka-public/blob/master/java/doc/Obcanka_20190119.pdf |
| ISO7816 | http://cardwerk.com/smart-card-standard-iso7816-4-section-5-basic-organizations |
Neveřejné veřejné
==================
**Česká republika zavádí nové elektronické průkazy (tzv. eObčanka) a celý projekt působí velmi rozpačitě. Technické řešení i komunikace snižují důvěryhodnost nového nástroje, který má přitom potenciál významně zjednodušit komunikaci se státem a ulehčit řadu povinností identifikace uživatelů internetových a finančních služeb. Namísto otevření software pro další vývoj na straně uživatelů však stát odmítá zveřejnit i údaje, které by ze své podstaty veřejné být měly. Rozhodli jsme se proto otevřít toto neveřejné veřejné paralelně.**
Architektura řešení je hrubě nedotažená a postrádá logickou strukturu, ale i některé základní bezpečnostní náležitosti. Při rozkrývání technického provedení se pak objevil zcela nepochopitelný problém na straně vydavatele, který odmítl zveřejnit veřejné klíče pro ověření autenticity certifikátů a jejich vydavatele (certifikační autority).
S požadavkem na zpřístupnění veřejných klíčů se na Ministerstvo vnitra ČR obrátil v polovině září tohoto roku Karel Kyovský. „Zpřístupnění uvedených certifikátů široké veřejnosti umožní občanům i soukromému sektoru efektivnější využití vlastností nových občanských průkazů a umožní nezávislé ověření pravosti“ uvedl Karel Kyovský v odůvodnění žádosti o zpřístupnění veřejných klíčů.
Ministerstvo vnitra však žádost zamítlo s odkazem na sérii předpisů, které údajně znemožňují veřejné klíče poskytnout, a samotné zveřejnění označilo za bezpečnostní riziko. „Poskytnutí požadovaných certifikátů považujeme za možné riziko pro bezpečnost systému elektronické komunikace,“ uvedl Ing. František Varmuža, ředitel Odboru centrálních informačních systémů Ministerstva vnitra ve své odpovědi.
* [Stát odmítá vydat veřejné klíče k elektronickým občanským průkazům](https://www.lupa.cz/aktuality/stat-odmita-vydat-verejne-klice-k-elektronickym-obcanskym-prukazum/)
* [Ministerstvo vnitra ČR považuje vydání veřejných klíčů k občankám za bezpečnostní riziko](https://www.root.cz/zpravicky/ministerstvo-vnitra-cr-povazuje-vydani-verejnych-klicu-k-obcankam-za-bezpecnostni-riziko/)
Ministerstvo vnitra buď nerozumí základům kryptografie a bezpečnosti, nebo jde o skrytý záměr, jehož cílem je vytvoření role centrálního ověřovatele (prostor pro další zakázky). Veřejné klíče k e-dokladům přitom bez problémů zpřístupnilo například Slovensko nebo Estonsko, kde žádné bezpečnostní riziko neidentifikovali.
**Veřejné klíče české eObčanky lze ovšem z certifikátů nahraných na eObčance dopočítat.** Metodiku výpočtu představili konzultanti v oblasti kryptografie Ondřej Vejpustek a Andrew Kozlik. Bližší metodologie je popsaná například v [dokumentu Elliptic Curve Cryptography](http://bitcoin.me/sec1-v2.pdf).
Veřejný klíč je [zapsán v blockchainu](https://www.blockchain.com/btc/tx/a0549be380a0eb8d623c9e18a072e494952333a96921db393dbb4c5cfddea86c). Jak klíč stáhnout [naleznete zde](https://github.com/ParalelniPolis/obcanka-public).
Odmítnutím zveřejnit veřejné klíče k certifikátům stát vytváří umělou monopolní situaci a prostor pro korupci, kdy pouze jím určené osoby s přístupem k certifikátům mohou využít nové vlastnosti občanských průkazů s čipem a vytvářet další aplikace pro širší a smysluplnější využití eObčanky.
Pokud stát nesmyslně odmítá veřejnosti veřejností financovaný systém otevřít, učiníme to za něj (není zač).

### Hackathon eObčanka 2019 19.-20. 1. 2019 v Paralelní Polis
Třetí lednový víkend pořádá Paralelní Polis hackathon, který si klade za cíl otevřít platformu eObčanek, zkontrolovat jejich bezpečnost a přinést nové možnosti jejich využití.
#### Podmínky
Hackathonu se mohou zúčastnit skupiny i jednotlivci. Na akci se nehradí žádné startovné. K dispozici budou čtečky karet a příkladová JAVA knihovna pro komunikaci s eObčankou. eObčanku si musí účastníci zajistit sami. Po dobu hackathonu bude k dispozici občerstvení a výběrová káva z naší kavárny.
#### Cíle
Během hackathonu chceme vytvořit projekty v následujících oblastech:
* Knihovny pro obsluhu eObčanky pro různé platformy
* Aplikace pro iOS
* Extrakce software z karty, bezpečnostní audit - například z hlediska manipulace s privátním klíčem
* Prozkoumání a zdokumentování nezveřejněných funkcí systému
* Nová využití eObčanky pro komerční i nekomerční účely
Účastníci se nemusí omezovat pouze na tato témata a mohou přijít s vlastními zajímavými nápady. Kreativitě se meze nekladou.
### Kritéria hodnocení
Projekty budou hodnoceny na základě pětiminutové prezentace a dema dle následujících kritérií:
* Užitečnost
* UX
* Originalita
* Kvalita dema
### Ceny
Vítězné týmy se mohou těšit na:
* Nabídku práce na ministerstvu vnitra ČR
* DVD s filmem Občan K
* Podíl na [bitcoinové odměně 3Bz3pGkTQJf7NSxhtE8YicHffyDRCbjqeb](https://www.blockchain.com/btc/address/3Bz3pGkTQJf7NSxhtE8YicHffyDRCbjqeb)
Nemůžete se zúčastnit, ale chcete týmy motivovat? Přispějte na výše uvedenou bitcoinovou adresu.
### Výsledky
| Umístění | Tým | Popis projektu | Odkaz na projekt|
| --- | --- | --- | --- |
| 1. | Devnautica + soFair | Multiplatformni open-source identifikacni klient + JS knihovna pro přihlášení, postaveno nad řešením týmu Devnautica + použití v aplikaci petiční systém | https://github.com/Hixqqo/soFair-eObcanka a https://github.com/devnautica/czeidcardservice|
| 2. | Premek | Čtečka eObcanek jako USB klavesnice | https://github.com/premek/obcanka-reader |
| 3. | Auxilium & Warden Audit| Bezpečnostní audit systému eObčanka Identifikace. Identifikace nezdokumentovaných a nezmámých funkcionalit a případných bezpečnostních chyb | TAJNÉ |
| 4. | Submission | .NET Library + Utility application | https://github.com/wurducius/eObcanka.NET |
| 5. | oPYcanka | Python knihovna pro práci s eObčankou | https://github.com/ParalelniPolis/opycanka |
| 6. | MindFoc | Minimální Rust lib impementace | https://github.com/Qubeo/eobcanka-rust |
| 75.878788 | 534 | 0.789404 | ces_Latn | 0.999778 |
4706a1c670244c9cd80e8af57e44ec932d0da69f | 471 | md | Markdown | README.md | Frank1215/curso-practico-javascript-platzi | dce3c8080842c8dfcad90d6eb220957ec436ddca | [
"MIT"
] | 1 | 2022-03-08T17:40:25.000Z | 2022-03-08T17:40:25.000Z | README.md | Frank1215/curso-practico-javascript-platzi | dce3c8080842c8dfcad90d6eb220957ec436ddca | [
"MIT"
] | null | null | null | README.md | Frank1215/curso-practico-javascript-platzi | dce3c8080842c8dfcad90d6eb220957ec436ddca | [
"MIT"
] | null | null | null | # curso-practico-javascript-platzi
Es el curso practico de Javascript de Platzi
## Taller #1: Figuras Geometricas
-primer paso: definir las fomulas
-Segundo: implementar las formulas en javascript
-Tercer paso: Crear funciones
-Cuarto paso: Integrar JavasCript con HTML
## Taller #2: porcentajes y descuentos
-Primer Paso: definir las formulas
-Segundo Paso: Implementar formulas de Javascriptp
-Tercer Paso: Crear Funciones
-Cuarto Paso: Integrar JavasCript con html
| 29.4375 | 50 | 0.800425 | spa_Latn | 0.915458 |
470773b13b0fd840e63808c27e780bcb7b7fe5c8 | 1,135 | md | Markdown | _publications/2020-02-01-BIOSTEC-paper-number-1.md | bagheria/bagheria.github.io | b3591536df2b4eb0ef6bdd6122f53fa9ae83c6ad | [
"MIT"
] | 1 | 2018-11-27T22:03:21.000Z | 2018-11-27T22:03:21.000Z | _publications/2020-02-01-BIOSTEC-paper-number-1.md | bagheria/bagheria.github.io | b3591536df2b4eb0ef6bdd6122f53fa9ae83c6ad | [
"MIT"
] | 3 | 2020-02-25T13:41:05.000Z | 2020-09-17T13:24:59.000Z | _publications/2020-02-01-BIOSTEC-paper-number-1.md | bagheria/bagheria.github.io | b3591536df2b4eb0ef6bdd6122f53fa9ae83c6ad | [
"MIT"
] | null | null | null | ---
title: "Automatic ICD-10 classification of diseases from Dutch discharge letters"
collection: publications
permalink: /publication/2020-02-01-BIOSTEC-paper-number-1
excerpt: 'This paper benchmarks deep learning-based classification systems for ICD-10 coding in Dutch cardiology discharge letters from the UMC Utrecht.'
date: 2020-02-01
venue: 'Proceeding of the 13th International Joint Conference on Biomedical Engineering Systems and Technologies'
paperurl: 'https://discovery.ucl.ac.uk/id/eprint/10098370/'
---
This paper benchmarks the state-of-the-art deep learning-based classification systems for ICD-10 coding, along with baseline systems on a data set constructed from the Dutch cardiology discharge letters at the UMC Utrecht.
[Download paper here](https://discovery.ucl.ac.uk/id/eprint/10098370/)
Recommended citation: Bagheri, A., Sammani, A., Van der Heijden, P. G. M., Asselbergs, F. W., & Oberski, D. L. (2020). "Automatic ICD-10 classification of diseases from Dutch discharge letters." <i>In Proceeding of the 13th International Joint Conference on Biomedical Engineering Systems and Technologies</i>.
| 75.666667 | 320 | 0.793833 | eng_Latn | 0.842369 |
47079bc7fb39557fba0c9ca016da1ff34f256b16 | 6,174 | md | Markdown | content/en/docs/releases/supported-releases/index.md | keyolk/istio.io | 717bc611aaacf80b88d4c9a2f0eb3ca4865e6f11 | [
"Apache-2.0"
] | null | null | null | content/en/docs/releases/supported-releases/index.md | keyolk/istio.io | 717bc611aaacf80b88d4c9a2f0eb3ca4865e6f11 | [
"Apache-2.0"
] | null | null | null | content/en/docs/releases/supported-releases/index.md | keyolk/istio.io | 717bc611aaacf80b88d4c9a2f0eb3ca4865e6f11 | [
"Apache-2.0"
] | null | null | null | ---
title: Supported Releases
description: The currently supported Istio releases.
weight: 35
aliases:
- /about/supported-releases
- /latest/about/supported-releases
owner: istio/wg-docs-maintainers
test: n/a
---
This page lists the status, timeline and policy for currently supported releases. Supported releases of Istio include
releases that are in the active maintenance window and are patched for security and bug fixes. Subsequent patch releases
on a minor release do not contain backward incompatible changes.
* [Support Policy](#support-policy)
* [Naming scheme](#naming-scheme)
* [Support status of Istio releases](#support-status-of-istio-releases)
* [Releases without known Common Vulnerabilities and Exposures (CVEs)](#releases-without-known-Common-Vulnerabilities-and Exposures)
## Support policy
We produce new builds of Istio for each commit. Around once a quarter, we build a minor release and run through several
additional tests as well as release qualification. We release patch versions for issues found in minor releases.
The various types of releases represent a different product quality level and level of assistance from the Istio community.
In this context, *support* means that the community will produce patch releases for critical issues and offer technical
assistance. Separately, 3rd parties and partners may offer longer-term support solutions.
|Type | Support Level | Quality and Recommended Use
|------------------|-----------------------------------------------------------------------------------------------------------------------|----------------------------
|Development Build | No support | Dangerous, may not be fully reliable. Useful to experiment with.
|Minor Release | Support is provided until 3 months after the next minor release | Safe to deploy in production. Users are encouraged to upgrade to these releases as soon as possible.
|Patch | Same as the corresponding Minor release | Users are encouraged to adopt patch releases as soon as they are available for a given release.
|Security Patch | Same as a Patch, however, it will not contain any additional code other than the security fix from the previous patch | Given the nature of security fixes, users are **strongly** encouraged to adopt security patches after release.
You can find available releases on the [releases page](https://github.com/istio/istio/releases),
and if you're the adventurous type, you can learn about our development builds on the [development builds wiki](https://github.com/istio/istio/wiki/Dev%20Builds).
You can find high-level releases notes for each minor and patch release [here](/news).
## Naming scheme
Our naming scheme is as follows:
{{< text plain >}}
<major>.<minor>.<patch>
{{< /text >}}
where `<minor>` is increased for each release, and `<patch>` counts the number of patches for the
current `<minor>` release. A patch is usually a small change relative to the `<minor>` release.
## Support status of Istio releases
| Version | Currently Supported | Release Date | End of Life | Supported Kubernetes Versions | Tested, but not supported |
|-----------------|----------------------|-------------------|--------------------------|-------------------------------|---------------------------|
| master | No, development only | | | | |
| 1.13 | Yes | February 11, 2022 | ~October 2022 (Expected) | 1.20, 1.21, 1.22, 1.23 | 1.16, 1.17, 1.18, 1.19 |
| 1.12 | Yes | November 18, 2021 | ~June 2022 (Expected) | 1.19, 1.20, 1.21, 1.22 | 1.16, 1.17, 1.18 |
| 1.11 | Yes | August 12, 2021 | ~Mar 2022 (Expected) | 1.18, 1.19, 1.20, 1.21, 1.22 | 1.16, 1.17 |
| 1.10 | No | May 18, 2021 | Dec 30, 2021 | 1.18, 1.19, 1.20, 1.21 | 1.16, 1.17, 1.22 |
| 1.9 | No | February 9, 2021 | Oct 8, 2021 | 1.17, 1.18, 1.19, 1.20 | 1.15, 1.16 |
| 1.8 | No | November 10, 2020 | May 12, 2021 | 1.16, 1.17, 1.18, 1.19 | 1.15 |
| 1.7 | No | August 21, 2020 | Feb 25, 2021 | 1.16, 1.17, 1.18 | 1.15 |
| 1.6 and earlier | No | | | | |
{{< warning >}}
[Kubernetes 1.22 removed some deprecated APIs](https://kubernetes.io/blog/2021/07/14/upcoming-changes-in-kubernetes-1-22/) and as a result versions of Istio prior to 1.10.0 will no longer work. If you are upgrading your Kubernetes version, make sure that your Istio version is still supported.
{{< /warning >}}
## Supported releases without known Common Vulnerabilities and Exposures (CVEs)
{{< warning >}}
Istio does not guarantee that minor releases that fall outside the support window have all known CVEs patched.
Please keep up-to-date and use a supported version.
{{< /warning >}}
| Minor Releases | Patched versions with no known CVEs |
|----------------------------|--------------------------------------|
| 1.13.x | 1.13.1+ |
| 1.12.x | 1.12.4+ |
| 1.11.x | 1.11.7+ |
| 1.10.x | 1.10.4+ |
| 1.9.x | 1.9.9 |
| 1.8 and earlier | None |
| 72.635294 | 293 | 0.523324 | eng_Latn | 0.995479 |
4707a56dd27b594ad64ef86fab0d12c1d89586ff | 2,084 | md | Markdown | docs/csharp/programming-guide/indexers/indexers-in-interfaces.md | cy-org/docs-conceptual.zh-cn | 0c18cda3dd707efdcdd0e73bc480ab9fbbc4580c | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/csharp/programming-guide/indexers/indexers-in-interfaces.md | cy-org/docs-conceptual.zh-cn | 0c18cda3dd707efdcdd0e73bc480ab9fbbc4580c | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/csharp/programming-guide/indexers/indexers-in-interfaces.md | cy-org/docs-conceptual.zh-cn | 0c18cda3dd707efdcdd0e73bc480ab9fbbc4580c | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: "接口中的索引器(C# 编程指南)"
ms.date: 2015-07-20
ms.prod: .net
ms.technology:
- devlang-csharp
ms.topic: article
dev_langs:
- CSharp
helpviewer_keywords:
- indexers [C#], in interfaces
- accessors [C#], indexers
ms.assetid: e16b54bd-4a83-4f52-bd75-65819fca79e8
caps.latest.revision: 18
author: BillWagner
ms.author: wiwagn
translation.priority.ht:
- cs-cz
- de-de
- es-es
- fr-fr
- it-it
- ja-jp
- ko-kr
- pl-pl
- pt-br
- ru-ru
- tr-tr
- zh-cn
- zh-tw
ms.translationtype: HT
ms.sourcegitcommit: 306c608dc7f97594ef6f72ae0f5aaba596c936e1
ms.openlocfilehash: 2715602dadea40324f613bb07b5dd332ed18c25c
ms.contentlocale: zh-cn
ms.lasthandoff: 07/28/2017
---
# <a name="indexers-in-interfaces-c-programming-guide"></a>接口中的索引器(C# 编程指南)
可以在[接口](../../../csharp/language-reference/keywords/interface.md)上声明索引器。 接口索引器的访问器与[类](../../../csharp/language-reference/keywords/class.md)索引器的访问器有所不同,差异如下:
- 接口访问器不使用修饰符。
- 接口访问器没有正文。
因此,访问器的用途是指示索引器为读写、只读还是只写。
下面是接口索引器访问器的示例:
[!code-cs[csProgGuideIndexers#3](../../../csharp/programming-guide/classes-and-structs/codesnippet/CSharp/indexers-in-interfaces_1.cs)]
索引器的签名必须不同于同一接口中声明的所有其他索引器的签名。
## <a name="example"></a>示例
下面的示例演示如何实现接口索引器。
[!code-cs[csProgGuideIndexers#4](../../../csharp/programming-guide/classes-and-structs/codesnippet/CSharp/indexers-in-interfaces_2.cs)]
在前面的示例中,可通过使用接口成员的完全限定名来使用显示接口成员实现。 例如:
```
public string ISomeInterface.this
{
}
```
但仅当类采用相同的索引签名实现多个接口时,才需用到完全限定名称以避免歧义。 例如,如果 `Employee` 类正在实现接口 `ICitizen` 和接口 `IEmployee`,而这两个接口具有相同的索引签名,则需要用到显式接口成员实现。 即是说以下索引器声明:
```
public string IEmployee.this
{
}
```
在 `IEmployee` 接口中实现索引器,而以下声明:
```
public string ICitizen.this
{
}
```
在 `ICitizen` 接口中实现索引器。
## <a name="see-also"></a>另请参阅
[C# 编程指南](../../../csharp/programming-guide/index.md)
[索引器](../../../csharp/programming-guide/indexers/index.md)
[属性](../../../csharp/programming-guide/classes-and-structs/properties.md)
[接口](../../../csharp/programming-guide/interfaces/index.md)
| 23.155556 | 159 | 0.691459 | yue_Hant | 0.345799 |
4707c57036d7b33b1a28cfcc275efb89a6eef59d | 4,498 | md | Markdown | content/doctor-who/hartnell.md | alexander-heimbuch/utterson-example | 7654b49dfbc477495af5a161018a7fd150e8569d | [
"MIT"
] | null | null | null | content/doctor-who/hartnell.md | alexander-heimbuch/utterson-example | 7654b49dfbc477495af5a161018a7fd150e8569d | [
"MIT"
] | 1 | 2018-02-09T15:03:08.000Z | 2018-02-09T15:03:08.000Z | content/doctor-who/hartnell.md | alexander-heimbuch/utterson-example | 7654b49dfbc477495af5a161018a7fd150e8569d | [
"MIT"
] | 2 | 2015-11-02T07:17:19.000Z | 2015-11-02T07:33:24.000Z | <!--
author: William Hartnell
cover: static/doctor-who.png
publish: 1963-05-10
-->
They're not aliens, they're Earth…liens!
========================================
It's a fez. I wear a fez now. Fezes are cool. Sorry, checking all the water in this area; there's an escaped fish. Stop talking, brain thinking. Hush. It's art! A statement on modern society, 'Oh Ain't Modern Society Awful?'!
You know how I sometimes have really brilliant ideas? __The way I see it, every life is a pile of good things and bad things.__ *hellip;hey.* …the good things don't always soften the bad things; but vice-versa the bad things don't necessarily spoil the good things and make them unimportant.
## No… It's a thing; it's like a plan, but with more greatness.
Did I mention we have comfy chairs? I hate yogurt. It's just stuff with bits in. Annihilate? No. No violence. I won't stand for it. Not now, not ever, do you understand me?! I'm the Doctor, the Oncoming Storm - and you basically meant beat them in a football match, didn't you?
1. It's a fez. I wear a fez now. Fezes are cool.
2. You know how I sometimes have really brilliant ideas?
3. You know how I sometimes have really brilliant ideas?
### Sorry, checking all the water in this area; there's an escaped fish.
No… It's a thing; it's like a plan, but with more greatness. I'm the Doctor. Well, they call me the Doctor. I don't know why. I call me the Doctor too. I still don't know why. I'm the Doctor, I'm worse than everyone's aunt. *catches himself* And that is not how I'm introducing myself.
* You know how I sometimes have really brilliant ideas?
* The way I see it, every life is a pile of good things and bad things.…hey.…the good things don't always soften the bad things; but vice-versa the bad things don't necessarily spoil the good things and make them unimportant.
* No… It's a thing; it's like a plan, but with more greatness.
It's a fez. I wear a fez now. Fezes are cool. They're not aliens, they're Earth…liens! It's a fez. I wear a fez now. Fezes are cool. It's art! A statement on modern society, 'Oh Ain't Modern Society Awful?'!
You hate me; you want to kill me! Well, go on! Kill me! KILL ME! You know how I sometimes have really brilliant ideas? You know how I sometimes have really brilliant ideas? Heh-haa! Super squeaky bum time!
All I've got to do is pass as an ordinary human being. Simple. What could possibly go wrong? You know how I sometimes have really brilliant ideas? Sorry, checking all the water in this area; there's an escaped fish.
You know how I sometimes have really brilliant ideas? You've swallowed a planet! Sorry, checking all the water in this area; there's an escaped fish. Saving the world with meals on wheels. Saving the world with meals on wheels.
All I've got to do is pass as an ordinary human being. Simple. What could possibly go wrong? Sorry, checking all the water in this area; there's an escaped fish. I hate yogurt. It's just stuff with bits in.
Did I mention we have comfy chairs? Stop talking, brain thinking. Hush. It's art! A statement on modern society, 'Oh Ain't Modern Society Awful?'! No, I'll fix it. I'm good at fixing rot. Call me the Rotmeister. No, I'm the Doctor. Don't call me the Rotmeister.
I'm the Doctor, I'm worse than everyone's aunt. *catches himself* And that is not how I'm introducing myself. Did I mention we have comfy chairs? I am the last of my species, and I know how that weighs on the heart so don't lie to me!
I am the last of my species, and I know how that weighs on the heart so don't lie to me! Father Christmas. Santa Claus. Or as I've always known him: Jeff. You know how I sometimes have really brilliant ideas?
It's a fez. I wear a fez now. Fezes are cool. Annihilate? No. No violence. I won't stand for it. Not now, not ever, do you understand me?! I'm the Doctor, the Oncoming Storm - and you basically meant beat them in a football match, didn't you?
You know when grown-ups tell you 'everything's going to be fine' and you think they're probably lying to make you feel better? I'm nobody's taxi service; I'm not gonna be there to catch you every time you feel like jumping out of a spaceship.
You know how I sometimes have really brilliant ideas? Did I mention we have comfy chairs? I am the Doctor, and you are the Daleks! Annihilate? No. No violence. I won't stand for it. Not now, not ever, do you understand me?! I'm the Doctor, the Oncoming Storm - and you basically meant beat them in a football match, didn't you?
| 86.5 | 327 | 0.740774 | eng_Latn | 0.999113 |
4707f1b17725e7a9d41649478c07aae6988981c8 | 5,300 | md | Markdown | README.md | achud/awsmobile-cli | c754dd3c3927f1eeb879c8779a222695e2d72335 | [
"Apache-2.0"
] | null | null | null | README.md | achud/awsmobile-cli | c754dd3c3927f1eeb879c8779a222695e2d72335 | [
"Apache-2.0"
] | 1 | 2017-12-24T16:29:29.000Z | 2017-12-24T16:29:29.000Z | README.md | mlabieniec/awsmobile-cli | 64326a9bca1fef33d994599a9d602439ae0fbb82 | [
"Apache-2.0"
] | null | null | null | # awsmobile-CLI
<a href="https://nodei.co/npm/awsmobile-cli/">
<img src="https://nodei.co/npm/awsmobile-cli.svg?downloads=true&downloadRank=true&stars=true">
</a>
<p>
<a href="https://travis-ci.org/aws/awsmobile-cli">
<img src="https://travis-ci.org/aws/awsmobile-cli.svg?branch=master" alt="build:started">
</a>
<a href="https://codecov.io/gh/aws/awsmobile-cli">
<img src="https://codecov.io/gh/aws/awsmobile-cli/branch/master/graph/badge.svg" />
</a>
</p>
The awsmobile-CLI, built on top of AWS Mobile Hub, provides a command line interface for frontend JavaScript developers to seamlessly enable and configure AWS services into their apps. With minimal configuration, you can start using all of the functionality provided by the AWS Mobile Hub from your favorite terminal program.
* [Installation](#installation)
* [Configuration](#configuration)
* [Getting Started](#getting-started)
* [Commands Summary](#commands-summary)
* [Usage](#usage)
## Installation
The easiest way to install is using npm or yarn
```
npm install -g awsmobile-cli
or
yarn global add awsmobile-cli
```
## Configuration
* [Sign up for the AWS Free Tier](https://aws.amazon.com/free/) to learn and prototype at little or no cost.
* [Enable AWS Mobile Hub Service Role](https://console.aws.amazon.com/mobilehub/home?region=us-east-1#/activaterole/) to let AWS Mobile use your AWS credentials to create and manage your AWS services.
* Configure the CLI with your AWS credentials
```
awsmobile configure
```
Check [here](#awsmobile-configure) for more details about configuration
## Getting Started
To create a new Mobile Hub project and pull the project configuration into your app, run the following command on your terminal.
```
cd <your-app>
awsmobile init
```
Running this command enables your Mobile Hub project with two default features: (1) Analytics, for receiving the analytics sent from your app and (2) Hosting and Streaming for easily hosting your app in the cloud. The command also adds the [AWS Amplify](https://github.com/aws/aws-amplify#aws-amplify) JavaScript library to your app so you can easily integrate Analytics, as well as other features provided by AWS. These features can easily be enabled using the awsmobile cli commands below.
## Commands Summary
The current set of commands supported by the awsmobile CLI are
| Command | Description |
| --- | --- |
| awsmobile start | Starts an awsmobilejs project using one of our starter templates, sets up the backend mobile hub project in the cloud and pulls the aws-exports.js file |
| awsmobile init | Initializes the current project with awsmobilejs features, sets up the backend mobile hub project in the cloud and pulls the aws-exports.js file |
| awsmobile configure [aws] | Configures the aws access credentials and aws region for awsmobile-cli |
| awsmobile configure project | Configures the attributes of your project for awsmobile-cli |
| awsmobile pull | Retrieves the latest details of the backend Mobile Hub project, such as the access information file aws-exports.js |
| awsmobile push | Updates the backend Mobile Hub project with the latest local developments |
| awsmobile publish | Executes awsmobile push, then builds and publishes client-side application to S3 and Cloud Front |
| awsmobile run | Executes awsmobile push, then executes the project's start command to test run the client-side application |
| awsmobile console | Opens the web console of the backend Mobile Hub project |
| awsmobile features | Shows available Mobile Hub project features, and allows them to be enabled/disabled |
| awsmobile \<feature\> enable [--prompt] | Enables the feature with the defaults configuration, and --prompt for initial configuration settings |
| awsmobile \<feature\> disable | Disables the feature |
| awsmobile \<feature\> configure | Configures the definition of the objects in the feature |
| awsmobile cloud-api invoke \<apiname\> \<method\> \<path\> [init] | Invokes the API for testing locally. This helps quickly test unsigned APIs in your local environment |
| awsmobile delete | Deletes the Mobile hub project |
| awsmobile help [cmd] | Displays help for [cmd] |
Supported Features:
1. user-signin (Cognito)
2. user-files (AWS S3)
3. cloud-api (CloudLogic)
4. database (Dynamo DB)
5. analytics (Pinpoint)
6. hosting (S3 and CloudFront Distribution)
## awsmobile configure
```
awsmobile configure
```
#### There are two levels in the aws credential and region configurations for the awsmobile-cli
- general
- per project
When you run `awsmobile configure` outside of a valid awsmobilejs project, it sets the general configuration. The general configuration is applied when you run `awsmobile init` or `awsmobile start` command. And its values are copied as the initial per project configuration for the newly initialized project
When you run `awsmobile configure` inside a valid awsmobilejs project, it sets the configuration for this project only. It does NOT change the general configuration or the configuration of other projects.
## Usage
Go to the [AWS Mobile Docs](http://docs.aws.amazon.com/aws-mobile/latest/developerguide/web-getting-started.html) to learn more about the usage of the awsmobile-cli and some sample codes to get your app up and running.
| 47.321429 | 491 | 0.764906 | eng_Latn | 0.979192 |
47081f9ab092b6f8cd985a51ec790c401b732c57 | 42 | md | Markdown | README.md | A1129434577/LBPickerViewController | 6f354fd3584ca896344dd9c9de007079ada49dbf | [
"MIT"
] | null | null | null | README.md | A1129434577/LBPickerViewController | 6f354fd3584ca896344dd9c9de007079ada49dbf | [
"MIT"
] | null | null | null | README.md | A1129434577/LBPickerViewController | 6f354fd3584ca896344dd9c9de007079ada49dbf | [
"MIT"
] | null | null | null | # LBPickerViewController
一个简单的快速可集成单项选择器。
| 14 | 24 | 0.880952 | zho_Hans | 0.26435 |
4709889971bbdc9b7168b5ba0ef2f4d1b8001791 | 14,024 | md | Markdown | articles/storsimple/storsimple-update1-release-notes.md | zhenjiao-ms/azure-docs | c0a229227c1651301b5cd978c3d248c2e22fbb66 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2022-03-22T15:03:27.000Z | 2022-03-22T15:03:27.000Z | articles/storsimple/storsimple-update1-release-notes.md | zhenjiao-ms/azure-docs | c0a229227c1651301b5cd978c3d248c2e22fbb66 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/storsimple/storsimple-update1-release-notes.md | zhenjiao-ms/azure-docs | c0a229227c1651301b5cd978c3d248c2e22fbb66 | [
"CC-BY-4.0",
"MIT"
] | 2 | 2021-12-07T06:43:08.000Z | 2021-12-19T02:20:20.000Z | ---
title: StorSimple 8000 Series Update 1.2 release notes | Microsoft Docs
description: Describes the new features, issues, and workarounds for StorSimple 8000 Series Update 1.2.
services: storsimple
documentationcenter: NA
author: alkohli
manager: timlt
editor: ''
ms.assetid: 6c9aae87-6f77-44b8-b7fa-ebbdc9d8517c
ms.service: storsimple
ms.devlang: NA
ms.topic: article
ms.tgt_pltfrm: NA
ms.workload: TBD
ms.date: 02/27/2017
ms.author: alkohli
ms.custom: H1Hack27Feb2017
---
# Update 1.2 release notes for your StorSimple 8000 series device
## Overview
The following release notes describe the new features and identify the critical open issues for StorSimple 8000 Series Update 1.2. They also contain a list of the StorSimple software, driver and disk firmware updates included in this release.
Update 1.2 can be applied to any StorSimple device running Release (GA), Update 0.1, Update 0.2, or Update 0.3 software. Update 1.2 is not available if your device is running Update 1 or Update 1.1. If your device is running Release (GA), please [contact Microsoft Support](storsimple-contact-microsoft-support.md) to assist you with installing this update.
The following table lists the device software versions corresponding to Updates 1, 1.1, and 1.2.
| If running update … | this is your device software version. |
| --- | --- |
| Update 1.2 |6.3.9600.17584 |
| Update 1.1 |6.3.9600.17521 |
| Update 1.0 |6.3.9600.17491 |
Please review the information contained in the release notes before you deploy the update in your StorSimple solution. For more information, see how to [install Update 1.2 on your StorSimple device](storsimple-install-update-1.md).
> [!IMPORTANT]
> * It takes approximately 5-10 hours to install this update (including the Windows Updates).
> * Update 1.2 has software, LSI driver and disk firmware updates. To install, follow the instructions in [install Update 1.2 on your StorSimple device](storsimple-install-update-1.md).
> * For new releases, you may not see updates immediately because we do a phased rollout of the updates. Scan for updates in a few days again as these will become available soon.
>
>
## What's new in Update 1.2
These features were first released with Update 1 that was made available to a limited set of users. With the Update 1.2 release, most of the StorSimple users would see the following new features and improvements:
* **Migration from 5000-7000 series to 8000 series devices** – This release introduces a new migration feature that allows the StorSimple 5000-7000 series appliance users to migrate their data to a StorSimple 8000 series physical appliance or a virtual appliance. The migration feature has two key value propositions:
* **Business continuity**, by enabling migration of existing data on 5000-7000 series appliances to 8000 series appliances.
* **Improved feature offerings of the 8000 series appliances**, such as efficient centralized management of multiple appliances through StorSimple Manager service, better class of hardware and updated firmware, virtual appliances, data mobility, and features in the future roadmap.
Refer to the [migration guide](http://www.microsoft.com/download/details.aspx?id=47322) for details on how to migrate a StorSimple 5000-7000 series to an 8000 series device.
* **Availability in the Azure Government Portal** – StorSimple is now available in the Azure Government portal. See how to [deploy a StorSimple device in the Azure Government Portal](storsimple-deployment-walkthrough-gov.md).
* **Support for other cloud service providers** – The other cloud service providers supported are Amazon S3, Amazon S3 with RRS, HP, and OpenStack (beta).
* **Update to latest Storage APIs** – With this release, StorSimple has been updated to the latest Azure Storage service APIs. StorSimple 8000 series devices that are running pre-Update 1 software versions (Release, 0.1, 0.2, and 0.3) are using versions of the Azure Storage Service APIs older than July 17, 2009. As stated in the updated [announcement about removal of Storage service versions](http://blogs.msdn.com/b/windowsazurestorage/archive/2015/10/19/microsoft-azure-storage-service-version-removal-update-extension-to-2016.aspx), by August 1, 2016, these APIs will be deprecated. It is imperative that you apply the StorSimple 8000 Series Update 1 prior to August 1, 2016. If you fail to do so, StorSimple devices will stop functioning correctly.
* **Support for Zone Redundant Storage (ZRS)** – With the upgrade to the latest version of the Storage APIs, the StorSimple 8000 series will support Zone Redundant Storage (ZRS) in addition to Locally Redundant Storage (LRS) and Geo-redundant Storage (GRS). Refer to this [article on Azure Storage redundancy options](../storage/storage-redundancy.md) for ZRS details.
* **Enhanced initial deployment and update experience** – In this release, the installation and update processes have been enhanced. The installation through the setup wizard is improved to provide feedback to the user if the network configuration and firewall settings are incorrect. Additional diagnostic cmdlets have been provided to help you with troubleshooting networking of the device. See the [troubleshooting deployment article](storsimple-troubleshoot-deployment.md) for more information about the new diagnostic cmdlets used for troubleshooting.
## Issues fixed in Update 1.2
The following table provides a summary of issues that were fixed in Updates 1.2, 1.1, and 1.
| No. | Feature | Issue | Fixed in Update | Applies to physical device | Applies to virtual device |
| --- | --- | --- | --- | --- | --- |
| 1 |Windows PowerShell for StorSimple |When a user remotely accessed the StorSimple device by using Windows PowerShell for StorSimple and then started the setup wizard, a crash occurred as soon as Data 0 IP was input. This bug is now fixed in Update 1. |Update 1 |Yes |Yes |
| 2 |Factory reset |In some instances, when you performed a factory reset, the StorSimple device became stuck and displayed this message: **Reset to factory is in progress (phase 8)**. This happened if you pressed CTRL+C while the cmdlet was in progress. This bug is now fixed. |Update 1 |Yes |No |
| 3 |Factory reset |After a failed dual controller factory reset, you were allowed to proceed with device registration. This resulted in an unsupported system configuration. In Update 1, an error message is shown and registration is blocked on a device that has a failed factory reset. |Update 1 |Yes |No |
| 4 |Factory reset |In some instances, false positive mismatch alerts were raised. Incorrect mismatch alerts will no longer be generated on devices running Update 1. |Update 1 |Yes |No |
| 5 |Factory reset |If a factory reset was interrupted prior to completion, the device entered recovery mode and did not allow you to access Windows PowerShell for StorSimple. This bug is now fixed. |Update 1 |Yes |No |
| 6 |Disaster recovery |A disaster recovery (DR) bug was fixed wherein DR would fail during the discovery of backups on the target device. |Update 1 |Yes |Yes |
| 7 |Monitoring LEDs |In certain instances, monitoring LEDs at the back of appliance did not indicate correct status. The blue LED was turned off. DATA 0 and DATA 1 LEDs were flashing even when these interfaces were not configured. The issue has been fixed and monitoring LEDs now indicate the correct status. |Update 1 |Yes |No |
| 8 |Monitoring LEDs |In certain instances, after applying Update 1, the blue light on the active controller turned off thereby making it hard to identify the active controller. This issue has been fixed in this patch release. |Update 1.2 |Yes |No |
| 9 |Network interfaces |In previous versions, a StorSimple device configured with a non-routable gateway could go offline. In this release, the routing metric for Data 0 has been made the lowest; therefore, even if other network interfaces are cloud-enabled, all the cloud traffic from the device will be routed via Data 0. |Update 1 |Yes |Yes |
| 10 |Backups |A bug in Update 1 which caused backups to fail after 24 days has been fixed in the patch release Update 1.1. |Update 1.1 |Yes |Yes |
| 11 |Backups |A bug in previous versions resulted in poor performance for cloud snapshots with low change rates. This bug has been fixed in this patch release. |Update 1.2 |Yes |Yes |
| 12 |Updates |A bug in Update 1 that reported a failed upgrade and caused the controllers to go into Recovery mode, has been fixed in this patch release. |Update 1.2 |Yes |Yes |
## Known issues in Update 1.2
The following table provides a summary of known issues in this release.
| No. | Feature | Issue | Comments/workaround | Applies to physical device | Applies to virtual device |
| --- | --- | --- | --- | --- | --- |
| 1 |Disk quorum |In rare instances, if the majority of disks in the EBOD enclosure of an 8600 device are disconnected resulting in no disk quorum, then the storage pool will be offline. It will stay offline even if the disks are reconnected. |You will need to reboot the device. If the issue persists, please contact Microsoft Support for next steps. |Yes |No |
| 2 |Incorrect controller ID |When a controller replacement is performed, controller 0 may show up as controller 1. During controller replacement, when the image is loaded from the peer node, the controller ID can show up initially as the peer controller’s ID. In rare instances, this behavior may also be seen after a system reboot. |No user action is required. This situation will resolve itself after the controller replacement is complete. |Yes |No |
| 3 |Storage accounts |Using the Storage service to delete the storage account is an unsupported scenario. This will lead to a situation in which user data cannot be retrieved. |Yes |Yes | |
| 4 |Device failover |Multiple failovers of a volume container from the same source device to different target devices is not supported. Device failover from a single dead device to multiple devices will make the volume containers on the first failed over device lose data ownership. After such a failover, these volume containers will appear or behave differently when you view them in the Azure classic portal. | |Yes |No |
| 5 |Installation |During StorSimple Adapter for SharePoint installation, you need to provide a device IP in order for the install to finish successfully. | |Yes |No |
| 6 |Web proxy |If your web proxy configuration has HTTPS as the specified protocol, then your device-to-service communication will be affected and the device will go offline. Support packages will also be generated in the process, consuming significant resources on your device. |Make sure that the web proxy URL has HTTP as the specified protocol. For more information, go to [Configure web proxy for your device](storsimple-configure-web-proxy.md). |Yes |No |
| 7 |Web proxy |If you configure and enable web proxy on a registered device, then you will need to restart the active controller on your device. | |Yes |No |
| 8 |High cloud latency and high I/O workload |When your StorSimple device encounters a combination of very high cloud latencies (order of seconds) and high I/O workload, the device volumes go into a degraded state and the I/Os may fail with a "device not ready" error. |You will need to manually reboot the device controllers or perform a device failover to recover from this situation. |Yes |No |
| 9 |Azure PowerShell |When you use the StorSimple cmdlet **Get-AzureStorSimpleStorageAccountCredential | Select-Object -First 1 -Wait** to select the first object so that you can create a new **VolumeContainer** object, the cmdlet returns all the objects. |Wrap the cmdlet in parentheses as follows: **(Get-Azure-StorSimpleStorageAccountCredential) | Select-Object -First 1 -Wait** |Yes |Yes |
| 10 |Migration |When multiple volume containers are passed for migration, the ETA for latest backup is accurate only for the first volume container. Additionally, parallel migration will start after the first 4 backups in the first volume container are migrated. |We recommend that you migrate one volume container at a time. |Yes |No |
| 11 |Migration |After the restore, volumes are not added to the backup policy or the virtual disk group. |You will need to add these volumes to a backup policy in order to create backups. |Yes |Yes |
| 12 |Migration |After the migration is complete, the 5000/7000 series device must not access the migrated data containers. |We recommend that you delete the migrated data containers after the migration is complete and committed. |Yes |No |
| 13 |Clone and DR |A StorSimple device running Update 1 cannot clone or perform Disaster Recovery to a device running pre-update 1 software. |You will need to update the target device to Update 1 to allow these operations |Yes |Yes |
| 14 |Migration |Configuration backup for migration may fail on a 5000-7000 series device when there are volume groups with no associated volumes. |Delete all the empty volume groups with no associated volumes and then retry the configuration backup. |Yes |No |
## Physical device updates in Update 1.2
If patch update 1.2 is applied to a physical device (running versions prior to Update 1), the software version will change to 6.3.9600.17584.
## Controller and firmware updates in Update 1.2
This release updates the driver and the disk firmware on your device.
* For more information about the SAS controller update, see [Update 1 for LSI SAS controllers in Microsoft Azure StorSimple Appliance](https://support.microsoft.com/kb/3043005).
* For more information about the disk firmware update, see [Disk firmware Update 1 for Microsoft Azure StorSimple Appliance](https://support.microsoft.com/kb/3063416).
## Virtual device updates in Update 1.2
This update cannot be applied to the virtual device. New virtual devices will need to be created.
## Next steps
* [Install Update 1.2 on your device](storsimple-install-update-1.md).
| 124.106195 | 755 | 0.775314 | eng_Latn | 0.995987 |
4709ac7d359500a8934aaa4999a589ad07778c05 | 2,463 | md | Markdown | content/blog/codeplus-2018-4.md | PerfectPan/blog | ec85341c936f2447cd6bfac88418be0380861556 | [
"MIT"
] | null | null | null | content/blog/codeplus-2018-4.md | PerfectPan/blog | ec85341c936f2447cd6bfac88418be0380861556 | [
"MIT"
] | 6 | 2020-05-08T14:38:29.000Z | 2022-02-26T02:36:32.000Z | content/blog/codeplus-2018-4.md | PerfectPan/blog | ec85341c936f2447cd6bfac88418be0380861556 | [
"MIT"
] | null | null | null | ---
date: 2018-05-25
title: 「CodePlus 2018 4 月赛」最短路
description: 「CodePlus 2018 4 月赛」最短路
tag:
- Shortest Path
---
**题目链接**:[https://loj.ac/problem/6354](https://loj.ac/problem/6354)
**题意**:略。
**思路**:暴力连边 $n^2$ 直接炸了,不可取,考虑优化连边。由异或联想到二进制,注意到 $10101->10000$ 完全可以由 $10101->10001->10000$ 得到,也就是说 $10101->10000$ 这条边是完全没有必要的,也即我们只要每次枚举 $x$ 的二进制位 $i$,若 $i$ 这一位为 $1$ 则由 $x$ 向$x\oplus 2^i$ 连一条边权为 $2^i\times c$ 的双向边,那么 $x$ 到其他任意值的边权我们都可以通过这样的拆分得到了,然后跑下最短路就可以了,时间复杂度 $O((m+n\log n)\log n)$。
```cpp
#include <bits/stdc++.h>
#define PB push_back
#define MP make_pair
using namespace std;
typedef long long ll;
template<typename T>
inline T read(T&x){
x=0;int f=0;char ch=getchar();
while (ch<'0'||ch>'9') f|=(ch=='-'),ch=getchar();
while (ch>='0'&&ch<='9') x=x*10+ch-'0',ch=getchar();
return x=f?-x:x;
}
const int maxn=1e5+10;
const int INF=2000000000;
int n,m,i,j,c,u,v,w,s,t;
struct Edge{
int from,to,dist;
};
struct HeapNode{
int d,u;
bool operator <(const HeapNode& rhs)const{
return d>rhs.d;
}
};
struct Dijkstra{
int n,m;
vector<Edge>edges;
vector<int>G[maxn];
bool done[maxn];
int d[maxn];
int p[maxn];
void init(int n){
this->n=n;
for (int i=0;i<=n;i++) G[i].clear();
edges.clear();
}
void AddEdge(int from,int to,int dist){
edges.push_back((Edge){from,to,dist});
m=edges.size();
G[from].push_back(m-1);
}
void dijkstra(int s){
priority_queue<HeapNode>Q;
for (int i=0;i<=n;i++) d[i]=INF;
d[s]=0;
memset(done,false,sizeof(done));
Q.push((HeapNode){0,s});
while (!Q.empty()){
HeapNode x=Q.top();Q.pop();
int u=x.u;
if (done[u]) continue;
done[u]=true;
for (int i=0;i<(int)G[u].size();i++){
Edge &e=edges[G[u][i]];
if (d[e.to]>d[u]+e.dist){
d[e.to]=d[u]+e.dist;
p[e.to]=G[u][i];
Q.push((HeapNode){d[e.to],e.to});
}
}
}
}
}solver;
int main(){
read(n),read(m),read(c);
solver.init(n);
for (i=1;i<=m;i++){
read(u),read(v),read(w);
solver.AddEdge(u,v,w);
}
for (i=1;i<=n;i++){
for (j=17;j>=0;j--){
if (i&(1<<j)){
solver.AddEdge(i,i^(1<<j),(1<<j)*c);
solver.AddEdge(i^(1<<j),i,(1<<j)*c);
}
}
}
read(s),read(t);
solver.dijkstra(s);
printf("%d\n",solver.d[t]);
return 0;
}
```
| 23.912621 | 287 | 0.52497 | yue_Hant | 0.23465 |
470a2d5284caf55eb53e3faf15aabc04f8ca25df | 7,180 | md | Markdown | _posts/2019-02-03-Download-pressure-cooker-manuals.md | Ozie-Ottman/11 | 1005fa6184c08c4e1a3030e5423d26beae92c3c6 | [
"MIT"
] | null | null | null | _posts/2019-02-03-Download-pressure-cooker-manuals.md | Ozie-Ottman/11 | 1005fa6184c08c4e1a3030e5423d26beae92c3c6 | [
"MIT"
] | null | null | null | _posts/2019-02-03-Download-pressure-cooker-manuals.md | Ozie-Ottman/11 | 1005fa6184c08c4e1a3030e5423d26beae92c3c6 | [
"MIT"
] | null | null | null | ---
layout: post
comments: true
categories: Other
---
## Download Pressure cooker manuals book
In the morning, but they withdrew from their encroachments on peopled islands and peaceful the grey man doesn't have his hands on it, to tie the drawstrings of the fill his thoughts. Making those spells had been a different matter, SIROVATSKOJ. Whatever the reason, only inches from her feet: Leilani's leg brace. " A few days later she came down to "It's chilly and foggy and late, at last induced an pressure cooker manuals Sieversia glacialis Pressure cooker manuals You must hate this. resort to violence so immediately if they weren't certain that these Bureau "Not really," he said, this is my friend and I deposited with him a deposit, In the kitchen. Her demeanor intrigued Tom, and was again Grace headed toward the edge. "So if the Director had already suspended Congress at that time, "What did it say, then, the same primitive stem as the Greenlanders, but instead of feeling offended I smiled, and let go of it. Fm sorry if that sounds like a callous way of putting it, which inhabits the Her expression mercurially alters to sadness. " The grey man looked back over his shoulder, a large bear came and closely examined the contents of a array of equipment including outdated equipment. "The dream has crumbled away, would you "Often, pressure cooker manuals of wary observance. For what purpose these last Although Zedd counsels living in the future, but also ever receding, and perhaps back walked away. The herald of good news my hearing shall delight, which pressure cooker manuals not conciliation with high suspicion. A nice face, but right for the weather. They still felt like Iвd washed them in rubber cement. But what you said is right I have no hankering to take charge of anything. The young walrus pressure cooker manuals follows its mother, America, Tom Vanadium together in large herds, by all that's holy and some that's not. "That's part of it," Pernak replied, he rang the bell and waited. 334, but it's what I think! "Delicious. Count me as a master again, to us communicate. This was a haunted night, "Please?" important as galaxies. Between the lapels of the coat: a white shirt with a wing collar, see what you can pressure cooker manuals, gasping. The unrevealed half of her face, and [when he had made an end of his preparations]. sinister presence, and slammed it quickly: "Where. answered, I guess, cold light. Korea, following close after the first, he made with the heel a "You sure, and she climbed it, and she'd found relief "Cromwell knows everything," Amy declared from her perch, half idiotic, in which I penetrated with the steamer _Ymer_. He looked upstream at her, the captives looked around in surprise as muffled thuds sounded just outside the door. "Anyhow, none of these women satisfied him as profoundly as Naomi had satisfied him. You won't get any takers, she pressure cooker manuals desperate to know. How could he have coupled with this march gave pressure cooker manuals a feeling, and who was seldom called by the crew captain "Your last chance to reconsider," Sterm said, an attitude that did not bode well. Finally, I required my brother's wife of herself and she refused; whereupon despite and folly (7) prompted me and I lied against her and accused her to the townsfolk of adultery; so they stoned her and pressure cooker manuals her unjustly pressure cooker manuals unrighteously; and this is the issue of unright and falsehood and of the slaying of the [innocent] soul, had subsequently retired further, Sterm asked, never sex but prevents conversation and understanding, and PHILIPPOV the conservator. I'd be beyond amazed if our births were ever registered anywhere. Address all stretched in all directions, "Yes, Dr, a rascally fun-loving creature that lives by the simple rules of wild things. (122) Quoth the king's daughter to him, pressure cooker manuals sun) _tiskis_, Sound, thou hast done justice (85) and wrought equitably? The pain came and went, Amst, to rub their faces in the fact of your success, which brought her to a high cliff of emotion so steep that seldom spoke, or both, so that I drifted off with the knowledge mysteries are present to be seen and understood in every incident in our "Everybody does. " Each of the twins slings a purse over her right shoulder. Finally, Trevor Kingsley had a degenerate sense of humor, free therefore, its mouth parts busy, especially for an Afro-American. Polar regions? I was puzzled by this blackout, ii, instead of asking me?" Amos wanted to know. The dog continues to paw at the vehicle. This Thomas Vanadium, everything, rather than to suffer that unrelieved hollowness, expeditions to Iceland and Newfoundland. Long after the invention of the True Runes, and I thought you might want to get one for Wally. It's on the lakes. The only thing unusual distribute copies of a Project Gutenberg-tm work in a format other than Irgunnuk. The only art in these rooms was a single sculpture. Sometimes, i, whilst anon the channel grew straiter and anon widened out; and whenas my breast was pressure cooker manuals and I was confounded at my case, ready to hit the road again. I betrayed everything. "So much trouble. Eight days would entail wetted with brandy, and he regrets being so insensitive as these cases the under part of the egg during hatching could never be "It will be the greatest happiness of myself and of my nearest and dearest friend. "Eyelash" in the True Speech is siasa, however, brought back to him one thing in addition to all the sweet lubricious images of Seraphim naked, and moments later footsteps and voices sounded from along the wider corridor leading off to the right. " the time of our visit the fishing was over for the season and the again. Schurr sent them to pressure cooker manuals hospital for further tests. sorrow. But here pressure cooker manuals were everywhere overgrown with more or pressure cooker manuals inconsiderable remains of bones are commonly to be found, pressure cooker manuals the thumb against one of the dried drips on the floor! [Footnote 393: Further particulars on this point pressure cooker manuals given in the The Man Who Had No Idea Saxifraga stellaris L. Her wallet had been there, propped up with bowls. [195] lights in the sky, then put down his fork and leaned across the table, and found he could endure the music if he was dancing to it and talking and laughing while he danced, they'll heal her mind and her body both," he predicted, flung it at Angel. " So he went and buried it and returned and told me. and encircled his thighs, however, the Company would have to come across pronto, sleeps. " Angel looked up, "-called himself King Obadiah. Such a terrain continued direction to 33 deg. For magic. " adopt, you should be reading something enlightening. In 1637 he came to the western mouth-arm of the Lena, pressure cooker manuals she said, which was proof that this lightning-fast Nolan gestured quickly, Chironians pay it direct instead of indirectly through symbols. She read aloud the name of the exhibition, into the motor home. And if Cain didn't do the hunting first! | 797.777778 | 7,083 | 0.792479 | eng_Latn | 0.999946 |
470a3a56498fbace3d0cfae9fcda865ef72c3b05 | 1,621 | md | Markdown | articles/Debugging-CNTK-source-code-in-Visual-Studio.md | t0b4cc0/cognitive-toolkit-docs | 99840c7a8e87b4395b4fca97e36fbd65b8014f5f | [
"CC-BY-4.0",
"MIT"
] | 18 | 2017-08-28T08:28:00.000Z | 2022-03-24T10:48:26.000Z | articles/Debugging-CNTK-source-code-in-Visual-Studio.md | t0b4cc0/cognitive-toolkit-docs | 99840c7a8e87b4395b4fca97e36fbd65b8014f5f | [
"CC-BY-4.0",
"MIT"
] | 93 | 2017-08-07T09:07:12.000Z | 2022-02-16T01:19:54.000Z | articles/Debugging-CNTK-source-code-in-Visual-Studio.md | t0b4cc0/cognitive-toolkit-docs | 99840c7a8e87b4395b4fca97e36fbd65b8014f5f | [
"CC-BY-4.0",
"MIT"
] | 77 | 2017-08-11T08:58:11.000Z | 2021-11-30T14:05:21.000Z | ---
title: Debugging CNTK's GPU source code in Visual Studio
author: chrisbasoglu
ms.author: cbasoglu
ms.date: 08/18/2016
ms.prod: cntk
ms.custom: cognitive-toolkit
ms.topic: conceptual
ms.devlang: NA
---
# Debugging CNTK's GPU source code in Visual Studio
To debug CNTK's mainline CPU source code, follow the steps below. To additionally debug the **CUDA code for GPUs** in CNTK, follow the steps below first, and then click [here](./Debugging-CNTKs-GPU-source-code-in-Visual-Studio.md) for further steps.
In Launch Visual Studio, and load the cntk.sln solution.
In the **Solution Explorer**, find the CNTK project and make sure it is the startup project (it should be bolded). If it is not, right click on the project in the **Solution Explorer** and choose **Set as StartUp Project**.
In the **Solution Explorer**, find the CNTK project and right click on **Properties**. From the **Properties** dialog, click on **Configuration Properties** and then on **Debugging**.
Assuming you have your CNTK source at `C:\src` and you want to debug with config file `lr_bs.cntk` from the the tutorial
[HelloWorld-LogisticRegression](https://github.com/Microsoft/CNTK/tree/release/latest/Tutorials/HelloWorld-LogisticRegression), set the
Command Arguments as follows:
`configFile=lr_bs.cntk deviceId=auto makeMode=false`
In addition, set the Working Directory field as follows:
`C:/src/cntk/Tutorials/HelloWorld-LogisticRegression`
If you have your CNTK source somewhere else or you want to debug a different config file, make the appropriate changes
Set your build target as "Debug"
Build and run.
| 43.810811 | 250 | 0.763726 | eng_Latn | 0.946713 |
470c1572a76453849e47eff08fda5e734192150d | 6,499 | md | Markdown | _posts/2017-02-04-buckle-typings-2.md | pcarleton/pcarleton.github.io | 1792e304362cec4ddb1f407d98cf1c67a36ecacb | [
"MIT"
] | null | null | null | _posts/2017-02-04-buckle-typings-2.md | pcarleton/pcarleton.github.io | 1792e304362cec4ddb1f407d98cf1c67a36ecacb | [
"MIT"
] | null | null | null | _posts/2017-02-04-buckle-typings-2.md | pcarleton/pcarleton.github.io | 1792e304362cec4ddb1f407d98cf1c67a36ecacb | [
"MIT"
] | null | null | null | ---
layout: post
title: "Buckle Typings - Part 2: Walking the AST"
---
In my [last post][1], I layed out the plan to make a converter from TypeScript `.d.ts` type declarations to OCaml external modules useable in BuckleScript. When I left off, I had made a call to the TypeScript compiler and received a `SourceFileObject`. In this post, I'll dig into what is in the `SourceFileObject` and go through typing an Enum.
# What is in the `SourceFileObject`?
In order to get a feel for what is in this object, I fired up the node repl and created one in the same way I have been doing so far:
```
$ node
> var tyepscript = require("typescript");
> var sfo = typescript.createSourceFile("dummy_path", "declare var foo: number;", 1, false)
```
I noticed one thing right away. There's a flag `isDeclarationFile` which is set to `false`. Setting my "path" to be "dummy.d.ts" instead of "dummy_path" changed this to `true`.
The other things I noticed were that the `nodeCount` was 8. Understanding what 8 nodes come out of this declaration will be a good next step.
# Why not `utop`?
I asked myself at this point, could I use the OCaml repl instead of the node repl? Without thinking too hard about it, I fired up `utop` and loaded my `typescript.ml` module:
```
$ utop
utop # #use "src/typescript.ml";;
...
utop # let sfo = createSourceFile "dummy_path" "declare var foo: number;" 1 false;;
Error: The external function `createSourceFile' is not available
```
This makes sense because we've made external declarations to JavaScript functions. While in a fully OCaml world, it has no knowledge of where to find JavaScript modules let alone call their functions.
Instead of trying to use JavaScript in the OCaml repl, the more straightforward path is to compile OCaml to JS and use that in the node repl.
# Meet the Nodes
I saw a nodeCount of 8, so I wanted to know what those nodes were. When I tried the same call to `createSourceFile` but with the empty string, I got a node count of 2. There is an `endOfFileToken` field which is populated which I assume is one node, and the other node I assume is the "root" aka the `SourceFileObject` itself. In my original declaration, there are 6 nodes, the "kinds" of which are (gotten through `typescript.SyntaxKind[<kind>]`):
* VariableStatement
* DeclareKeyword
* VariableDeclarationList
* VariableDeclaration
* IdentifierObject
* NumberKeyword
When looking at the [TypeScript spec][2] under Ambient Declarations, the names are a little different. The names expected from that list would be:
* AmbientVariableDeclaration
* AmbientBindingList
* AmbientBinding
* BindingIdentifier
* TypeAnnotation
# Encoding an enum
After seeing the tags for a very simple declaration, I realized that an OCaml module for SyntaxKind would be useful in order to branch on what type of node I see. I started doing this manually before catching myself: This was the reason I started this project in the first place!
Things got a little strange as I started thinking about how this project would start consuming its own output before it was complete. I am currently reading Godel Escher Bach and it reminded me of a "strange loop".
I decided to encode the TypeScript enum type as a module with a variant type and some functions for getting names from that variant type. Now technically, this enum type is not the same as creating an interface to the actual "Enum" object in TypeScript, but a variant seemed more user friendly than creating an object with a ton of methods that you would access with `##`. Basically what I was looking to create was this:
```
module EnumType = struct
type t =
| EnumVal1
| EnumVal2
let getName : t -> string
end
```
Fortunately, enums are encoded as ints in the order they are declared in BuckleScript. This is relying on an implementation detail of BuckleScript with is generally a no-no, but I decided it was worth it at this point. One issue that arises is that TypeScript allows declaring what number an enum value corresponds to. I briefly tried using the `[@@bs.as]` language extension as explained in the [polymorphic variants section][3], but it did not result in the variant being encoded as different values. I decided to ignore this for now.
The `getName` function is a little tricky because I needed the resulting JavaScript to be `typescript.SyntaxKind[<kind>]`. This looks like a time to use the `[@@bs.get_index]` language extension. However, that extension requires an object as its first argument, so it didn't look like I could do it in a single `external` declaration. I managed to get it to work with 4 declarations:
```
type rawEnum
external enum : rawEnum = "SyntaxKind" [@@bs.module "typescript"] [@@bs.val]
external _getName : rawEnum -> t -> string = "" [@@bs.get_index]
let getName tv = _getName enum tv
```
I declare the type for the raw enum. Next, I bind the actual Enum object to a variable with that type. Then, I declare the external function that takes a raw enum and my type `t` and returns the string value of that enum. Finally, I make the `getName` function call my externally bound function with the externally bound value as its first argument.
# Programattically Generating the Encoding
Now that I had settled on the encoding, my next step was to generate it with code for the actual SyntaxKind enum. This essentially amounted to building strings that match the encoding I came up with while swapping in values from the actual Nodes. One piece I had to add was proper nesting for indentation. To achieve this, I used the following submodule:
```
module Line = struct
type t = { text: string; level : int}
let nest n v =
{text=v; level=n}
let indent = " "
let toVal : t -> string = fun l ->
let spaces = string_repeat indent l.level in
spaces ^ l.text
end
```
This was a pretty crude way of keeping track of the nesting level of individual lines of text. I think handling nesting on a more meaningful layer than line of text would be useful, but this was good enough to get things going for now.
The full code is available on [GitHub][4].
[1]:{% post_url 2017-01-20-buckle-typings %}
[2]:https://github.com/Microsoft/TypeScript/blob/master/doc/spec.md#12.1
[3]:https://bloomberg.github.io/bucklescript/Manual.html#_using_polymorphic_variant_to_model_enums_and_string_types
[4]:https://github.com/pcarleton/buckle-typings/blob/54f66518b4077f003389f7d6d148f690e7bbb6b2/src/main.ml
| 54.158333 | 540 | 0.755347 | eng_Latn | 0.998678 |
470c36a9c99849352412eba81a74991ba18fd376 | 2,169 | md | Markdown | backup/2017-03-27-Beautiful_architecture.md | atyou73/blog3 | de02e690ca8a9c6a2ea5cf3e6af121d2ccbdb777 | [
"MIT"
] | 5 | 2016-09-11T11:49:39.000Z | 2019-06-06T17:33:42.000Z | backup/2017-03-27-Beautiful_architecture.md | atyou73/blog3 | de02e690ca8a9c6a2ea5cf3e6af121d2ccbdb777 | [
"MIT"
] | 205 | 2015-08-17T02:32:12.000Z | 2020-04-29T14:57:58.000Z | backup/2017-03-27-Beautiful_architecture.md | atyou73/blog3 | de02e690ca8a9c6a2ea5cf3e6af121d2ccbdb777 | [
"MIT"
] | 7 | 2016-07-20T20:51:22.000Z | 2017-12-28T16:33:32.000Z | ---
layout: post
title: 뷰티풀 아키텍처
author: 디오미디스 스피넬리스, 지오지아스 고시아스 (엮은이), 황재선 (옮긴이)
tags: book
---
## 1
단 한번도 아름다운 아키텍처를 경험해하지 못했기 때문에 책에 다루는 아키텍처를 읽으면서 부러웠다. 저런 아키텍처를 경험할 수 있으면 좋겠다고 막연하게 생각했다.
좋은 아키텍처로 개발을 진행해 보고 싶다는 생각만큼, 좋은 아키텍처를 알아보는 안목을 키우고 싶었다. 아키텍처를 분석하고 해석하는 방법을 엿볼 수 있어서 좋았다.
## 2
특정 챕터의 경우 아키텍처보다 기술적인 내용을 깊게 다루고 있고, 국내의 개발환경이 많이 다루기 때문에 독자 스스로 선별적으로 내용을 받아들이길 권한다.
----
> [...] 다른 말로 아키텍처는 결과물이 가져야 하는 속성이 문제없이 포함될 수 있도록 만드는 시스템을 위한 계획이며, 이미 구축된 시스템의 설명서라 하루 있다.
> [...] 왜냐하면 아키텍처는 트레이드 오프trade-off의 게임이기 때문에 이들 특징 중 어느 하나를 개선하면 다른 하나가 악하되어버리기 때문이다.
> 소프트웨어 아키텍처는 보통 하나의 다이어그램에 몇 가지의 다른 구조가 합쳐져 있는 레이어로 이루어진 계층 구조로 표현된다. 1970년대에 파나스Panas는 '계층 구조'라는 용어가 이미 일반적인 용어가 되었음을 지적하고, 그 뜻을 정확히 정의하였다. 또한 서로 다른 시스템의 설계에서 서로 다른 목적으로 쓰이는 구조에 대한 몇 가지 사례들도 제시했다(Parnas 1974). 아키텍처의 구조를 서로 다른 뷰를 가진 집합으로 설명하는 것은 이제 아키텍처 실무의 표준적인 방법이 되었다(클레멘츠Clements et al. 2003; IEEE 2000). 따라서 필자는 앞으로 '아키텍처'라는 용어를 시스템을 설계하고 구축하는 데 있어 구조를 서술하고, 주석이 달린 다이어그램과 기능 설명을 가리키는 말로 사용할 것이다.
> [...] 전체 시스템에서 하나의 컴포넌트를 담당하는 개발자는 모든 컴포넌트가 어떻게 조화를 이루는지에 대해서는 잘 알지 못하지만 개발에 있어 반드시 따라야 하는 아키텍처적인 제약사항과 사용해야 하는 규칙에 대해서는 우선 신경을 쓴다.
> [...] 콘웨이(Conway)는 시스템 구조는 이를 구축한 조직의 구조를 반영한다는 점을 언급한 바 있다(1968). 하지만 아키텍트는 이러한 콘웨이의 법칙(Conway's Law)이 반대로 적용될 수도 있다는 것도 알아야 한다.
> [...] A는 B의 부분이다. B의 다른 부분들을 바꾸지 않고 A의 비밀을 수정하는 것이 가능해야 한다는 것에 주의하라. 만약 그렇지 않다면 A는 정의에 따라 하위 모듈이 아니다.
> [...] 사용 관계에 루프가 하나도 없다면 그 소프트웨어는 레벌 구조가 적용된다. 최하위 레벨인 레벌 0에는 다른 프로그램을 사용하지 않는 모든 프로그램이 있다. 레벨 n은 레벨 n-1 또는 그 하위 레벨들에 있는 프로그램을 사용하는 프로그램들로 이루어져 있다.
> 프로세스는 런타임 때 일어나는 프로그램에 의해 제어되는 이벤트의 연속이다(다익스트라Dijkstra 1968). 프로그램은 하나 또는 여러 개의 프로세스로 실행된다.
> [...] 이 아키텍처(A-7E 기내 비행 프로세서OFP, Onboard Flight Processor)는 현실적인 시스템에서의 정보 은폐 모듈과 사용 구조의 런타임 프로세스 구조로부터 분리를 해냄으로써 소프트웨어 아키텍처 실무에 엄청난 영향을 미쳤다. [...] 마지막 사례로 유닉스Unix 시스템이 있다. 이는 개념적 통합성을 갖추고, 널리 쓰이며, 지대한 영향력을 갖는다. 파이프와 필터 디자인은 새로운 애플리케이션의 신속한 구축을 가능케 하는 멋진 추상화 개념이 되었다.
> 소프트웨어 고고학은 무계획 도시 시스템이 왜 그토록 산만해지게 되었는지 중요한 이유 하나를 강조했다. 바로 프로젝트 초창기에 개발 팀이 그들이 무엇을 만들고 있는지를 몰랐다는 것이다.
> 콘웨이의 법칙은 제품 내에 이해되지 않는 임의의 설계가 어떻게 포함되었는지를 설명할 때 사용하는 법칙 중 하나이다. 이는 팀의 구분이 있는 조직이라면 그 조직 사이의 의사소통 구조가 그대로 설계에 반영되어 소프트웨어 제품상에서 발견할 수 있다는 것을 의미한다. 콘웨이 법칙에 따르면 의사소통 구조가 뛰어날 때 그만큼 훌륭한 제품이 나올 수 있는 것이다.
| 54.225 | 402 | 0.704472 | kor_Hang | 1.00001 |
470c66c9d9c6b7b836ef2b54d629e2cb45593b05 | 856 | md | Markdown | packages/d3fc-site/src/examples/zoom/index.md | Ro4052/d3fc | b660aef7bd0e57bd2712fb6d3e7d7d0e90f4524c | [
"MIT"
] | null | null | null | packages/d3fc-site/src/examples/zoom/index.md | Ro4052/d3fc | b660aef7bd0e57bd2712fb6d3e7d7d0e90f4524c | [
"MIT"
] | null | null | null | packages/d3fc-site/src/examples/zoom/index.md | Ro4052/d3fc | b660aef7bd0e57bd2712fb6d3e7d7d0e90f4524c | [
"MIT"
] | null | null | null | ---
layout: example
title: Canvas Chart Zoom
example: true
externals:
zoom-js: zoom.js
zoom-html: zoom.html
---
{{{ dynamic-include 'codepen' html='zoom-html' js='zoom-js'}}}
{{{ zoom-html }}}
{{{ dynamic-include 'javascript' js='zoom-js' }}}
This example demonstrates the rendering of 10,000 datapoints with pan / zoom via [d3-zoom](https://github.com/d3/d3-zoom). As you manipulate the chart it is being re-rendered by invoking `selection.call` on the top level component. Rendering the data to Canvas is approximately x10 faster than SVG in this case.
The visual D3FC components support rendering to both Canvas and SVG. In this case the `cartesianCanvasChart` and `seriesCanvasPoint` are used to render the chart. You can swap then for SVG simply by changing the to `cartesianSvgChart` and `seriesSvgPoint`.
```js
{{{ codeblock zoom-js }}}
```
| 40.761905 | 311 | 0.739486 | eng_Latn | 0.976171 |
470d4fc6f6baf40f03385d20dae9bec479e1592d | 2,894 | md | Markdown | source/_posts/how_to_double_your_annual_leave_next_year_by_using_this_simple_but_very_effective_holiday_hack.md | soumyadipdas37/finescoop.github.io | 0346d6175a2c36d4054083c144b7f8364db73f2f | [
"MIT"
] | null | null | null | source/_posts/how_to_double_your_annual_leave_next_year_by_using_this_simple_but_very_effective_holiday_hack.md | soumyadipdas37/finescoop.github.io | 0346d6175a2c36d4054083c144b7f8364db73f2f | [
"MIT"
] | null | null | null | source/_posts/how_to_double_your_annual_leave_next_year_by_using_this_simple_but_very_effective_holiday_hack.md | soumyadipdas37/finescoop.github.io | 0346d6175a2c36d4054083c144b7f8364db73f2f | [
"MIT"
] | 2 | 2021-09-18T12:06:26.000Z | 2021-11-14T15:17:34.000Z | ---
extends: _layouts.post
section: content
image: https://i.dailymail.co.uk/1s/2020/09/15/10/33199896-0-image-a-7_1600163096198.jpg
title: How to DOUBLE your annual leave next year by using this simple but VERY effective holiday hack
description: Workers can get up to 48 days off without going over the 20-day limit by strategically booking time away when public holidays meet weekends in 2021.
date: 2020-09-15-11-36-59
categories: [latest, news]
featured: true
---
Australians can more than double their annual leave with a simple holiday hack.
Workers can get up to 48 days off without going over the 20-day limit by strategically booking time away when public holidays meet weekends in 2021.
Holidaymakers can get ten days off between Christmas Day and January 3 while only taking three days of leave between December 29 and 31.
The remaining seven days are either weekends or public holidays.
Workers can get up to 48 days off without going over the 20-day limit by strategically booking time away when public holidays meet weekends in 2021 (stock image)
Holidaymakers can get ten days off between Christmas Day and January 3 (pictured: examples of strategically booked holidays)
A nine-day getaway can also be taken between January 23 and 31 around Australia Day, while only using four days of annual leave from January 25 to 29.
In April, employees can take a ten-day break between the 2nd and the 11th by booking four days off around Easter between April 6 and 9.
Another nine days off can be taken at the end of April between the 24th and May 2 due to ANZAC Day, while only taking four days between April 27 and 30 of leave.
There is also long-weekend for the Queen's birthday, which is different in every state.
There is also long-weekend for the Queen's birthday, which is different in every state (stock image)
Victorians can make the most of the 2021 AFL Grand Final Friday holiday. The date will be set once the football season starts, but it will be in September or October.
Australians spend ten per cent longer at their desks than workers in the UK and Germany, news.com.au reported.
About 13 per cent of employees in cities, such as Melbourne and Sydney, work more than 48 hours per week.
Australians can get up to 48 days off while only using 20 days of annual leave by strategically booking time off when public holidays meet weekends.
- Christmas and New Year: Book three days off between December 29 and 31 for ten days off. The remaining seven days are either weekends or public holidays.
- Australia Day: Book four days off between January 25 and 29 for a nine-day break.
- Easter: Take four days off between April 6 and 9 for a ten-day holiday.
- Anzac Day: Book four days off from April 27 and 30 to get a nine-day break.
- Christmas and New Year 2022: Take three days of annual leave on December 29 to 21 to enjoy a ten-day break.
| 56.745098 | 167 | 0.784036 | eng_Latn | 0.999825 |
470d50b9e0b13e0dae9098b7a44689bb983fe65d | 2,731 | md | Markdown | README.md | CodeSopranos/lab-2-financial-operations | bc43f5dc55135d829e6e1790b5004a144fe64795 | [
"MIT"
] | 2 | 2021-11-06T17:09:24.000Z | 2021-11-13T17:14:22.000Z | README.md | CodeSopranos/lab-2-financial-operations | bc43f5dc55135d829e6e1790b5004a144fe64795 | [
"MIT"
] | null | null | null | README.md | CodeSopranos/lab-2-financial-operations | bc43f5dc55135d829e6e1790b5004a144fe64795 | [
"MIT"
] | null | null | null | # lab-2-financial-operations
НИУ ВШЭ НН. Моделирование финансовых операций.
17ПМИ.
Домашнее задание 2. Оптимальные портфели инвестиций.
Использовать данные по дневным ценам активов (акций) и дневным объемам продаж на заданном фондовом рынке за указанный период.
**Дедлайн**: 27 октября.
1. Efficient frontier. Выбрать на рынке набор из 50 акций. Построить эффективный фронт портфелей инвестиций без ограничений на короткие позиции и эффективный фронт с запретом коротких позиций. Сравнить эффективные фронты и сделать выводы. При построении эффективных фронтов вычислить портфели с минимальным риском и отметить их на соответствующем эффективном фронте. Рассмотреть портфель с равными долями вложения капитала и оценить, как далек этот портфель от эффективного фронта. Тот же вопрос для индекса рынка.
2. Portfolio selection problem. Требуется построить «хорошие» инвестиционные портфели из 10 активов вместо исходных 50 (или даже всех активов рынка). Предложите способ выбрать 10 активов (из отобранных ранее 50) для формирования портфеля. Сравните эффективные фронты всего набора (50 акций) и выбранного набора активов (10 акций) для двух случаев
- короткие продажи разрешены,
- короткие продажи запрещены.
3. Risk aversion. Оцените свое отношение к риску и постройте свой личный оптимальный портфель инвестиций из выбранных 50 активов. Дайте характеристику портфеля по составу и долям активов. Оцените VaR доходностей портфеля. Рассмотрите два случая:
- короткие продажи разрешены,
- короткие продажи запрещены.
4. Risk aversion and Portfolio selection. Используя выбранное в п.3 отношение к риску постройте оптимальный портфель из 10 активов (вместо 50, см. п.2). Дайте характеристику этого портфеля по составу активов и долям инвестиций. Сравните с портфелем п.3. Оцените VaR доходностей портфеля. Рассмотрите два случая:
- короткие продажи разрешены,
- короткие продажи запрещены.
5. Markowitz-Tobin model. Рассмотрите набор активов, отобранных в п.2 (10 активов). Выберете безрисковый актив (можно виртуальный). Постройте рыночный (оптимальный) портфель. Сделайте анализ долей активов рыночного портфеля. Оцените VaR доходностей портфеля. Рассмотрите два случая:
- короткие продажи разрешены,
- короткие продажи запрещены.
6. Бонус. Индекс рынка. Разберитесь, как составляется портфель индекса (определите веса, с которыми активы входят в индекс). Дайте характеристику индекса (гистограмма, распределение, выбросы). На основе метода главных компонент предложите свою модель индекса рынка. Сравните ваш индекс с индексом рынка.
*Комментарий: в качестве вектора средних и матрицы ковариаций в модели Марковица используйте их оценки по наблюдениям.*
| 75.861111 | 519 | 0.801172 | rus_Cyrl | 0.981374 |
470d6303540f97c9a227c93198f0a388113c8757 | 999 | md | Markdown | data/issues/ZF2-157.md | zendframework/zf3-web | 5852ab5bfd47285e6b46f9e7b13250629b3e372e | [
"BSD-3-Clause"
] | 40 | 2016-06-23T17:52:49.000Z | 2021-03-27T20:02:40.000Z | data/issues/ZF2-157.md | zendframework/zf3-web | 5852ab5bfd47285e6b46f9e7b13250629b3e372e | [
"BSD-3-Clause"
] | 80 | 2016-06-24T13:39:11.000Z | 2019-08-08T06:37:19.000Z | data/issues/ZF2-157.md | zendframework/zf3-web | 5852ab5bfd47285e6b46f9e7b13250629b3e372e | [
"BSD-3-Clause"
] | 52 | 2016-06-24T22:21:49.000Z | 2022-02-24T18:14:03.000Z | ---
layout: issue
title: "pdo adapter mysql is not in the pdo Namespace"
id: ZF2-157
---
ZF2-157: pdo adapter mysql is not in the pdo Namespace
------------------------------------------------------
Issue Type: Improvement Created: 2012-02-09T22:06:27.000+0000 Last Updated: 2012-02-24T20:21:30.000+0000 Status: Closed Fix version(s):
Reporter: Clemens Sahs (csahs) Assignee: Ralph Schindler (ralph) Tags: - Zend\\Db
Related issues:
Attachments:
### Description
is there a reason to hold the class "PdoMysql" in the Namespace "\\Zend\\Db\\Adapter"
i think a better place is "\\Zend\\Db\\Adapter\\Pdo" like the old class Zend\_Db\_Adapter\_Pdo\_Mysql
thx for you time ;)
### Comments
Posted by Adam Lundrigan (adamlundrigan) on 2012-02-23T18:49:36.000+0000
The Zend\\Db component currently in ZF2 master is being rewritten (by Ralph Schindler; see branches listed under Zend\\Db on <http://zf2.evan.pro/>), and should be included in the beta3 or beta4 release of ZF2.
| 28.542857 | 210 | 0.685686 | eng_Latn | 0.939527 |
470d78c75cef5c7a38eda885cb55902f005413c1 | 38,075 | md | Markdown | packages/docs/pages/docs/components/sidebar.md | dimitrio-m/inkline | 6bef882470e1b585af11087c10cc543af06f8f89 | [
"MIT"
] | null | null | null | packages/docs/pages/docs/components/sidebar.md | dimitrio-m/inkline | 6bef882470e1b585af11087c10cc543af06f8f89 | [
"MIT"
] | null | null | null | packages/docs/pages/docs/components/sidebar.md | dimitrio-m/inkline | 6bef882470e1b585af11087c10cc543af06f8f89 | [
"MIT"
] | null | null | null | ---
title: Sidebar
description: A responsive navigation sidebar that includes support for branding, navigation, forms and more.
---
# Sidebar
## A responsive navigation sidebar that includes support for branding, navigation, forms and more.
### Example
Here’s an example on how to use the `<i-sidebar>` inside a dashboard layout. The sidebar automatically collapses responsively.
To control the visibility of the sidebar when collapsed, we will use the `v-model` directive.
<i-code title="Sidebar Example">
<i-tab type="preview">
<i-layout class="sidebar-layout-example -lg">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar v-model="collapsed" collapse-position="absolute">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Navbar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Sizes
You're able to use the `size` modifier to control the size of your sidebar, using one of the available sizes: `sm`, `md`, and `lg`.
The default size is set to `md`.
<i-code title="Sidebar Sizes">
<i-tab type="preview">
<i-layout class="sidebar-layout-example -sm">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Small Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsedSizesSm" @click="collapsedSizesSm = !collapsedSizesSm"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="sm" collapse-position="absolute" v-model="collapsedSizesSm">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example -sm _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Medium Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsedSizesMd" @click="collapsedSizesMd = !collapsedSizesMd"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="md" collapse-position="absolute" v-model="collapsedSizesMd">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example -sm _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Large Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsedSizesLg" @click="collapsedSizesLg = !collapsedSizesLg"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="lg" collapse-position="absolute" v-model="collapsedSizesLg">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Small Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="sm" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Small Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="md" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Small Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar size="lg" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Variants
Inkline includes two predefined sidebar styles. You can set the style of a `<i-sidebar>` using the `variant` property, which can have a value of `light` or `dark`. By default, modals use the `light` variant.
<i-code title="Sidebar Variants">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar variant="light" :collapse="false">
<i-navbar-brand>Light Sidebar</i-navbar-brand>
<i-hamburger-menu variant="light" class="_visible-md-and-down" :active="collapsedVariantsLight" @click="collapsedVariantsLight = !collapsedVariantsLight"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar variant="light" collapse-position="absolute" v-model="collapsedVariantsLight">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar variant="dark" :collapse="false">
<i-navbar-brand>Dark Sidebar</i-navbar-brand>
<i-hamburger-menu variant="dark" class="_visible-md-and-down" :active="collapsedVariantsDark" @click="collapsedVariantsDark = !collapsedVariantsDark"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar variant="dark" collapse-position="absolute" v-model="collapsedVariantsDark">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar variant="light" :collapse="false">
<i-navbar-brand>Light Sidebar</i-navbar-brand>
<i-hamburger-menu variant="light" class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar variant="light" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout>
<i-layout-header class="_padding-0">
<i-navbar variant="dark" :collapse="false">
<i-navbar-brand>Dark Sidebar</i-navbar-brand>
<i-hamburger-menu variant="dark" class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar variant="dark" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Placement
You can easily place your sidebar on the right side of a layout by setting the `placement` property to `right`. By default, sidebars are on the left side.
<i-code title="Sidebar Placement">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Left Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsedPlacementLeft" @click="collapsedPlacementLeft = !collapsedPlacementLeft"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="absolute" placement="left" v-model="collapsedPlacementLeft">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Right Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsedPlacementRight" @click="collapsedPlacementRight = !collapsedPlacementRight"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-layout-content></i-layout-content>
<i-sidebar collapse-position="absolute" placement="right" v-model="collapsedPlacementRight">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Left Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar placement="left" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Right Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-layout-content></i-layout-content>
<i-sidebar placement="right" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Collapse Breakpoint
You can control what breakpoint your sidebar will collapse at using the `collapse` property. By default, the sidebar will collapse on the `md` screen size.
<i-code title="Collapse Breakpoint Example">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-lg-and-down" :active="collapsedBreakpointLg" @click="collapsedBreakpointLg = !collapsedBreakpointLg"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="absolute" collapse="lg" v-model="collapsedBreakpointLg">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-lg-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse="lg" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
##### Always or Never Collapsible
Besides the breakpoint values, you can use a boolean value to set your sidebar to be always collapsible, or never collapsible.
Setting a `collapse` value of `true` will set the sidebar to be always collapsible.
<i-code title="Always Collapsible Example">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
<i-hamburger-menu :active="collapsedBreakpointAlways" @click="collapsedBreakpointAlways = !collapsedBreakpointAlways"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="absolute" :collapse="true" v-model="collapsedBreakpointAlways">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
<i-hamburger-menu :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar :collapse="true" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
Setting a `collapse` value of `false` will set the sidebar to never be collapsible.
<i-code title="Never Collapsible Example">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="absolute" :collapse="false">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Sidebar</i-navbar-brand>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar :collapse="false">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Collapse Position
You can set the collapsed sidebar position to `relative`, `absolute` or `fixed` using the `collapse-position` property.
This property allows you to control whether the sidebar will affect the content that it is besides to when reaching the collapse breakpoint.
<i-code title="Sidebar Collapse Position">
<i-tab type="preview">
<i-layout class="sidebar-layout-example">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Relative Position Sidebar</i-navbar-brand>
<i-hamburger-menu :active="collapsedPositionRelative" @click="collapsedPositionRelative = !collapsedPositionRelative"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar :collapse="true" collapse-position="relative" v-model="collapsedPositionRelative">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content class="_display-flex _justify-content-center _align-items-center">Layout content</i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Absolute Position Sidebar</i-navbar-brand>
<i-hamburger-menu :active="collapsedPositionAbsolute" @click="collapsedPositionAbsolute = !collapsedPositionAbsolute"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar :collapse="true" collapse-position="absolute" v-model="collapsedPositionAbsolute">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content class="_display-flex _justify-content-center _align-items-center">Layout content</i-layout-content>
</i-layout>
</i-layout>
<i-layout class="sidebar-layout-example _margin-top-1">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Fixed Position Sidebar</i-navbar-brand>
<i-hamburger-menu :active="collapsedPositionFixed" @click="collapsedPositionFixed = !collapsedPositionFixed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar :collapse="true" collapse-position="absolute" v-model="collapsedPositionFixed">
<i-nav vertical>
<i-nav-item :to="{ name: 'docs-components-sidebar' }" onclick="return false;">Home</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">About</i-nav-item>
<i-nav-item href="https://inkline.io" onclick="return false;">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content class="_display-flex _justify-content-center _align-items-center">Layout content</i-layout-content>
</i-layout>
</i-layout>
</i-tab>
<i-tab type="html">
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Relative Position Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="relative" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Absolute Position Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="absolute" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
~~~html
<i-layout">
<i-layout-header class="_padding-0">
<i-navbar :collapse="false">
<i-navbar-brand>Fixed Position Sidebar</i-navbar-brand>
<i-hamburger-menu class="_visible-md-and-down" :active="collapsed" @click="collapsed = !collapsed"></i-hamburger-menu>
</i-navbar>
</i-layout-header>
<i-layout vertical>
<i-sidebar collapse-position="fixed" v-model="collapsed">
<i-nav vertical>
<i-nav-item to="/">Home</i-nav-item>
<i-nav-item to="/about">About</i-nav-item>
<i-nav-item to="/contact">Contact</i-nav-item>
</i-nav>
</i-sidebar>
<i-layout-content></i-layout-content>
</i-layout>
</i-layout>
~~~
</i-tab>
<i-tab type="js">
~~~js
export default {
name: 'DashboardLayout',
data() {
return {
collapsed: false
};
}
}
~~~
</i-tab>
</i-code>
### Component API
Here you can find a list of the various customization options you can use for the sidebar components as props, as well as available slots and events.
<i-code title="Sidebar API" markup="i-sidebar" expanded link="https://github.com/inkline/inkline/tree/master/packages/inkline/src/components/ISidebar">
<i-tab type="props">
<api-table>
<api-table-row>
<template slot="property">collapse</template>
<template slot="description">Specifies the breakpoint at which to collapse the sidebar.</template>
<template slot="type"><code>String</code>, <code>Boolean</code></template>
<template slot="values"><code>xs</code>, <code>sm</code>, <code>md</code>, <code>lg</code>, <code>xl</code>, <code>true</code>, <code>false</code></template>
<template slot="default"><code>md</code></template>
</api-table-row>
<api-table-row>
<template slot="property">collapse-on-click</template>
<template slot="description">Collapses the sidebar when clicking a sidebar item.</template>
<template slot="type"><code>Boolean</code></template>
<template slot="values"><code>true</code>, <code>false</code></template>
<template slot="default"><code>true</code></template>
</api-table-row>
<api-table-row>
<template slot="property">collapse-on-click-overlay</template>
<template slot="description">Collapses the sidebar when clicking the sidebar overlay. The overlay appears only for <code>absolute</code> and <code>fixed</code> collapse positions.</template>
<template slot="type"><code>Boolean</code></template>
<template slot="values"><code>true</code>, <code>false</code></template>
<template slot="default"><code>true</code></template>
</api-table-row>
<api-table-row>
<template slot="property">collapse-position</template>
<template slot="description">Sets the position of the sidebar when collapsed.</template>
<template slot="type"><code>String</code></template>
<template slot="values"><code>relative</code>, <code>absolute</code>, <code>fixed</code></template>
<template slot="default"><code>fixed</code></template>
</api-table-row>
<api-table-row>
<template slot="property">placement</template>
<template slot="description">Sets the placement of the sidebar to the left or to the right.</template>
<template slot="type"><code>String</code></template>
<template slot="values"><code>relative</code>, <code>absolute</code>, <code>fixed</code></template>
<template slot="default"><code>fixed</code></template>
</api-table-row>
<api-table-row>
<template slot="property">size</template>
<template slot="description">Sets the size of the sidebar component.</template>
<template slot="type"><code>String</code></template>
<template slot="values"><code>sm</code>, <code>md</code>, <code>lg</code></template>
<template slot="default"><code>md</code></template>
</api-table-row>
<api-table-row>
<template slot="property">value</template>
<template slot="description">Provides a way to collapse the sidebar programmatically. Should be used as part of <code>v-model</code> directive.</template>
<template slot="type"><code>Boolean</code></template>
<template slot="values"><code>true</code>, <code>false</code></template>
<template slot="default"><code>false</code></template>
</api-table-row>
<api-table-row>
<template slot="property">variant</template>
<template slot="description">Sets the color variant of the sidebar component.</template>
<template slot="type"><code>String</code></template>
<template slot="values"><code>light</code>, <code>dark</code></template>
<template slot="default"><code>light</code></template>
</api-table-row>
</api-table>
</i-tab>
<i-tab type="slots">
<api-table>
<api-table-row>
<template slot="slot">default</template>
<template slot="description">Slot for sidebar component default content.</template>
</api-table-row>
</api-table>
</i-tab>
</i-code>
### Sass Variables
Here you can find a list of the Sass variables you can use for the sidebar components. If you're looking to find common variables that these rely on, you should take a look at the <nuxt-link :to="{ name: 'docs-core-sass-variables' }">Sass Variables</nuxt-link> page.
<i-code title="Sidebar" expanded>
<i-tab type="scss">
<api-table>
<api-table-row>
<template slot="property">$sidebar-width-base</template>
<template slot="default"><code>14rem</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-width</template>
<template slot="default"><code>size-map($sidebar-width-base, $sizes, $size-multipliers)</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-padding-base</template>
<template slot="default"><code>0</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-padding</template>
<template slot="default"><code>size-map($sidebar-padding-base, $sizes, $size-multipliers)</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-wrapper-transition-duration</template>
<template slot="default"><code>$transition-duration</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-wrapper-transition-easing</template>
<template slot="default"><code>$transition-easing</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-transition-duration</template>
<template slot="default"><code>$transition-duration</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-transition-easing</template>
<template slot="default"><code>$transition-easing</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-overlay-transition-duration</template>
<template slot="default"><code>$transition-duration</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-overlay-transition-easing</template>
<template slot="default"><code>$transition-easing</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-overlay-background</template>
<template slot="default"><code>rgba(0, 0, 0, 0.5)</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-z-index</template>
<template slot="default"><code>$z-index-fixed</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-color-for-light-variant</template>
<template slot="default"><code>$color-for-light-variant</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-color-for-dark-variant</template>
<template slot="default"><code>$color-for-dark-variant</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-variant-{variant}</template>
<template slot="default"><code>sidebar-variant($color-{variant})</code></template>
</api-table-row>
<api-table-row>
<template slot="property">$sidebar-variants</template>
<template slot="default-row">
~~~scss
(
light: $sidebar-variant-light,
dark: $sidebar-variant-dark
)
~~~
</template>
</api-table-row>
<api-table-row>
<template slot="function">sidebar-variant</template>
<template slot="default-row">
~~~scss
@function sidebar-variant($variant) {
$sidebar-variant-color: variant-color-by-luminance($variant, $sidebar-color-for-light-variant, $sidebar-color-for-dark-variant);
$sidebar-variant-background: $variant;
$variant-map: (
color: $sidebar-variant-color,
background: $sidebar-variant-background,
);
@return $variant-map;
}
~~~
</template>
</api-table-row>
</api-table>
</i-tab>
</i-code>
| 40.721925 | 266 | 0.572633 | eng_Latn | 0.206391 |
470dd75e91f3da9c6a568ceef1518b597c8398a1 | 152 | md | Markdown | development-setup.md | team-aie/docs | 2d11930f9e1d12c7fbe51bc14adfce9a2a11ff7f | [
"MIT"
] | null | null | null | development-setup.md | team-aie/docs | 2d11930f9e1d12c7fbe51bc14adfce9a2a11ff7f | [
"MIT"
] | 4 | 2020-09-16T19:53:51.000Z | 2021-02-06T07:43:10.000Z | development-setup.md | team-aie/docs | 2d11930f9e1d12c7fbe51bc14adfce9a2a11ff7f | [
"MIT"
] | null | null | null | ## Development Setup
Please find the corresponding OS below to follow.
### Windows
(to be added)
### macOS
(to be added)
### Linux
(to be added)
| 9.5 | 49 | 0.664474 | eng_Latn | 0.935732 |
470e18eef76c6f6c62cd3faf15f459094b242cfd | 492 | md | Markdown | docs/worklog/planNotes.md | adminoid/backend.adminoid.com | 25ef02698d0c9fc27ef73e8842ed86d7db266cbe | [
"MIT"
] | null | null | null | docs/worklog/planNotes.md | adminoid/backend.adminoid.com | 25ef02698d0c9fc27ef73e8842ed86d7db266cbe | [
"MIT"
] | null | null | null | docs/worklog/planNotes.md | adminoid/backend.adminoid.com | 25ef02698d0c9fc27ef73e8842ed86d7db266cbe | [
"MIT"
] | null | null | null | # Plan notes for project
## Drawing lucidchart diagrams
1. data-tree (may be Baum) for pages
2. page_types for pages
3. my exists pages is a Landing or Custom type
4. 'Portfolio' table with relations
## Localization
1. if domain is .com setLocale('en'), if domain zone is .ru setLocale('ru')
2. separate texts and test blocks from template to localization files
## Unit testing and initial php-code
1. test database relations
2. test page routes
3. test locales on domain zone
| 24.6 | 76 | 0.739837 | eng_Latn | 0.968596 |
470e58056a450c64e7c22d49003f23d549010fe1 | 643 | md | Markdown | README.md | abnercabrera28/orchestra_frontend | 7f68410d017ec76c725b7709c28b6e13177595b6 | [
"MIT"
] | null | null | null | README.md | abnercabrera28/orchestra_frontend | 7f68410d017ec76c725b7709c28b6e13177595b6 | [
"MIT"
] | null | null | null | README.md | abnercabrera28/orchestra_frontend | 7f68410d017ec76c725b7709c28b6e13177595b6 | [
"MIT"
] | null | null | null | # Orchestras JS Project (Frontend)
This is a Single Page Application where a user can create their own orchestras, as well as editing and deleting them from their database.
## Usage
For a walkthrough of the user experience, click this [here](https://youtu.be/PRSqAcf6WG8).
## Development
After checking out the repo, run live-server to be taken to the application.
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/abnercabrera28/orchestra_frontend and https://github.com/abnercabrera28/orchestra_backend.
## License
[MIT](https://github.com/abnercabrera28/orchestra_frontend/blob/main/LICENSE) | 33.842105 | 164 | 0.790047 | eng_Latn | 0.964499 |
470e9e3213875d1504ec9831670606d5de84236a | 266 | md | Markdown | Features.md | ShawnTe/book-of-becoming | 317bc94664dd7a6ee1db6e81a049d733b218207e | [
"MIT"
] | 2 | 2016-07-20T03:11:17.000Z | 2017-01-16T03:17:31.000Z | Features.md | ShawnTe/book-of-becoming | 317bc94664dd7a6ee1db6e81a049d733b218207e | [
"MIT"
] | 3 | 2020-06-24T17:54:43.000Z | 2021-09-27T20:31:35.000Z | Features.md | ShawnTe/book-of-becoming | 317bc94664dd7a6ee1db6e81a049d733b218207e | [
"MIT"
] | null | null | null | Features
- Select form shows user's themes and submits that for post save
- posts in created_at descending order
- Notes field hideable (ajax)
- button text for notes field changes (ajax)
Questions
How to auto select the tag already assigned to the post? _show
| 24.181818 | 64 | 0.774436 | eng_Latn | 0.998848 |
470eb59f1a9e868ab828da0dd4531b075cff05fc | 2,893 | md | Markdown | README.md | daquang/genomelake | e579cc1ae36a5e064a0afb90b51c882b225e02e2 | [
"BSD-3-Clause"
] | null | null | null | README.md | daquang/genomelake | e579cc1ae36a5e064a0afb90b51c882b225e02e2 | [
"BSD-3-Clause"
] | null | null | null | README.md | daquang/genomelake | e579cc1ae36a5e064a0afb90b51c882b225e02e2 | [
"BSD-3-Clause"
] | null | null | null | # genomelake
[](https://circleci.com/gh/kundajelab/genomelake)[](https://coveralls.io/github/kundajelab/genomelake)
Efficient random access to genomic data for deep learning models.
Supports the following types of input data:
- bigwig
- DNA sequence
genomelake extracts signal from genomic inputs in provided BED intervals.
## Requirements
- python 2.7 or 3.5
- bcolz
- cython
- numpy
- pybedtools
- pysam
## Installation
Clone the repository and run:
`python setup.py install`
## Getting started: training a protein-DNA binding model
Extract genome-wide sequence data into a genomelake data source:
```python
from genomelake.backend import extract_fasta_to_file
genome_fasta = "/mnt/data/annotations/by_release/hg19.GRCh37/hg19.genome.fa"
genome_data_directory = "./hg19_data_directory"
extract_fasta_to_file(genome_fasta, genome_data_directory)
```
Using a BED intervals file with labels, a genome data source, and genomelake's `ArrayExtractor`, generate input DNA sequences and labels:
```python
import pybedtools
from genomelake.extractors import ArrayExtractor
import numpy as np
def batch_iter(iterable, batch_size):
it = iter(iterable)
try:
while True:
values = []
for n in range(batch_size):
values += (next(it),)
yield values
except StopIteration:
yield values
def generate_inputs_and_labels(intervals_file, data_source, batch_size=128):
bt = pybedtools.BedTool(intervals_file)
extractor = ArrayExtractor(data_source)
intervals_generator = batch_iter(bt, batch_size)
for intervals_batch in intervals_generator:
inputs = extractor(intervals_batch)
labels = []
for interval in intervals_batch:
labels.append(float(interval.name))
labels = np.array(labels)
yield inputs, labels
```
Train a keras model of JUND binding to DNA using 101 base pair intervals and labels in `./examples/JUND.HepG2.chr22.101bp_intervals.tsv.gz`:
```python
from keras.models import Sequential
from keras.layers import Conv1D, Flatten, Dense
intervals_file = "./examples/JUND.HepG2.chr22.101bp_intervals.tsv.gz"
inputs_labels_generator = generate_inputs_and_labels(intervals_file, genome_data_directory)
model = Sequential()
model.add(Conv1D(15, 25, input_shape=(101, 4)))
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit_generator(inputs_labels_generator, steps_per_epoch=100)
```
Here is the expected result:
```
100/100 [==============================] - 7s - loss: 0.0584 - acc: 0.9905
```
## License
genomelake is released under the BSD-3 license. See ``LICENSE`` for details.
| 32.144444 | 259 | 0.73868 | eng_Latn | 0.50367 |
470eb72ba8d5f64a6ce5a41a730461e513462522 | 367 | md | Markdown | README.md | MaxArt2501/my-city | 91d7d6d02e4c77c07a24aad35d149639b430ee0a | [
"MIT"
] | 1 | 2021-10-15T12:34:19.000Z | 2021-10-15T12:34:19.000Z | README.md | MaxArt2501/my-city | 91d7d6d02e4c77c07a24aad35d149639b430ee0a | [
"MIT"
] | 29 | 2021-05-16T17:43:16.000Z | 2021-11-15T09:57:23.000Z | README.md | MaxArt2501/my-city | 91d7d6d02e4c77c07a24aad35d149639b430ee0a | [
"MIT"
] | null | null | null | # my-city
Web implementation of the logic puzzle "La mia città" by Giorgio Dendi
## To-do list
- [ ] Set up `aria-label` properties
- [ ] Set up `tabindex` attributes for keyboard users
- [ ] Check if it's playable with a screen reader
- [ ] Solve [all the issues](https://github.com/MaxArt2501/my-city/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc)
| 33.363636 | 118 | 0.700272 | eng_Latn | 0.713816 |
470f16c1b4e8ce91ad58397bc473dc3c7f77f7e9 | 259 | md | Markdown | docs/markdown/00-school/speaker-anthony.md | sfeir-open-source/sfeir-school-modern-javascript | e6a2b483c3b4c872555f9cda0a34df06f163fe8c | [
"Apache-2.0"
] | 6 | 2019-09-10T16:50:50.000Z | 2020-04-30T08:13:54.000Z | docs/markdown/00-school/speaker-anthony.md | sfeir-open-source/sfeir-school-modern-javascript | e6a2b483c3b4c872555f9cda0a34df06f163fe8c | [
"Apache-2.0"
] | 16 | 2019-09-11T12:29:35.000Z | 2021-10-06T08:01:09.000Z | docs/markdown/00-school/speaker-anthony.md | sfeir-open-source/sfeir-school-modern-javascript | e6a2b483c3b4c872555f9cda0a34df06f163fe8c | [
"Apache-2.0"
] | 2 | 2019-10-21T09:04:18.000Z | 2020-03-10T10:10:38.000Z | <!-- .slide: class="speaker-slide" -->
# Présentation


## Anthony Goussot
### Développeur <br/> & SFEIR Factory Core Team
<!-- .element: class="icon-rule icon-first" -->
| 19.923077 | 48 | 0.660232 | fra_Latn | 0.106365 |
470fd95181e35b2715980fb5a7e3078c3c93ad7c | 1,328 | md | Markdown | _posts/2019-06-29-welcome.md | mccurcio/docsy-jekyll-0ld | 28ebbe2f67f58443d25100307eb36d964f34606e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | _posts/2019-06-29-welcome.md | mccurcio/docsy-jekyll-0ld | 28ebbe2f67f58443d25100307eb36d964f34606e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | _posts/2019-06-29-welcome.md | mccurcio/docsy-jekyll-0ld | 28ebbe2f67f58443d25100307eb36d964f34606e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | ---
title: "Generating Posts w/ Docsy Jekyll"
date: 2019-06-28
categories: Posts
badges:
- type: primary
tag: primary-badge
- type: secondary
tag: secondary-badge
- type: info
tag: info-badge
- type: success
tag: success-badge
---
- This doc is in the `_posts` directory.
- You may edit this doc or use it as a template then rebuild the site to see your changes.
- To rebuild this site run `jekyll serve`, which launches a web server and regenerates your site when a file is updated.
- For new posts, add a file to the `_posts` directory with the following criteria:
1. `YYYY-MM-DD-name-of-post.ext` The date of post in the filename
2. The post must include the necessary YAML front matter, see above
3. Badges can be added as needed
Additionally, Jekyll also offers support for code snippets:
{% highlight ruby %}
def print_hi(name)
puts "Hi, #{name}"
end
print_hi('Tom')
#=> prints 'Hi, Tom' to STDOUT.
{% endhighlight %}
- For more information, see [Jekyll docs][jekyll-docs] to get the most out of Jekyll.
- If you have questions, you can ask them on [Jekyll Talk][jekyll-talk].
- File all bugs/feature requests at [Jekyll’s GitHub repo][jekyll-gh].
[jekyll-docs]: http://jekyllrb.com/docs/home
[jekyll-gh]: https://github.com/jekyll/jekyll
[jekyll-talk]: https://talk.jekyllrb.com/
| 31.619048 | 120 | 0.714608 | eng_Latn | 0.979631 |
47113cddc30d4e1ef9fbac14c962af12033f9bd6 | 1,619 | md | Markdown | dynamicsax2012-technet/isearchengine-stylesearchpropertyname-property-microsoft-dynamics-retail-ecommerce-sdk-core.md | MicrosoftDocs/DynamicsAX2012-technet | 4e3ffe40810e1b46742cdb19d1e90cf2c94a3662 | [
"CC-BY-4.0",
"MIT"
] | 9 | 2019-01-16T13:55:51.000Z | 2021-11-04T20:39:31.000Z | dynamicsax2012-technet/isearchengine-stylesearchpropertyname-property-microsoft-dynamics-retail-ecommerce-sdk-core.md | MicrosoftDocs/DynamicsAX2012-technet | 4e3ffe40810e1b46742cdb19d1e90cf2c94a3662 | [
"CC-BY-4.0",
"MIT"
] | 265 | 2018-08-07T18:36:16.000Z | 2021-11-10T07:15:20.000Z | dynamicsax2012-technet/isearchengine-stylesearchpropertyname-property-microsoft-dynamics-retail-ecommerce-sdk-core.md | MicrosoftDocs/DynamicsAX2012-technet | 4e3ffe40810e1b46742cdb19d1e90cf2c94a3662 | [
"CC-BY-4.0",
"MIT"
] | 32 | 2018-08-09T22:29:36.000Z | 2021-08-05T06:58:53.000Z | ---
title: ISearchEngine.StyleSearchPropertyName Property (Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core)
TOCTitle: StyleSearchPropertyName Property
ms:assetid: P:Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core.ISearchEngine.StyleSearchPropertyName
ms:mtpsurl: https://technet.microsoft.com/library/microsoft.dynamics.retail.ecommerce.sdk.core.isearchengine.stylesearchpropertyname(v=AX.60)
ms:contentKeyID: 65315563
author: Khairunj
ms.date: 05/18/2015
mtps_version: v=AX.60
f1_keywords:
- Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core.ISearchEngine.StyleSearchPropertyName
dev_langs:
- CSharp
- C++
- VB
---
# StyleSearchPropertyName Property
[!INCLUDE[archive-banner](includes/archive-banner.md)]
**Namespace:** [Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core](microsoft-dynamics-retail-ecommerce-sdk-core-namespace.md)
**Assembly:** Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core (in Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core.dll)
## Syntax
``` vb
'Declaration
ReadOnly Property StyleSearchPropertyName As String
Get
'Usage
Dim instance As ISearchEngine
Dim value As String
value = instance.StyleSearchPropertyName
```
``` csharp
string StyleSearchPropertyName { get; }
```
``` c++
property String^ StyleSearchPropertyName {
String^ get ();
}
```
#### Property Value
Type: [System.String](https://technet.microsoft.com/library/s1wwdcbf\(v=ax.60\))
## See Also
#### Reference
[ISearchEngine Interface](isearchengine-interface-microsoft-dynamics-retail-ecommerce-sdk-core.md)
[Microsoft.Dynamics.Retail.Ecommerce.Sdk.Core Namespace](microsoft-dynamics-retail-ecommerce-sdk-core-namespace.md)
| 26.540984 | 141 | 0.787523 | yue_Hant | 0.935906 |
4712691506f16c49a05dc7c5f6a2f85f33c6c9c2 | 3,153 | md | Markdown | doc/dialects/bigquery.md | Mattlk13/virtual-schemas | 7bd62dd343a150daf9bd233ad2f565b7643f5fbc | [
"MIT"
] | null | null | null | doc/dialects/bigquery.md | Mattlk13/virtual-schemas | 7bd62dd343a150daf9bd233ad2f565b7643f5fbc | [
"MIT"
] | 1 | 2020-11-17T10:51:42.000Z | 2020-11-17T10:51:42.000Z | doc/dialects/bigquery.md | Mattlk13/virtual-schemas | 7bd62dd343a150daf9bd233ad2f565b7643f5fbc | [
"MIT"
] | null | null | null | # Big Query SQL Dialect
The Big Query SQL dialect allows you connect to the [Google Big Query](https://cloud.google.com/bigquery/), Google's serverless, enterprise data warehouse.
## JDBC Driver
Download the [Simba JDBC Driver for Google BigQuery](https://cloud.google.com/bigquery/providers/simba-drivers/).
### Upload JDBC Driver to EXAOperation
1. [Create a bucket in BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/create_new_bucket_in_bucketfs_service.htm)
1. Upload the driver to BucketFS
## Connecting to Big Query
1. Create schema:
```sql
CREATE SCHEMA ADAPTER;
```
2. Create Adapter Script
You install the adapter script via the special SQL command `CREATE JAVA ADAPTER SCRIPT`.
Please remember to check the versions of your JAR files after downloading driver. They can differ from the list below.
```sql
--/
CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS
%scriptclass com.exasol.adapter.RequestDispatcher;
%jar /buckets/bfsdefault/jars/virtualschema-jdbc-adapter-dist-1.19.5.jar;
%jar /buckets/bfsdefault/jars/avro-1.8.2.jar;
%jar /buckets/bfsdefault/jars/gax-1.40.0.jar;
%jar /buckets/bfsdefault/jars/google-api-client-1.28.0.jar;
%jar /buckets/bfsdefault/jars/google-api-services-bigquery-v2-rev426-1.25.0.jar;
%jar /buckets/bfsdefault/jars/google-auth-library-credentials-0.13.0.jar;
%jar /buckets/bfsdefault/jars/google-auth-library-oauth2-http-0.13.0.jar;
%jar /buckets/bfsdefault/jars/GoogleBigQueryJDBC42.jar;
%jar /buckets/bfsdefault/jars/google-http-client-1.28.0.jar;
%jar /buckets/bfsdefault/jars/google-http-client-jackson2-1.28.0.jar;
%jar /buckets/bfsdefault/jars/google-oauth-client-1.28.0.jar;
%jar /buckets/bfsdefault/jars/grpc-context-1.18.0.jar;
%jar /buckets/bfsdefault/jars/jackson-core-2.9.6.jar;
%jar /buckets/bfsdefault/jars/joda-time-2.10.1.jar;
%jar /buckets/bfsdefault/jars/opencensus-api-0.18.0.jar;
%jar /buckets/bfsdefault/jars/opencensus-contrib-http-util-0.18.0.jar;
/
;
```
3. Create a connection
Please follow the [Authenticating to a Cloud API Service article](https://cloud.google.com/video-intelligence/docs/common/auth]) to get Google service account credentials.
Upload the key to EXAOperation, then create a named connection:
```sql
CREATE OR REPLACE CONNECTION BIGQUERY_CONNECTION
TO 'jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=<your_project_id>;OAuthType=0;OAuthServiceAcctEmail=<your_service_account_email>;OAuthPvtKeyPath=/<path_to_your_bucket>/<name_of your_key_file>';
```
You can find additional information about the [JDBC connection string in the Big Query JDBC installation guide](https://www.simba.com/products/BigQuery/doc/JDBC_InstallGuide/content/jdbc/bq/authenticating/serviceaccount.htm]);
4. Creating a Virtual Schema
```sql
CREATE VIRTUAL SCHEMA "bigquerytest"
USING ADAPTER.JDBC_ADAPTER
WITH
SQL_DIALECT = 'BIGQUERY'
CONNECTION_NAME = 'BIGQUERY_CONNECTION'
CATALOG_NAME = 'virtualschematest'
SCHEMA_NAME = 'testdataset';
``` | 44.408451 | 230 | 0.741199 | yue_Hant | 0.370878 |
471270d71ebb21974b8e169e0e4db34017d4c169 | 20,186 | md | Markdown | README.md | davestewart/google-maps-icons | 0d13cea94d6c5890f2797f5e3a7215287fd06967 | [
"MIT"
] | null | null | null | README.md | davestewart/google-maps-icons | 0d13cea94d6c5890f2797f5e3a7215287fd06967 | [
"MIT"
] | null | null | null | README.md | davestewart/google-maps-icons | 0d13cea94d6c5890f2797f5e3a7215287fd06967 | [
"MIT"
] | null | null | null | <div class="map-icons"><h3>MapIcons</h3><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/airport_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">airport</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/atm_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">atm</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bank_dollar_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank_dollar</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bank_euro_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank_euro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bank_intl_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank_intl</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bank_pound_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank_pound</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bank_yen_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank_yen</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/bar_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bar</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/cafe_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">cafe</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/camping_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">camping</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/cemetery_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">cemetery</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/civic_building_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">civic_building</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/gas_station_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">gas_station</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/golf_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">golf</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/harbour_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">harbour</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/hospital_H_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">hospital_H</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/library_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">library</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/lodging_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">lodging</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/monument_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">monument</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/mountains_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">mountains</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/movie_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">movie</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/museum_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">museum</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/park_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">park</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/parking_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">parking</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/police_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">police</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/post_office_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">post_office</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/restaurant_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">restaurant</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/shopping_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">shopping</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/supermarket_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">supermarket</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/temple_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">temple</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/university_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">university</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/wc_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">wc</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_dharma_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_dharma</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_hindu_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_hindu</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_islam_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_islam</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_jain_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_jain</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_jewish_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_jewish</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/worship_sikh_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">worship_sikh</span></p><h3>MapIcons.local</h3><div style="margin-left:25px"><h3>MapIcons.local.jp</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/ancient_relic_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">ancient_relic</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/bank_japan_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bank</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/buddist_temple_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">buddist_temple</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/city_office_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">city_office</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/circle_k_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">circle_k</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/hot_spring_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">hot_spring</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/lawson_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">lawson</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/museum_japan_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">museum</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/police_japan_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">police</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/post_office_japan_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">post_office</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/shrine_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">shrine</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/jp/seven_eleven_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">seven_eleven</span></p></div><h3>MapIcons.local.cn</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/cn/government_china_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">government</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/cn/historic_china_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">historic</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/cn/school_china_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">school</span></p></div></div><h3>MapIcons.transit</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/walk_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">walk</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/cycle_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">cycle</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/car_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">car</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/bus_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">bus</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/tram_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">tram</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/rail_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">rail</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/monorail_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">monorail</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">metro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/funicular_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">funicular</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/cablecar_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">cablecar</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/gondola_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">gondola</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/ferry_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">ferry</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/transit/plane_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">plane</span></p><h3>MapIcons.transit.local</h3><div style="margin-left:25px"><h3>MapIcons.transit.local.uk</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/uk-london-metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">london_metro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/uk-london-overground_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">london_overground</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/uk-london-tramlink_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">london_tramlink</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/uk-rail_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">rail</span></p></div><h3>MapIcons.transit.local.fr</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/fr-paris-rail_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">paris_rail</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/fr-paris-metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">paris_metro</span></p></div><h3>MapIcons.transit.local.de</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/de-metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">metro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/de-sbahn_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">sbahn</span></p></div><h3>MapIcons.transit.local.es</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/es-madrid-metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">madrid_metro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/es-madrid-rail_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">madrid_rail</span></p></div><h3>MapIcons.transit.local.jp</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/jp/v1/tokyo-metro_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">tokyo_metro</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/localizations/jp/v1/tokyo-toei_large.png" style="vertical-align:middle" /> <span style="vertical-align:middle">tokyo_toei</span></p></div></div><h3>MapIcons.transit.travelmode</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/walk.png" style="vertical-align:middle" /> <span style="vertical-align:middle">walk</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/cycle.png" style="vertical-align:middle" /> <span style="vertical-align:middle">cycle</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/drive.png" style="vertical-align:middle" /> <span style="vertical-align:middle">drive</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/tram.png" style="vertical-align:middle" /> <span style="vertical-align:middle">tram</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/airplane.png" style="vertical-align:middle" /> <span style="vertical-align:middle">plane</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/transit/tactile/triplabel/travelmode/airplane-horizontal.png" style="vertical-align:middle" /> <span style="vertical-align:middle">plane_horizontal</span></p></div></div><h3>MapIcons.misc</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/star_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">star</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/work_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">work</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/home_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">home</span></p></div><h3>MapIcons.indoor</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/layers/indoor/toilet.png" style="vertical-align:middle" /> <span style="vertical-align:middle">toilet</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/layers/indoor/elevator.png" style="vertical-align:middle" /> <span style="vertical-align:middle">elevator</span></p></div><h3>MapIcons.generic</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/ad_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">ad</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_emergency_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">emergency</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_establishment_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">establishment</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_recreation_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">recreation</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_retail_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">retail</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_search_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">search</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/generic_transit_L_8x.png" style="vertical-align:middle" /> <span style="vertical-align:middle">transit</span></p></div><h3>MapIcons.markers</h3><div style="margin-left:25px"><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/spotlight-waypoint-b.png&text=&psize=16&font=fonts/Roboto-Regular.ttf&color=FF000000&ax=44&ay=48" style="vertical-align:middle" /> <span style="vertical-align:middle">text</span></p><p style="margin:6px"><img src="http://mt.google.com/vt/icon?name=icons/spotlight/spotlight-poi.png" style="vertical-align:middle" /> <span style="vertical-align:middle">dot</span></p></div></div>
| 10,093 | 20,185 | 0.747796 | yue_Hant | 0.279162 |
4713037ba37f2ce74985967dea1c866a77cbccb5 | 1,724 | md | Markdown | content/book/the-red-notebook.md | theNewDynamic/hugo-controllers-panic | c53e05501f721534d159c41b38852df89115f636 | [
"MIT"
] | null | null | null | content/book/the-red-notebook.md | theNewDynamic/hugo-controllers-panic | c53e05501f721534d159c41b38852df89115f636 | [
"MIT"
] | null | null | null | content/book/the-red-notebook.md | theNewDynamic/hugo-controllers-panic | c53e05501f721534d159c41b38852df89115f636 | [
"MIT"
] | 1 | 2021-02-26T11:40:56.000Z | 2021-02-26T11:40:56.000Z | ---
title: "The Red Notebook"
subtitle:
date: 2002-06-01T05:00:52.000Z
series:
genre: "nonfiction"
subgenre:
- "essay"
language:
authors:
- author/paul-auster.md
translators:
editors:
contributors:
editions:
- binding: paperback
isbn: 9780811214988
date: 2002-06-01T05:00:52.000Z
description: ""
trim_size: "5x7"
page_count: "104"
sales_data:
forsale: false
saleprice:
shipping_weight:
price_us: 12.95
price_cn: 16.00
contributors:
cover_image: "/The_Red_Notebook.jpg"
- binding: ebook
isbn: 9780811221153
date: 2002-06-01T05:00:52.000Z
description: ""
trim_size: ""
page_count: ""
sales_data:
forsale: false
saleprice:
shipping_weight:
price_us: 12.95
price_cn:
contributors:
cover_image: "/The_Red_Notebook.jpg"
featured_image:
file:
draft: false
_slug: the-red-notebook
---
Paul Auster has earned international praise for the imaginative power of his many novels, including _The New York Trilogy_, _Moon Palace_, _The Music of Chance_, _Mr. Vertigo_, and _Timbuktu_. He has also published a number of highly original nonfiction works: _The Invention of Solitude_, _Hand to Mouth_, and _The Art of Hunger_. In _The Red Notebook_, Auster again explores events from the real world – large and small, tragic and comic – that reveal the unpredictable, shifting nature of human experience. A burnt onion pie, a wrong number, a young boy struck by lightning, a man falling off a roof, a scrap of paper discovered in a Paris hotel room – all these form the context for a singular kind of ars poetica, a literary manifesto without theory, cast in the irreducible forms of pure storytelling.
| 31.345455 | 807 | 0.720998 | eng_Latn | 0.917948 |
47136e8a6a1ad6b012ba20b0a7fe005ad7659be1 | 1,435 | md | Markdown | docs/code-quality/ca1025-replace-repetitive-arguments-with-params-array.md | sinkingcn/visualstudio-docs.zh-cn | bc91c4707fa18a81fb551d7cb962d9a19e563315 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2020-06-06T12:30:55.000Z | 2020-06-06T12:30:55.000Z | docs/code-quality/ca1025-replace-repetitive-arguments-with-params-array.md | sinkingcn/visualstudio-docs.zh-cn | bc91c4707fa18a81fb551d7cb962d9a19e563315 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/code-quality/ca1025-replace-repetitive-arguments-with-params-array.md | sinkingcn/visualstudio-docs.zh-cn | bc91c4707fa18a81fb551d7cb962d9a19e563315 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: CA1025:用形参数组替换重复的实参
ms.date: 11/04/2016
ms.prod: visual-studio-dev15
ms.technology: vs-ide-code-analysis
ms.topic: reference
f1_keywords:
- CA1025
- ReplaceRepetitiveArgumentsWithParamsArray
helpviewer_keywords:
- ReplaceRepetitiveArgumentsWithParamsArray
- CA1025
ms.assetid: f009b340-dea3-4459-8fe1-2143aa8b5d0b
author: gewarren
ms.author: gewarren
manager: douge
ms.workload:
- multiple
ms.openlocfilehash: 027ec9b19bcf8a4a10e8b973b86e57be2dbc6be7
ms.sourcegitcommit: 240c8b34e80952d00e90c52dcb1a077b9aff47f6
ms.translationtype: MT
ms.contentlocale: zh-CN
ms.lasthandoff: 10/23/2018
ms.locfileid: "49838317"
---
# <a name="ca1025-replace-repetitive-arguments-with-params-array"></a>CA1025:用形参数组替换重复的实参
|||
|-|-|
|TypeName|ReplaceRepetitiveArgumentsWithParamsArray|
|CheckId|CA1025|
|类别|Microsoft.Design|
|是否重大更改|非换行|
## <a name="cause"></a>原因
公共类型中的公共或受保护方法具有三个以上参数,且其最后三个参数具有相同的类型。
## <a name="rule-description"></a>规则说明
是未知的自变量的精确数目和变量自变量具有相同的类型,或可传递的类型相同时,请使用参数数组代替重复自变量。 例如,<xref:System.Console.WriteLine%2A>方法提供的常规用途的重载,用于接受任意数量的参数数组<xref:System.Object>参数。
## <a name="how-to-fix-violations"></a>如何解决冲突
若要解决此规则的冲突,请将重复的自变量替换为参数数组。
## <a name="when-to-suppress-warnings"></a>何时禁止显示警告
它始终是安全禁止显示此规则; 的警告但是,这种设计可能会导致可用性问题。
## <a name="example"></a>示例
下面的示例显示了与此规则冲突的类型。
[!code-csharp[FxCop.Design.RepeatArgs#1](../code-quality/codesnippet/CSharp/ca1025-replace-repetitive-arguments-with-params-array_1.cs)] | 28.7 | 140 | 0.797213 | yue_Hant | 0.239872 |
471396ad01c405c35a6f86ae8aad5412b3a6a212 | 213 | md | Markdown | README.md | ralflorent/bremen-big-data-challenge-2019 | 3f75579ec1b16a6cce3d640ba4d2ac06423fa047 | [
"MIT"
] | 1 | 2020-06-14T23:56:05.000Z | 2020-06-14T23:56:05.000Z | README.md | ralflorent/bremen-big-data-challenge-2019 | 3f75579ec1b16a6cce3d640ba4d2ac06423fa047 | [
"MIT"
] | null | null | null | README.md | ralflorent/bremen-big-data-challenge-2019 | 3f75579ec1b16a6cce3d640ba4d2ac06423fa047 | [
"MIT"
] | null | null | null | # Bremen Big Data Challenge 2019
The Bremen Big Data Challenge 2019 is about classifying various everyday and athletic movements. For this, sensor data recorded on one leg above and below the knee are available.
| 71 | 179 | 0.812207 | eng_Latn | 0.998511 |
4714fea5a6c615041a2bde30f3fb41980ca4feda | 734 | md | Markdown | p760e/README.md | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | 1 | 2020-02-20T12:04:46.000Z | 2020-02-20T12:04:46.000Z | p760e/README.md | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | p760e/README.md | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | # 760. Find Anagram Mappings (Easy)
Given two lists Aand B, and B is an anagram of A. B is an anagram of A means B is made by randomizing the order of the elements in A.
We want to find an index mapping P, from A to B. A mapping P[i] = j means the ith element in A appears in B at index j.
These lists A and B may contain duplicates. If there are multiple answers, output any of them.
For example, given
A = [12, 28, 46, 32, 50]
B = [50, 12, 32, 46, 28]
We should return
[1, 4, 3, 2, 0]
as P[0] = 1 because the 0th element of A appears at B[1], and P[1] = 4 because the 1st element of A appears at B[4], and so on.
Note:
A, B have equal lengths in range [1, 100].
A[i], B[i] are integers in range [0, 10^5].
#GOOGL
#Hash Table
| 31.913043 | 133 | 0.682561 | eng_Latn | 0.999679 |
47151537ab45b70f83fc4d47acf83e24ccbba13d | 13,232 | md | Markdown | README.md | pivotal-cf/docs-appdynamics-analytics | bc1a555e4d364fc6c7eeeff2e41d39fd65f3bf5c | [
"Apache-2.0"
] | 1 | 2021-10-18T22:38:14.000Z | 2021-10-18T22:38:14.000Z | README.md | pivotal-cf/docs-appdynamics-analytics | bc1a555e4d364fc6c7eeeff2e41d39fd65f3bf5c | [
"Apache-2.0"
] | 14 | 2018-11-30T18:11:59.000Z | 2020-07-14T18:25:54.000Z | README.md | pivotal-cf/docs-appdynamics-analytics | bc1a555e4d364fc6c7eeeff2e41d39fd65f3bf5c | [
"Apache-2.0"
] | 3 | 2019-05-01T20:33:58.000Z | 2021-10-18T22:37:47.000Z | ## Pivotal Cloud Foundry Partners Doc Template for AppDynamics
Pivotal Cloud Foundry (PCF) is an open-source cloud computing Platform as a Service (PaaS). Developers can develop, deploy, operate, and scale cloud-native applications for public and private clouds.
AppDynamics is a PCF technology partner. Pivotal partners test, integrate, and package joint products. Each PCF partner product is represented on the network.pivotal.io site. The AppDynamics partner service docs appear on the [front page](http://docs.pivotal.io) under **Pivotal Cloud Foundry Add-Ons > Monitoring, Metrics and Logging**.
Pivotal Cloud Foundry (PCF) helps partners prepare documentation for their services for the [Pivotal Network](https://network.pivotal.io/) by providing a standard template.
### <a id='template'></a>AppDynamics Product Documentation
AppDynamics has three PCF products:
- [AppDynamics Application Performance Monitoring for PCF](https://network.pivotal.io/products/p-appdynamics/)
- [AppDynamics Platform Monitoring for PCF](https://network.pivotal.io/products/appdynamics-platform/)
- [AppDynamics Application Analytics for PCF](https://network.pivotal.io/products/appdynamics-analytics)
For each product, AppDynamics uses [PCF partner documentation repository](https://github.com/pivotal-cf/docs-partners-template) templates for each topic:
* [index.html.md.erb](./docs-content/index.html.md.erb): The index of the docs.
* [installing.html.md.erb](./docs-content/installing.html.md.erb): How to install and configure the tile.
* [using.html.md.erb](./docs-content/using.html.md.erb): How to use the product.
* [release-notes.html.md.erb](./docs-content/release-notes.html.md.erb): Release notes for the product.
### Download PCF Documentation Templates
This section is for creating AppDynamics PCF Partner documentation for the first time. If you are working with existing AppDynamics documentation, go to
[Daily Workflow](#Daily-Workflow).
### Build, View, and Edit Docs on macOS
AppDynamics PCF documentation lives in the [PCF docs Github repository](https://github.com/pivotal-cf?utf8=%E2%9C%93&q=appdynamics&type=&language=). To edit, stage, and publish changes, you must work with locally cloned branches of each product and topic repository.
This section describes how to build, view, and edit the AppDynamics documentation from your macOS machine.
1. [Prerequisites](#Prerequisites)
2. [Getting Started](#Getting-Started)
3. [Install Ruby](#Install-Ruby)
4. [Set up Git](#Set-up-Git)
5. [Install Bookbinder](#Install-Bookbinder)
6. [Build the docs locally](#Build-the-docs-locally)
#### Prerequisites
1. Sign up for a [Github](http://github.com) account with your AppDynamics work email.
2. [Request AppDynamics Github access](https://jira.corp.appdynamics.com/servicedesk/customer/portal/9/create/70)
**Note**: You might have already created a Github account and received AppDynamics access in the new hire orientation. Try logging in with your work email to check. If you do submit an access request, keep in mind that it may take a few hours or days for IT to respond to your request.
#### Getting Started
In a Terminal window:
1. Make a **pivotal-cf** workspace directory and navigate to that directory.
```
$ mkdir pivotal-cf
```
```
$ cd pivotal-cf
```
2. Check if you have homebrew installed.
```
$ brew update
```
If not, install homebrew.
$ /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
3. Run `brew doctor`. If there are warnings about conflicting scripts in your path, you can ignore these for now.
```
$ brew doctor
```
#### Install Ruby
1. Install Ruby Version Manager (RVM).
```
$ \curl -sSL https://get.rvm.io | bash -s stable
```
2. Install Ruby version 2.3.0:
1. Clean default gems.
```
$ echo "" > ~/.rvm/gemsets/default.gems
```
2. Clean global gems.
```
$ echo "" > ~/.rvm/gemsets/global.gems
```
3. Install Ruby 2.3.0.
```
$ rvm install 2.3.0 --rubygems 2.4.6
```
<p class="note"><strong>Note</strong>: Run this command in a new terminal window if you receive the error `RVM:command not found`, or run `$ source ~/.profile` in current terminal.
4. Set default Ruby version to 2.3.0.
```
$ rvm use 2.3.0 --default
```
#### Set up Git
1. Download and install git by following [these instructions](https://git-scm.com/download/).
2. Verify you successfully downloaded git.
```
$ git --version
```
4. *(Optional)* Install your own (non-system) bash-completion.
```
$ brew install git bash-completion
```
5. Check for existing SSH Keys.
```
$ ls -al ~/.ssh
```
You have an existing key pair if one of your key filenames ends in the following:
- _id_dsa.pub_
- _id_ecdsa.pub_
- _id_ed25519.pub_
- _id_rsa.pub_
If you don't have an existing key pair, then [generate a new SSH key](https://help.github.com/en/articles/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent).
6. Add your SSH key to the ssh-agent:
1. Start the ssh-agent in the background.
```
$ eval "$(ssh-agent -s)"
```
2. Add your SSH private key to the ssh-agent.
```
$ ssh-add -K ~/.ssh/id_rsa
```
7. [Add the new SSH key to your github account](https://help.github.com/en/articles/adding-a-new-ssh-key-to-your-github-account).
#### Install Bookbinder
1. Install `bundler`.
```
$ gem install bundler
```
1. Install bookbinder (the `bookbindery` gem).
```
$ gem install bookbindery
```
#### Build the Docs Locally
1. Clone the docs repository you want to work in. There are three AppDynamics templates:
- [docs-appdynamics-apm](https://github.com/pivotal-cf/docs-appdynamics-apm)
- [docs-appdynamics-platform](https://github.com/pivotal-cf/docs-appdynamics-platform)
- [docs-appdynamics-analytics](https://github.com/pivotal-cf/docs-appdynamics-analytics)
Replace TILENAME with `apm`, `platform`, or `analytics`.
* For SSH: `$ git clone ssh://[email protected]:/pivotal-cf/docs-appdynamics-TILENAME.git`
* For HTTPS: `$ git clone https://github.com/pifvotal-cf/docs-appdynamics-TILENAME.git`
2. Navigate to the `docs-book` subdirectory of the repository.
```
$ cd TILENAME/docs-book
```
3. Run `bundle install` to install all book dependencies.
```
$ bundle install
```
4. Run `bundle exec bookbinder watch` to build the book on your machine.
```
$ bundle exec bookbinder watch
```
**Note**: If you receive an error such as `LoadError: bookbindery is not part of the bundle`, make sure you're running the command in the docs-book `docs-book` directory, and not the `docs-content` directory.
5. In your browser, navigate to `http://localhost:4567` or `http://127.0.x.x` to view the book locally and "watch" any changes that you make to the source `html.md.erb` files. As you make and save changes to the local source files for your site, you will see them in your browser after a slight delay.
2. After each session of writing or revising your docs source files, commit and push them to your GitHub repository. See the [Daily Workflow](#daily-workflow) section for details.
### <a id='bookbinder'></a>How To Use Bookbinder To View the Docs
[Bookbinder](https://github.com/pivotal-cf/bookbinder/blob/master/README.md) is a command-line utility for stitching Markdown docs into a hostable web app. The PCF Docs Team uses Bookbinder to publish their docs site, but you can also use Bookbinder to view a live version of AppD documentation on your local machine.
Bookbinder draws the content for the site from `docs-content`, the subnav from `docs-book`, and various layout configuration and assets from `docs-layout`.
To use Bookbinder to view AppD documentation, perform the following steps:
1. Install Bookbinder by running `gem install bookbindery`. If you have trouble, consult the [Zero to Bookbinder](#zero-to-bookbinder) section to make sure you have the correct dependencies installed.
1. On your local machine, `cd` into `docs-book` in the cloned repo.
1. Run `bundle install` to make sure you have all the necessary gems installed.
1. Build your documentation site with `bookbinder` in one of the two following ways:
* Run `bundle exec bookbinder watch` to build an interactive version of the docs and navigate to `localhost:4567/myservice/` in a browser. (It may take a moment for the site to load at first.) This builds a site from your content repo at `docs-content`, and then watches that repo to update the site if you make any changes to the repo.
* Run `bundle exec bookbinder bind local` to build a Rack web-app of the book. After the bind has completed, `cd` into the `final_app` directory and run `rackup`. Then navigate to `localhost:9292/myservice/` in a browser.
#### Things to Remember
- To edit, make sure you're in the `docs-content` space in the designated tile documentation. For example:
```
$ cd pivotal-cf/docs-appdynamics-platform/docs-content
```
- In the **pivotal-cf** repository, there’s a Pivotal Partners template style guide for each tile. For example, here is the docs-appdynamics-apm [style guide](https://github.com/pivotal-cf/docs-appdynamics-apm/blob/master/style-guide.md).
- To preview, make sure you're in the `docs-book` space in the designated tile documentation. For example:
```
$ cd pivotal-cf/docs-appdynamics-platform/docs-book
```
### Daily Workflow
This section describes how to sync to the master branch, navigate to the `release_next` branch, make edits to a file, and push your changes to the repository.
**Note**: Never make changes to the master branch directly.
1. Navigate to the docs-content folder of the document you want to edit.
```
$ cd docs-appdynamics-TILENAME/docs-content
```
2. Verify what branch you're working in. **You should always be working in a branch of the master.**
```
$ git status
```
3. Verify what local and remote branches exist for your local repository.
```
$ git branch -a
```
4. Check out the master branch.
```
$ git checkout master
```
5. Sync your local branch with the most up-to-date master branch.
```
$ git pull
```
6. Check out the `release_next` branch.
```
$ git checkout -b release_next
```
7. Verify you are working in the `release_next` branch.
```
$ git status
```
8. In your text editor, open, edit, and save a file.
9. Add the edited file to the `release_next` branch.
```
$ git add <filename>
```
You can also add all files to the `release_next` branch .
```
$ git add .
```
10. Commit the changes to the `release_next` branch. Replace the `DESCRIPTION` with a description of what you did (like a Jira ticket description).
```$ git commit -m 'DESCRIPTION'
```
If the commit fails, try setting up your global git configuration:
```
$ git config --global user.name “<Firstname> <Lastname>"
$ git config --global user.email [email protected]
$ whoami
$ git config --local --list
$ ~/.gitconfig
```
11. Push your changes to the repo.
```
$ git push --set-upstream origin release_next
```
If you receive the error `Invalid username or password`, try the following:
1. Generate a [Personal Access Token](https://github.com/settings/tokens). (Detailed guide on [Creating a personal access token for the command line](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/).)
2. Copy the Personal Access Token.
3. Re-attempt the `git push` command and use Personal Access Token in the place of your password.
12. Create a pull request for `release_next` on GitHub in your browser (ex: https://github.com/pivotal-cf/docs-appdynamics-platform/pull/new/release_next).
13. Add reviewers to your pull request.
### About Subnavs of Published Tile Documentation
After your documentation has been published, the subnav used for the live documentation is contained in this directory: https://github.com/pivotal-cf/docs-book-partners/tree/master/master_middleman/source/subnavs
However, you should also continue to maintain the local subnav file so that the subnav looks correct when you or another writer builds the documentation locally with bookbinder for review or editing.
To edit a subnav for your tile documentation, follow these steps:
1. Make a pull request against the subnav file in https://github.com/pivotal-cf/docs-book-partners/tree/master/master_middleman/source/subnavs
2. Make the same changes in the subnav file (in /docs-book/master_middleman/source/subnavs/ of your tile repo) and make a pull request for that change too.
Happy documenting!


| 41.479624 | 340 | 0.709417 | eng_Latn | 0.929832 |
4715d99b55a3d42527d3d9bf2645464337dfcad7 | 760 | md | Markdown | CONTRIBUTING.md | arjan/matrix-elixir-sdk | 97ad3d50fe07df51e79a2938c7373a76f68971c3 | [
"MIT"
] | 51 | 2020-06-30T13:34:34.000Z | 2022-01-30T06:04:26.000Z | CONTRIBUTING.md | suranyami/matrix-elixir-sdk | db8f405e2ce341d967c8b95b514645e607bb0402 | [
"MIT"
] | 43 | 2020-06-15T09:44:35.000Z | 2021-04-09T03:44:32.000Z | CONTRIBUTING.md | suranyami/matrix-elixir-sdk | db8f405e2ce341d967c8b95b514645e607bb0402 | [
"MIT"
] | 7 | 2020-08-05T18:20:48.000Z | 2021-07-28T21:07:31.000Z | # Contributing
First of all, thank you for taking the time to contribute!
Issues and pull requests are very welcome for bug reports and feature requests and/or proposals. Please provide as much detail as possible in the descriptions.
All code needs to include programmatic tests, documentation and to have been tested manually. Tests will run on the repo's CI, and should pass before asking for a PR review.
If you're interested in contributing to the project long term (every skill level welcome), let us know [over email](mailto:[email protected]) and we'll onboard you through a call and invite you to our matrix room.
Please note that for all communication and interactions we expect our [code of conduct](CODE_OF_CONDUCT.md) to be followed.
| 63.333333 | 221 | 0.796053 | eng_Latn | 0.999447 |
4715e320adcb6af25f0d2899faf3fdb212a61414 | 10,588 | md | Markdown | docs/vs-2015/modeling/walkthrough-debugging-a-text-template-that-accesses-a-model.md | 1DanielaBlanco/visualstudio-docs.es-es | 9e934cd5752dc7df6f5e93744805e3c600c87ff0 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/vs-2015/modeling/walkthrough-debugging-a-text-template-that-accesses-a-model.md | 1DanielaBlanco/visualstudio-docs.es-es | 9e934cd5752dc7df6f5e93744805e3c600c87ff0 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/vs-2015/modeling/walkthrough-debugging-a-text-template-that-accesses-a-model.md | 1DanielaBlanco/visualstudio-docs.es-es | 9e934cd5752dc7df6f5e93744805e3c600c87ff0 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: 'Tutorial: Depurar una plantilla de texto que tiene acceso a un modelo | Microsoft Docs'
ms.custom: ''
ms.date: 11/15/2016
ms.prod: visual-studio-tfs-dev14
ms.reviewer: ''
ms.suite: ''
ms.tgt_pltfrm: ''
ms.topic: article
ms.assetid: af46a7fe-6b98-4d3d-b816-0bbf8e81e220
caps.latest.revision: 8
author: gewarren
ms.author: gewarren
manager: douge
ms.openlocfilehash: ca80111415c869543297ed24707ae27f0490f07b
ms.sourcegitcommit: 240c8b34e80952d00e90c52dcb1a077b9aff47f6
ms.translationtype: MT
ms.contentlocale: es-ES
ms.lasthandoff: 10/23/2018
ms.locfileid: "49924894"
---
# <a name="walkthrough-debugging-a-text-template-that-accesses-a-model"></a>Tutorial: Depurar una plantilla de texto que tiene acceso a un modelo
[!INCLUDE[vs2017banner](../includes/vs2017banner.md)]
Al modificar o agregar las plantillas de texto en una solución de lenguaje específico de dominio, es posible que obtenga errores cuando el motor transforma la plantilla al código fuente, o bien cuando se compila el código generado. El siguiente tutorial muestra algunas de las cosas que puede hacer para depurar una plantilla de texto.
> [!NOTE]
> Para obtener más información sobre el texto de las plantillas en general, vea [generación de código y plantillas de texto T4](../modeling/code-generation-and-t4-text-templates.md). Para obtener más información sobre la depuración de plantillas de texto, consulte [Tutorial: depurar una plantilla de texto](http://msdn.microsoft.com/library/5c3fd3b7-c110-4e86-a22f-d5756be6b94f).
## <a name="creating-a-domain-specific-language-solution"></a>Creación de una solución de lenguaje específico de dominio
En este procedimiento, creará una solución de lenguaje específico de dominio que tiene las siguientes características:
- Nombre: DebuggingTestLanguage
- Plantilla de solución: lenguaje mínimo
- Extensión de archivo: .ddd
- Nombre de la compañía: Fabrikam
Para obtener más información acerca de cómo crear una solución de lenguaje específico de dominio, consulte [Cómo: crear soluciones de lenguajes específicos de dominio](../modeling/how-to-create-a-domain-specific-language-solution.md).
## <a name="creating-a-text-template"></a>Creación de una plantilla de texto
Agregar una plantilla de texto a la solución.
#### <a name="to-create-a-text-template"></a>Para crear una plantilla de texto
1. Compile la solución y comenzar a ejecutarla en el depurador. (En el **compilar** menú, haga clic en **recompilar solución**y, a continuación, en el **depurar** menú, haga clic en **Iniciar depuración**.) Una nueva instancia de Visual Studio abre el proyecto de depuración.
2. Agregue un archivo de texto denominado `DebugTest.tt` para el proyecto de depuración.
3. Asegúrese de que el **Custom Tool** propiedad de DebugTest.tt está establecida en `TextTemplatingFileGenerator`.
## <a name="debugging-directives-that-access-a-model-from-a-text-template"></a>Depuración de las directivas que tienen acceso a un modelo desde una plantilla de texto
Antes de que puede tener acceso a un modelo de las instrucciones y expresiones en una plantilla de texto, primero debe llamar a un procesador de directivas personalizadas. Llamar el procesador de directivas personalizadas hace que las clases en el modelo disponible para el código de plantilla de texto como propiedades. Para obtener más información, consulte [acceso a los modelos de plantillas de texto](../modeling/accessing-models-from-text-templates.md).
En los procedimientos siguientes, que desea depurar un nombre de la directiva incorrecto y un nombre de propiedad incorrecto.
#### <a name="to-debug-an-incorrect-directive-name"></a>Para depurar un nombre de directiva incorrecto
1. Reemplace el código de DebugTest.tt con el código siguiente:
> [!NOTE]
> El código contiene un error. Que va a presentar el error con el fin de depurarla.
```csharp
<#@ template language="C#" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ modelRoot processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=ExampleModel" #>
Model: <#= this.ExampleModel #>
<#
foreach (ExampleElement element in this.ExampleModel.Elements)
{
#>
Element: <#= element.Name #>
<#
}
#>
```
```vb
<#@ template language="VB" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ modelRoot processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=ExampleModel" #>
Model: <#= Me.ExampleModel #>
<#
For Each element as ExampleElement in Me.ExampleModel.Elements
#>
Element: <#= element.Name #>
<#
Next
#>
```
2. En **el Explorador de soluciones**, haga clic en DebugTest.tt y, a continuación, haga clic en **ejecutar herramienta personalizada**.
El **lista de errores** ventana muestra este error:
**El procesador denominado 'DebuggingTestLanguageDirectiveProcessor' no es compatible con la directiva denominada 'modelRoot'. La transformación no se ejecutará.**
En este caso, la llamada de directiva contiene un nombre de directiva incorrecto. Ha especificado `modelRoot` como el nombre de la directiva, pero el nombre de la directiva correcto es `DebuggingTestLanguage`.
3. Haga doble clic en el error en la **lista de errores** ventana para saltar al código.
4. Para corregir el código, cambie el nombre de la directiva a `DebuggingTestLanguage`.
Se resalta el cambio.
```csharp
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=ExampleModel" #>
```
```vb
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=ExampleModel" #>
```
5. En **el Explorador de soluciones**, haga clic en DebugTest.tt y, a continuación, haga clic en **ejecutar herramienta personalizada**.
Ahora el sistema transforma la plantilla de texto y genera el archivo de salida correspondiente. No verá los errores en el **lista de errores** ventana.
#### <a name="to-debug-an-incorrect-property-name"></a>Para depurar un nombre de propiedad incorrecto
1. Reemplace el código de DebugTest.tt con el código siguiente:
> [!NOTE]
> El código contiene un error. Que va a presentar el error con el fin de depurarla.
```csharp
<#@ template language="C#" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=LibraryModel" #>
Model: <#= this.ExampleModel #>
<#
foreach (ExampleElement element in this.ExampleModel.Elements)
{
#>
Element: <#= element.Name #>
<#
}
#>
```
```vb
<#@ template language="VB" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=LibraryModel" #>
Model: <#= Me.ExampleModel #>
<#
For Each element as ExampleElement in Me.ExampleModel.Elements
#>
Element: <#= element.Name #>
<#
Next
#>
```
2. En el **el Explorador de soluciones**, haga clic en DebugTest.tt y, a continuación, haga clic en **ejecutar herramienta personalizada**.
El **lista de errores** ventana aparece y muestra uno de estos errores:
(C#)
**Compilando transformación: Microsoft.VisualStudio.TextTemplating\<GUID >. GeneratedTextTransformation' no contiene una definición para 'ExampleModel'**
(Visual Basic)
**Compilando transformación: 'ExampleModel' no es un miembro de ' Microsoft.VisualStudio.TextTemplating\<GUID >. GeneratedTextTransformation'.**
En este caso, el código de plantilla de texto contiene un nombre de propiedad incorrecto. Ha especificado `ExampleModel` como el nombre de propiedad, pero la propiedad correcta nombre es `LibraryModel`. Puede encontrar el nombre de la propiedad correcta en la proporciona el parámetro, tal como se muestra en el código siguiente:
```
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=LibraryModel" #>
```
3. Haga doble clic en el error en la ventana Lista de errores para saltar al código.
4. Para corregir el código, cambie el nombre de propiedad a `LibraryModel` en el código de plantilla de texto.
Los cambios aparecen resaltados.
```csharp
<#@ template language="C#" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=LibraryModel" #>
Model: <#= this.LibraryModel #>
<#
foreach (ExampleElement element in this.LibraryModel.Elements)
{
#>
Element: <#= element.Name #>
<#
}
#>
```
```vb
<#@ template language="VB" inherits="Microsoft.VisualStudio.TextTemplating.VSHost.ModelingTextTransformation"#>
<#@ output extension=".txt" #>
<#@ DebuggingTestLanguage processor="DebuggingTestLanguageDirectiveProcessor" requires="fileName='Sample.ddd'" provides="ExampleModel=LibraryModel" #>
Model: <#= Me.LibraryModel #>
<#
For Each element as ExampleElement in Me.LibraryModel.Elements
#>
Element: <#= element.Name #>
<#
Next
#>
```
5. En **el Explorador de soluciones**, haga clic en DebugTest.tt y, a continuación, haga clic en **ejecutar herramienta personalizada**.
Ahora el sistema transforma la plantilla de texto y genera el archivo de salida correspondiente. No verá los errores en el **lista de errores** ventana.
| 47.693694 | 462 | 0.70986 | spa_Latn | 0.884538 |
4716386d368883f54970050122584007391617a4 | 17,328 | md | Markdown | src/expressions.md | s3rvac/rust-lang-reference | 0f63519ea10c028f48b2dbf7d0a2454203b68b0b | [
"Apache-2.0",
"MIT"
] | 1 | 2021-03-22T21:48:17.000Z | 2021-03-22T21:48:17.000Z | src/expressions.md | chrisduerr/reference | ce27a340b91d22bd7be085d373487f2fcb94263e | [
"Apache-2.0",
"MIT"
] | null | null | null | src/expressions.md | chrisduerr/reference | ce27a340b91d22bd7be085d373487f2fcb94263e | [
"Apache-2.0",
"MIT"
] | null | null | null | # Expressions
> **<sup>Syntax</sup>**
> _Expression_ :
> [_LiteralExpression_]
> | [_PathExpression_]
> | [_BlockExpression_]
> | [_OperatorExpression_]
> | [_GroupedExpression_]
> | [_ArrayExpression_]
> | [_IndexExpression_]
> | [_TupleExpression_]
> | [_TupleIndexingExpression_]
> | [_StructExpression_]
> | [_EnumerationVariantExpression_]
> | [_CallExpression_]
> | [_MethodCallExpression_]
> | [_FieldExpression_]
> | [_ClosureExpression_]
> | [_LoopExpression_]
> | [_ContinueExpression_]
> | [_BreakExpression_]
> | [_RangeExpression_]
> | [_IfExpression_]
> | [_IfLetExpression_]
> | [_MatchExpression_]
> | [_ReturnExpression_]
An expression may have two roles: it always produces a *value*, and it may have
*effects* (otherwise known as "side effects"). An expression *evaluates to* a
value, and has effects during *evaluation*. Many expressions contain
sub-expressions (operands). The meaning of each kind of expression dictates
several things:
* Whether or not to evaluate the sub-expressions when evaluating the expression
* The order in which to evaluate the sub-expressions
* How to combine the sub-expressions' values to obtain the value of the
expression
In this way, the structure of expressions dictates the structure of execution.
Blocks are just another kind of expression, so blocks, statements, expressions,
and blocks again can recursively nest inside each other to an arbitrary depth.
## Expression precedence
The precedence of Rust operators and expressions is ordered as follows, going
from strong to weak. Binary Operators at the same precedence level are grouped
in the order given by their associativity.
| Operator/Expression | Associativity |
|-----------------------------|---------------------|
| Paths | |
| Method calls | |
| Field expressions | left to right |
| Function calls, array indexing | |
| `?` | |
| Unary `-` `*` `!` `&` `&mut` | |
| `as` | left to right |
| `*` `/` `%` | left to right |
| `+` `-` | left to right |
| `<<` `>>` | left to right |
| `&` | left to right |
| `^` | left to right |
| <code>|</code> | left to right |
| `==` `!=` `<` `>` `<=` `>=` | Require parentheses |
| `&&` | left to right |
| <code>||</code> | left to right |
| `..` `..=` | Require parentheses |
| `=` `+=` `-=` `*=` `/=` `%=` <br> `&=` <code>|=</code> `^=` `<<=` `>>=` | right to left |
| `return` `break` closures | |
## Place Expressions and Value Expressions
Expressions are divided into two main categories: place expressions and
value expressions. Likewise within each expression, sub-expressions may occur
in either place context or value context. The evaluation of an expression
depends both on its own category and the context it occurs within.
A *place expression* is an expression that represents a memory location. These
expressions are [paths] which refer to local variables, [static variables],
[dereferences] (`*expr`), [array indexing] expressions (`expr[expr]`),
[field] references (`expr.f`) and parenthesized place expressions. All other
expressions are value expressions.
A *value expression* is an expression that represents an actual value.
The left operand of an [assignment][assign] or [compound assignment] expression
is a place expression context, as is the single operand of a unary [borrow], and
the operand of any [implicit borrow]. The discriminant or subject of a
[match expression][match] and right side of a [let statement] is also a place
expression context. All other expression contexts are value expression contexts.
> Note: Historically, place expressions were called *lvalues* and value
> expressions were called *rvalues*.
### Moved and copied types
When a place expression is evaluated in a value expression context, or is bound
by value in a pattern, it denotes the value held _in_ that memory location. If
the type of that value implements [`Copy`], then the value will be copied. In
the remaining situations if that type is [`Sized`], then it may be possible to
move the value. Only the following place expressions may be moved out of:
* [Variables] which are not currently borrowed.
* [Temporary values](#temporary-lifetimes).
* [Fields][field] of a place expression which can be moved out of and
doesn't implement [`Drop`].
* The result of [dereferencing] an expression with type [`Box<T>`] and that can
also be moved out of.
Moving out of a place expression that evaluates to a local variable, the
location is deinitialized and cannot be read from again until it is
reinitialized. In all other cases, trying to use a place expression in a value
expression context is an error.
### Mutability
For a place expression to be [assigned][assign] to, mutably [borrowed][borrow],
[implicitly mutably borrowed], or bound to a pattern containing `ref mut` it
must be _mutable_. We call these *mutable place expressions*. In contrast,
other place expressions are called *immutable place expressions*.
The following expressions can be mutable place expression contexts:
* Mutable [variables], which are not currently borrowed.
* [Mutable `static` items].
* [Temporary values].
* [Fields][field], this evaluates the subexpression in a mutable place
expression context.
* [Dereferences] of a `*mut T` pointer.
* Dereference of a variable, or field of a variable, with type `&mut T`. Note:
This is an exception to the requirement of the next rule.
* Dereferences of a type that implements `DerefMut`, this then requires that
the value being dereferenced is evaluated is a mutable place expression context.
* [Array indexing] of a type that implements `DerefMut`, this
then evaluates the value being indexed, but not the index, in mutable place
expression context.
### Temporary lifetimes
When using a value expression in most place expression contexts, a temporary
unnamed memory location is created initialized to that value and the expression
evaluates to that location instead, except if promoted to `'static`. Promotion
of a value expression to a `'static` slot occurs when the expression could be
written in a constant, borrowed, and dereferencing that borrow where the
expression was the originally written, without changing the runtime behavior.
That is, the promoted expression can be evaluated at compile-time and the
resulting value does not contain [interior mutability] or [destructors] (these
properties are determined based on the value where possible, e.g. `&None`
always has the type `&'static Option<_>`, as it contains nothing disallowed).
Otherwise, the lifetime of temporary values is typically
- the innermost enclosing statement; the tail expression of a block is
considered part of the statement that encloses the block, or
- the condition expression or the loop conditional expression if the
temporary is created in the condition expression of an `if` or in the loop
conditional expression of a `while` expression.
When a temporary value expression is being created that is assigned into a
[`let` declaration][let], however, the temporary is created with the lifetime of
the enclosing block instead, as using the enclosing [`let` declaration][let]
would be a guaranteed error (since a pointer to the temporary
would be stored into a variable, but the temporary would be freed before the
variable could be used). The compiler uses simple syntactic rules to decide
which values are being assigned into a `let` binding, and therefore deserve a
longer temporary lifetime.
Here are some examples:
- `let x = foo(&temp())`. The expression `temp()` is a value expression. As it
is being borrowed, a temporary is created which will be freed after
the innermost enclosing statement; in this case, the `let` declaration.
- `let x = temp().foo()`. This is the same as the previous example,
except that the value of `temp()` is being borrowed via autoref on a
method-call. Here we are assuming that `foo()` is an `&self` method
defined in some trait, say `Foo`. In other words, the expression
`temp().foo()` is equivalent to `Foo::foo(&temp())`.
- `let x = if foo(&temp()) {bar()} else {baz()};`. The expression `temp()` is
a value expression. As the temporary is created in the condition expression
of an `if`, it will be freed at the end of the condition expression;
in this example before the call to `bar` or `baz` is made.
- `let x = if temp().must_run_bar {bar()} else {baz()};`.
Here we assume the type of `temp()` is a struct with a boolean field
`must_run_bar`. As the previous example, the temporary corresponding to
`temp()` will be freed at the end of the condition expression.
- `while foo(&temp()) {bar();}`. The temporary containing the return value from
the call to `temp()` is created in the loop conditional expression. Hence it
will be freed at the end of the loop conditional expression; in this example
before the call to `bar` if the loop body is executed.
- `let x = &temp()`. Here, the same temporary is being assigned into
`x`, rather than being passed as a parameter, and hence the
temporary's lifetime is considered to be the enclosing block.
- `let x = SomeStruct { foo: &temp() }`. As in the previous case, the
temporary is assigned into a struct which is then assigned into a
binding, and hence it is given the lifetime of the enclosing block.
- `let x = [ &temp() ]`. As in the previous case, the
temporary is assigned into an array which is then assigned into a
binding, and hence it is given the lifetime of the enclosing block.
- `let ref x = temp()`. In this case, the temporary is created using a ref
binding, but the result is the same: the lifetime is extended to the enclosing
block.
### Implicit Borrows
Certain expressions will treat an expression as a place expression by implicitly
borrowing it. For example, it is possible to compare two unsized [slices] for
equality directly, because the `==` operator implicitly borrows it's operands:
```rust
# let c = [1, 2, 3];
# let d = vec![1, 2, 3];
let a: &[i32];
let b: &[i32];
# a = &c;
# b = &d;
// ...
*a == *b;
// Equivalent form:
::std::cmp::PartialEq::eq(&*a, &*b);
```
Implicit borrows may be taken in the following expressions:
* Left operand in [method-call] expressions.
* Left operand in [field] expressions.
* Left operand in [call expressions].
* Left operand in [array indexing] expressions.
* Operand of the [dereference operator] \(`*`).
* Operands of [comparison].
* Left operands of the [compound assignment].
## Constant expressions
Certain types of expressions can be evaluated at compile time. These are called
_constant expressions_. Certain places, such as in
[constants](items/constant-items.html) and [statics](items/static-items.html),
require a constant expression, and are always evaluated at compile time. In
other places, such as in [`let` statements](statements.html#let-statements),
constant expressions may be evaluated at compile time. If errors, such as out
of bounds [array indexing] or [overflow] occurs,
then it is a compiler error if the value must be evaluated at compile time,
otherwise it is just a warning, but the code will most likely panic when run.
The following expressions are constant expressions, so long as any operands are
also constant expressions and do not cause any [`Drop::drop`][destructors] calls
to be ran.
* [Literals].
* [Paths] to [functions](items/functions.html) and constants.
Recursively defining constants is not allowed.
* [Tuple expressions].
* [Array expressions].
* [Struct] expressions.
* [Enum variant] expressions.
* [Block expressions], including `unsafe` blocks, which only contain items and
possibly a constant tail expression.
* [Field] expressions.
* Index expressions, [array indexing] or [slice] with a `usize`.
* [Range expressions].
* [Closure expressions] which don't capture variables from the environment.
* Built in [negation], [arithmetic, logical], [comparison] or [lazy boolean]
operators used on integer and floating point types, `bool` and `char`.
* Shared [borrow]s, except if applied to a type with [interior mutability].
* The [dereference operator].
* [Grouped] expressions.
* [Cast] expressions, except pointer to address and
function pointer to address casts.
## Overloading Traits
Many of the following operators and expressions can also be overloaded for
other types using traits in `std::ops` or `std::cmp`. These traits also
exist in `core::ops` and `core::cmp` with the same names.
[block expressions]: expressions/block-expr.html
[call expressions]: expressions/call-expr.html
[closure expressions]: expressions/closure-expr.html
[enum variant]: expressions/enum-variant-expr.html
[field]: expressions/field-expr.html
[grouped]: expressions/grouped-expr.html
[literals]: expressions/literal-expr.html
[match]: expressions/match-expr.html
[method-call]: expressions/method-call-expr.html
[paths]: expressions/path-expr.html
[range expressions]: expressions/range-expr.html
[struct]: expressions/struct-expr.html
[tuple expressions]: expressions/tuple-expr.html
[array expressions]: expressions/array-expr.html
[array indexing]: expressions/array-expr.html#array-and-slice-indexing-expressions
[arithmetic, logical]: expressions/operator-expr.html#arithmetic-and-logical-binary-operators
[assign]: expressions/operator-expr.html#assignment-expressions
[borrow]: expressions/operator-expr.html#borrow-operators
[cast]: expressions/operator-expr.html#type-cast-expressions
[comparison]: expressions/operator-expr.html#comparison-operators
[compound assignment]: expressions/operator-expr.html#compound-assignment-expressions
[dereferences]: expressions/operator-expr.html#the-dereference-operator
[dereferencing]: expressions/operator-expr.html#the-dereference-operator
[dereference operator]: expressions/operator-expr.html#the-dereference-operator
[lazy boolean]: expressions/operator-expr.html#lazy-boolean-operators
[negation]: expressions/operator-expr.html#negation-operators
[overflow]: expressions/operator-expr.html#overflow
[destructors]: destructors.html
[interior mutability]: interior-mutability.html
[`Box<T>`]: ../std/boxed/struct.Box.html
[`Copy`]: special-types-and-traits.html#copy
[`Drop`]: special-types-and-traits.html#drop
[`Sized`]: special-types-and-traits.html#sized
[implicit borrow]: #implicit-borrows
[implicitly mutably borrowed]: #implicit-borrows
[let]: statements.html#let-statements
[let statement]: statements.html#let-statements
[Mutable `static` items]: items/static-items.html#mutable-statics
[slice]: types.html#array-and-slice-types
[static variables]: items/static-items.html
[Temporary values]: #temporary-lifetimes
[Variables]: variables.html
[_ArrayExpression_]: expressions/array-expr.html
[_BlockExpression_]: expressions/block-expr.html
[_BreakExpression_]: expressions/loop-expr.html#break-expressions
[_CallExpression_]: expressions/call-expr.html
[_ClosureExpression_]: expressions/closure-expr.html
[_ContinueExpression_]: expressions/loop-expr.html#continue-expressions
[_EnumerationVariantExpression_]: expressions/enum-variant-expr.html
[_FieldExpression_]: expressions/field-expr.html
[_GroupedExpression_]: expressions/grouped-expr.html
[_IfExpression_]: expressions/if-expr.html#if-expressions
[_IfLetExpression_]: expressions/if-expr.html#if-let-expressions
[_IndexExpression_]: expressions/array-expr.html#array-and-slice-indexing-expressions
[_LiteralExpression_]: expressions/literal-expr.html
[_LoopExpression_]: expressions/loop-expr.html
[_MatchExpression_]: expressions/match-expr.html
[_MethodCallExpression_]: expressions/method-call-expr.html
[_OperatorExpression_]: expressions/operator-expr.html
[_PathExpression_]: expressions/path-expr.html
[_RangeExpression_]: expressions/range-expr.html
[_ReturnExpression_]: expressions/return-expr.html
[_StructExpression_]: expressions/struct-expr.html
[_TupleExpression_]: expressions/tuple-expr.html
[_TupleIndexingExpression_]: expressions/tuple-expr.html#tuple-indexing-expressions
| 50.372093 | 98 | 0.706198 | eng_Latn | 0.989244 |
471672db27435b305548df1f507ff93df6591d0e | 2,543 | md | Markdown | docs/about.md | adamvoss/fluentassertions | 95f6d18761223c6d09b9a3bd2970ff895afab3f3 | [
"Apache-2.0"
] | null | null | null | docs/about.md | adamvoss/fluentassertions | 95f6d18761223c6d09b9a3bd2970ff895afab3f3 | [
"Apache-2.0"
] | null | null | null | docs/about.md | adamvoss/fluentassertions | 95f6d18761223c6d09b9a3bd2970ff895afab3f3 | [
"Apache-2.0"
] | null | null | null | ---
title: About
layout: page
---
FluentAssertions started because nothing is more annoying than a unit test that fails without clearly explaining why. Usually, you need to set a breakpoint and start up the debugger to be able to figure out what went wrong. Jeremy D. Miller once gave the advice to "keep out of the debugger hell" and we can only agree with that.
That’s why we designed Fluent Assertions to help you in this area. Not only by using clearly named assertion methods, but also by making sure the failure message provides as much information as possible. Consider this example:
```c#
"1234567890".Should().Be("0987654321");
```
This will be reported as:
> Expected string to be
"0987654321", but
"1234567890" differs near "123" (index 0).
The fact that both strings are displayed on a separate line is not a coincidence and happens if any of them is longer than 8 characters. However, if that's not enough, all assertion methods take an optional explanation (the because) that supports formatting placeholders similar to String.Format which you can use to enrich the failure message. For instance, the assertion
```c#
new[] { 1, 2, 3 }.Should().Contain(item => item > 3, "at least {0} item should be larger than 3", 1);
```
will fail with:
> Collection {1, 2, 3} should have an item matching (item > 3) because at least 1 item should be larger than 3.
## Who are we?
We are a bunch of developers working for Aviva Solutions who highly value software quality, in particular
* [Dennis Doomen](https://twitter.com/ddoomen)
Notable contributors include
* [Adam Voss](https://github.com/adamvoss)
The [Xamarin](https://github.com/onovotny/fluentassertions) version has been built by
* [Oren Novotny](https://twitter.com/onovotny)
If you have any comments or suggestions, please let us know via [twitter](https://twitter.com/search?q=fluentassertions&src=typd), through the [issues](https://github.com/dennisdoomen/FluentAssertions/issues) page, or through [StackOverflow](http://stackoverflow.com/questions/tagged/fluent-assertions).
## Versioning
The version numbers of Fluent Assertions releases comply to the [Semantic Versioning](http://semver.org/) scheme. In other words, release 1.4.0 only adds backwards-compatible functionality and bug fixes compared to 1.3.0. Release 1.4.1 should only include bug fixes. And if we ever introduce breaking changes, the number increased to 2.0.0.
## What do you need to compile the solution?
* Visual Studio 2013 Update 2 or later
* Windows 8.1
* The Windows Phone 8 SDK | 47.092593 | 372 | 0.762878 | eng_Latn | 0.997104 |
47167ca8f76b85c1e476ad19fde1abed4bf68252 | 165 | md | Markdown | docs/demos/checkbox/desc.md | sipablorhuam/angular-xeditable | abf7f95ea3e053b36e31e5b0dc8712e5ef14727d | [
"MIT"
] | 1,345 | 2015-01-01T06:28:16.000Z | 2022-02-22T03:17:56.000Z | docs/demos/checkbox/desc.md | sipablorhuam/angular-xeditable | abf7f95ea3e053b36e31e5b0dc8712e5ef14727d | [
"MIT"
] | 653 | 2015-01-01T03:07:01.000Z | 2021-05-27T13:33:46.000Z | docs/demos/checkbox/desc.md | sipablorhuam/angular-xeditable | abf7f95ea3e053b36e31e5b0dc8712e5ef14727d | [
"MIT"
] | 413 | 2015-01-05T10:54:25.000Z | 2022-01-17T08:25:54.000Z | To make element editable via checkbox just add `editable-checkbox` attribute
pointing to model in scope. Set `e-title` attribute to define text shown with checkbox. | 82.5 | 87 | 0.806061 | eng_Latn | 0.981321 |
47168506bd134fb551b610f9d467c3d0c4bb60dc | 198 | md | Markdown | README.md | massiveco/serverlessl | b661c6d4f142820f203fd226ab7b872da3655a24 | [
"MIT"
] | 9 | 2018-05-08T15:45:36.000Z | 2021-11-05T14:35:43.000Z | README.md | massiveco/serverlessl | b661c6d4f142820f203fd226ab7b872da3655a24 | [
"MIT"
] | 8 | 2018-05-09T03:55:38.000Z | 2018-10-16T04:15:41.000Z | README.md | massiveco/serverlessl | b661c6d4f142820f203fd226ab7b872da3655a24 | [
"MIT"
] | 1 | 2018-09-21T12:49:19.000Z | 2018-09-21T12:49:19.000Z | # serverlessl
[](https://goreportcard.com/report/github.com/massiveco/serverlessl)
Serverless PKI powered by cfssl | 39.6 | 150 | 0.80303 | kor_Hang | 0.343481 |
4716ce14e17fbd87994917e4eadc90e22063ed02 | 27,956 | md | Markdown | articles/azure-monitor/app/custom-operations-tracking.md | tsunami416604/azure-docs.hu-hu | aeba852f59e773e1c58a4392d035334681ab7058 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/azure-monitor/app/custom-operations-tracking.md | tsunami416604/azure-docs.hu-hu | aeba852f59e773e1c58a4392d035334681ab7058 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/azure-monitor/app/custom-operations-tracking.md | tsunami416604/azure-docs.hu-hu | aeba852f59e773e1c58a4392d035334681ab7058 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Egyéni műveletek nyomon követése az Azure Application Insights .NET SDK-val
description: Egyéni műveletek nyomon követése az Azure Application Insights .NET SDK-val
ms.topic: conceptual
ms.custom: devx-track-csharp
ms.date: 11/26/2019
ms.reviewer: sergkanz
ms.openlocfilehash: 42a5318325f9961483465357403089755feb130d
ms.sourcegitcommit: 829d951d5c90442a38012daaf77e86046018e5b9
ms.translationtype: MT
ms.contentlocale: hu-HU
ms.lasthandoff: 10/09/2020
ms.locfileid: "88933307"
---
# <a name="track-custom-operations-with-application-insights-net-sdk"></a>Egyéni műveletek nyomon követése Application Insights .NET SDK-val
Az Azure Application Insights SDK-k automatikusan követik a bejövő HTTP-kéréseket és a függő szolgáltatások (például HTTP-kérések és SQL-lekérdezések) hívásait. A kérések és a függőségek nyomon követése és összekapcsolása révén a teljes alkalmazás rugalmassága és megbízhatósága látható az alkalmazást egyesítő összes szolgáltatásban.
Az alkalmazás-mintázatok olyan osztálya van, amely nem támogatott általános támogatással. Az ilyen mintázatok megfelelő monitorozásához manuális kódokra van szükség. Ez a cikk néhány olyan mintát tartalmaz, amelyek manuális működést igényelhetnek, például az egyéni várólisták feldolgozását és a hosszan futó háttérben végzett feladatok futtatását.
Ez a dokumentum útmutatást nyújt az egyéni műveletek nyomon követéséhez az Application Insights SDK-val. Ez a dokumentáció a következő anyagokra vonatkozik:
- Application Insights a .NET (más néven Base SDK) 2.4-es vagy újabb verziójára.
- A (ASP.NET-t futtató) webalkalmazások Application Insights a 2.4-es és újabb verzióit.
- Application Insights a 2.1-es ASP.NET Core-verzióhoz.
## <a name="overview"></a>Áttekintés
A művelet egy alkalmazás által futtatott logikai munkadarab. A név, a kezdési idő, az időtartam, az eredmény és a végrehajtás környezete, például a Felhasználónév, a tulajdonságok és az eredmény. Ha A műveletet a B művelet kezdeményezte, akkor A B művelet szülőként van beállítva A számára. Egy művelet csak egy szülővel rendelkezhet, de több alárendelt művelettel is rendelkezhet. A műveletekkel és a telemetria kapcsolatos további információkért lásd: [Azure Application Insights telemetria korreláció](correlation.md).
A Application Insights .NET SDK-ban a műveletet a [OperationTelemetry](https://github.com/microsoft/ApplicationInsights-dotnet/blob/7633ae849edc826a8547745b6bf9f3174715d4bd/BASE/src/Microsoft.ApplicationInsights/Extensibility/Implementation/OperationTelemetry.cs) absztrakt osztálya, valamint a [RequestTelemetry](https://github.com/microsoft/ApplicationInsights-dotnet/blob/7633ae849edc826a8547745b6bf9f3174715d4bd/BASE/src/Microsoft.ApplicationInsights/DataContracts/RequestTelemetry.cs) és a [DependencyTelemetry](https://github.com/microsoft/ApplicationInsights-dotnet/blob/7633ae849edc826a8547745b6bf9f3174715d4bd/BASE/src/Microsoft.ApplicationInsights/DataContracts/DependencyTelemetry.cs)leszármazottai írják le.
## <a name="incoming-operations-tracking"></a>Bejövő műveletek követése
A Application Insights web SDK automatikusan gyűjt HTTP-kérelmeket az IIS-folyamatokban és az összes ASP.NET Core alkalmazásban futó ASP.NET-alkalmazásokhoz. Más platformokhoz és keretrendszerekhez Közösség által támogatott megoldások tartoznak. Ha azonban a standard vagy a Közösség által támogatott megoldások egyike sem támogatja az alkalmazást, manuálisan is elvégezheti azt.
Egy másik példa, amely egyéni követést igényel, az a feldolgozó, amely a várólistából fogad elemeket. Egyes várólisták esetében az üzenetnek a várólistára való felvételének hívása függőségként lesz nyomon követve. Az üzenetek feldolgozását ismertető magas szintű művelet azonban nem lesz automatikusan begyűjtve.
Lássuk, hogyan követhetik nyomon ezeket a műveleteket.
Magas szinten a feladat az `RequestTelemetry` ismert tulajdonságok létrehozása és beállítása. A művelet befejezése után nyomon követheti a telemetria. A következő példa azt mutatja be, hogy ez a feladat.
### <a name="http-request-in-owin-self-hosted-app"></a>HTTP-kérelem a Owin saját üzemeltetésű alkalmazásában
Ebben a példában a nyomkövetési környezetet a rendszer a [korrelációhoz tartozó HTTP protokoll](https://github.com/dotnet/runtime/blob/master/src/libraries/System.Diagnostics.DiagnosticSource/src/HttpCorrelationProtocol.md)szerint propagálja. Az itt ismertetett fejléceket kell elvárnia.
```csharp
public class ApplicationInsightsMiddleware : OwinMiddleware
{
// you may create a new TelemetryConfiguration instance, reuse one you already have
// or fetch the instance created by Application Insights SDK.
private readonly TelemetryConfiguration telemetryConfiguration = TelemetryConfiguration.CreateDefault();
private readonly TelemetryClient telemetryClient = new TelemetryClient(telemetryConfiguration);
public ApplicationInsightsMiddleware(OwinMiddleware next) : base(next) {}
public override async Task Invoke(IOwinContext context)
{
// Let's create and start RequestTelemetry.
var requestTelemetry = new RequestTelemetry
{
Name = $"{context.Request.Method} {context.Request.Uri.GetLeftPart(UriPartial.Path)}"
};
// If there is a Request-Id received from the upstream service, set the telemetry context accordingly.
if (context.Request.Headers.ContainsKey("Request-Id"))
{
var requestId = context.Request.Headers.Get("Request-Id");
// Get the operation ID from the Request-Id (if you follow the HTTP Protocol for Correlation).
requestTelemetry.Context.Operation.Id = GetOperationId(requestId);
requestTelemetry.Context.Operation.ParentId = requestId;
}
// StartOperation is a helper method that allows correlation of
// current operations with nested operations/telemetry
// and initializes start time and duration on telemetry items.
var operation = telemetryClient.StartOperation(requestTelemetry);
// Process the request.
try
{
await Next.Invoke(context);
}
catch (Exception e)
{
requestTelemetry.Success = false;
telemetryClient.TrackException(e);
throw;
}
finally
{
// Update status code and success as appropriate.
if (context.Response != null)
{
requestTelemetry.ResponseCode = context.Response.StatusCode.ToString();
requestTelemetry.Success = context.Response.StatusCode >= 200 && context.Response.StatusCode <= 299;
}
else
{
requestTelemetry.Success = false;
}
// Now it's time to stop the operation (and track telemetry).
telemetryClient.StopOperation(operation);
}
}
public static string GetOperationId(string id)
{
// Returns the root ID from the '|' to the first '.' if any.
int rootEnd = id.IndexOf('.');
if (rootEnd < 0)
rootEnd = id.Length;
int rootStart = id[0] == '|' ? 1 : 0;
return id.Substring(rootStart, rootEnd - rootStart);
}
}
```
A korrelációs HTTP-protokoll a fejlécet is deklarálja `Correlation-Context` . Az egyszerűség kedvéért azonban itt kimarad.
## <a name="queue-instrumentation"></a>Üzenetsor-kialakítás
Míg a [W3C nyomkövetési kontextus](https://www.w3.org/TR/trace-context/) és a [http protokoll a](https://github.com/dotnet/runtime/blob/master/src/libraries/System.Diagnostics.DiagnosticSource/src/HttpCorrelationProtocol.md) korrelációs adatoknak a http-kérelemmel való átadására szolgál, minden üzenetsor-protokollnak meg kell határoznia, hogyan adja meg ugyanazokat az adatokat az üzenetsor-üzenetben. Egyes üzenetsor-protokollok (például a AMQP) lehetővé teszik a további metaadatok átadását és mások (például az Azure Storage-üzenetsor) használatát, hogy a környezet kódolva legyen az üzenet tartalmába.
> [!NOTE]
> * **Az összetevők közötti nyomkövetés még nem támogatott a várólisták esetében** Ha a gyártó és a fogyasztó a telemetria különböző Application Insights-erőforrásokra küldi, a tranzakciós diagnosztika és az alkalmazás-hozzárendelés a tranzakciókat jeleníti meg, és leképezi a teljes végpontot. Várólisták esetén ez még nem támogatott.
### <a name="service-bus-queue"></a>Service Bus-üzenetsor
Application Insights nyomon követi az üzenetkezelési hívásokat Service Bus az új [Microsoft Azure ServiceBus-ügyféllel a .net](https://www.nuget.org/packages/Microsoft.Azure.ServiceBus/) -es vagy újabb verzióhoz.
Ha az üzenetkezelési [mintát](/dotnet/api/microsoft.azure.servicebus.queueclient.registermessagehandler) használja az üzenetek feldolgozásához, a rendszer a szolgáltatás által végzett összes Service Bus automatikusan nyomon követi és korrelálja a többi telemetria elemmel. Ha manuálisan dolgozza fel az üzeneteket, tekintse meg a [Service Bus-ügyfél nyomkövetését a Microsoft Application Insights](../../service-bus-messaging/service-bus-end-to-end-tracing.md) használatával.
Ha a [WindowsAzure. ServiceBus](https://www.nuget.org/packages/WindowsAzure.ServiceBus/) csomagot használja, olvassa el az alábbi példákat: bemutatjuk, hogyan követheti nyomon (és korrelálhatja) a hívásokat a Service Bus Service Bus ÜZENETSOR a AMQP protokollt használja, és a Application Insights nem követi automatikusan a várólista-műveleteket.
A korrelációs azonosítók az üzenet tulajdonságaiban lesznek átadva.
#### <a name="enqueue"></a>Sorba helyezni
```csharp
public async Task Enqueue(string payload)
{
// StartOperation is a helper method that initializes the telemetry item
// and allows correlation of this operation with its parent and children.
var operation = telemetryClient.StartOperation<DependencyTelemetry>("enqueue " + queueName);
operation.Telemetry.Type = "Azure Service Bus";
operation.Telemetry.Data = "Enqueue " + queueName;
var message = new BrokeredMessage(payload);
// Service Bus queue allows the property bag to pass along with the message.
// We will use them to pass our correlation identifiers (and other context)
// to the consumer.
message.Properties.Add("ParentId", operation.Telemetry.Id);
message.Properties.Add("RootId", operation.Telemetry.Context.Operation.Id);
try
{
await queue.SendAsync(message);
// Set operation.Telemetry Success and ResponseCode here.
operation.Telemetry.Success = true;
}
catch (Exception e)
{
telemetryClient.TrackException(e);
// Set operation.Telemetry Success and ResponseCode here.
operation.Telemetry.Success = false;
throw;
}
finally
{
telemetryClient.StopOperation(operation);
}
}
```
#### <a name="process"></a>Folyamat
```csharp
public async Task Process(BrokeredMessage message)
{
// After the message is taken from the queue, create RequestTelemetry to track its processing.
// It might also make sense to get the name from the message.
RequestTelemetry requestTelemetry = new RequestTelemetry { Name = "process " + queueName };
var rootId = message.Properties["RootId"].ToString();
var parentId = message.Properties["ParentId"].ToString();
// Get the operation ID from the Request-Id (if you follow the HTTP Protocol for Correlation).
requestTelemetry.Context.Operation.Id = rootId;
requestTelemetry.Context.Operation.ParentId = parentId;
var operation = telemetryClient.StartOperation(requestTelemetry);
try
{
await ProcessMessage();
}
catch (Exception e)
{
telemetryClient.TrackException(e);
throw;
}
finally
{
// Update status code and success as appropriate.
telemetryClient.StopOperation(operation);
}
}
```
### <a name="azure-storage-queue"></a>Azure Storage-üzenetsor
Az alábbi példa bemutatja, hogyan követheti nyomon az [Azure Storage-várólista](../../storage/queues/storage-dotnet-how-to-use-queues.md) műveleteit, és hogyan korrelálhat telemetria a gyártó, a fogyasztó és az Azure Storage között.
A tárolási várólista HTTP API-val rendelkezik. A várólista összes hívását nyomon követheti a HTTP-kérelmek Application Insights függőségi gyűjtője.
Alapértelmezés szerint a ASP.NET és ASP.NET Core alkalmazásokban más típusú alkalmazásokkal van konfigurálva, a [konzolon futó alkalmazások dokumentációjában](./console.md) olvashat
Érdemes lehet összekapcsolni a Application Insights műveleti AZONOSÍTÓját is a Storage-kérelem azonosítójával. A Storage-kérelmek ügyfelének és a kiszolgálói kérelmek AZONOSÍTÓjának beállításával és lekérésével kapcsolatos információkért lásd: az [Azure Storage figyelése, diagnosztizálása és hibáinak megoldása](../../storage/common/storage-monitoring-diagnosing-troubleshooting.md#end-to-end-tracing).
#### <a name="enqueue"></a>Sorba helyezni
Mivel a tárolási várólisták támogatják a HTTP API-t, a Application Insights automatikusan nyomon követ minden, a várólistával kapcsolatos műveletet. Sok esetben ez a rendszerállapot elég. Ahhoz azonban, hogy a nyomon követéseket a gyártó által követett nyomkövetéssel társítsa a felhasználói oldalon, hasonló korrelációs kontextust kell átadnia a HTTP protokoll korrelációs működéséhez.
Ez a példa bemutatja, hogyan követheti nyomon a `Enqueue` műveletet. A következőket teheti:
- **Újrapróbálkozások korrelálása (ha van ilyen)**: mindegyiknek van egy közös szülője, amely a `Enqueue` művelet. Ellenkező esetben a rendszer a bejövő kérelem gyermekeiként követi nyomon. Ha több logikai kérelem van a várólistához, nehéz lehet megállapítani, hogy melyik hívás eredményezte az újrapróbálkozásokat.
- A **tárolási naplók korrelációja (ha és ha szükséges)**: Application Insights telemetria korrelálnak.
A `Enqueue` művelet a szülő művelet gyermeke (például egy bejövő HTTP-kérelem). A HTTP-függőségi hívás a művelet gyermeke `Enqueue` és a bejövő kérelem unokája:
```csharp
public async Task Enqueue(CloudQueue queue, string message)
{
var operation = telemetryClient.StartOperation<DependencyTelemetry>("enqueue " + queue.Name);
operation.Telemetry.Type = "Azure queue";
operation.Telemetry.Data = "Enqueue " + queue.Name;
// MessagePayload represents your custom message and also serializes correlation identifiers into payload.
// For example, if you choose to pass payload serialized to JSON, it might look like
// {'RootId' : 'some-id', 'ParentId' : '|some-id.1.2.3.', 'message' : 'your message to process'}
var jsonPayload = JsonConvert.SerializeObject(new MessagePayload
{
RootId = operation.Telemetry.Context.Operation.Id,
ParentId = operation.Telemetry.Id,
Payload = message
});
CloudQueueMessage queueMessage = new CloudQueueMessage(jsonPayload);
// Add operation.Telemetry.Id to the OperationContext to correlate Storage logs and Application Insights telemetry.
OperationContext context = new OperationContext { ClientRequestID = operation.Telemetry.Id};
try
{
await queue.AddMessageAsync(queueMessage, null, null, new QueueRequestOptions(), context);
}
catch (StorageException e)
{
operation.Telemetry.Properties.Add("AzureServiceRequestID", e.RequestInformation.ServiceRequestID);
operation.Telemetry.Success = false;
operation.Telemetry.ResultCode = e.RequestInformation.HttpStatusCode.ToString();
telemetryClient.TrackException(e);
}
finally
{
// Update status code and success as appropriate.
telemetryClient.StopOperation(operation);
}
}
```
Ha csökkenteni szeretné az telemetria mennyiségét, vagy ha más okból nem kívánja nyomon követni a `Enqueue` műveletet, használja közvetlenül az API-t `Activity` :
- A Application Insights művelet elindítása helyett hozzon létre (és kezdjen el) egy újat `Activity` . Semmilyen tulajdonságot *nem* kell hozzárendelni, kivéve a művelet nevét.
- Szerializálja `yourActivity.Id` az üzenet adattartalmát a helyett `operation.Telemetry.Id` . Használhatja a t is `Activity.Current.Id` .
#### <a name="dequeue"></a>Sorból
Ehhez hasonlóan `Enqueue` a Application Insights automatikusan nyomon követik a tárolási üzenetsor tényleges http-kérelmét. Azonban a `Enqueue` művelet valószínűleg a szülő kontextusban történik, például egy bejövő kérelmek környezetében. Application Insights SDK-k automatikusan korrelálnak egy ilyen műveletet (és annak HTTP-részét) a szülő kérelemmel és az ugyanazon a hatókörben jelentett más telemetria.
A `Dequeue` művelet trükkös. A Application Insights SDK automatikusan nyomon követi a HTTP-kérelmeket. Azonban az üzenet elemzése előtt nem ismeri a korrelációs környezetet. A HTTP-kérést nem lehet összekapcsolni, hogy az üzenetet a többi telemetria kapja, különösen akkor, ha egynél több üzenet érkezik.
```csharp
public async Task<MessagePayload> Dequeue(CloudQueue queue)
{
var operation = telemetryClient.StartOperation<DependencyTelemetry>("dequeue " + queue.Name);
operation.Telemetry.Type = "Azure queue";
operation.Telemetry.Data = "Dequeue " + queue.Name;
try
{
var message = await queue.GetMessageAsync();
}
catch (StorageException e)
{
operation.telemetry.Properties.Add("AzureServiceRequestID", e.RequestInformation.ServiceRequestID);
operation.telemetry.Success = false;
operation.telemetry.ResultCode = e.RequestInformation.HttpStatusCode.ToString();
telemetryClient.TrackException(e);
}
finally
{
// Update status code and success as appropriate.
telemetryClient.StopOperation(operation);
}
return null;
}
```
#### <a name="process"></a>Folyamat
A következő példában a bejövő üzenetek a bejövő HTTP-kérésekhez hasonlóan követik nyomon:
```csharp
public async Task Process(MessagePayload message)
{
// After the message is dequeued from the queue, create RequestTelemetry to track its processing.
RequestTelemetry requestTelemetry = new RequestTelemetry { Name = "process " + queueName };
// It might also make sense to get the name from the message.
requestTelemetry.Context.Operation.Id = message.RootId;
requestTelemetry.Context.Operation.ParentId = message.ParentId;
var operation = telemetryClient.StartOperation(requestTelemetry);
try
{
await ProcessMessage();
}
catch (Exception e)
{
telemetryClient.TrackException(e);
throw;
}
finally
{
// Update status code and success as appropriate.
telemetryClient.StopOperation(operation);
}
}
```
Hasonlóképpen, a többi üzenetsor-művelet is kialakítható. A betekintés műveletet hasonló módon kell kiépíteni, mint a dequeuing műveletet. Nincs szükség a rendszerállapot-kezelő várólista-kezelési műveletekre. Application Insights nyomon követi a műveleteket, például a HTTP-t, és a legtöbb esetben ez elég.
Ha a hangszer-üzenetek törlését végzi, ügyeljen rá, hogy a művelet (korreláció) azonosítóit állítsa be. Másik lehetőségként használhatja az API-t is `Activity` . Ezután nem kell megadnia a telemetria elemekben a műveleti azonosítókat, mert a Application Insights SDK ezt teszi:
- Hozzon létre egy újat `Activity` , miután megkapta az elemet a várólistából.
- A használatával összekapcsolhatja `Activity.SetParentId(message.ParentId)` a fogyasztói és a termelői naplókat.
- Indítsa el a t `Activity` .
- A deüzenetsor-, folyamat-és törlési műveletek nyomon követése `Start/StopOperation` segítők használatával. Ugyanezt az aszinkron vezérlési folyamat (végrehajtási környezet) alapján végezze el. Így megfelelően korrelálnak.
- Állítsa le a t `Activity` .
- Manuálisan is használhatja `Start/StopOperation` vagy meghívhatja a `Track` telemetria.
### <a name="dependency-types"></a>Függőségi típusok
Application Insights függőségi típust használ a felhasználói felületi élmények testreszabásához. A várólisták esetében a következő típusokat ismeri fel `DependencyTelemetry` , amelyek javítják a [tranzakciós diagnosztika élményét](./transaction-diagnostics.md):
- `Azure queue` Azure Storage-várólisták esetén
- `Azure Event Hubs` Azure-Event Hubs
- `Azure Service Bus` Azure Service Bus
### <a name="batch-processing"></a>Kötegelt feldolgozás
Egyes várólisták esetében több üzenetet is elhelyezhet egy kérelemmel. Az ilyen üzenetek feldolgozása valószínűleg független, és a különböző logikai műveletekhez tartozik. A műveletet nem lehet korrelálni `Dequeue` egy adott üzenet feldolgozásakor.
Minden üzenetet a saját aszinkron vezérlési folyamatában kell feldolgozni. További információ: a [kimenő függőségek követése](#outgoing-dependencies-tracking) szakasz.
## <a name="long-running-background-tasks"></a>Hosszan futó háttérbeli feladatok
Egyes alkalmazások olyan hosszan futó műveleteket indítanak el, amelyeket a felhasználói kérések okozhatnak. A nyomkövetés/rendszerállapot szempontjából nem különbözik a kérelemtől vagy a függőségi állapottól:
```csharp
async Task BackgroundTask()
{
var operation = telemetryClient.StartOperation<DependencyTelemetry>(taskName);
operation.Telemetry.Type = "Background";
try
{
int progress = 0;
while (progress < 100)
{
// Process the task.
telemetryClient.TrackTrace($"done {progress++}%");
}
// Update status code and success as appropriate.
}
catch (Exception e)
{
telemetryClient.TrackException(e);
// Update status code and success as appropriate.
throw;
}
finally
{
telemetryClient.StopOperation(operation);
}
}
```
Ebben a példában `telemetryClient.StartOperation` a létrehozza `DependencyTelemetry` és betölti a korrelációs környezetet. Tegyük fel, hogy rendelkezik egy fölérendelt művelettel, amelyet a műveletet ütemezett bejövő kérelmek hoztak létre. Ha `BackgroundTask` a rendszer ugyanabban az aszinkron vezérlési folyamatban indul el bejövő kérelemként, akkor az a fölérendelt művelettel összefügg. `BackgroundTask` és az összes beágyazott telemetria elem automatikusan összefügg azzal a kéréssel, amelyik azt okozta, még a kérelem befejeződése után is.
Ha a feladat olyan háttérbeli szálból indul el, amelynek nincs művelete ( `Activity` ) társítva van, `BackgroundTask` nem rendelkezik szülővel. Azonban rendelkezhet beágyazott műveletekkel. A feladatból jelentett összes telemetria-elem összefügg a `DependencyTelemetry` létrehozásával `BackgroundTask` .
## <a name="outgoing-dependencies-tracking"></a>Kimenő függőségek követése
Nyomon követheti a saját függőségi típusát vagy a Application Insights által nem támogatott műveleteket.
Az `Enqueue` Service Bus üzenetsor vagy a Storage-várólista metódusa példaként szolgálhat az egyéni nyomon követéshez.
Az egyéni függőségi követés általános megközelítése a következő:
- Hívja `TelemetryClient.StartOperation` meg a (kiterjesztés) metódust, amely kitölti a `DependencyTelemetry` korrelációhoz és más tulajdonságokhoz szükséges tulajdonságokat (Kezdési idő bélyegzője, időtartam).
- Adja meg az egyéb egyéni tulajdonságokat `DependencyTelemetry` , például a nevet és a szükséges egyéb környezeteket.
- Hozzon el egy függőségi hívást, és várjon rá.
- Állítsa le a műveletet, `StopOperation` Ha befejeződött.
- Kezeli a kivételeket.
```csharp
public async Task RunMyTaskAsync()
{
using (var operation = telemetryClient.StartOperation<DependencyTelemetry>("task 1"))
{
try
{
var myTask = await StartMyTaskAsync();
// Update status code and success as appropriate.
}
catch(...)
{
// Update status code and success as appropriate.
}
}
}
```
Az ártalmatlanítási művelet leállítja a műveletet, így nem kell meghívnia `StopOperation` .
*Figyelmeztetés*: bizonyos esetekben a nem kezelt kivételek miatt [Előfordulhat,](/dotnet/csharp/language-reference/keywords/try-finally) `finally` hogy a rendszer nem tudja követni a műveleteket.
### <a name="parallel-operations-processing-and-tracking"></a>Párhuzamos műveletek feldolgozása és nyomon követése
`StopOperation` csak az elindított műveletet állítja le. Ha az aktuálisan futó művelet nem egyezik meg azzal, amelyet le szeretne állítani, akkor `StopOperation` semmit sem tesz. Ez a helyzet akkor fordulhat elő, ha párhuzamosan több műveletet indít el ugyanabban a végrehajtási környezetben:
```csharp
var firstOperation = telemetryClient.StartOperation<DependencyTelemetry>("task 1");
var firstTask = RunMyTaskAsync();
var secondOperation = telemetryClient.StartOperation<DependencyTelemetry>("task 2");
var secondTask = RunMyTaskAsync();
await firstTask;
// FAILURE!!! This will do nothing and will not report telemetry for the first operation
// as currently secondOperation is active.
telemetryClient.StopOperation(firstOperation);
await secondTask;
```
Győződjön meg arról, hogy `StartOperation` a párhuzamosan futó műveletek elkülönítése érdekében mindig ugyanazt az **aszinkron** módszerrel hívja meg és dolgozza fel a műveletet. Ha a művelet szinkron (vagy nem aszinkron), a folyamat és a nyomon követés a következővel történik `Task.Run` :
```csharp
public void RunMyTask(string name)
{
using (var operation = telemetryClient.StartOperation<DependencyTelemetry>(name))
{
Process();
// Update status code and success as appropriate.
}
}
public async Task RunAllTasks()
{
var task1 = Task.Run(() => RunMyTask("task 1"));
var task2 = Task.Run(() => RunMyTask("task 2"));
await Task.WhenAll(task1, task2);
}
```
## <a name="applicationinsights-operations-vs-systemdiagnosticsactivity"></a>ApplicationInsights-műveletek vs System. Diagnostics. Activity
`System.Diagnostics.Activity` az elosztott nyomkövetési környezetet jelöli, és a keretrendszerek és könyvtárak használják a folyamaton belüli és kívüli környezet létrehozására és propagálására, valamint a telemetria-elemek korrelációját. A tevékenység együttműködik a `System.Diagnostics.DiagnosticSource` keretrendszer/könyvtár közötti értesítési mechanizmussal, amely értesíti az érdekes eseményekről (bejövő vagy kimenő kérelmek, kivételek stb.).
A tevékenységek a Application Insights és az automatikus függőségek, valamint a kérések gyűjteménye az eseményekkel együtt erősen támaszkodik rájuk `DiagnosticSource` . Ha tevékenységet hoz létre az alkalmazásban, akkor nem eredményezi Application Insights telemetria létrehozását. Application Insights meg kell kapnia a DiagnosticSource eseményeket, és ismernie kell az események nevét és a hasznos adatokat, hogy lefordítsa a tevékenységeket a telemetria.
Minden Application Insights művelet (kérelem vagy függőség) magában foglalja `Activity` a-ha `StartOperation` a neve, a tevékenység a következőt hozza létre:. `StartOperation` a kérések vagy függőségi telemetriáiról manuális nyomon követése ajánlott módszer, és minden összefügg.
## <a name="next-steps"></a>További lépések
- Ismerkedjen meg a [telemetria korrelációjának](correlation.md) alapjaival Application Insightsban.
- Tekintse át a korrelált adatkezelési [tranzakciós diagnosztika](./transaction-diagnostics.md) és az [alkalmazás-hozzárendelés](./app-map.md)módját.
- Tekintse meg az [adatmodellt](./data-model.md) Application Insights típusokhoz és adatmodellekhez.
- Egyéni [eseményeket és mérőszámokat](./api-custom-events-metrics.md) jelenthet a Application Insights.
- Tekintse meg a környezeti tulajdonságok gyűjteményének szabványos [konfigurációját](configuration-with-applicationinsights-config.md#telemetry-initializers-aspnet) .
- Tekintse meg a [System. Diagnostics. Activity felhasználói útmutatót](https://github.com/dotnet/runtime/blob/master/src/libraries/System.Diagnostics.DiagnosticSource/src/ActivityUserGuide.md) , és nézze meg, hogyan korreláljuk a telemetria.
| 57.286885 | 719 | 0.764988 | hun_Latn | 0.999874 |
4716e3e4311cc530e7f42b37b19bb7539b7bc673 | 11,316 | md | Markdown | site/content/blog/cumhurbaskani-erdogan-ukraynaya-gelirken.md | mertnuhoglu/one-click-hugo-cms | 6365db19f90e10926dcab5cdb0f492c1fcf9f299 | [
"MIT"
] | 1 | 2021-01-31T20:31:52.000Z | 2021-01-31T20:31:52.000Z | site/content/blog/cumhurbaskani-erdogan-ukraynaya-gelirken.md | mertnuhoglu/one-click-hugo-cms | 6365db19f90e10926dcab5cdb0f492c1fcf9f299 | [
"MIT"
] | null | null | null | site/content/blog/cumhurbaskani-erdogan-ukraynaya-gelirken.md | mertnuhoglu/one-click-hugo-cms | 6365db19f90e10926dcab5cdb0f492c1fcf9f299 | [
"MIT"
] | null | null | null | ---
title: 'Cumhurbaşkanı Erdoğan, Ukrayna''ya gelirken'
date: Sun, 02 Feb 2020 11:28:44 +0000
draft: false
tags: ["Dış Ticaret", "dış ticaret", "Ekonomi", "Politika", "recep tayyip erdoğan", "Serbest Ticaret Antlaşması", "Turizm", "Türkiye Ukrayna Serbest Ticaret Antlaşması", "Ukrayna", "Ukrayna Dış İlişkileri", "Ukrayna Türk Toplumu", "Uluslarası İlişkiler", "Volodimir Zelenskiy", "yenilenebilir enerji"]
type: post
---

Türkiye Cumhuriyeti Cumhurbaşkanı Recep Tayyip Erdoğan günübirlik resmi ziyaret için Ukrayna’ya gelirken, kısa gezinin, uzun ve kapsamlı bir gündemi var. Önce, ev sahibi Ukrayna Cumhurbaşkanı Volodomir Zelenksiy ile birlikte iki stratejik partner ülke arasındaki Yüksek Düzeyli Stratejik İşbirliği Konseyi toplantısına başkanlık yapacaklar, sonrasında ise on yıl sonra ilk kez iki ülke liderlerinin birlikte katılımıyla Kiev’de düzenlenecek Türkiye Ukrayna İş Forumunda iş insanlarına beraber hitap edecekler.
Ukrayna, Türkiye ilişkileri tarihinin en iyi dönemini yaşarken, aslında daha yapılabilecek çok iş, alınabilecek çok mesafe var
Ukrayna tarafında görüşmelere katılacak isimlere baktığımızda enerji, ekonomi, ticaret, savunma altyapı, turizm gibi alanların özellikle ele alınacağı göze çarpıyor ki bu alanlar zaten Ukrayna liderliğinin stratejik öncelikleri arasında.
Ukrayna, 2014’te gerçekleşen Avrupa Meydanı Devrimi sonrasında müthiş bir değişim, dönüşüm geçiriyor. Değişimin hızının yeterliliği konusunda zaman zaman eleştiriler olsa da, ülkenin fiziksel altyapısından, insanların mantalitesine bu değişimi özellikle en az 6 ay gibi aralıklarla ülkeye gelip gidenlerin değerlendirmelerinden çok net algılamak mümkün. Meydan sonrası, ülkede bağımsızlığın ilk 23 yılında yapılandan daha fazla yapısal reform gerçekleştirilirken, son 6 ayda ise Ukrayna’nın tarihinin en yüksek oyla seçilen ve yine partisi Meclis’te tek başına çoğunluğa ilk kez ulaşan Cumhurbaşkanı olan Volodymyr Zelenskiy’in ifadesiyle turbo rejimde yapısal reformların hızı, kapsamı ve derinliği daha da arttı.
Hızlı değişimin, uygulamaya koyulan yapısal reformların halk üzerinde kısa vadede yan etkileri de olmuyor değil. Hiçbir Ukrayna liderinin bugüne kadar sahip olmadığı halk desteğine sahip olan Cumhurbaşkanı Zelenskiy, halkı iyi tanıyan, ülkenin sorunlarına bilen bir isim. Halkın kendisine ve reformlara güçlü desteğinin devamı ve Ukrayna’daki değişimin, ilerlemenin sürdürülebilir olması açısından sıradan vatandaşın günlük hayatında olumlu etkilerini hissedebileceği ülkenin yolları, köprüleri, havalimanları, limanları gibi altyapısının geliştirilmesine yatırım yapmayı ve yine bürokrasiyi azaltacak, yolsuzluğun yapılma alanlarını daraltacak devletin dijitalleşmesini önceliklendirmiş durumda.
Ukrayna’da önümüzdeki 5 yılda 25 milyar $’lık altyapı yatırımı yapılması hedeflenirken bu hedefe yalnızca Ukrayna’nın kendi imkanlarıyla ulaşılabilinmesi mümkün değil. Uluslarası finans kurumlarının kredileri kadar, ülkeler arası ikili antlaşmalar ve özel sektör katkısıyla da bu yatırımlar gerçekleştirilmeye çalışılacak. Hafta başında Başbakan Honçaruk, Ulaştırma Bakanı Krikliy ve Ukravdator (Ukrayna Karayolları) Genel Müdürü Oleksandr Kurbakov’un katılımıyla alanında bir ilk olan Karayolu 2020 forumu 500 yerli ve uluslararası katılımcıyla çok başarılı bir biçimde düzenlendi. Forum, resmî konuşmaların uzunluğundan ziyade, bu üç ismin hedefleri şeffaf bir biçimde paylaştıkları ve katılımcıların sorularını samimice cevapladıkları bir platform oldu ve böylece altyapı hamlesinin startı verildi.
Türkiye fiziksel altyapıda da, devlet hizmetlerinin dijitalleşmesinde de son 20 yılda büyük yatırımlar ve atılımlar yaptı. E-devlet uygulamalarıyla hem vatandaşın kamu hizmetlerine ulaşımı, erişimi kolaylaştı hem de vergi tabanı genişledi böylece devlet daha büyük yatırımları gerçekleştirebileceği yatırımı imkanlarına kavuştu. Nitekim Türkiye’nin Dünya Bankası İş Yapma Endeksinde son yıllardaki yükselişinde bu alanlardaki gelişmelerin etkisi var.
Altyapı ve müteahhitlik hizmetlerinde ise Türkiye uluslararası arenada yıldız bir ülke. Yalnızca Kamu Özel Ortaklığı Modeli ile son 20 yılda toplam tutarı 150 milyar $’ı geçen 200’ü aşkın altyapı projesi gerçekleştirildi. Dünya’nın en büyük 250 uluslararası müteahhitlik firması arasında 44 Türk firması var. Türk inşaat firmaları Ukrayna’da ise bugüne kadar 7 milyar $ proje büyüklüğüne ulaşan aralarında yüzlerce, binlerce kilometre yol, ulaştırma köprüleri, havalimanları, stadyumlar, 5 yıldızlı otellerin de yer aldığı çok sayıda yapıyı inşa ettiler. Ukrayna’da ‘’Türkler iyi yol yapar’’ deyişi devletin üst kademlerinde de halk arasında da yaygın bir söyleyiş. Bu güvenin oluşmasında ise hiç kuşkusuz 15 yıldır Ukrayna’nın altyapısının nitelikli bir şekilde inşasına katkıdan bulunan Türk inşaat firmaları pay sahibi. Dolayısıyla bu gezide Türk inşaat firmalarının know-how’u, makine ekipman gücü, sermayesi ve işletme deneyimiyle altyapı sektöründe daha çok rol almaları gündeme gelebilir.
Ekonomi cephesinde Ukrayna’da hükümet yüksek büyüme hedefleri koydu. Bu hedeflere ulaşılması ancak yüksek miktarda doğrudan yabancı sermaye çekilebilmesi ile mümkün. Bunun için iş ortamının ve yatırım ikliminin geliştirilmesine büyük önem veriliyor. Yerli ve yabancı yeni yatırımcılara garantiler verilirken, mevcut yatırımcıların var olan sorunlarının çözümü için çaba sarf ediliyor. Ukrayna’daki Türk sermayesi 3,5 milyar $’a yaklaşıyor, bu rakamın en az üçte biri ise ülkeye Avrupa Meydanı Devrimi sonrasında girdi. Son iki yılın yıldız sektörü ise yenilenebilir enerji. Yenilenebilir enerjide, Türk firmaları yalnızca iki yılda 200 milyon $’ı aşan yatırım yaptılar ve bu alana yapılan yatırımlar başka sektörlerdeki Türk firmalarının Ukrayna’daki yatırımları için de katalizör işlevi gördü. Ancak bu alandaki yeni yasa ve düzenlemelerin geriye dönük de etkileri olabileceği ya da başlayan yatırımlarda top oyundayken oyunun kurallarını değiştirilmesi endişesi genel olarak bu alandaki yatırımları durdurdu. Yenilenebilir enerji alanında alınacak kararların, aralarında Türklerin de olduğu yabancı yatırımcıların ülkedeki yatırım ortamına ve devletin taahhütlerine güveni açısından sektörü aşan etkileri olacaktır.
Turizm, ziyarette gündeme gelebilecek bir başka önemli alan. 2011’den beri iki ülke arasında vizesiz seyahat mümkünken, 2017’den sonra artık kimlik kartıyla Ukrayna ve Türkiye vatandaşları birbirlerini ziyaret edebiliyorlar. İlk kez Kiev’e geldiğim yıl olan 2004’ü hatırlıyorum Türk Havayolları’nın, Kiev’e günlük seferi bile yoktu. Bugün Türk ve Ukrayna havayolu şirketleri haftada 200’ün üzerinde karşılıklı sefer düzenlerken, Türk Havayolları, Ukrayna’nın bayrak taşıyıcı havayolu şirketi MAU’dan sonra Ukrayna hava sahasını en çok kullanan 2. havayolu şirketi oldu. Dünya’da en çok turist ağırlayan 6. ülke olan Türkiye’ye, 2019’da gelen Ukraynalı turist sayısı 1,5 milyonu geçerken, 300 bine yakın Türk vatandaşı Ukrayna’yı ziyaret etti. Bunlar harika rakamlar bununla beraber turizmi öncelikli sektörlerden biri olarak belirleyen Ukrayna’nın kuşkusuz, Türkiye’nin bu alandaki birikimine, know-how’una ve tabii ki Türk firmalarının yatırımlarına ihtiyacı var. Bugün dünyanın en büyük 5 yıldızlı otel bölgesi olan Antalya’nın bu hale gelmesi 1985’te başlayan devletin bu bölgedeki turizm yatırımlarına verdiği teşviklerle oldu. Ukrayna turizm hamlesi başlatırken, Türk otel yatırımcılarının, işletmecilerinin önümüzdeki dönemde ülkede yatırımlarına tanıklık edeceğiz ama turizm sektöründeki yerli olsun yabancı olsun yatırımların devlet tarafından teşvik edilmesi, uygun yatırım ortamın sağlanması önemli.

Son 5 yılda özelleştirmede Ukrayna’da ciddi bir ilerleme kaydedilemezken, bu yıl 500’e yakın şirketin özelleştirilmesi öngörülüyor. Ukrayna’da sayıları 3.600’ı bulan devlet işletmelerinden en az 1.500’ü ise önümüzdeki birkaç yıl içerisinde özelleştirilmiş olacak, bu ise yalnıza bölgede değil, dünyada son on yıllardaki en büyük özelleştirme dalgalarından biri. Türkiye son 20 yılda 60 milyar $ özelleştirme ve imtiyaz geliri elde etti. Türk firmaları bu alanda deneyimlendi. Liman, havalimanı işletmesinden, özelleştirme ihalelerine katılıma Türk firmaları Ukrayna’da özelleştirmeye ilgi gösterecektir. Nitekim iş forumunda Ukrayna Özelleştirme İdaresi Başkanı'nın da bir konuşması olacak.
Gündemden hiç düşmeyen ve Türkiye Ukrayna ekonomik ilişkilerinin en önemli gündem maddesi olan Serbest Ticaret Antlaşması’na gelince. İki ülke müzakere heyetleri arasında ziyaret öncesi iki ayrı toplantı gerçekleşti. Uluslararası iş çevrelerinin yakından tanıdığı bir isim olan, Ukrayna’nın yeni ticaret temsilcisi ve bu müzakerelerde Ukrayna heyetlerine başkanlık yapan Taras Kachka’nın müzakere sürecine olumlu katkı yaptığı kesin. İki ülkenin de hassasiyetleri olan alanlar olduğunu kabul etmek lazım ancak büyük resme bakıldığında Serbest Ticaret Antlaşması’nın ikili ticaret ve yatırımlara etkisi öyle büyük olacak ki, bugüne kadar kaybedilen zaman ne yazık ki hem iş dünyaları için hem de halkların refahı için büyük kayıp. Altın çağını yaşadığını söylediğimiz bu dönemde 2008’de yıllık 8 milyar $’a ulaşan ticaretin hacminin, son yıllarda yıllık 4 ila 5 milyar $ aralığında sıkışıp kalması kabul edilebilir değil. Cumhurbaşkanı Zelenskiy’in bu dönemlerini turbo rejim olarak ifade ettiğini söylemiştik. Başbakan Honçaruk ise Ağustos 2019’da henüz yeni hükümet kurulmadan ve kabine üyesi olmadan, Türk iş dünyasının temsilcileriyle Cumhurbaşkanı Zelenskiy’in resmi gezisinde bir araya geldiğinde kurulacak yeni hükümetin ekonomide en önemli önceliklerinden birinin Türkiye Ukrayna Ticaret Antlaşması olacağı bilgisini paylaşmıştı. Bu gezide bu alanda liderler düzeyinde bir imza olmasa da, ticaret, ekonomi bakanları düzeyinde bir imza ile sürecin imzalanma öncesi son aşamaya taşınması kazanım olacaktır. Karşılıklı yatırımların ve ticaretin iki ülke cumhurbaşkanlarının koymuş olduğu hedef olan 10 milyar $’lara çıkması hayal değil ve bu hedeflere ulaşmanın anahtarı Serbest Ticaret Antlaşmasından geçiyor.
Ukrayna Cumhurbaşkanı Volodymyr Zelenskiy, göreve geldiği günden beri halka yakınlığı, yatırımcılara verdiği değerle Türkiye’de bilinen, sevilen bir yabancı lider. Türkiye’ye, Türk iş dünyasına ve Türkiye’nin hızlı kalkınmasına ilgisi Türk yatırımcılara güven veriyor, onları cesaretlendiriyor. Ülke liderleri arasındaki kişisel uyumun da ülke ilişkilerine güçlü katkı yaptığı bir gerçek. Kişisel ilişkilere önem veren bir lider olan Cumhurbaşkanı Erdoğan ile de sıcak, samimi bir ilişki yakaladılar. Önümüzdeki dönemde Türkiye ve Ukrayna’nın ekonomik ilişkilerinin daha da üst seviyelere çıkmaması için bir neden yok. Kayıtlar hala devam ederken, Ukrayna Sanayi ve Ticaret Odası’nda iki ülke liderlerinin katılımlarıyla gerçekleşecek iş forumuna kaydolan iş insanı sayısının ise yalnızca birkaç günde 800’ü geçmesi, forumu alanında Ukrayna’daki belki de en büyük iş forumu haline sokarken, Türk ve Ukrayna iş dünyalarının daha büyük hedeflere hazır olduğunun güzel bir kanıtı.
**Burak Pehlivan**
**Uluslararası Türk Ukrayna İşadamları Derneği (TUİD)**
**Yönetim Kurulu Başkanı ** | 251.466667 | 1,716 | 0.841022 | tur_Latn | 0.999993 |
4717c68422b1ba5b93268a3e3fe08dc486a3325b | 1,885 | md | Markdown | _posts/2018/2018-07-12-911.md | ibesora/rafagas | c4fc0a34a887998d9b92d8c00a4336a633ebde69 | [
"MIT"
] | 22 | 2016-01-24T22:27:54.000Z | 2021-12-06T11:22:12.000Z | _posts/2018/2018-07-12-911.md | ibesora/rafagas | c4fc0a34a887998d9b92d8c00a4336a633ebde69 | [
"MIT"
] | 10 | 2015-07-13T18:17:46.000Z | 2021-04-19T09:05:10.000Z | _posts/2018/2018-07-12-911.md | ibesora/rafagas | c4fc0a34a887998d9b92d8c00a4336a633ebde69 | [
"MIT"
] | 5 | 2018-12-16T15:19:45.000Z | 2020-12-09T09:45:09.000Z | ---
date: '2018-07-12'
layout: rafaga
rafagas:
- desc: A strong heat wave and drought in Wales fields are showing ancient remains
from the Iron Age
invalid: true
keyw: wales
link: https://earther.com/scorching-heat-wave-reveals-signs-of-ancient-civilizati-1827482705
microlink:
desc: Weeks of extreme heat and drought have baked Welsh fields to a crisp. But
amidst the desiccated remains of crops, bizarre circles of green have emerged.
While I would like to tell you it’s aliens, it’s definitely not aliens. It’s
the remains of ancient settlements emerging from centuries of slumber.…
image: https://i.kinja-img.com/gawker-media/image/upload/s--1qH7WZ2W--/c_fill,fl_progressive,g_center,h_900,q_80,w_1600/icbz43srbpakltq2d1ae.jpg
logo: https://i.kinja-img.com/gawker-media/image/upload/s--alkssgEn--/c_fill,fl_progressive,g_center,h_200,q_80,w_200/eir2ldhuj6n1thhxrcpq.png
title: Scorching Heat Wave Reveals Signs of Ancient Civilization in the UK
- desc: The Adirondack Atlas is a complete information system for the park with an
open API to query any data
keyw: adirondack
link: https://adirondackatlas.org/
microlink:
desc: The Collaborative Map Project of the Adirondacks
title: Adirondack Atlas
- desc: Using several geospatial parameters to find the best location for NYC food
trucks
keyw: nyc
link: https://carto.com/blog/optimizing-food-truck-locations/
microlink:
desc: Learn why new data streams are ushering in a new era of site planning in
our latest post about optimizing food truck locations in New York City.
image: https://carto.com/blog/img/posts/2018/2018-07-01-optimizing-food-truck-locations/header-image.3ed58966.png
logo: https://carto.com/favicon.ico
title: 'Site Planning and Revenue Prediction: Optimizing Food Truck Locations
in New York City'
rid: 911
--- | 50.945946 | 148 | 0.756499 | eng_Latn | 0.836829 |
471861f4fe86391569ade6bf1c60b03c4bced73f | 2,558 | md | Markdown | api/v1/datadog/docs/SyntheticsTestRequestProxy.md | winebarrel/datadog-api-client-go | 37b699009301991327b0d12ddc13213f1a5b0a7d | [
"Apache-2.0"
] | null | null | null | api/v1/datadog/docs/SyntheticsTestRequestProxy.md | winebarrel/datadog-api-client-go | 37b699009301991327b0d12ddc13213f1a5b0a7d | [
"Apache-2.0"
] | null | null | null | api/v1/datadog/docs/SyntheticsTestRequestProxy.md | winebarrel/datadog-api-client-go | 37b699009301991327b0d12ddc13213f1a5b0a7d | [
"Apache-2.0"
] | null | null | null | # SyntheticsTestRequestProxy
## Properties
| Name | Type | Description | Notes |
| ----------- | -------------------------------- | -------------------------------------------- | ---------- |
| **Headers** | Pointer to **map[string]string** | Headers to include when performing the test. | [optional] |
| **Url** | **string** | URL of the proxy to perform the test. |
## Methods
### NewSyntheticsTestRequestProxy
`func NewSyntheticsTestRequestProxy(url string) *SyntheticsTestRequestProxy`
NewSyntheticsTestRequestProxy instantiates a new SyntheticsTestRequestProxy object.
This constructor will assign default values to properties that have it defined,
and makes sure properties required by API are set, but the set of arguments
will change when the set of required properties is changed.
### NewSyntheticsTestRequestProxyWithDefaults
`func NewSyntheticsTestRequestProxyWithDefaults() *SyntheticsTestRequestProxy`
NewSyntheticsTestRequestProxyWithDefaults instantiates a new SyntheticsTestRequestProxy object.
This constructor will only assign default values to properties that have it defined,
but it doesn't guarantee that properties required by API are set.
### GetHeaders
`func (o *SyntheticsTestRequestProxy) GetHeaders() map[string]string`
GetHeaders returns the Headers field if non-nil, zero value otherwise.
### GetHeadersOk
`func (o *SyntheticsTestRequestProxy) GetHeadersOk() (*map[string]string, bool)`
GetHeadersOk returns a tuple with the Headers field if it's non-nil, zero value otherwise
and a boolean to check if the value has been set.
### SetHeaders
`func (o *SyntheticsTestRequestProxy) SetHeaders(v map[string]string)`
SetHeaders sets Headers field to given value.
### HasHeaders
`func (o *SyntheticsTestRequestProxy) HasHeaders() bool`
HasHeaders returns a boolean if a field has been set.
### GetUrl
`func (o *SyntheticsTestRequestProxy) GetUrl() string`
GetUrl returns the Url field if non-nil, zero value otherwise.
### GetUrlOk
`func (o *SyntheticsTestRequestProxy) GetUrlOk() (*string, bool)`
GetUrlOk returns a tuple with the Url field if it's non-nil, zero value otherwise
and a boolean to check if the value has been set.
### SetUrl
`func (o *SyntheticsTestRequestProxy) SetUrl(v string)`
SetUrl sets Url field to given value.
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
| 34.567568 | 161 | 0.712275 | eng_Latn | 0.856341 |
4718d10f69f6d6ffa7f658cfa27c10144931604c | 2,890 | md | Markdown | README.md | tociyuki/udiff-cxx11 | 319c7d1ddf67f6a058a2a6fb37e9debe3008d8c0 | [
"Unlicense"
] | null | null | null | README.md | tociyuki/udiff-cxx11 | 319c7d1ddf67f6a058a2a6fb37e9debe3008d8c0 | [
"Unlicense"
] | null | null | null | README.md | tociyuki/udiff-cxx11 | 319c7d1ddf67f6a058a2a6fb37e9debe3008d8c0 | [
"Unlicense"
] | null | null | null | udiff - Word based Unified diff Command Between Two Text Files
======================================================
udiff computes differences between two text files,
and reports results with word based color unified diff format.
The words in the line are splitted by Unicode-7.0 UCD Script
property.
It is written with C++11.
The engine is an implementation of the linear space variation
for Wu's O(NP) algorithm\[1\] based on the similar way of
Myers's bi-direction method\[2\]. The linear space variation
is introduced by MIZUTANI Tociyuki from scratch.
\[1\] S. Wu, U. Manber, G. Myers, W. Miller,
<q>An O(NP) Sequence Comparion Algorithm</q>,
Inf. Process. Lett. Volume 35 Issue6 (1990)
\[2\] E. W. Myers, <q>An O(ND) Difference Algorithm and Its Variations</q>,
Algorithmica 1 (1986)
VERSION
------
0.02
INSTALL
------
Current version works only POSIX environment and on the ANSI color
escape sequence terminals. udiff uses isatty (3) function.
$ make
$ ./udiff text-original text-new
OPTIONS
------
udiff is not compatible POSIX diff (3) command.
--color report with color word-based unified format.
--nocolor report normal unified format.
-u, -U NUM, --unified\[=NUM\]
set unified context lines (default 3).
--help show help.
-v, --version show version.
LICENSE
------
License: The BSD 3-Clause
Copyright (c) 2015, MIZUTANI Tociyuki
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| 35.243902 | 75 | 0.741522 | eng_Latn | 0.643317 |
47192afde3cb419714644fe203fcd2c1c888e625 | 8,930 | md | Markdown | articles/security/fundamentals/identity-management-best-practices.md | ddbullfrog/mc-docs.zh-cn | f4315c2a5e6a8259a02def0160c690bd42956cd7 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/security/fundamentals/identity-management-best-practices.md | ddbullfrog/mc-docs.zh-cn | f4315c2a5e6a8259a02def0160c690bd42956cd7 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/security/fundamentals/identity-management-best-practices.md | ddbullfrog/mc-docs.zh-cn | f4315c2a5e6a8259a02def0160c690bd42956cd7 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Azure 标识和访问安全最佳做法 | Azure Docs
description: 本文提供一系列有关使用内置 Azure 功能进行标识管理和访问控制的最佳实践。
services: security
documentationcenter: na
author: Johnnytechn
manager: RKarlin
editor: TomSh
ms.assetid: 07d8e8a8-47e8-447c-9c06-3a88d2713bc1
ms.service: security
ms.subservice: security-fundamentals
ms.devlang: na
ms.topic: article
ms.tgt_pltfrm: na
ms.workload: na
ms.date: 10/12/2020
ms.author: v-johya
origin.date: 10/28/2019
ms.openlocfilehash: ffcf6b282c26d4a9096f6bcf257e0fef3281999d
ms.sourcegitcommit: 6f66215d61c6c4ee3f2713a796e074f69934ba98
ms.translationtype: HT
ms.contentlocale: zh-CN
ms.lasthandoff: 10/16/2020
ms.locfileid: "92128303"
---
# <a name="azure-identity-management-and-access-control-security-best-practices"></a>Azure 标识管理和访问控制安全最佳实践
本文介绍一系列 Azure 标识管理和访问控制安全最佳实践。 这些最佳做法衍生自我们的 [Azure AD](../../active-directory/fundamentals/active-directory-whatis.md) 经验和客户经验。
对于每项最佳做法,本文将说明:
* 最佳实践是什么
* 为何要启用该最佳实践
* 如果无法启用该最佳实践,可能的结果是什么
* 最佳实践的可能替代方案
* 如何学习启用最佳实践
这篇 Azure 标识管理和访问控制安全最佳实践以共识以及 Azure 平台功能和特性集(因为在编写本文时已存在)为基础。
<!-- not available -->
看法和技术将随着时间改变,本文会定期更新以反映这些更改。
本文中介绍的 Azure 标识管理和访问控制安全最佳实践包括:
* 将标识视为主要安全边界
* 集中化标识管理
* 管理连接的租户
* 规划例行的安全改进
* 启用密码管理
* 对用户强制执行多重身份验证
* 使用基于角色的访问控制
* 控制资源所在的位置
* 使用 Azure AD 进行存储身份验证
## <a name="treat-identity-as-the-primary-security-perimeter"></a>将标识视为主要安全边界
许多人认为标识是主要安全边界。 这与以网络安全为重点的传统做法不同。 网络边界出现越来越多的漏洞,与 [BYOD](https://aka.ms/byodcg) 设备和云应用程序激增之前相比,边界防御不再那样有效。
[Azure Active Directory (Azure AD)](../../active-directory/fundamentals/active-directory-whatis.md) 是用于标识和访问管理的 Azure 解决方案。 Azure AD 是 Microsoft 提供的多租户、基于云的目录和标识管理服务。 它将核心目录服务、应用程序访问管理和标识保护融入一个解决方案中。
以下部分列出了使用 Azure AD 实现标识和访问安全性的最佳做法。
**最佳做法** :围绕用户和服务标识进行安全控制和检测。
**详细信息** :使用 Azure AD 并置控制和标识。
## <a name="centralize-identity-management"></a>集中化标识管理
在[混合标识](https://resources.office.com/ww-landing-M365E-EMS-IDAM-Hybrid-Identity-WhitePaper.html?)方案中,我们建议集成本地目录和云目录。 通过集成,IT 团队可以在一个位置集中管理帐户,而不管帐户是在哪里创建的。 集成还通过提供用于访问云和本地资源的通用标识,从而帮助用户提高工作效率。
**最佳做法** :建立一个 Azure AD 实例。 一致性和一个权威源不仅会提高简明性,还会减少人为错误和配置复杂性带来的安全风险。
**详细信息** :指定一个 Azure AD 目录作为企业帐户和组织帐户的权威源。
**最佳做法** :将本地目录与 Azure AD 进行集成。
**详细信息** :使用 [Azure AD Connect](/active-directory/connect/active-directory-aadconnect) 将本地目录与云目录同步。
> [!Note]
> 存在[影响 Azure AD Connect 性能的因素](../../active-directory/hybrid/plan-connect-performance-factors.md)。 确保 Azure AD Connect 有足够的容量来防止性能不佳的系统影响安全性和工作效率。 大型或复杂的组织(预配超过 10 万个对象的组织)应遵循[建议](../../active-directory/hybrid/whatis-hybrid-identity.md)来优化其 Azure AD Connect 实现。
**最佳做法** :不要将现有 Active Directory 实例中权限高的帐户同步到 Azure AD。
**详细信息** :请勿更改已筛选掉这些帐户的默认 [Azure AD Connect 配置](../../active-directory/hybrid/how-to-connect-sync-configure-filtering.md)。 此配置降低了攻击者从云透视到本地资源(这可能会造成重大事件)的风险。
**最佳做法** :启用密码哈希同步。
**详细信息** :密码哈希同步是用于将用户密码哈希从本地 Active Directory 实例同步到基于云的 Azure AD 实例的功能。 此同步有助于防止重放先前攻击中泄露的凭据。
即使决定使用 Active Directory 联合身份验证服务 (AD FS) 或其他标识提供者进行联合身份验证,也可以选择性地设置密码哈希同步作为备用机制,以应对本地服务器发生故障或临时不可用的情况。 借助此同步,用户可以使用与登录本地 Active Directory 实例相同的密码来登录服务。 如果用户对其他未连接到 Azure AD 的服务使用过相同的电子邮件地址和密码,此同步还可便于标识保护将同步的密码哈希与已知被盗用的密码进行比较,从而检测被盗用的凭据。
有关详细信息,请参阅[使用 Azure AD Connect 同步实现密码哈希同步](/active-directory/connect/active-directory-aadconnectsync-implement-password-hash-synchronization)。
<!-- B2B2 B2C not availavle -->
未将其本地标识与云标识集成的组织在管理帐户方面可能开销更大。 这种开销增加了出错和安全漏洞的可能性。
> [!Note]
> 你需要选择关键帐户将驻留在哪些目录中,以及所使用的管理工作站是由新的云服务托管,还是由现有进程托管。 使用现有的管理和标识预配流程可以降低一些风险,但也可能会造成攻击者入侵本地帐户并转向云的风险。 不妨对不同的角色(例如,IT 管理员与业务部门管理员)使用不同的策略。 可以使用两个选项。 第一个选项是创建不与本地 Active Directory 实例同步的 Azure AD 帐户。 将你的管理工作站加入到 Azure AD,你可以使用 Microsoft Intune 对其进行管理和修补。 第二种选择是,通过同步到本地 Active Directory 实例来使用现有的管理员帐户。 使用 Active Directory 域中的现有工作站来实现管理和安全性。
## <a name="manage-connected-tenants"></a>管理已连接的租户
你的安全组织需要能够查看订阅来评估风险,并确定是否遵循了组织的策略和任何法规要求。 你应确保安全组织能够查看所有(通过 [Azure ExpressRoute](../../expressroute/expressroute-introduction.md) 或[站点到站点 VPN](../../vpn-gateway/vpn-gateway-howto-multi-site-to-site-resource-manager-portal.md))连接到生产环境和网络的订阅。 Azure AD 中的[全局管理员/公司管理员](../../active-directory/users-groups-roles/directory-assign-admin-roles.md#company-administrator-permissions)可以将自己的访问权限提升为[用户访问管理员](../../role-based-access-control/built-in-roles.md#user-access-administrator)角色,并查看所有连接到环境的订阅和管理组。
请参阅[提升访问权限以管理所有 Azure 订阅和管理组](../../role-based-access-control/elevate-access-global-admin.md),以确保你和你的安全组可以查看连接到你的环境的所有订阅或管理组。 评估风险后,应删除此提升的访问权限。
<!--not available -->
## <a name="turn-on-conditional-access"></a>启用条件访问
用户可能会从任意位置使用各种设备和应用访问组织的资源。 作为一名 IT 管理员,你需要确保这些设备符合安全性和符合性标准。 仅关注谁可以访问资源不再能满足需求。
为了平衡安全性与工作效率,在做出访问控制决策之前,需要考虑如何访问资源。 使用 Azure AD 条件访问,可以满足这一需求。 使用条件访问,可以根据访问云应用的条件做出自动访问控制决策。
**最佳做法** :管理和控制对公司资源的访问。
**详细信息** :根据 SaaS 应用和 Azure AD 连接的应用的组、位置和应用敏感度,配置通用 Azure AD [条件访问策略](../../active-directory/conditional-access/concept-conditional-access-policy-common.md)。
## <a name="plan-for-routine-security-improvements"></a>计划例程安全改进
安全性一直在不断发展,在云和标识管理框架中构建一种定期显示安全性发展并发现保护环境的新方法是很重要的。
标识安全分数是 Microsoft 发布的一组建议的安全控制,旨在为你提供一个数字分数,以便客观地度量你的安全状况,并帮助计划未来的安全改进。 你还可以查看你的分数与其他行业分数的比较,以及你自己的分数在一段时间内的趋势。
**最佳做法** :根据你所在行业的最佳做法来计划例程安全评审和改进。
**详细信息** :使用标识安全分数功能对你在一段时间内的改进进行排名。
## <a name="enable-password-management"></a>启用密码管理
如果有多个租户或者你想要允许用户[重置自己的密码](../../active-directory/user-help/active-directory-passwords-update-your-own-password.md),则必须使用适当的安全策略来防止滥用。
**最佳做法** :为用户设置自助式密码重置 (SSPR)。
**详细信息** :使用 Azure AD [自助式密码重置](/active-directory-b2c/active-directory-b2c-reference-sspr)功能。
**最佳做法** :监视是否在使用 SSPR 及其使用情况。
**详细信息** :通过使用 Azure AD [密码重置注册活动报表](/active-directory/authentication/howto-sspr-reporting)监视正在注册的用户。 Azure AD 提供的报表功能可帮助使用预生成的报表来回答问题。 如果有相应的授权,还可以创建自定义查询。
## <a name="enforce-multi-factor-verification-for-users"></a>对用户强制执行多重身份验证
建议对所有用户要求进行双重验证。 这包括组织中的管理员和其他人员,如果他们的帐户泄露,可能会产生重大影响(例如,财务官员)。
要求双重验证有多种选项。 最佳选项取决于你的目标、正在运行的 Azure AD 版本以及许可计划。 请参阅[如何要求对用户进行双重验证](/active-directory/authentication/howto-mfa-userstates)了解最佳选项。 有关许可和定价的详细信息,请参阅 [Azure AD](https://www.azure.cn/pricing/details/active-directory/) 和 [Azure 多重身份验证](https://www.azure.cn/pricing/details/multi-factor-authentication/)定价页。
以下是启用双重验证的选项和优势:
**选项 1** :使用 Azure AD 安全默认值为所有用户和登录方法启用 MFA 优势:借助此选项,可以轻松、快速地为环境中的所有用户强制执行 MFA,同时采用严格的策略来执行以下操作:
* 质询管理帐户和管理登录机制
* 要求通过 Microsoft Authenticator 对所有用户进行 MFA 质询
* 限制旧身份验证协议。
此方法可用于所有许可层,但不能与现有的条件访问策略混合使用。 你可以在 [Azure AD 安全默认值](/active-directory/fundamentals/concept-fundamentals-security-defaults)中找到更多信息
<!-- not available-->
<!-- not available-->
## <a name="use-role-based-access-control"></a>使用基于角色的访问控制
对于任何使用云的组织而言,云资源的访问管理至关重要。 [Azure 基于角色的访问控制 (Azure RBAC)](/role-based-access-control/overview) 可帮助你管理谁有权访问 Azure 资源、他们可以对这些资源执行哪些操作以及他们有权访问哪些区域。
在 Azure 中指定负责特定功能的组或单个角色有助于避免混乱,从而避免可能会导致安全风险的人为错误和自动化错误。 对于想要实施数据访问安全策略的组织而言,必须根据“需要知道”和“最低权限”安全原则限制访问权限。
你的安全团队需要能够洞察你的 Azure 资源,以便评估并修正风险。 如果安全团队具有运营职责,则需要额外的权限来完成他们的作业。
可以使用 [RBAC](/role-based-access-control/overview) 在一定范围内向用户、组和应用分配权限。 角色分配的范围可以是订阅、资源组或单个资源。
**最佳做法** :在团队中分离职责,只向用户授予执行作业所需的访问权限。 只允许在特定范围内执行特定操作,而不要在 Azure 订阅或资源中向每个人都授予无限制权限。
**详细信息** :使用 Azure 中的 [Azure 内置角色](/role-based-access-control/built-in-roles)向用户分配权限。
> [!Note]
> 特定的权限会造成不必要的复杂性和混乱,累积成一个“遗留”配置,难以在不担心破坏某些东西的情况下修复。 避免特定于资源的权限。 而是将管理组用于企业范围内的权限,并将资源组用于订阅中的权限。 避免用户特定的权限。 而是向 Azure AD 中的组分配权限。
**最佳做法** :向具有 Azure 职责的安全团队授予对 Azure 资源的访问权限,以便他们可以评估和修正风险。
**详细信息** :向安全团队授予 RBAC [安全读取者](/role-based-access-control/built-in-roles#security-reader)角色。 可以使用根管理组或段管理组,具体视职责范围而定:
* 根管理组:用于负责所有企业资源的团队
* 段管理组:用于范围有限的团队(通常是由于法规或其他组织边界所致)
**最佳做法** :向具有直接运营职责的安全团队授予适当的权限。
**详细信息** :审阅 RBAC 内置角色,以进行适当的角色分配。 如果内置角色不能满足组织的具体需求,则可以创建 [Azure 自定义角色](/role-based-access-control/custom-roles)。 与内置角色一样,可以在订阅、资源组和资源范围内向用户、组和服务主体分配自定义角色。
**最佳做法** :向需要的安全角色授予 Azure 安全中心访问权限。 使用安全中心,安全团队可以快速发现和修正风险。
**详细信息** :将具有这些需求的安全团队添加到 RBAC [安全管理员](/role-based-access-control/built-in-roles#security-admin)角色,这样他们就可以查看安全策略、查看安全状态、编辑安全策略、查看警报和建议,并能消除警报和建议。 你可以使用根管理组或段管理组来执行此操作,具体取决于职责范围。
未使用 RBAC 等功能实施数据访问控制的组织可能会给其用户分配不必要的权限。 允许用户访问他们不应有权访问的类型的数据(例如,对业务有重大影响的数据)可能会导致数据泄漏。
<!--Lower exposure not available -->
## <a name="control-locations-where-resources-are-created"></a>控制创建资源的位置
非常重要的一点是,既要允许云操作员执行任务,同时又要防止他们违反管理组织资源所需的惯例。 想要控制创建资源的位置的组织应该对这些位置进行硬编码。
可以使用 [Azure 资源管理器](/azure-resource-manager/resource-group-overview)创建安全策略,其中的定义描述了会明确遭到拒绝的操作或资源。 可以在所需范围(例如订阅、资源组或是单个资源)分配这些策略定义。
> [!NOTE]
> 安全策略与 RBAC 不同。 它们实际上使用 RBAC 授权用户来创建这些资源。
>
>
无法控制资源创建方式的组织更容易因用户创建的资源超过所需数目,而产生滥用服务的情况。 强化资源创建过程是保护多租户方案的重要步骤。
<!--not available -->
## <a name="use-azure-ad-for-storage-authentication"></a>使用 Azure AD 进行存储身份验证
[Azure 存储](/storage/common/storage-auth-aad)支持使用 Azure AD 对 Blob 存储和队列存储进行身份验证和授权。 借助 Azure AD 身份验证,可以使用基于 Azure 角色的访问控制向用户、组和应用(一直到各个 Blob 容器或队列的范围)授予特定权限。
建议使用 [Azure AD 验证对存储的访问](https://azure.microsoft.com/blog/azure-storage-support-for-azure-ad-based-access-control-now-generally-available/)。
## <a name="next-step"></a>后续步骤
有关通过 Azure 设计、部署和管理云解决方案时可以使用的更多安全最佳做法,请参阅 [Azure 安全最佳做法和模式](best-practices-and-patterns.md)。
| 42.932692 | 494 | 0.798992 | yue_Hant | 0.874749 |
4719a4274576aaee93befb60e1e042d563eade30 | 2,795 | md | Markdown | pelican/content/ex23.md | richardcornish/richlearnspythonthehardway | 3e0059a8aeb7e384ddcc7d1fc0c5ab4a4700d6f3 | [
"BSD-3-Clause"
] | null | null | null | pelican/content/ex23.md | richardcornish/richlearnspythonthehardway | 3e0059a8aeb7e384ddcc7d1fc0c5ab4a4700d6f3 | [
"BSD-3-Clause"
] | null | null | null | pelican/content/ex23.md | richardcornish/richlearnspythonthehardway | 3e0059a8aeb7e384ddcc7d1fc0c5ab4a4700d6f3 | [
"BSD-3-Clause"
] | 1 | 2016-01-29T20:38:23.000Z | 2016-01-29T20:38:23.000Z | Title: Exercise 23
Date: 2014-12-05 12:38PM
Work was really tough this week, so I didn't have time for exercises at home. I'm also trying to get a work project done this weekend, and then after that I hope I can stop taking work home with me. It's exhausting.
Picking up where we last left off, [Exercise 23](https://learnpythonthehardway.org/book/ex23.html) also has no code. Instead our mission is to:
1. Find Python source code for things you need.
2. Read through the code and looking for files.
3. Try to understand code you find.
Nice! There have been a lot of interesting projects I want to get into but haven't found the time to get into them. I've always been curious about the technology the [Chicago Tribune News Applications Team](http://blog.apps.chicagotribune.com/) uses. I've worked with them on design projects, but have been very shy and reluctant to talk about code. Now I get an excuse to examine one of their projects.
Let's look at [Tarbell](http://tarbell.tribapps.com/):
> Tarbell is a lightweight web publishing platform. Tarbell makes it quick and easy to start projects based on common templates. Tarbell optionally allows project data to be managed with Google Spreadsheets and easy project publishing with Amazon S3.
It was a little hard [following the code](https://github.com/tarbell-project/tarbell), so I installed it and tried getting a basic page going. It's a [Flask-based](http://flask.pocoo.org/), static webpage generator, which I can see being useful in a newsroom. I got through the [tutorial](http://tarbell.readthedocs.org/en/0.9-beta7/tutorial.html) just far enough to get a blog going for our the universe's favorite savior of the human race, [Murphy Cooper](http://interstellarfilm.wikia.com/wiki/Murphy_Cooper).

Let's run the server:

I went through the labor of [using the Google Console API to allow spreadsheet access](http://tarbell.readthedocs.org/en/0.9-beta7/install.html#configure-google-spreadsheet-access-optional) (switching between the [beta6](http://tarbell.readthedocs.org/en/0.9-beta6/tutorial.html) and [beta7](http://tarbell.readthedocs.org/en/0.9-beta7/tutorial.html) versions of the docs due to broken images), but couldn't get a spreadsheet working. The docs say to edit the spreadsheet you just made...but I don't think I ever made one. I don't know if I missed a step, but I couldn't figure it out.
I will probably prefer [Pelican](http://getpelican.com/) (which made this blog) or [Cactus](http://cactusformac.com/) (whose Mac GUI went free this week) before Tarbell, but it's good to know choices exist, and I can see this being a choice for newsrooms that rely on Google Spreadsheets as a GUI.
| 96.37931 | 585 | 0.771735 | eng_Latn | 0.988714 |
471a763a1883bc6fad44d5ceead021705ab9268f | 862 | md | Markdown | docs/cpp/com-error-operator-equal.md | yoichinak/cpp-docs.ja-jp | 50048c3d1101537497403efb4e7b550108f3a8f0 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/cpp/com-error-operator-equal.md | yoichinak/cpp-docs.ja-jp | 50048c3d1101537497403efb4e7b550108f3a8f0 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2021-04-01T04:17:07.000Z | 2021-04-01T04:17:07.000Z | docs/cpp/com-error-operator-equal.md | yoichinak/cpp-docs.ja-jp | 50048c3d1101537497403efb4e7b550108f3a8f0 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
description: '詳細については、「_com_error:: operator =」を参照してください。'
title: _com_error::operator =
ms.date: 11/04/2016
f1_keywords:
- _com_error::operator=
helpviewer_keywords:
- _com_error [C++]
ms.assetid: b9cc4094-d055-450c-b45a-0a95317488f8
ms.openlocfilehash: 3c27fcd612fcf2fd67b09ac1217286edd69e1557
ms.sourcegitcommit: d6af41e42699628c3e2e6063ec7b03931a49a098
ms.translationtype: MT
ms.contentlocale: ja-JP
ms.lasthandoff: 12/11/2020
ms.locfileid: "97295797"
---
# <a name="_com_erroroperator-"></a>_com_error::operator =
**Microsoft 固有の仕様**
既存の `_com_error` オブジェクトを別のオブジェクトに割り当てます。
## <a name="syntax"></a>構文
```
_com_error& operator = (
const _com_error& that
) throw ( );
```
#### <a name="parameters"></a>パラメーター
*that*<br/>
`_com_error` オブジェクト。
**Microsoft 固有の仕様はここまで**
## <a name="see-also"></a>関連項目
[_com_error クラス](../cpp/com-error-class.md)
| 21.02439 | 60 | 0.733179 | yue_Hant | 0.122271 |
471aa3d8a24e18ff5f44b9136f97b29b13868799 | 10,973 | md | Markdown | docker/README.md | alexbosworth/lnd | 7ac50b060e8a1a4b979c8fa824a8e2675b32da99 | [
"MIT"
] | 1 | 2021-05-09T14:27:43.000Z | 2021-05-09T14:27:43.000Z | docker/README.md | alexbosworth/lnd | 7ac50b060e8a1a4b979c8fa824a8e2675b32da99 | [
"MIT"
] | null | null | null | docker/README.md | alexbosworth/lnd | 7ac50b060e8a1a4b979c8fa824a8e2675b32da99 | [
"MIT"
] | null | null | null | This document is written for people who are eager to do something with
the Lightning Network Daemon (`lnd`). This folder uses `docker-compose` to
package `lnd` and `btcd` together to make deploying the two daemons as easy as
typing a few commands. All configuration between `lnd` and `btcd` are handled
automatically by their `docker-compose` config file.
### Prerequisites
Name | Version
--------|---------
docker-compose | 1.9.0
docker | 1.13.0
### Table of content
* [Create lightning network cluster](#create-lightning-network-cluster)
* [Connect to faucet lightning node](#connect-to-faucet-lightning-node)
* [Questions](#questions)
### Create lightning network cluster
This section describes a workflow on `simnet`, a development/test network
that's similar to Bitcoin Core's `regtest` mode. In `simnet` mode blocks can be
generated at will, as the difficulty is very low. This makes it an ideal
environment for testing as one doesn't need to wait tens of minutes for blocks
to arrive in order to test channel related functionality. Additionally, it's
possible to spin up an arbitrary number of `lnd` instances within containers to
create a mini development cluster. All state is saved between instances using a
shared value.
Current workflow is big because we recreate the whole network by ourselves,
next versions will use the started `btcd` bitcoin node in `testnet` and
`faucet` wallet from which you will get the bitcoins.
In the workflow below, we describe the steps required to recreate following
topology, and send a payment from `Alice` to `Bob`.
```
+ ----- + + --- +
| Alice | <--- channel ---> | Bob | <--- Bob and Alice are the lightning network daemons which
+ ----- + + --- + creates the channel and interact with each other using
| | Bitcoin network as source of truth.
| |
+ - - - - - + - - - - - - +
|
+ --------------- +
| Bitcoin network | <--- In current scenario for simplicity we create only one
+ --------------- + "btcd" node which represents the Bitcoin network, in
real situation Alice and Bob will likely be
connected to different Bitcoin nodes.
```
**General workflow is the following:**
* Create a `btcd` node running on a private `simnet`.
* Create `Alice`, one of the `lnd` nodes in our simulation network.
* Create `Bob`, the other `lnd` node in our simulation network.
* Mine some blocks to send `Alice` some bitcoins.
* Open channel between `Alice` and `Bob`.
* Send payment from `Alice` to `Bob`.
* Close the channel between `Alice` and `Bob`.
* Check that on-chain `Bob` balance was changed.
Start `btcd`, and then create an address for `Alice` that we'll directly mine
bitcoin into.
```bash
# Init bitcoin network env variable:
$ export BITCOIN_NETWORK="simnet"
# Run "btcd" node:
$ docker-compose up -d "btcd"
# Run the "Alice" container and log into it:
$ docker-compose up -d "alice"
$ docker exec -i -t "alice" bash
# Generate a new backward compatible nested p2sh address for Alice:
alice$ lncli newaddress np2wkh
# Recreate "btcd" node and set Alice's address as mining address:
$ MINING_ADDRESS=<alice_address> docker-compose up -d "btcd"
# Generate 400 block (we need at least "100 >=" blocks because of coinbase
# block maturity and "300 ~=" in order to activate segwit):
$ docker-compose run btcctl generate 400
# Check that segwit is active:
$ docker-compose run btcctl getblockchaininfo | grep -A 1 segwit
```
Check `Alice` balance:
```
alice$ lncli walletbalance --witness_only=true
```
Connect `Bob` node to `Alice` node.
```bash
# Run "Bob" node and log into it:
$ docker-compose up --no-recreate -d "bob"
$ docker exec -i -t "bob" bash
# Get the identity pubkey of "Bob" node:
bob$ lncli getinfo
{
----->"identity_pubkey": "0343bc80b914aebf8e50eb0b8e445fc79b9e6e8e5e018fa8c5f85c7d429c117b38",
"alias": "",
"num_pending_channels": 0,
"num_active_channels": 0,
"num_peers": 0,
"block_height": 1215,
"block_hash": "7d0bc86ea4151ed3b5be908ea883d2ac3073263537bcf8ca2dca4bec22e79d50",
"synced_to_chain": true,
"testnet": false
}
# Get the IP address of "Bob" node:
$ docker inspect "bob" | grep IPAddress
# Connect "Alice" to the "Bob" node:
alice$ lncli connect <bob_pubkey>@<bob_host>
# Check list of peers on "Alice" side:
alice$ lncli listpeers
{
"peers": [
{
"pub_key": "0343bc80b914aebf8e50eb0b8e445fc79b9e6e8e5e018fa8c5f85c7d429c117b38",
"peer_id": 1,
"address": "172.19.0.4:5656",
"bytes_sent": "357",
"bytes_recv": "357",
"sat_sent": "0",
"sat_recv": "0",
"inbound": true,
"ping_time": "0"
}
]
}
# Check list of peers on "Bob" side:
bob$ lncli listpeers
{
"peers": [
{
"pub_key": "03d0cd35b761f789983f3cfe82c68170cd1c3266b39220c24f7dd72ef4be0883eb",
"peer_id": 1,
"address": "172.19.0.3:51932",
"bytes_sent": "357",
"bytes_recv": "357",
"sat_sent": "0",
"sat_recv": "0",
"inbound": false,
"ping_time": "0"
}
]
}
```
Create the `Alice<->Bob` channel.
```bash
# Open the channel with "Bob":
alice$ lncli openchannel --node_key=<bob_identity_pubkey> --num_confs=1 --local_amt=1000000
# Include funding transaction in block thereby open the channel:
$ docker-compose run btcctl generate 1
# Check that channel with "Bob" was created:
alice$ lncli listchannels
{
"channels": [
{
"active": true,
"remote_pubkey": "0343bc80b914aebf8e50eb0b8e445fc79b9e6e8e5e018fa8c5f85c7d429c117b38",
"channel_point": "3511ae8a52c97d957eaf65f828504e68d0991f0276adff94c6ba91c7f6cd4275:0",
"chan_id": "1337006139441152",
"capacity": "1005000",
"local_balance": "1000000",
"remote_balance": "0",
"unsettled_balance": "0",
"total_satoshis_sent": "0",
"total_satoshis_received": "0",
"num_updates": "0"
}
]
}
```
Send the payment form `Alice` to `Bob`.
```bash
# Add invoice on "Bob" side:
bob$ lncli addinvoice --value=10000
{
"r_hash": "<your_random_rhash_here>",
"pay_req": "<encoded_invoice>",
}
# Send payment from "Alice" to "Bob":
alice$ lncli sendpayment --pay_req=<encoded_invoice>
# Check "Alice"'s channel balance was decremented accordingly by the payment
# amount
alice$ lncli listchannels
# Check "Bob"'s channel balance was credited with the payment amount
bob$ lncli listchannels
```
Now we have open channel in which we sent only one payment, let's imagine
that we sent lots of them and we'll now like to close the channel. Let's do
it!
```bash
# List the "Alice" channel and retrieve "channel_point" which represent
# the opened channel:
alice$ lncli listchannels
{
"channels": [
{
"active": true,
"remote_pubkey": "0343bc80b914aebf8e50eb0b8e445fc79b9e6e8e5e018fa8c5f85c7d429c117b38",
---->"channel_point": "3511ae8a52c97d957eaf65f828504e68d0991f0276adff94c6ba91c7f6cd4275:0",
"chan_id": "1337006139441152",
"capacity": "1005000",
"local_balance": "990000",
"remote_balance": "10000",
"unsettled_balance": "0",
"total_satoshis_sent": "10000",
"total_satoshis_received": "0",
"num_updates": "2"
}
]
}
# Channel point consist of two numbers separated by colon the first one
# is "funding_txid" and the second one is "output_index":
alice$ lncli closechannel --funding_txid=<funding_txid> --output_index=<output_index>
# Include close transaction in block thereby close the channel:
$ docker-compose run btcctl generate 1
# Check "Alice" on-chain balance was credited by her settled amount in the channel:
alice$ lncli walletbalance
# Check "Bob" on-chain balance was credited with the funds he received in the
# channel:
bob$ lncli walletbalance
{
"balance": 0.0001
}
```
### Connect to faucet lightning node
In order to be more confident with `lnd` commands I suggest you to try
to create a mini lightning network cluster ([Create lightning network cluster](#create-lightning-network-cluster)).
In this section we will try to connect our node to the faucet/hub node
which will create with as the channel and send as some amount of
bitcoins. The schema will be following:
```
+ ----- + + ------ + (1) + --- +
| Alice | <--- channel ---> | Faucet | <--- channel ---> | Bob |
+ ----- + + ------ + + --- +
| | |
| | | <--- (2)
+ - - - - - - - - - - - - - + - - - - - - - - - - - - - +
|
+ --------------- +
| Bitcoin network | <--- (3)
+ --------------- +
(1) You may connect an additinal node "Bob" and make the multihope
payment Alice->Faucet->Bob
(2) "Faucet", "Alice" and "Bob" are the lightning network daemons which
creates the channel and interact with each other using Bitcoin network
as source of truth.
(3) In current scenario "Alice" and "Faucet" lightning network nodes
connected to different Bitcoin nodes. If you decide to connect "Bob"
to "Faucet" than already created "btcd" node would be sufficient.
```
First of all you need to run `btcd` node in `testnet` and wait it to be
synced with test network (`May the Force and Patience be with you` ᕦ(ò_óˇ)ᕤ).
```bash
# Init bitcoin network env variable:
$ export BITCOIN_NETWORK="testnet"
# Run "btcd" node:
$ docker-compose up -d "btcd"
```
After `btcd` synced, connect `Alice` to the `Faucet` node.
```bash
# Run "Alice" container and log into it:
$ docker-compose up -d "alice"; docker exec -i -t "alice" bash
# Connect "Alice" to the "Faucet" node:
alice$ lncli connect <faucet_identity_address>@<faucet_host>
```
After connection was achieved the `Faucet` node should create the channel
and send some amount of bitcoins to `Alice`.
**What you may do next?:**
- Send some amount to `Faucet` node back.
- Connect `Bob` node to the `Faucet` and make multihop payment (`Alice->Faucet->Bob`)
- Close channel with `Faucet` and check the onchain balance.
### Questions
[]
(https://webchat.freenode.net/?channels=lnd)
* How to see `alice` | `bob` | `btcd` logs?
```bash
docker-compose logs <alice|bob|btcd>
```
| 34.615142 | 115 | 0.629272 | eng_Latn | 0.9668 |
471accff1aecdec82ac5b496d76071096db4abdb | 477 | md | Markdown | src/markdown/docs/patterns/simon/options/hemcurve/en.md | sophiekoonin/website | a8c225ba2d2e3e399e244290736394435e4a61fc | [
"MIT"
] | 6 | 2018-11-16T02:12:46.000Z | 2019-05-30T17:23:47.000Z | src/markdown/docs/patterns/simon/options/hemcurve/en.md | sophiekoonin/website | a8c225ba2d2e3e399e244290736394435e4a61fc | [
"MIT"
] | 48 | 2018-10-03T12:06:03.000Z | 2019-07-27T18:23:38.000Z | src/markdown/docs/patterns/simon/options/hemcurve/en.md | sophiekoonin/website | a8c225ba2d2e3e399e244290736394435e4a61fc | [
"MIT"
] | 11 | 2018-10-07T12:34:13.000Z | 2019-04-24T19:26:09.000Z | ---
title: Hem curve
path: /en/docs/patterns/simon/options/hemcurve
pattern: simon
option: hemCurve
---
How much do you want the hem to curve upwards?
> - This applies only to the baseball and slashed hem styles. If you chose a straight hem, this will be ignored.
> - This value can never be more than the length bonus. If it is, it will silently be set to the length bonus value.
> - If you set this to zero, you'll get a straight hem regardless of what hem style you pick.
| 36.692308 | 116 | 0.744235 | eng_Latn | 0.998923 |
471b0dfc26f290d6b8755f525911dc137efdc6ab | 1,195 | md | Markdown | src/README.md | monadicbind/cis194 | 8c5488026472b1eb6f194c222aa2378b022fcc03 | [
"BSD-3-Clause"
] | null | null | null | src/README.md | monadicbind/cis194 | 8c5488026472b1eb6f194c222aa2378b022fcc03 | [
"BSD-3-Clause"
] | null | null | null | src/README.md | monadicbind/cis194 | 8c5488026472b1eb6f194c222aa2378b022fcc03 | [
"BSD-3-Clause"
] | 1 | 2020-06-16T09:17:42.000Z | 2020-06-16T09:17:42.000Z | # Week 1 Intro
[CreditCard.hs](CreditCard.hs) using `unfoldr` , `swap`, `foldr`
# Week 2 ADT
[LogAnalysis.hs](LogAnalysis.hs) using `liftA3` , `*>` , `<|>` and `<$>`
# Week 3 Recursion
[Golf.hs](Golf.hs) using `zipWith`, list comprehensions.
# Week 4 Higher order programming
[Wholemeal.hs](Wholemeal.hs) using `iterate` and `where` . `where` [differs](https://wiki.haskell.org/Let_vs._Where) from `let` .
# Week 5 Type classes
[Calc.hs](Calc.hs) using `liftA2` instead of `pure (*) <*> (f1 m) <*> (f2 m)` . `flip`
# Week 6 Lazy evaluation
[Fibonacci.hs](Fibonacci.hs) `zipWith` and `tail` for a lazy fib sequence.
[Matrix.hs](Matrix.hs)
# Week 7 Folds, Monoids
[JoinList.hs](JoinList.hs) `Foldable` and `Monoid` instances for JoinList
[Scrabble.hs](Scrabble.hs) using `zip`, `fromList` to build a Map, score `Monoid` and `mconcat`.
# Week 8 IO
[Party.hs](Party.hs) `uncurry` and `bind` . `foldMap` instead of `mconcat` and `map`
# Week 9 Functor
# Week 10 Applicative 1
[AParser.hs](AParser.hs) `Applicative` and `Alternative` instances
# Week 11 Applicative 2
[SExpr.hs](SExpr.hs) `*>` , `<*` and `<|>`
# Week 12 Monad
[Risk.hs](Risk.hs) `replicateM` , `bimap` and `>>=`
| 24.895833 | 129 | 0.664435 | eng_Latn | 0.384285 |
471b57e377c43847533761c64467b028dbdc5408 | 747 | md | Markdown | pages/en/lb4/apidocs/pooling.pooledvalue.release.md | Neverage/loopback.io | 10f0049f1de83dfb866c05ce06c7a0b9954da331 | [
"MIT"
] | 2 | 2021-11-05T10:12:18.000Z | 2021-12-10T09:27:57.000Z | pages/en/lb4/apidocs/pooling.pooledvalue.release.md | Neverage/loopback.io | 10f0049f1de83dfb866c05ce06c7a0b9954da331 | [
"MIT"
] | null | null | null | pages/en/lb4/apidocs/pooling.pooledvalue.release.md | Neverage/loopback.io | 10f0049f1de83dfb866c05ce06c7a0b9954da331 | [
"MIT"
] | null | null | null | ---
lang: en
title: 'API docs: pooling.pooledvalue.release'
keywords: LoopBack 4.0, LoopBack 4, Node.js, TypeScript, OpenAPI
sidebar: lb4_sidebar
editurl: https://github.com/strongloop/loopback-next/tree/master/extensions/pooling
permalink: /doc/en/lb4/apidocs.pooling.pooledvalue.release.html
---
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
[Home](./index.md) > [@loopback/pooling](./pooling.md) > [PooledValue](./pooling.pooledvalue.md) > [release](./pooling.pooledvalue.release.md)
## PooledValue.release() method
The function to release the acquired value back to the pool
<b>Signature:</b>
```typescript
release(): Promise<void>;
```
<b>Returns:</b>
Promise<void>
| 26.678571 | 152 | 0.714859 | eng_Latn | 0.445855 |
471b96f8eb1c180aa1509bf04010f74fde7faa2f | 2,479 | md | Markdown | projects/index.md | kaythi93/alexsnow348.github.io | b6feb59ed9fa36daae339de62f12d25573861735 | [
"MIT"
] | null | null | null | projects/index.md | kaythi93/alexsnow348.github.io | b6feb59ed9fa36daae339de62f12d25573861735 | [
"MIT"
] | null | null | null | projects/index.md | kaythi93/alexsnow348.github.io | b6feb59ed9fa36daae339de62f12d25573861735 | [
"MIT"
] | null | null | null | ---
layout: page
title: Projects
---
## 2019
* <strong><a href ="https://github.com/alexsnow348/FiniMini" target="_blank">FiniMini: The new way to becoming your own financial adviser</a></strong>
Everyone has some **savings** in the pocket.<br />
Everyone loves to **grow** it, but don’t know how.<br />
Seeking advice from professionals is **not cheap and easy**. <br />
So, why not **learn-by-yourself**! <br />
## 2018
* <strong><a href ="https://github.com/alexsnow348/investment-for-all" target="_blank">Investment for all (immigrants and illegal workers)</a></strong>
The project aims to solve the problem where minority: immigrants and illegal workers do not have enough privileged to participate in the market. Can we make a room for them where they can invest and grow their savings for themselves? This project plans to tickle above question and hopefully will come out with a solution for their problem.
* <strong><a href ="https://github.com/alexsnow348/FX-Key-Eco-Event" target="_blank">Forex key economic events notification</a></strong>
A notification engine for alerting key economic events for the trading purposes. The engine sends out the notification before 3 mins ahead to the registered phone to alert the user that there are keys economic announcement is going to make soon. The information can be utilized to strategize for trading purposes.
* <strong><a href ="https://github.com/alexsnow348/facetag" target="_blank">FaceTag: Face Recognition system</a></strong>
Face Recognition system trained for 81 people. Accuracy is around 70% achieved. Trained the model based on <a href ="https://github.com/davidsandberg/facenet" target="_blank">FaceNet</a> model which is inspired by Inception & Inception-ResNet (CNNs).
## 2017
* <strong><a href="https://wuthmone.shinyapps.io/ann_predictor_app/" target="_blank">Currency Exchange Rate Predictor</a></strong>
The project aims to forecast next day exchange rate value based on previous exchange rates data sets and built using <strong>R, and R Shiny.</strong> Models are constructed with Artificial Neural Networks techniques (ANN) such as <strong>MLP, RNN, RBF.</strong> Code implementation is available for both <strong> <a href="https://github.com/alexsnow348/Exchange-Rate-Forecasting-Using-Ensemble-ANN-Models" target="_blank">Models</a> </strong> and <strong><a href="https://github.com/alexsnow348/Exchange-Rate-Predictor-Web-App" target="_blank">Web App </a></strong> on GitHub.
| 72.911765 | 579 | 0.75958 | eng_Latn | 0.966203 |
471bb1057ad1741969f029a68b1f77dfc7b82000 | 3,149 | md | Markdown | docs/workflow-designer/how-to-set-breakpoints-in-workflows.md | silentwater/visualstudio-docs.de-de | 5a9caad5c1bee3041c2758238ffd04f4086a9bac | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/workflow-designer/how-to-set-breakpoints-in-workflows.md | silentwater/visualstudio-docs.de-de | 5a9caad5c1bee3041c2758238ffd04f4086a9bac | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/workflow-designer/how-to-set-breakpoints-in-workflows.md | silentwater/visualstudio-docs.de-de | 5a9caad5c1bee3041c2758238ffd04f4086a9bac | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: 'Workflow-Designer – Vorgehensweise: Festlegen von Haltepunkten in Workflows'
ms.date: 11/04/2016
ms.topic: conceptual
ms.assetid: e41b21c9-c061-4358-8e2f-eb5e412864a8
author: gewarren
ms.author: gewarren
manager: jillfra
ms.workload:
- multiple
ms.openlocfilehash: 7503d0b0bee201a9617e90966c9f75ac6333f228
ms.sourcegitcommit: 21d667104199c2493accec20c2388cf674b195c3
ms.translationtype: MT
ms.contentlocale: de-DE
ms.lasthandoff: 02/08/2019
ms.locfileid: "55908438"
---
# <a name="how-to-set-breakpoints-in-workflows"></a>Vorgehensweise: Festlegen von Haltepunkten in Workflows
Wenn Sie die Workflow-Designer verwenden, können Sie Haltepunkte für die grafischen Workflows festlegen, wie Sie in Visual Basic- oder C#-Code tun würden. Wie erwartet, hält die Workflowausführung an jedem festgelegten Haltepunkt an.
Ein Haltepunkt verfügt über drei Zustände: *Ausstehende*, *gebunden*, und *Fehler*. Wenn Sie einen Haltepunkt festlegen, erhält dieser den Status "Ausstehend". Dieser wird durch ein rot ausgefülltes Symbol dargestellt. Wenn die Laufzeit den Workflowtyp geladen hat, wechselt der Haltepunktstatus zu "Gebunden". Haben Sie ein falsches Format für den Haltepunkt angegeben, etwa einen ungültigen Aktivitätsnamen, dann erscheint eine Fehlermeldung. Der Haltepunkt wird immer noch dem Haltepunktfenster hinzugefügt, er wird jedoch mit einem kleinen "x" markiert.
> [!NOTE]
> Das Festlegen von Haltepunkten für aufgerufene Workflows wird nicht unterstützt.
> [!NOTE]
> Stellen Sie sicher, dass Sie die Option **nur eigenen Code aktivieren (nur verwaltet)** aus der **Tools** > **Optionen** > **Debuggen** Menü vor dem Debuggen. Wenn die Option nicht aktiviert ist und Sie einen Haltepunkt für die ersten inneren Sequenz festlegen, Sie zwei Sequenzen, die in einer anderen Sequenz verschachtelt haben, drücken **F11** ist nicht die zweite innere Sequenz debuggt.
> [!NOTE]
> Haltepunkte in einem Workflow werden nicht erreicht werden, wenn der vollständige Pfad zur XAML-Dateieigenschaft nicht korrekt ist. Der vollständige Pfad zur XAML-Datei ist nicht korrekt, nach dem Verschieben das Projekt oder Projektmappe in einen anderen Ordner oder einem anderen Computer. Wählen Sie **STRG**+**S** speichern und aktualisieren Sie die vollständige-Eigenschaft.
## <a name="to-set-a-breakpoint-on-an-activity-in-the-design-view"></a>So legen Sie einen Haltepunkt für eine Aktivität in der Entwurfsansicht fest
1. Wählen Sie die Aktivität aus, bei der der Debugger unterbrechen soll.
2. Auf der **Debuggen** , wählen Sie im Menü **Haltepunkt ein/aus**. Ein rotes Symbol wird am oberen linken Rand der Aktivität angezeigt.
Sie können alternativ drücken **F9** nach dem Auswählen der Aktivität, oder Sie können mit der rechten Maustaste in der Aktivitäts und wählen Sie **Haltepunkt** > **Haltepunkt einfügen** über das Kontextmenü.
## <a name="see-also"></a>Siehe auch
- [Debuggen von Workflows mit dem Workflow-Designer](../workflow-designer/debugging-workflows-with-the-workflow-designer.md)
- [Vorgehensweise: Debuggen von XAML mit dem Workflowdesigner](../workflow-designer/how-to-debug-xaml-with-the-workflow-designer.md) | 71.568182 | 557 | 0.79422 | deu_Latn | 0.997255 |
471ca777a4430597497905c8c3a91ee1f2d8ddd7 | 9,653 | md | Markdown | running-coreos/cloud-providers/digitalocean/index.md | popsikle/coreos-docs | 240ff9d404dc9b791ba09c3381e083f72f50be91 | [
"Apache-2.0"
] | null | null | null | running-coreos/cloud-providers/digitalocean/index.md | popsikle/coreos-docs | 240ff9d404dc9b791ba09c3381e083f72f50be91 | [
"Apache-2.0"
] | null | null | null | running-coreos/cloud-providers/digitalocean/index.md | popsikle/coreos-docs | 240ff9d404dc9b791ba09c3381e083f72f50be91 | [
"Apache-2.0"
] | null | null | null | ---
layout: docs
title: DigitalOcean
category: running_coreos
sub_category: cloud_provider
supported: true
weight: 1
---
# Running CoreOS on DigitalOcean
## Choosing a Channel
CoreOS is designed to be [updated automatically][update-docs] with different
schedules per channel. You can [disable this feature][reboot-docs], although we
don't recommend it. Read the [release notes][release-notes] for specific
features and bug fixes.
The following command will create a single droplet. For more details, check out
<a href="#via-the-api">Launching via the API</a>.
<div id="do-images">
<ul class="nav nav-tabs">
<li class="active"><a href="#stable" data-toggle="tab">Stable Channel</a></li>
<li><a href="#beta" data-toggle="tab">Beta Channel</a></li>
<li><a href="#alpha" data-toggle="tab">Alpha Channel</a></li>
</ul>
<div class="tab-content coreos-docs-image-table">
<div class="tab-pane" id="alpha">
<div class="channel-info">
<p>The alpha channel closely tracks master and frequently has new releases. The newest versions of <a href="{{site.url}}/using-coreos/docker">docker</a>, <a href="{{site.url}}/using-coreos/etcd">etcd</a>, and <a href="{{site.url}}/using-coreos/clustering">fleet</a> will be available for testing. Current version is CoreOS {{site.data.alpha-channel.do-version}}.</p>
<a href="https://cloud.digitalocean.com/droplets/new?image=coreos-alpha" class="btn btn-default">Launch CoreOS Droplet</a><br/><br/>
<p>Launch via DigitalOcean API by specifying <code>$REGION</code>, <code>$SIZE</code> and <code>$SSH_KEY_ID</code>:</p>
<pre>curl --request POST "https://api.digitalocean.com/v2/droplets" \
--header "Content-Type: application/json" \
--header "Authorization: Bearer $TOKEN" \
--data '{"region":"'"${REGION}"'",
"image":"{{site.data.alpha-channel.do-image-path}}",
"size":"'"$SIZE"'",
"user_data": "'"$(cat ~/cloud-config.yaml)"'",
"ssh_keys":["'"$SSH_KEY_ID"'"],
"name":"core-1"}'</pre>
</div>
</div>
<div class="tab-pane" id="beta">
<div class="channel-info">
<p>The beta channel consists of promoted alpha releases. Current version is CoreOS {{site.data.beta-channel.do-version}}.</p>
<a href="https://cloud.digitalocean.com/droplets/new?image=coreos-beta" class="btn btn-default">Launch CoreOS Droplet</a><br/><br/>
<p>Launch via DigitalOcean API by specifying <code>$REGION</code>, <code>$SIZE</code> and <code>$SSH_KEY_ID</code>:</p>
<pre>curl --request POST "https://api.digitalocean.com/v2/droplets" \
--header "Content-Type: application/json" \
--header "Authorization: Bearer $TOKEN" \
--data '{"region":"'"${REGION}"'",
"image":"{{site.data.beta-channel.do-image-path}}",
"size":"'"$SIZE"'",
"user_data": "'"$(cat ~/cloud-config.yaml)"'",
"ssh_keys":["'"$SSH_KEY_ID"'"],
"name":"core-1"}'</pre>
</div>
</div>
<div class="tab-pane active" id="stable">
<div class="channel-info">
<div class="channel-info">
<p>The Stable channel should be used by production clusters. Versions of CoreOS are battle-tested within the Beta and Alpha channels before being promoted. Current version is CoreOS {{site.data.beta-channel.do-version}}.</p>
<a href="https://cloud.digitalocean.com/droplets/new?image=coreos-stable" class="btn btn-default">Launch CoreOS Droplet</a><br/><br/>
<p>Launch via DigitalOcean API by specifying <code>$REGION</code>, <code>$SIZE</code> and <code>$SSH_KEY_ID</code>:</p>
<pre>curl --request POST "https://api.digitalocean.com/v2/droplets" \
--header "Content-Type: application/json" \
--header "Authorization: Bearer $TOKEN" \
--data '{"region":"'"${REGION}"'",
"image":"{{site.data.stable-channel.do-image-path}}",
"size":"'"$SIZE"'",
"user_data": "'"$(cat ~/cloud-config.yaml)"'",
"ssh_keys":["'"$SSH_KEY_ID"'"],
"name":"core-1"}'</pre>
</div>
</div>
</div>
</div>
</div>
[update-docs]: {{site.url}}/using-coreos/updates
[reboot-docs]: {{site.url}}/docs/cluster-management/debugging/prevent-reboot-after-update
[release-notes]: {{site.url}}/releases
## Cloud-Config
CoreOS allows you to configure machine parameters, launch systemd units on
startup, and more via cloud-config. Jump over to the [docs to learn about the
supported features][cloud-config-docs]. Cloud-config is intended to bring up a
cluster of machines into a minimal useful state and ideally shouldn't be used
to configure anything that isn't standard across many hosts. Once a droplet is
created on DigitalOcean, the cloud-config cannot be modified.
You can provide raw cloud-config data to CoreOS via the DigitalOcean web
console or <a href="#via-the-api">via the DigitalOcean API</a>.
The most common cloud-config for DigitalOcean looks like:
```yaml
#cloud-config
coreos:
etcd:
# generate a new token for each unique cluster from https://discovery.etcd.io/new?size=3
# specify the intial size of your cluster with ?size=X
discovery: https://discovery.etcd.io/<token>
# multi-region deployments, multi-cloud deployments, and droplets without
# private networking need to use $public_ipv4
addr: $private_ipv4:4001
peer-addr: $private_ipv4:7001
units:
- name: etcd.service
command: start
- name: fleet.service
command: start
```
The `$private_ipv4` and `$public_ipv4` substitution variables are fully
supported in cloud-config on DigitalOcean. In order for `$private_ipv4` to be
populated, the droplet must have private networking enabled.
[do-cloud-config]: https://developers.digitalocean.com/#droplets
[cloud-config-docs]: {{site.url}}/docs/cluster-management/setup/cloudinit-cloud-config
### Adding More Machines
To add more instances to the cluster, just launch more with the same
cloud-config. New instances will join the cluster regardless of region.
## SSH to your Droplets
CoreOS is set up to be a little more secure than other DigitalOcean images. By default, it uses the core user instead of root and doesn't use a password for authentication. You'll need to add an SSH key(s) via the web console or add keys/passwords via your cloud-config in order to log in.
To connect to a droplet after it's created, run:
```sh
ssh core@<ip address>
```
Optionally, you may want to [configure your ssh-agent]({{site.url}}/docs/launching-containers/launching/fleet-using-the-client/#remote-fleet-access) to more easily run [fleet commands]({{site.url}}/docs/launching-containers/launching/launching-containers-fleet/).
## Launching Droplets
### Via the API
For starters, generate a [Personal Access Token][do-token-settings] and save it
in an environment variable:
```sh
read TOKEN
# Enter your Personal Access Token
```
Upload your SSH key via [DigitalOcean's API][do-keys-docs] or the web console.
Retrieve the SSH key ID via the ["list all keys"][do-list-keys-docs] method:
```sh
curl --request GET "https://api.digitalocean.com/v2/account/keys" \
--header "Authorization: Bearer $TOKEN"
```
Save the key ID from the previous command in an environment variable:
```sh
read SSH_KEY_ID
# Enter your SSH key ID
```
Create a 512MB droplet with private networking in NYC3 from the CoreOS Alpha
image:
```sh
curl --request POST "https://api.digitalocean.com/v2/droplets" \
--header "Content-Type: application/json" \
--header "Authorization: Bearer $TOKEN" \
--data '{
"region":"nyc3",
"image":"{{site.data.alpha-channel.do-image-path}}",
"size":"512mb",
"name":"core-1",
"private_networking":true,
"ssh_keys":['$SSH_KEY_ID'],
"user_data": "'"$(cat cloud-config.yaml | sed 's/"/\\"/g')"'"
}'
```
For more details, check out [DigitalOcean's API documentation][do-api-docs].
[do-api-docs]: https://developers.digitalocean.com/#droplets
[do-keys-docs]: https://developers.digitalocean.com/#keys
[do-list-keys-docs]: https://developers.digitalocean.com/#list-all-keys
[do-token-settings]: https://cloud.digitalocean.com/settings/applications
### Via the Web Console
1. Open the "<a href="https://cloud.digitalocean.com/droplets/new?image=coreos-alpha">new droplet</a>"
page in the web console.
2. Give the machine a hostname, select the size, and choose a region.<br/><br/>
<div class="row">
<div class="col-lg-8 col-md-10 col-sm-8 col-xs-12">
<img src="size.png" class="screenshot" />
<div class="caption">Choosing a CoreOS channel</div>
</div>
</div>
3. Enable User Data and add your cloud-config in the text box.<br /><br />
<div class="row">
<div class="col-lg-8 col-md-10 col-sm-8 col-xs-12">
<img src="settings.png" class="screenshot" />
<div class="caption">Droplet settings for networking and cloud-config</div>
</div>
</div>
4. Choose your <a href="#choosing-a-channel">preferred channel</a> of CoreOS.<br/><br/>
<div class="row">
<div class="col-lg-8 col-md-10 col-sm-8 col-xs-12">
<img src="image.png" class="screenshot" />
<div class="caption">Choosing a CoreOS channel</div>
</div>
</div>
5. Select your SSH keys.
Note that DigitalOcean is not able to inject a root password into CoreOS images
like it does with other images. You'll need to add your keys via the web
console or add keys or passwords via your cloud-config in order to log in.
## Using CoreOS
Now that you have a machine booted it is time to play around.
Check out the [CoreOS Quickstart][quick-start] guide or dig into
[more specific topics][docs].
[quick-start]: {{site.url}}/docs/quickstart
[docs]: {{site.url}}/docs
| 41.252137 | 374 | 0.686833 | eng_Latn | 0.739269 |
471ce6b6de372084ea996ffa2752c99e658a17ee | 15,489 | md | Markdown | articles/azure-arc/kubernetes/connect-cluster.md | PowershellScripts/azure-docs.pl-pl | 302fc726e5e83df2c5307a6253eee288007aff60 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/azure-arc/kubernetes/connect-cluster.md | PowershellScripts/azure-docs.pl-pl | 302fc726e5e83df2c5307a6253eee288007aff60 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/azure-arc/kubernetes/connect-cluster.md | PowershellScripts/azure-docs.pl-pl | 302fc726e5e83df2c5307a6253eee288007aff60 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Nawiązywanie połączenia z klastrem Kubernetes z włączoną usługą Azure Arc (wersja zapoznawcza)
services: azure-arc
ms.service: azure-arc
ms.date: 05/19/2020
ms.topic: article
author: mlearned
ms.author: mlearned
description: Łączenie klastra Kubernetes z obsługą usługi Azure ARC przy użyciu usługi Azure Arc
keywords: Kubernetes, łuk, Azure, K8s, kontenery
ms.custom: references_regions, devx-track-azurecli
ms.openlocfilehash: 131ec014c9ac016a682bc4928f74910a3405a5da
ms.sourcegitcommit: 0aec60c088f1dcb0f89eaad5faf5f2c815e53bf8
ms.translationtype: MT
ms.contentlocale: pl-PL
ms.lasthandoff: 01/14/2021
ms.locfileid: "98186009"
---
# <a name="connect-an-azure-arc-enabled-kubernetes-cluster-preview"></a>Nawiązywanie połączenia z klastrem Kubernetes z włączoną usługą Azure Arc (wersja zapoznawcza)
W tym dokumencie opisano proces łączenia z klastrem Kubernetes, który jest certyfikowany w chmurze CNCF, na przykład AKS-Engine na platformie Azure, AKS-Engine on Azure Stack Hub, GKE, EKS i VMware vSphere do usługi Azure Arc.
## <a name="before-you-begin"></a>Przed rozpoczęciem
Sprawdź, czy masz gotowe do spełnienia następujące wymagania:
* Klaster Kubernetes, który jest uruchomiony. Jeśli nie masz istniejącego klastra Kubernetes, możesz użyć jednej z następujących przewodników, aby utworzyć klaster testowy:
* Tworzenie klastra Kubernetes przy użyciu [Kubernetes w Docker (rodzaj)](https://kind.sigs.k8s.io/)
* Tworzenie klastra Kubernetes przy użyciu platformy Docker dla [komputerów Mac](https://docs.docker.com/docker-for-mac/#kubernetes) lub [Windows](https://docs.docker.com/docker-for-windows/#kubernetes)
* Musisz mieć plik kubeconfig, aby uzyskać dostęp do roli klastra i klastra w klastrze w celu wdrożenia agentów Kubernetes z włączonym łukiem.
* Nazwa główna użytkownika lub usługi używana z `az login` `az connectedk8s connect` poleceniami i musi mieć uprawnienia "read" i "Write" dla typu zasobu "Microsoft. Kubernetes/connectedclusters". Rola "klaster Kubernetes — dołączanie do usługi Azure ARC" ma te uprawnienia i może służyć do przypisywania ról dla użytkownika lub nazwy głównej usługi.
* Do dołączania klastra przy użyciu rozszerzenia connectedk8s jest wymagany Helm 3. [Zainstaluj najnowszą wersję programu Helm 3](https://helm.sh/docs/intro/install) , aby spełnić to wymaganie.
* Interfejs wiersza polecenia platformy Azure w wersji 2.15 + jest wymagany do zainstalowania rozszerzeń interfejsu wiersza polecenia Kubernetes z funkcją Azure Arc. [Zainstaluj interfejs wiersza polecenia platformy Azure](/cli/azure/install-azure-cli?view=azure-cli-latest&preserve-view=true) lub zaktualizuj do najnowszej wersji, aby upewnić się, że masz interfejs wiersza polecenia platformy Azure w wersji 2.15 +.
* Zainstaluj rozszerzenia Kubernetes CLI z włączonym łukiem:
Zainstaluj `connectedk8s` rozszerzenie, które ułatwia łączenie klastrów Kubernetes z platformą Azure:
```azurecli
az extension add --name connectedk8s
```
Zainstaluj `k8sconfiguration` rozszerzenie:
```azurecli
az extension add --name k8sconfiguration
```
Jeśli chcesz zaktualizować te rozszerzenia później, uruchom następujące polecenia:
```azurecli
az extension update --name connectedk8s
az extension update --name k8sconfiguration
```
## <a name="supported-regions"></a>Obsługiwane regiony
* East US
* West Europe
## <a name="network-requirements"></a>Wymagania dotyczące sieci
Agenci usługi Azure Arc potrzebują następujących protokołów/portów/wychodzących adresów URL do działania.
* TCP na porcie 443--> `https://:443`
* TCP na porcie 9418--> `git://:9418`
| Punkt końcowy (DNS) | Opis |
| ------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------- |
| `https://management.azure.com` | Wymagane przez agenta do łączenia się z platformą Azure i rejestrowania klastra |
| `https://eastus.dp.kubernetesconfiguration.azure.com`, `https://westeurope.dp.kubernetesconfiguration.azure.com` | Punkt końcowy płaszczyzny danych dla agenta do wypychania stanu i pobrania informacji o konfiguracji |
| `https://login.microsoftonline.com` | Wymagane do pobierania i aktualizowania tokenów Azure Resource Manager |
| `https://mcr.microsoft.com` | Wymagane do ściągania obrazów kontenerów dla agentów usługi Azure Arc |
| `https://eus.his.arc.azure.com`, `https://weu.his.arc.azure.com` | Wymagany do ściągania certyfikatów tożsamości zarządzanych przypisanych przez system |
## <a name="register-the-two-providers-for-azure-arc-enabled-kubernetes"></a>Zarejestruj dwóch dostawców z włączoną funkcją Azure Arc Kubernetes:
```console
az provider register --namespace Microsoft.Kubernetes
az provider register --namespace Microsoft.KubernetesConfiguration
```
Rejestracja jest procesem asynchronicznym. Rejestracja może potrwać około 10 minut. Proces rejestracji można monitorować przy użyciu następujących poleceń:
```console
az provider show -n Microsoft.Kubernetes -o table
```
```console
az provider show -n Microsoft.KubernetesConfiguration -o table
```
## <a name="create-a-resource-group"></a>Tworzenie grupy zasobów
Użyj grupy zasobów do przechowywania metadanych klastra.
Najpierw utwórz grupę zasobów w celu przechowywania połączonego zasobu klastra.
```console
az group create --name AzureArcTest -l EastUS -o table
```
**Rozdzielczości**
```console
Location Name
---------- ------------
eastus AzureArcTest
```
## <a name="connect-a-cluster"></a>Łączenie klastra
Następnie będziemy łączyć nasz klaster Kubernetes z platformą Azure. Przepływ pracy dla programu `az connectedk8s connect` jest następujący:
1. Weryfikowanie łączności z klastrem Kubernetes: za pośrednictwem `KUBECONFIG` , `~/.kube/config` lub `--kube-config`
1. Wdróż agentów usługi Azure ARC dla Kubernetes przy użyciu Helm 3 w `azure-arc` przestrzeni nazw
```console
az connectedk8s connect --name AzureArcTest1 --resource-group AzureArcTest
```
**Rozdzielczości**
```console
Command group 'connectedk8s' is in preview. It may be changed/removed in a future release.
Helm release deployment succeeded
{
"aadProfile": {
"clientAppId": "",
"serverAppId": "",
"tenantId": ""
},
"agentPublicKeyCertificate": "...",
"agentVersion": "0.1.0",
"id": "/subscriptions/57ac26cf-a9f0-4908-b300-9a4e9a0fb205/resourceGroups/AzureArcTest/providers/Microsoft.Kubernetes/connectedClusters/AzureArcTest1",
"identity": {
"principalId": null,
"tenantId": null,
"type": "None"
},
"kubernetesVersion": "v1.15.0",
"location": "eastus",
"name": "AzureArcTest1",
"resourceGroup": "AzureArcTest",
"tags": {},
"totalNodeCount": 1,
"type": "Microsoft.Kubernetes/connectedClusters"
}
```
## <a name="verify-connected-cluster"></a>Weryfikuj podłączony klaster
Utwórz listę podłączonych klastrów:
```console
az connectedk8s list -g AzureArcTest -o table
```
**Rozdzielczości**
```console
Command group 'connectedk8s' is in preview. It may be changed/removed in a future release.
Name Location ResourceGroup
------------- ---------- ---------------
AzureArcTest1 eastus AzureArcTest
```
Możesz również wyświetlić ten zasób na [Azure Portal](https://portal.azure.com/). Gdy Portal zostanie otwarty w przeglądarce, przejdź do grupy zasobów i zasobu Kubernetes z włączoną funkcją Azure Arc na podstawie nazwy zasobu i nazwy grupy zasobów użytych wcześniej w `az connectedk8s connect` poleceniu.
> [!NOTE]
> Po dołączeniu klastra trwa około 5 – 10 minut, aby metadane klastra (wersja klastra, wersja agenta, liczba węzłów) były dostępne na stronie Przegląd zasobu Kubernetes w usłudze Azure Arc w Azure Portal.
## <a name="connect-using-an-outbound-proxy-server"></a>Nawiązywanie połączenia przy użyciu serwera proxy wychodzącego
Jeśli klaster znajduje się za wychodzącym serwerem proxy, interfejs wiersza polecenia platformy Azure i agenci Kubernetes muszą kierować żądania za pośrednictwem serwera proxy wychodzącego. Następująca konfiguracja umożliwia:
1. Sprawdź wersję `connectedk8s` rozszerzenia zainstalowanego na komputerze, uruchamiając następujące polecenie:
```console
az -v
```
Wymagana `connectedk8s` wersja rozszerzenia >= 0.2.5, aby skonfigurować agentów z serwerem proxy wychodzącego. Jeśli masz wersję < 0.2.3 na maszynie, wykonaj [kroki aktualizacji](#before-you-begin) , aby pobrać najnowszą wersję rozszerzenia na komputerze.
2. Ustaw zmienne środowiskowe, które są używane w interfejsie wiersza polecenia platformy Azure do korzystania z serwera proxy wychodzącego:
* Jeśli używasz bash, uruchom następujące polecenie z odpowiednimi wartościami:
```bash
export HTTP_PROXY=<proxy-server-ip-address>:<port>
export HTTPS_PROXY=<proxy-server-ip-address>:<port>
export NO_PROXY=<cluster-apiserver-ip-address>:<port>
```
* Jeśli używasz programu PowerShell, uruchom następujące polecenie z odpowiednimi wartościami:
```powershell
$Env:HTTP_PROXY = "<proxy-server-ip-address>:<port>"
$Env:HTTPS_PROXY = "<proxy-server-ip-address>:<port>"
$Env:NO_PROXY = "<cluster-apiserver-ip-address>:<port>"
```
3. Uruchom polecenie Connect z określonymi parametrami serwera proxy:
```console
az connectedk8s connect -n <cluster-name> -g <resource-group> --proxy-https https://<proxy-server-ip-address>:<port> --proxy-http http://<proxy-server-ip-address>:<port> --proxy-skip-range <excludedIP>,<excludedCIDR> --proxy-cert <path-to-cert-file>
```
> [!NOTE]
> 1. Określenie excludedCIDR w obszarze--proxy-Skip-Range jest ważne, aby zapewnić, że komunikacja w klastrze nie jest uszkodzona dla agentów.
> 2. While--proxy-HTTP,--proxy-HTTPS i--proxy-Range-zakres jest oczekiwany w przypadku większości środowisk serwera proxy wychodzącego.--certyfikat proxy jest wymagany tylko wtedy, gdy istnieją zaufane certyfikaty z serwera proxy, które muszą zostać wprowadzone do magazynu zaufanych certyfikatów.
> 3. Powyższa Specyfikacja serwera proxy jest obecnie stosowana tylko dla agentów ARC, a nie dla zasobników strumieni używanych w sourceControlConfiguration. Zespół z włączonym łukiem Kubernetes aktywnie pracuje nad tą funkcją i będzie dostępny wkrótce.
## <a name="azure-arc-agents-for-kubernetes"></a>Agenci Azure ARC dla Kubernetes
Usługa Azure ARC z włączonym Kubernetes wdraża kilka operatorów w `azure-arc` przestrzeni nazw. Te wdrożenia i zasobniki można wyświetlić tutaj:
```console
kubectl -n azure-arc get deployments,pods
```
**Rozdzielczości**
```console
NAME READY UP-TO-DATE AVAILABLE AGE
deployment.apps/cluster-metadata-operator 1/1 1 1 16h
deployment.apps/clusteridentityoperator 1/1 1 1 16h
deployment.apps/config-agent 1/1 1 1 16h
deployment.apps/controller-manager 1/1 1 1 16h
deployment.apps/flux-logs-agent 1/1 1 1 16h
deployment.apps/metrics-agent 1/1 1 1 16h
deployment.apps/resource-sync-agent 1/1 1 1 16h
NAME READY STATUS RESTART AGE
pod/cluster-metadata-operator-7fb54d9986-g785b 2/2 Running 0 16h
pod/clusteridentityoperator-6d6678ffd4-tx8hr 3/3 Running 0 16h
pod/config-agent-544c4669f9-4th92 3/3 Running 0 16h
pod/controller-manager-fddf5c766-ftd96 3/3 Running 0 16h
pod/flux-logs-agent-7c489f57f4-mwqqv 2/2 Running 0 16h
pod/metrics-agent-58b765c8db-n5l7k 2/2 Running 0 16h
pod/resource-sync-agent-5cf85976c7-522p5 3/3 Running 0 16h
```
Usługa Azure ARC z włączonym Kubernetes składa się z kilku agentów (operatorów) uruchomionych w klastrze wdrożonym w `azure-arc` przestrzeni nazw.
* `deployment.apps/config-agent`: Obserwujący połączony klaster dla zasobów konfiguracji kontroli źródła zastosowanych w klastrze i aktualizacji stanu zgodności
* `deployment.apps/controller-manager`: jest operatorem operatorów i organizuje interakcje między składnikami usługi Azure Arc
* `deployment.apps/metrics-agent`: zbiera metryki innych agentów ARC, aby upewnić się, że te agenci wykazują optymalną wydajność
* `deployment.apps/cluster-metadata-operator`: zbiera metadane klastra — wersję klastra, liczbę węzłów i wersję agenta usługi Azure Arc
* `deployment.apps/resource-sync-agent`: synchronizuje powyższe metadane klastra z platformą Azure
* `deployment.apps/clusteridentityoperator`: Usługa Azure ARC z włączonym Kubernetes obecnie obsługuje tożsamość przypisaną do systemu. clusteridentityoperator zachowuje certyfikat tożsamości usługi zarządzanej (MSI) używany przez innych agentów do komunikacji z platformą Azure.
* `deployment.apps/flux-logs-agent`: zbiera dzienniki z operatorów strumienia wdrożonych w ramach konfiguracji kontroli źródła
## <a name="delete-a-connected-cluster"></a>Usuwanie połączonego klastra
Zasób można usunąć `Microsoft.Kubernetes/connectedcluster` przy użyciu interfejsu wiersza polecenia platformy Azure lub Azure Portal.
* **Usuwanie przy użyciu interfejsu wiersza polecenia platformy Azure**: następujące polecenie interfejsu wiersza poleceń platformy Azure może służyć do inicjowania usuwania zasobu Kubernetes z włączoną funkcją Azure Arc.
```console
az connectedk8s delete --name AzureArcTest1 --resource-group AzureArcTest
```
To polecenie usuwa `Microsoft.Kubernetes/connectedCluster` zasób i wszystkie skojarzone `sourcecontrolconfiguration` zasoby na platformie Azure. W interfejsie wiersza polecenia platformy Azure jest używana Dezinstalacja Helm w celu usunięcia agentów uruchomionych w klastrze.
* **Usuwanie na Azure Portal**: usunięcie zasobu Kubernetes z obsługą usługi Azure Arc na Azure Portal powoduje usunięcie `Microsoft.Kubernetes/connectedcluster` zasobu i wszystkich skojarzonych `sourcecontrolconfiguration` zasobów na platformie Azure, ale nie spowoduje usunięcia agentów uruchomionych w klastrze. Aby usunąć agentów uruchomionych w klastrze, uruchom następujące polecenie.
```console
az connectedk8s delete --name AzureArcTest1 --resource-group AzureArcTest
```
## <a name="next-steps"></a>Następne kroki
* [Korzystanie z podejścia GitOps w połączonym klastrze](./use-gitops-connected-cluster.md)
* [Zarządzanie konfiguracją klastra przy użyciu Azure Policy](./use-azure-policy.md)
| 56.119565 | 417 | 0.701466 | pol_Latn | 0.998384 |
471d5474efaf395be8496269a8c1fbb64a6a0978 | 541 | md | Markdown | readme.md | dsbaars/beefy-app | 90ce3d693292c5493fc36a38d364f6ff1243ad7a | [
"MIT"
] | 1 | 2021-03-25T04:36:38.000Z | 2021-03-25T04:36:38.000Z | readme.md | dsbaars/beefy-app | 90ce3d693292c5493fc36a38d364f6ff1243ad7a | [
"MIT"
] | null | null | null | readme.md | dsbaars/beefy-app | 90ce3d693292c5493fc36a38d364f6ff1243ad7a | [
"MIT"
] | null | null | null | # Beefy Finance (\$BIFI)
## Overview
Official frontend repo of [beefy.finance](https://app.beefy.finance). Multichain yield farming optimizer.
## Env
Required env vars:
```
REACT_APP_NETWORK_ID = 56 // Network ID of BSC
```
## To Run
```
yarn
yarn start
```
The app will be running at http://localhost:3000/
## Contribute
Beefy.Finance exists thanks to its contributors. There are many ways you can participate and help build high quality software. Check out the [contribution guide](CONTRIBUTING.md)!
## License
[MIT](LICENSE)
| 17.451613 | 179 | 0.726433 | eng_Latn | 0.80415 |
471e3e4f37a811d32e6d5089b7380203e8e32041 | 1,421 | md | Markdown | README.md | SamuelOtisi/create_analytical_dataset | c2f86dcaf738e597efdd60e089253321ec0af8ea | [
"Apache-2.0"
] | null | null | null | README.md | SamuelOtisi/create_analytical_dataset | c2f86dcaf738e597efdd60e089253321ec0af8ea | [
"Apache-2.0"
] | null | null | null | README.md | SamuelOtisi/create_analytical_dataset | c2f86dcaf738e597efdd60e089253321ec0af8ea | [
"Apache-2.0"
] | null | null | null | # Create_analytical_dataset
## **Predictive Analytics for Business nanodegree**
## **Project Description**
Pawdacity is a leading pet store chain in Wyoming with 13 stores throughout the state. This year, Pawdacity would like to expand and open a 14th store. The manager has asked to perform an analysis to recommend the city for Pawdacity’s newest store, based on predicted yearly sales.
The first step in predicting yearly sales is to first format and blend together data from different datasets and deal with outliers.
The manager has given the following information to work with:
- The monthly sales data for all of the Pawdacity stores for the year 2010.
- NAICS data on the most current sales of all competitor stores where total sales is equal to 12 months of sales.
- A partially parsed data file that can be used for population numbers.
- Demographic data (Households with individuals under 18, Land Area, Population Density, and Total Families) for each city and county in the state of Wyoming. For people who are unfamiliar with the US city system, a state contains counties and counties contains one or more cities.
**Note**
All data and resources(including the Alteryx workflow) for this project have been included in the p2-files in this repository feel free to clone and use them.
Dont forget to connect with me on [linkedIn](https://www.linkedin.com/in/samuel-otisi/) for any help on this project.
| 78.944444 | 282 | 0.793807 | eng_Latn | 0.999704 |
471e48069ebdcf400b6ecbdd0192552237214d28 | 2,044 | md | Markdown | docs/relational-databases/system-functions/filestream-and-filetable-functions-transact-sql.md | zelanko/sql-docs.es-es | e8de33fb5b7b566192c5fd38f7d922aca7fa3840 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/relational-databases/system-functions/filestream-and-filetable-functions-transact-sql.md | zelanko/sql-docs.es-es | e8de33fb5b7b566192c5fd38f7d922aca7fa3840 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/relational-databases/system-functions/filestream-and-filetable-functions-transact-sql.md | zelanko/sql-docs.es-es | e8de33fb5b7b566192c5fd38f7d922aca7fa3840 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
description: Funciones FILESTREAM y FileTable (Transact-SQL)
title: Funciones FileStream y FileTable (Transact-SQL) | Microsoft Docs
ms.custom: ''
ms.date: 03/06/2017
ms.prod: sql
ms.prod_service: database-engine
ms.reviewer: ''
ms.technology: system-objects
ms.topic: language-reference
dev_langs:
- TSQL
helpviewer_keywords:
- FileTables [SQL Server], system functions
ms.assetid: 71f729df-d340-4cf9-9a6d-305fcb39b009
author: rothja
ms.author: jroth
ms.openlocfilehash: 0ba0d5777ad63262dda102f0f7be1f2269ba0be9
ms.sourcegitcommit: e700497f962e4c2274df16d9e651059b42ff1a10
ms.translationtype: MT
ms.contentlocale: es-ES
ms.lasthandoff: 08/17/2020
ms.locfileid: "88481812"
---
# <a name="filestream-and-filetable-functions-transact-sql"></a>Funciones FILESTREAM y FileTable (Transact-SQL)
[!INCLUDE [SQL Server](../../includes/applies-to-version/sqlserver.md)]
En esta sección se describen las funciones del sistema relacionadas con las características FILESTREAM y FileTable.
[FileTableRootPath (Transact-SQL)](../../relational-databases/system-functions/filetablerootpath-transact-sql.md)
Devuelve la ruta de acceso UNC del nivel raíz de un objeto FileTable específico o de la base de datos actual.
[GetFileNamespacePath (Transact-SQL)](../../relational-databases/system-functions/getfilenamespacepath-transact-sql.md)
Devuelve la ruta de acceso UNC para un archivo o directorio de un objeto FileTable.
[GetPathLocator (Transact-SQL)](../../relational-databases/system-functions/getpathlocator-transact-sql.md)
Devuelve el valor del identificador del localizador de ruta de acceso para el archivo o directorio especificado en un objeto FileTable.
[PathName (Transact-SQL)](../../relational-databases/system-functions/pathname-transact-sql.md)
Devuelve la ruta de acceso de un objeto binario grande (BLOB) FILESTREAM.
Para más información sobre FileTables, vea [FileTables (SQL Server)](../../relational-databases/blob/filetables-sql-server.md).
| 45.422222 | 138 | 0.776419 | spa_Latn | 0.435911 |
471e9897e88ded57c52fc9b5b92c4259e71df422 | 6,192 | md | Markdown | _posts/2019-06-30-Download-sister-aimee-the-life-of-aimee-semple-mcpherson.md | Jobby-Kjhy/27 | ea48bae2a083b6de2c3f665443f18b1c8f241440 | [
"MIT"
] | null | null | null | _posts/2019-06-30-Download-sister-aimee-the-life-of-aimee-semple-mcpherson.md | Jobby-Kjhy/27 | ea48bae2a083b6de2c3f665443f18b1c8f241440 | [
"MIT"
] | null | null | null | _posts/2019-06-30-Download-sister-aimee-the-life-of-aimee-semple-mcpherson.md | Jobby-Kjhy/27 | ea48bae2a083b6de2c3f665443f18b1c8f241440 | [
"MIT"
] | null | null | null | ---
layout: post
comments: true
categories: Other
---
## Download Sister aimee the life of aimee semple mcpherson book
" No meanness is evident in this tall, do not appear now to be found in any large numbers on the to see if the names were in alphabetical order, with the _Vega's_ weak steam-power. important and hitherto little known manuscript of it from the middle you can't move quickly. The thing man was walking slime, you would never by God, I have to get back in there, anyhow. sink, and he would know a lie for what it was, the more I know This didn't work for Junior, in European fashion. Thought something convenient for you?" suffered nothing, that by a mercantile porch-squatter, is harder than his mother warned him that it could be and much harder than ever it appears to Subject: Promotion to Fleet Captain on the Tschorna river, they wear, 364 old Sinsemilla would do in a similar situation, as well! Her whole body convulsed with the effort to tear loose. This was the final sieve, fell, he recognized his vulnerability. com? "Being lame, rocky masses. There was nobody but He wrote down the coordinates for the plane crash in which his daughter and her husband had died, I will loose thee, I remember. " finds it necessary that I accompany him back to the mouth sister aimee the life of aimee semple mcpherson YOUNG OF THE GREENLAND SEAL? Bridges and high ledges. "Being lame, not a supernatural hush. " "iLoco mocoso!" "You have a telephone call," it said confidingly. Shadows flourished. Unfortunately, Agnes realized that this was not a prayer for the soul of a my existence. " in contact with Samoyeds, which, delight moved her and she called to mind her lord Er Reshid and wept sore, but it should not take long once he was there; a half-month to go, poured tea, excessive for a woman. well, she might easily have broken her neck, who abode in the Mountain Caf. Her wrists were too tightly bound to allow her to hold a lighter in such a way On the 19th August we continued to sail and steam along the coast, claiming that development lad on Chiron had already been deemed up for grabs "by virtue of natural precedent"; they disagreed with each other about prices and tariffs. You're pretty safe up here. Most were overweight. She recognized the strategy. "You won't get such a bad headache. RABAUT, with thy permission, and could with the help of seen movies about serial killers? Hagg way past even state or local authorities. " "All wrong. Close to the so she reached across her body with her left hand, that's all it is, actually. profit, if thou slay her, it's an 5, surveyed, 79; ii, making in a few hours a catch which would be sufficient for their remained in the second container. From here, no colony has much of a chance. _Nrok_, and that. the inhabitants were often more or less in conflict with the "Make the light," she said. that no trace of it was left. "Baby, yet he was instantly certain that this was no coincidental look-alike. our course along the west coast of Novaya Zemlya towards Yugor water at an angle, I prefer some formality. In order to fall in with this landmark Johannesen sailed of foraging animals, and the Italian edition printed in 1550. More than that, in the presence of sister aimee the life of aimee semple mcpherson Majesty the Queen of Much greater inconvenience than from cold did we in the cabins suffer the word love with a straight face! A sponge, would be hailed with great delight both in England and in "How?" far as I know, battered young face, spit-in-the-eye, and generally lending Curtis quickly feels his way past the sink, but he wasn't able to relent, "Last Tuesday night, he knew not what they were; so he strained his sight and seeing horsemen coming and troops and footmen. Sister aimee the life of aimee semple mcpherson great fire in the tower Must've been bad shit. The likelihood of his being So they all arose and repaired to El Anca, and something in her face made him nervous! Selene, making in a few hours a catch which would be sufficient for their remained in the second container, 408; Thomas td, forming a circle with an inner diameter manufactured by Fleetwood. "Easy. It's a benefit that comes with my job Gaulitz nodded emphatically. not immediately relieved. heart only was eaten, seeing only the track before them in the dim silvery glow of extraordinary rarity. "So, she sister aimee the life of aimee semple mcpherson was having Another week of unrewarded job-hunting. Half the natural size. So sister aimee the life of aimee semple mcpherson she's not a that he will be wounded with its sharp beak? Maria, in it, but not limited to, thoroughly salting the seat of his pants, not from below. " isn't saved, she acknowledged that she would have preferred sister aimee the life of aimee semple mcpherson account it is incumbent on me to begin by giving a narrative of the bouncing off him. Information about the Project Gutenberg Literary Archive On second thought-no. " paramedic's hands tightly enough to make him wince. " To Micky, and hour by hour he blends better with the and had undergone subsequent tendon surgery, but from the killing harm. Wanting praise, but Birdie didn't mind, that the talk may be made an end of. " calculated the height of some of the sister aimee the life of aimee semple mcpherson at from 1200 "The baby's small but healthy. let me think. I don't know. This wasn't a religious program, _snoesparfven_ or _snoelaerkan_, Earl aimed the gadget at various points in the "Sounds good," Driscoll said? "The more I hear, the matter of necessary care is genetically irrelevant The fertilized egg is already a separate organism with its genetic characteristics fixed and unique. Following the tougher and of inferior quality; the eggs, another man of power, but all the responsibilities that mattered to him had been incriminating mistake. Meanwhile, and there met him another with a load of wood, pipes. Bathrooms. Being old, was not There was not much to be got from the people his men brought to him. Some prejudice, cranes his neck, actually, perhaps four, came sister aimee the life of aimee semple mcpherson contact with the population of the Chukch вBarry N. | 688 | 6,071 | 0.788437 | eng_Latn | 0.999928 |
471ecd5b59dd8ee882aad8add481433a0dd42479 | 2,684 | md | Markdown | controls/asyncupload/server-side-programming/onfileuploaded.md | mohdjuwaiser/ajax-docs | 56376e3c2d405a948ecd88828c75fa73d3602422 | [
"MIT"
] | null | null | null | controls/asyncupload/server-side-programming/onfileuploaded.md | mohdjuwaiser/ajax-docs | 56376e3c2d405a948ecd88828c75fa73d3602422 | [
"MIT"
] | null | null | null | controls/asyncupload/server-side-programming/onfileuploaded.md | mohdjuwaiser/ajax-docs | 56376e3c2d405a948ecd88828c75fa73d3602422 | [
"MIT"
] | null | null | null | ---
title: OnFileUploaded
page_title: OnFileUploaded | RadAsyncUpload for ASP.NET AJAX Documentation
description: OnFileUploaded
slug: asyncupload/server-side-programming/onfileuploaded
tags: onfileuploaded
published: True
position: 2
---
# OnFileUploaded
##
The server-side **FileUploaded** occurs after a file is uploaded and a postback is triggered.
>note When the [PostbackTriggers]({%slug asyncupload/how-to/how-to-persist-uploaded-files%}) property of the **RadAsyncUpload** is set to a particular button, the **FileUploaded** event will fire only when that button is clicked.
>
The **FileUploaded** event handler receives two arguments:
* The **RadAsyncUpload** control that initiated the file upload. This argument is of type object, but can be cast to the **RadAsyncUpload** type.
* An **FileUploadedEventArgs** object that has three properties:
* **IsValid** - Allows you to specify whether the uploaded file is valid. If it is, **RadAsyncUpload** will automatically save it to the TargetFolder, if one is set.
* **File** - Provides reference to the file uploaded.
* **UploadResult** - A container object containing information sent from the **RadAsyncUpload** file handler. For additional information, visit [How to extend the RadAsyncUpload handler.]({%slug asyncupload/how-to/how-to-extend-the-radasyncupload-handler%})
The example below demonstrates how to prepare the previously uploaded file to send as an e-mail attachment:
````C#
void RadAsyncUpload1_FileUploaded(object sender, FileUploadedEventArgs e)
{
e.IsValid = !CheckUploadedFileValidity();
if (e.IsValid)
{
byte[] buffer = new byte[e.File.ContentLength];
using (Stream str = e.File.InputStream)
{
str.Read(buffer, 0, e.File.ContentLength);
var attachment = createAttachment(buffer);
// more code
}
}
}
````
````VB.NET
Private Sub RadAsyncUpload1_FileUploaded(ByVal sender As Object, ByVal e As FileUploadedEventArgs)
e.IsValid = Not CheckUploadedFileValidity()
If e.IsValid Then
Dim buffer As Byte() = New Byte(e.File.ContentLength - 1) {}
Using str As Stream = e.File.InputStream
str.Read(buffer, 0, e.File.ContentLength)
' more code
Dim attachment = createAttachment(buffer)
End Using
End If
End Sub
````
## See Also
[Getting Started]({%slug asyncupload/getting-started%})
[How to extend the RadAsyncUpload handler]({%slug asyncupload/how-to/how-to-extend-the-radasyncupload-handler%})
[Server-Side Events]({%slug asyncupload/server-side-programming/events%})
| 37.277778 | 258 | 0.701937 | eng_Latn | 0.86037 |
471ed5d535899ef5bf914ebff6d2e89d0dadfc3c | 1,290 | md | Markdown | _posts/2017-06-01-first-post.md | samfcmc/kick-ass-dev | b6204eda9eb0bbb46590a13a23b74ef2c68169b8 | [
"MIT"
] | null | null | null | _posts/2017-06-01-first-post.md | samfcmc/kick-ass-dev | b6204eda9eb0bbb46590a13a23b74ef2c68169b8 | [
"MIT"
] | null | null | null | _posts/2017-06-01-first-post.md | samfcmc/kick-ass-dev | b6204eda9eb0bbb46590a13a23b74ef2c68169b8 | [
"MIT"
] | null | null | null | ---
layout: post
title: "The First Post"
date: 2017-06-01 19:30:10 +0100
categories: general
tags: [Random]
thumbnail: developers_developers_developers.png
---
Hi guys!
My name is Samuel and I am a portuguese guy.
This is the first post in my new project which is this blog.
I have a full time job as a `software developer` and I love technology since I was a little kid.
When I grew up I decided to go to college to learn Software Engineering.
In the last semester in college I became a teaching assistant in Software Engineering classes.
Yes, I was a teacher and a student at the same time!
It was really great!
Now I have a full time job and I miss those days when I was helping others achieving success.
I decided to start this blog to share what I know and hopefully `help you to become a kick ass developer`.
I am going to try to write about interesting stuff.
Maybe you already know about something I write about, but this blog is also intended to help beginners.
I am going to try to write small posts instead of big walls of text so you can read at least one post in a matter of minutes and learn something.
I hope you guys like it.
This is just the beginning.
All developers out there or developers wanna be...
Stay tunned :)
{% include youtube.html video="Vhh_GeBPOhs" %}
| 41.612903 | 145 | 0.769767 | eng_Latn | 0.999955 |
471f02162aa84eb5d1a9d798c7160b840a9c9bb1 | 20,661 | md | Markdown | articles/vmware-cloudsimple/learn-private-cloud-permissions.md | Myhostings/azure-docs.tr-tr | 536eaf3b454f181f4948041d5c127e5d3c6c92cc | [
"CC-BY-4.0",
"MIT"
] | 16 | 2017-08-28T08:29:36.000Z | 2022-01-02T16:46:30.000Z | articles/vmware-cloudsimple/learn-private-cloud-permissions.md | Ahmetmaman/azure-docs.tr-tr | 536eaf3b454f181f4948041d5c127e5d3c6c92cc | [
"CC-BY-4.0",
"MIT"
] | 470 | 2017-11-11T20:59:16.000Z | 2021-04-10T17:06:28.000Z | articles/vmware-cloudsimple/learn-private-cloud-permissions.md | Ahmetmaman/azure-docs.tr-tr | 536eaf3b454f181f4948041d5c127e5d3c6c92cc | [
"CC-BY-4.0",
"MIT"
] | 25 | 2017-11-11T19:39:08.000Z | 2022-03-30T13:47:56.000Z | ---
title: CloudSimple-özel bulut izin modeli tarafından Azure VMware çözümü
description: CloudSimple özel bulut izin modelini, grupları ve kategorilerini açıklar
author: Ajayan1008
ms.author: v-hborys
ms.date: 08/16/2019
ms.topic: article
ms.service: azure-vmware-cloudsimple
ms.reviewer: cynthn
manager: dikamath
ms.openlocfilehash: 1c8cfeda008955006f2fbad1df58c8047bd36541
ms.sourcegitcommit: f28ebb95ae9aaaff3f87d8388a09b41e0b3445b5
ms.translationtype: MT
ms.contentlocale: tr-TR
ms.lasthandoff: 03/29/2021
ms.locfileid: "97898054"
---
# <a name="cloudsimple-private-cloud-permission-model-of-vmware-vcenter"></a>CloudSimple özel bulut izin modeli VMware vCenter
CloudSimple, özel bulut ortamına tam yönetici erişimini korur. Her CloudSimple müşterisine, ortamlarında sanal makineleri dağıtabilmek ve yönetmek için yeterli yönetim ayrıcalıklarına sahip olur. Gerekirse, yönetim işlevleri gerçekleştirmek için ayrıcalıklarınızın geçici olarak ilerletiden olabilirsiniz.
## <a name="cloud-owner"></a>Bulut sahibi
Özel bir bulut oluştururken, özel buluttaki nesneleri yönetmek için **bulut sahibi rol** erişimi Ile vCenter tek Sign-On etki alanında bir **cloudowner** kullanıcısı oluşturulur. Bu Kullanıcı ayrıca ek [vCenter kimlik kaynakları](set-vcenter-identity.md)ve diğer kullanıcıları özel bulut vCenter 'a da ayarlayabilir.
> [!NOTE]
> CloudSimple özel bulut vCenter 'niz için varsayılan kullanıcı, [email protected] özel bir bulut oluşturulduğunda oluşturulur.
## <a name="user-groups"></a>Kullanıcı Grupları
**Bulut sahibi grubu** adlı bir grup, özel bulutun dağıtımı sırasında oluşturulur. Bu gruptaki kullanıcılar, özel buluttaki vSphere ortamının çeşitli kısımlarını yönetebilir. Bu gruba **bulut sahibi rol** ayrıcalıkları otomatik olarak verilir ve **cloudowner** kullanıcısı bu grubun bir üyesi olarak eklenir. CloudSimple, yönetim kolaylığı için sınırlı ayrıcalıklara sahip ek gruplar oluşturur. Bu önceden oluşturulmuş gruplara herhangi bir kullanıcı ekleyebilirsiniz ve aşağıda tanımlanan ayrıcalıklar gruplardaki kullanıcılara otomatik olarak atanır.
### <a name="pre-created-groups"></a>Önceden oluşturulmuş gruplar
| Grup Adı | Amaç | Rol |
| -------- | ------- | ------ |
| Bulut sahibi grubu | Bu grubun üyeleri özel bulut vCenter üzerinde yönetici ayrıcalıklarına sahiptir | [Bulut sahibi-rol](#cloud-owner-role) |
| Bulut-genel-küme-Yönetici-Grup | Bu grubun üyeleri, özel bulut vCenter kümesinde yönetim ayrıcalıklarına sahiptir | [Bulut-küme-yönetici-rol](#cloud-cluster-admin-role) |
| Bulut-genel-depolama-Yönetici-Grup | Bu grubun üyeleri, özel bulut vCenter üzerinde depolamayı yönetebilir | [Bulut-depolama-yönetici-rol](#cloud-storage-admin-role) |
| Bulut-genel-ağ-yönetici-Grup | Bu grubun üyeleri, özel bulut vCenter üzerinde ağ ve dağıtılmış bağlantı noktası gruplarını yönetebilir | [Bulut-ağ-yönetici-rol](#cloud-network-admin-role) |
| Bulut-genel-VM-Yönetici-Grup | Bu grubun üyeleri, özel bulut vCenter üzerinde sanal makineleri yönetebilir | [Bulut-VM-yönetici-rol](#cloud-vm-admin-role) |
Bireysel kullanıcılara özel bulutu yönetme izinleri vermek için, Kullanıcı hesapları oluşturun ve uygun gruplara ekleyin.
> [!CAUTION]
> Yeni kullanıcılar yalnızca *bulut sahibi grubu*, *bulut-genel-küme-yönetici-grubu*, *bulut-genel-depolama-yönetici-grubu*, bulut-genel- *Ağ-Yönetici-Grup* veya *bulut-genel-VM-yönetici grubu* için eklenmelidir. *Yöneticiler* grubuna eklenen kullanıcılar otomatik olarak kaldırılacaktır. Yalnızca hizmet hesaplarının *Yöneticiler* grubuna eklenmesi gerekir ve hizmet hesapları vSphere Web Kullanıcı arabiriminde oturum açmak için kullanılmamalıdır.
## <a name="list-of-vcenter-privileges-for-default-roles"></a>Varsayılan roller için vCenter ayrıcalıklarının listesi
### <a name="cloud-owner-role"></a>Bulut sahibi-rol
| **Kategori** | **Privilege** |
|----------|-----------|
| **Alarmlar** | Alarmı kabul et <br> Alarm oluştur <br> Alarm eylemini devre dışı bırak <br> Uyarıyı Değiştir <br> Alarmı kaldır <br> Alarm durumunu ayarla |
| **İzinler** | Değiştirme izni |
| **İçerik Kitaplığı** | Kitaplık öğesi Ekle <br> Yerel Kitaplık oluştur <br> Abone olunan Kitaplık oluştur <br> Kitaplık öğesini Sil <br> Yerel kitaplığı Sil <br> Abone olunan kitaplığı Sil <br> Dosyaları indirme <br> Kitaplık öğesini çıkar <br> Abone olunan kitaplığı çıkar <br> Depolama alanını içeri aktar <br> Araştırma aboneliği bilgileri <br> Depolama alanını oku <br> Kitaplık öğesini Eşitle <br> Abone olunan kitaplığı Eşitle <br> İç denetim yazın <br> Yapılandırma ayarlarını Güncelleştir <br> Güncelleştirme dosyaları <br> Kitaplığı Güncelleştir <br> Kitaplık öğesini Güncelleştir <br> Yerel kitaplığı Güncelleştir <br> Abone olunan kitaplığı Güncelleştir <br> Yapılandırma ayarlarını görüntüle |
| **Şifreleme işlemleri** | Disk Ekle <br> Kopyalama <br> Şifre Çözme <br> Doğrudan Erişim <br> Şifreleme <br> Yeni şifreleme <br> KMS 'yi yönetme <br> Şifreleme ilkelerini yönetme <br> Anahtarları yönetme <br> Geçiş <br> Yeniden Crypto <br> VM 'yi kaydetme <br> Konağı Kaydet |
| **dvPort grubu** | Oluştur <br> Sil <br> Değiştir <br> İlke işlemi <br> Kapsam işlemi |
| **Hatayla** | Alan ayır <br> Veri deposuna gözatamıyorum <br> Veri deposunu yapılandırma <br> Alt düzey dosya işlemleri <br> Veri deposunu taşı <br> Veri deposunu kaldır <br> Dosyayı kaldır <br> Veri deposunu yeniden adlandır <br> Sanal makine dosyalarını Güncelleştir <br> Sanal makine meta verilerini Güncelleştir |
| **ESX Aracısı Yöneticisi** | Config <br> Değiştir <br> Görünüm |
| **Dahili numara** | Uzantıyı Kaydet <br> Uzantı kaydını sil <br> Uzantıyı Güncelleştir |
| **Dış istatistik sağlayıcısı**| Kaydol <br> Kaydı Kaldır <br> Güncelleştir |
| **Klasör** | Klasör oluştur <br> Klasörü Sil <br> Klasörü taşı <br> Klasörü yeniden adlandır |
| **Genel** | Görevi iptal et <br> Kapasite planlaması <br> Tanılama <br> Yöntemi devre dışı bırak <br> Yöntemleri etkinleştir <br> Genel etiket <br> Sağlık <br> Lisanslar <br> Olayı günlüğe kaydet <br> Özel öznitelikleri Yönet <br> Ara sunucu <br> Betik eylemi <br> Hizmet yöneticileri <br> Özel öznitelik ayarla <br> Sistem etiketi |
| **Sistem durumu güncelleştirme sağlayıcısı** | Kaydol <br> Kaydı Kaldır <br> Güncelleştir |
| **Ana bilgisayar > yapılandırması** | Depolama bölümü yapılandırması |
| **> envanteri barındırma** | Kümeyi değiştirme |
| **vSphere etiketleme** | VSphere etiketi atama veya atamasını kaldırma <br> VSphere etiketi oluştur <br> VSphere etiket kategorisi oluştur <br> VSphere etiketini Sil <br> VSphere etiket kategorisini Sil <br> VSphere etiketini Düzenle <br> VSphere etiket kategorisini Düzenle <br> Kategori Için UsedBy alanını değiştirme <br> Etiket Için UsedBy alanını değiştirme |
| **Ağ** | Ağ ata <br> Yapılandırma <br> Ağı taşı <br> Kaldır |
| **Performans** | Aralıkları Değiştir |
| **Konak profili** | Görünüm |
| **Kaynak** | Öneriyi Uygula <br> Sanal uygulamayı kaynak havuzuna ata <br> Sanal makineyi kaynak havuzuna ata <br> Kaynak havuzu oluştur <br> Sanal makineyi kapalı geçir <br> Sanal makinede güç geçişi gerçekleştir <br> Kaynak havuzunu Değiştir <br> Kaynak havuzunu taşı <br> VMotion sorgula <br> Kaynak havuzunu kaldır <br> Kaynak havuzunu yeniden adlandır |
| **Zamanlanmış görev** | Görev oluşturma <br> Görevi Değiştir <br> Görevi kaldır <br> Görevi çalıştır |
| **Oturumlar** | Kullanıcının kimliğine bürün <br> İleti <br> Oturumu doğrula <br> Oturumları görüntüle ve durdur |
| **Veri deposu kümesi** | Veri deposu kümesi yapılandırma |
| **Profil temelli depolama** | Profil temelli depolama güncelleştirmesi <br> Profil temelli depolama alanı görünümü |
| **Depolama görünümleri** | Hizmeti Yapılandır <br> Görünüm |
| **Görevler** | Görev Oluştur <br> Güncelleştirme görevi |
| **Aktarım Hizmeti**| Yönetme <br> İzleyici |
| **vApp** | Sanal makine Ekle <br> Kaynak havuzu ata <br> VApp atama <br> Kopyalama <br> Oluştur <br> Sil <br> Dışarı Aktarma <br> İçeri Aktar <br> Taşı <br> Kapatma <br> Açma <br> Rename <br> Askıya Alma <br> Kaydı Kaldır <br> OVF ortamını görüntüle <br> vApp uygulama yapılandırması <br> vApp örneği yapılandırması <br> yapılandırmaya göre vApp Managed <br> vApp kaynak yapılandırması |
| **VRMPolicy** | VRMPolicy sorgula <br> VRMPolicy 'yi Güncelleştir |
| **Sanal makine > yapılandırması** | Mevcut diski Ekle <br> Yeni Disk Ekle <br> Cihaz Ekle veya Kaldır <br> Gelişmiş <br> Değişiklik CPU sayısı <br> Kaynağı değiştir <br> ManagedBy 'yi yapılandırma <br> Disk değişiklik izleme <br> Disk kirası <br> Bağlantı ayarlarını görüntüle <br> Sanal diski Genişlet <br> Konak USB cihazı <br> Bellek <br> Cihaz ayarlarını değiştir <br> Sorgu hatası toleransı uyumluluğu <br> Sahip olunan dosyaları sorgula <br> Ham cihaz <br> Yoldan yeniden yükle <br> Diski Kaldır <br> Rename <br> Konuk bilgilerini Sıfırla <br> Ek açıklamayı ayarla <br> Ayarlar <br> Swapfile yerleşimi <br> Çatal üst öğesini aç <br> Sanal makinenin kilidini aç <br> Sanal makine uyumluluğunu yükselt |
| **Sanal makine > Konuk işlemleri** | Konuk işlemi diğer adı değişikliği <br> Konuk işlemi diğer adı sorgusu <br> Konuk işlemi değişiklikleri <br> Konuk işlemi program yürütme <br> Konuk işlem sorguları |
| **Sanal makine > etkileşimi** | Soru cevap <br> Sanal makinede yedekleme işlemi <br> CD medyasını yapılandırma <br> Disket ortamını yapılandırma <br> Konsol etkileşimi <br> Ekran görüntüsü oluşturma <br> Tüm diskleri birleştirin <br> Cihaz bağlantısı <br> Sürükleyip bırakma <br> VIX API tarafından Konuk işletim sistemi yönetimi <br> USB HID tarama kodları Ekle <br> Duraklatma veya duraklamayı kaldırma <br> Temizleme veya küçültme işlemleri gerçekleştirme <br> Kapatma <br> Açma <br> Sanal makinede oturum Kaydet <br> Sanal makinede oturumu yeniden Yürüt <br> Sıfırla <br> Hata toleransını sürdürür <br> Askıya Alma <br> Askıya alma hatası toleransı <br> Yük devretme testi <br> Test yeniden başlatma Ikincil VM <br> Hata toleransını devre dışı bırakma <br> Hata toleransını aç <br> VMware araçları yüklemesi |
| **Sanal makine > envanteri** | Mevcut kaynaktan oluştur <br> Yeni oluştur <br> Taşı <br> Kaydol <br> Kaldır <br> Kaydı Kaldır |
| **Sanal makine > sağlama** | Disk erişimine izin ver <br> Dosya erişimine izin ver <br> Salt okuma disk erişimine izin ver <br> Sanal makine indirmeye izin ver <br> Sanal makine dosyalarının karşıya yüklenmesine izin ver <br> Şablonu kopyala <br> Sanal makineyi Kopyala <br> Sanal makineden Şablon Oluştur <br> Özelleştirme <br> Şablon dağıtma <br> Şablon olarak işaretle <br> Sanal makine olarak işaretle <br> Özelleştirme belirtimini değiştirme <br> Diskleri yükselt <br> Özelleştirme belirtimlerini oku |
| **Sanal makine > hizmeti yapılandırması** | Bildirimlere izin ver <br> Genel olay bildirimlerinin yoklanmaya izin ver <br> Hizmet yapılandırmasını yönetme <br> Hizmet yapılandırmasını değiştirme <br> Sorgu hizmeti yapılandırması <br> Hizmet yapılandırmasını oku |
| **Sanal makine > anlık görüntü yönetimi** | Anlık görüntü oluşturma <br> Anlık görüntüyü kaldır <br> Anlık görüntüyü yeniden adlandır <br> Anlık görüntüye dön |
| **Sanal makine > vSphere çoğaltması** | Çoğaltmayı yapılandırma <br> Çoğaltmayı yönetme <br> Çoğaltmayı izleme |
| **vService** | Bağımlılık oluştur <br> Bağımlılığı yok et <br> Bağımlılık yapılandırmasını yeniden yapılandırın <br> Güncelleştirme bağımlılığı |
### <a name="cloud-cluster-admin-role"></a>Bulut-küme-yönetici-rol
| **Kategori** | **Privilege** |
|----------|-----------|
| **Hatayla** | Alan ayır <br> Veri deposuna gözatamıyorum <br> Veri deposunu yapılandırma <br> Alt düzey dosya işlemleri <br> Veri deposunu kaldır <br> Veri deposunu yeniden adlandır <br> Sanal makine dosyalarını Güncelleştir <br> Sanal makine meta verilerini Güncelleştir |
| **Klasör** | Klasör oluştur <br> Klasörü Sil <br> Klasörü taşı <br> Klasörü yeniden adlandır |
| **Ana bilgisayar > yapılandırması** | Depolama bölümü yapılandırması |
| **vSphere etiketleme** | VSphere etiketi atama veya atamasını kaldırma <br> VSphere etiketi oluştur <br> VSphere etiket kategorisi oluştur <br> VSphere etiketini Sil <br> VSphere etiket kategorisini Sil <br> VSphere etiketini Düzenle <br> VSphere etiket kategorisini Düzenle <br> Kategori Için UsedBy alanını değiştirme <br> Etiket Için UsedBy alanını değiştirme |
| **Ağ** | Ağ ata |
| **Kaynak** | Öneriyi Uygula <br> Sanal uygulamayı kaynak havuzuna ata <br> Sanal makineyi kaynak havuzuna ata <br> Kaynak havuzu oluştur <br> Sanal makineyi kapalı geçir <br> Sanal makinede güç geçişi gerçekleştir <br> Kaynak havuzunu Değiştir <br> Kaynak havuzunu taşı <br> VMotion sorgula <br> Kaynak havuzunu kaldır <br> Kaynak havuzunu yeniden adlandır |
| **vApp** | Sanal makine Ekle <br> Kaynak havuzu ata <br> VApp atama <br> Kopyalama <br> Oluştur <br> Sil <br> Dışarı Aktarma <br> İçeri Aktar <br> Taşı <br> Kapatma <br> Açma <br> Rename <br> Askıya Alma <br> Kaydı Kaldır <br> OVF ortamını görüntüle <br> vApp uygulama yapılandırması <br> vApp örneği yapılandırması <br> yapılandırmaya göre vApp Managed <br> vApp kaynak yapılandırması |
| **VRMPolicy** | VRMPolicy sorgula <br> VRMPolicy 'yi Güncelleştir |
| **Sanal makine > yapılandırması** | Mevcut diski Ekle <br> Yeni Disk Ekle <br> Cihaz Ekle veya Kaldır <br> Gelişmiş <br> Değişiklik CPU sayısı <br> Kaynağı değiştir <br> ManagedBy 'yi yapılandırma <br> Disk değişiklik izleme <br> Disk kirası <br> Bağlantı ayarlarını görüntüle <br> Sanal diski Genişlet <br> Konak USB cihazı <br> Bellek <br> Cihaz ayarlarını değiştir <br> Sorgu hatası toleransı uyumluluğu <br> Sahip olunan dosyaları sorgula <br> Ham cihaz <br> Yoldan yeniden yükle <br> Diski Kaldır <br> Rename <br> Konuk bilgilerini Sıfırla <br> Ek açıklamayı ayarla <br> Ayarlar <br> Swapfile yerleşimi <br> Çatal üst öğesini aç <br> Sanal makinenin kilidini aç <br> Sanal makine uyumluluğunu yükselt |
| **Sanal makine > Konuk işlemleri** | Konuk işlemi diğer adı değişikliği <br> Konuk işlemi diğer adı sorgusu <br> Konuk işlemi değişiklikleri <br> Konuk işlemi program yürütme <br> Konuk işlem sorguları |
| **Sanal makine > etkileşimi** | Soru cevap <br> Sanal makinede yedekleme işlemi <br> CD medyasını yapılandırma <br> Disket ortamını yapılandırma <br> Konsol etkileşimi <br> Ekran görüntüsü oluşturma <br> Tüm diskleri birleştirin <br> Cihaz bağlantısı <br> Sürükleyip bırakma <br> VIX API tarafından Konuk işletim sistemi yönetimi <br> USB HID tarama kodları Ekle <br> Duraklatma veya duraklamayı kaldırma <br> Temizleme veya küçültme işlemleri gerçekleştirme <br> Kapatma <br> Açma <br> Sanal makinede oturum Kaydet <br> Sanal makinede oturumu yeniden Yürüt <br> Sıfırla <br> Hata toleransını sürdürür <br> Askıya Alma <br> Askıya alma hatası toleransı <br> Yük devretme testi <br> Test yeniden başlatma Ikincil VM <br> Hata toleransını devre dışı bırakma <br> Hata toleransını aç <br> VMware araçları yüklemesi
| **Sanal makine > envanteri** | Mevcut kaynaktan oluştur <br> Yeni oluştur <br> Taşı <br> Kaydol <br> Kaldır <br> Kaydı Kaldır |
| **Sanal makine > sağlama** | Disk erişimine izin ver <br> Dosya erişimine izin ver <br> Salt okuma disk erişimine izin ver <br> Sanal makine indirmeye izin ver <br> Sanal makine dosyalarının karşıya yüklenmesine izin ver <br> Şablonu kopyala <br> Sanal makineyi Kopyala <br> Sanal makineden Şablon Oluştur <br> Özelleştirme <br> Şablon dağıtma <br> Şablon olarak işaretle <br> Sanal makine olarak işaretle <br> Özelleştirme belirtimini değiştirme <br> Diskleri yükselt <br> Özelleştirme belirtimlerini oku |
| **Sanal makine > hizmeti yapılandırması** | Bildirimlere izin ver <br> Genel olay bildirimlerinin yoklanmaya izin ver <br> Hizmet yapılandırmasını yönetme <br> Hizmet yapılandırmasını değiştirme <br> Sorgu hizmeti yapılandırması <br> Hizmet yapılandırmasını oku
| **Sanal makine > anlık görüntü yönetimi** | Anlık görüntü oluşturma <br> Anlık görüntüyü kaldır <br> Anlık görüntüyü yeniden adlandır <br> Anlık görüntüye dön |
| **Sanal makine > vSphere çoğaltması** | Çoğaltmayı yapılandırma <br> Çoğaltmayı yönetme <br> Çoğaltmayı izleme |
| **vService** | Bağımlılık oluştur <br> Bağımlılığı yok et <br> Bağımlılık yapılandırmasını yeniden yapılandırın <br> Güncelleştirme bağımlılığı |
### <a name="cloud-storage-admin-role"></a>Bulut-depolama-yönetici-rol
| **Kategori** | **Privilege** |
|----------|-----------|
| **Hatayla** | Alan ayır <br> Veri deposuna gözatamıyorum <br> Veri deposunu yapılandırma <br> Alt düzey dosya işlemleri <br> Veri deposunu kaldır <br> Veri deposunu yeniden adlandır <br> Sanal makine dosyalarını Güncelleştir <br> Sanal makine meta verilerini Güncelleştir |
| **Ana bilgisayar > yapılandırması** | Depolama bölümü yapılandırması |
| **Veri deposu kümesi** | Veri deposu kümesi yapılandırma |
| **Profil temelli depolama** | Profil temelli depolama güncelleştirmesi <br> Profil temelli depolama alanı görünümü |
| **Depolama görünümleri** | Hizmeti Yapılandır <br> Görünüm |
### <a name="cloud-network-admin-role"></a>Bulut-ağ-yönetici-rol
| **Kategori** | **Privilege** |
|----------|-----------|
| **dvPort grubu** | Oluştur <br> Sil <br> Değiştir <br> İlke işlemi <br> Kapsam işlemi |
| **Ağ** | Ağ ata <br> Yapılandırma <br> Ağı taşı <br> Kaldır |
| **Sanal makine > yapılandırması** | Cihaz ayarlarını değiştir |
### <a name="cloud-vm-admin-role"></a>Bulut-VM-yönetici-rol
| **Kategori** | **Privilege** |
|----------|-----------|
| **Hatayla** | Alan ayır <br> Veri deposuna gözatamıyorum |
| **Ağ** | Ağ ata |
| **Kaynak** | Sanal makineyi kaynak havuzuna ata <br> Sanal makineyi kapalı geçir <br> Sanal makinede güç geçişi gerçekleştir
| **vApp** | Dışarı Aktarma <br> İçeri Aktar |
| **Sanal makine > yapılandırması** | Mevcut diski Ekle <br> Yeni Disk Ekle <br> Cihaz Ekle veya Kaldır <br> Gelişmiş <br> Değişiklik CPU sayısı <br> Kaynağı değiştir <br> ManagedBy 'yi yapılandırma <br> Disk değişiklik izleme <br> Disk kirası <br> Bağlantı ayarlarını görüntüle <br> Sanal diski Genişlet <br> Konak USB cihazı <br> Bellek <br> Cihaz ayarlarını değiştir <br> Sorgu hatası toleransı uyumluluğu <br> Sahip olunan dosyaları sorgula <br> Ham cihaz <br> Yoldan yeniden yükle <br> Diski Kaldır <br> Rename <br> Konuk bilgilerini Sıfırla <br> Ek açıklamayı ayarla <br> Ayarlar <br> Swapfile yerleşimi <br> Çatal üst öğesini aç <br> Sanal makinenin kilidini aç <br> Sanal makine uyumluluğunu yükselt |
| **Sanal makine >Konuk işlemleri** | Konuk işlemi diğer adı değişikliği <br> Konuk işlemi diğer adı sorgusu <br> Konuk işlemi değişiklikleri <br> Konuk işlemi program yürütme <br> Konuk işlem sorguları |
| **Sanal makine >etkileşimi** | Soru cevap <br> Sanal makinede yedekleme işlemi <br> CD medyasını yapılandırma <br> Disket ortamını yapılandırma <br> Konsol etkileşimi <br> Ekran görüntüsü oluşturma <br> Tüm diskleri birleştirin <br> Cihaz bağlantısı <br> Sürükleyip bırakma <br> VIX API tarafından Konuk işletim sistemi yönetimi <br> USB HID tarama kodları Ekle <br> Duraklatma veya duraklamayı kaldırma <br> Temizleme veya küçültme işlemleri gerçekleştirme <br> Kapatma <br> Açma <br> Sanal makinede oturum Kaydet <br> Sanal makinede oturumu yeniden Yürüt <br> Sıfırla <br> Hata toleransını sürdürür <br> Askıya Alma <br> Askıya alma hatası toleransı <br> Yük devretme testi <br> Test yeniden başlatma Ikincil VM <br> Hata toleransını devre dışı bırakma <br> Hata toleransını aç <br> VMware araçları yüklemesi |
| **Sanal makine >envanteri** | Mevcut kaynaktan oluştur <br> Yeni oluştur <br> Taşı <br> Kaydol <br> Kaldır <br> Kaydı Kaldır |
| **Sanal makine >sağlama** | Disk erişimine izin ver <br> Dosya erişimine izin ver <br> Salt okuma disk erişimine izin ver <br> Sanal makine indirmeye izin ver <br> Sanal makine dosyalarının karşıya yüklenmesine izin ver <br> Şablonu kopyala <br> Sanal makineyi Kopyala <br> Sanal makineden Şablon Oluştur <br> Özelleştirme <br> Şablon dağıtma <br> Şablon olarak işaretle <br> Sanal makine olarak işaretle <br> Özelleştirme belirtimini değiştirme <br> Diskleri yükselt <br> Özelleştirme belirtimlerini oku |
| **Sanal makine >hizmeti yapılandırması** | Bildirimlere izin ver <br> Genel olay bildirimlerinin yoklanmaya izin ver <br> Hizmet yapılandırmasını yönetme <br> Hizmet yapılandırmasını değiştirme <br> Sorgu hizmeti yapılandırması <br> Hizmet yapılandırmasını oku
| **Sanal makine >anlık görüntü yönetimi** | Anlık görüntü oluşturma <br> Anlık görüntüyü kaldır <br> Anlık görüntüyü yeniden adlandır <br> Anlık görüntüye dön |
| **Sanal makine >vSphere çoğaltması** | Çoğaltmayı yapılandırma <br> Çoğaltmayı yönetme <br> Çoğaltmayı izleme |
| **vService** | Bağımlılık oluştur <br> Bağımlılığı yok et <br> Bağımlılık yapılandırmasını yeniden yapılandırın <br> Güncelleştirme bağımlılığı |
| 138.66443 | 815 | 0.762499 | tur_Latn | 0.999662 |
471f2dbe0f0fa5986cbc3b0a4a46ab00ec8e7a22 | 354 | md | Markdown | Controls/bootstrap/InputGroup/sample2/sample.md | Jollof-guy/dotvvm-docs | 339102392f0b766d74628f183f9069e125a0ce55 | [
"Apache-2.0"
] | null | null | null | Controls/bootstrap/InputGroup/sample2/sample.md | Jollof-guy/dotvvm-docs | 339102392f0b766d74628f183f9069e125a0ce55 | [
"Apache-2.0"
] | null | null | null | Controls/bootstrap/InputGroup/sample2/sample.md | Jollof-guy/dotvvm-docs | 339102392f0b766d74628f183f9069e125a0ce55 | [
"Apache-2.0"
] | null | null | null | ## Sample 2: Multiple Controls in the InputGroup
You can also place multiple controls in the `LeftTemplate` and `RightTemplate` properties, e.g. [RadioButton](/docs/controls/bootstrap/RadioButton/{branch})s or a combination of
[Button](/docs/controls/bootstrap/Button/{branch})s and [DropDownButton](/docs/controls/bootstrap/DropDownButton/{branch})s.
| 70.8 | 178 | 0.788136 | eng_Latn | 0.791357 |
471f8aad3886f019dd5060274845631b9e814ba4 | 4,666 | md | Markdown | CONTRIBUTING.md | db0/haldric | 0067b6873eb8b74b0fdaa67e26708b01847b9003 | [
"MIT"
] | null | null | null | CONTRIBUTING.md | db0/haldric | 0067b6873eb8b74b0fdaa67e26708b01847b9003 | [
"MIT"
] | 1 | 2019-02-19T18:41:53.000Z | 2019-02-19T18:41:53.000Z | CONTRIBUTING.md | zozer/haldric | 49d725a14b1f998f4c567c25c2b65ceff60fd780 | [
"MIT"
] | null | null | null |
## Contributing Guidelines
- Get in touch and communicate! Let us know what you are working on or report bugs using the issues tab. Join the [Wesnoth Discord server](https://discord.gg/battleforwesnoth) to discuss the project live or to get in touch with the community. Ping @Vultraz to get access to the #project-haldric channel.
- A rough outline of things that need to be done can be found [here](https://github.com/wesnoth/haldric/issues/5).
- For code, we follow the [GDScript Style Guide](https://docs.godotengine.org/en/3.1/getting_started/scripting/gdscript/gdscript_styleguide.html). We try to write clean and self-documenting GDScript as it helps us build upon each other's work. We use static typing to write more robust code and get full auto completion in Godot 3.1. Also, we're here to review and to help you improve your code.
- The maintainers may refactor or tweak your code to make it fit the project's style, but we'll give you the opportunity to refine the style by yourself.
## New to Godot?
- If you never worked with the Godot Engine before, the [Official Godot Docs](https://docs.godotengine.org/en/3.2/index.html) will guide you through your first steps in Godot.
- If you are more a video type person, here are a few links:
- (3.0) [Getting Started -- Godot 3](https://www.youtube.com/watch?v=hG_MgGHAX-Q) - by [Gamefromscratch](https://www.youtube.com/channel/UCr-5TdGkKszdbboXXsFZJTQ)
- (3.0) [Learning Godot 3.0](https://www.youtube.com/watch?v=uPoLKQG0gmw&list=PLsk-HSGFjnaFutTDzgik2KMRl6W1JxFgD) - by [KidsCanCode](https://www.youtube.com/channel/UCNaPQ5uLX5iIEHUCLmfAgKg)
- (3.1) [Intro to GDScript for Programming Beginners](https://www.youtube.com/watch?v=UcdwP1Q2UlU&t=) - by [GDQuest](https://www.youtube.com/channel/UCxboW7x0jZqFdvMdCFKTMsQ)
- (3.1) [Intro to C# in Godot 3.1](https://www.youtube.com/watch?v=hRuUHxOCYz0&t) - by [GDQuest](https://www.youtube.com/channel/UCxboW7x0jZqFdvMdCFKTMsQ)
- (3.2) [Godot Action RPG Series](https://www.youtube.com/playlist?list=PL9FzW-m48fn2SlrW0KoLT4n5egNdX-W9a)
### Note:
Haldric uses the latest stable build of the Godot Engine, currently that is 3.2.2
The Tutorials are for different 3.X versions. While there are a few differences in the API, the general concept has not changed.
However we do not reccomend watching tutorials for Godot 2.1, as the jump from 2.1 to 3.0 was quite big.
### Download Godot
Godot can be downloaded on their official [Download Page](https://godotengine.org/download) or on [Steam](https://store.steampowered.com/app/404790/Godot_Engine/)!
## How To set up the The Battle for Wesnoth 2.0 Development environment.
This quick guide will walk you through the process of setting up a standard development environment for The Battle for Wesnoth 2.0
### Step 1: Download and “Install” Godot:
Download the latest version of Godot for your Operating System from the Godot website. You will likely want the Standard 64-bit version (The Mono version adds C# support however unless you have a specific need for this the Standard version will suffice.)

The Godot executable does not require installation so you will simply need to extract the exe from the zip file to the location of your choosing.\
Once you have placed the executable in the desired location simply run it to Start Godot.\

### Step 2: Download Wesnoth repository
Obtain the current Haldric zip file from the Wesnoth Haldric Github page (https://github.com/wesnoth/haldric)

Open the Godot Engine and select the Import button from the right-hand menu

Use the Browse button you navigate to the downloaded zip file.
You will need to create an empty directory to open the project in and navigate to that directory in the “Project Installation Path:” Field. You can tell that the paths are valid by the green checkmark by each field.

Hit the “Import & Edit” button to load the project.\
Once Loaded you will be ready to start assisting in the development of The Battle for Wesnoth 2.0!

| 75.258065 | 395 | 0.778397 | eng_Latn | 0.948389 |
4720e94192ef7e5860ff6698453dcb0bdd3b8e06 | 506 | md | Markdown | Autofac.Annotation.Benchmark/BenchmarkDotNet.Artifacts/results/Autofac.Annotation.Benchmark.AutofacAutowiredResolveBenchmark-report-github.md | yuzd/Autofac.Annotation | 9caad26e0641e0c0772a13f4d9f6fd0fbe880da7 | [
"MIT"
] | 203 | 2018-12-04T12:58:02.000Z | 2022-03-25T03:48:10.000Z | Autofac.Annotation.Benchmark/BenchmarkDotNet.Artifacts/results/Autofac.Annotation.Benchmark.AutofacAutowiredResolveBenchmark-report-github.md | yuzd/Autofac.Annotation | 9caad26e0641e0c0772a13f4d9f6fd0fbe880da7 | [
"MIT"
] | 22 | 2019-10-01T12:51:46.000Z | 2022-01-21T14:45:13.000Z | Autofac.Annotation.Benchmark/BenchmarkDotNet.Artifacts/results/Autofac.Annotation.Benchmark.AutofacAutowiredResolveBenchmark-report-github.md | yuzd/Autofac.Annotation | 9caad26e0641e0c0772a13f4d9f6fd0fbe880da7 | [
"MIT"
] | 43 | 2019-01-17T03:10:17.000Z | 2022-02-27T13:52:18.000Z | ``` ini
BenchmarkDotNet=v0.11.3, OS=macOS High Sierra 10.13.6 (17G65) [Darwin 17.7.0]
Intel Core i7-4650U CPU 1.70GHz (Haswell), 1 CPU, 4 logical and 2 physical cores
.NET Core SDK=2.1.403
[Host] : .NET Core 2.1.5 (CoreCLR 4.6.26919.02, CoreFX 4.6.26919.02), 64bit RyuJIT
DefaultJob : .NET Core 2.1.5 (CoreCLR 4.6.26919.02, CoreFX 4.6.26919.02), 64bit RyuJIT
```
| Method | Mean | Error | StdDev |
|------- |---------:|---------:|---------:|
| Test | 98.02 us | 1.594 us | 1.413 us |
| 36.142857 | 88 | 0.586957 | yue_Hant | 0.604721 |
4720f178bd79dd449181f3f2ac1d02e2172418bd | 1,907 | md | Markdown | README.md | adgear/helm-chart-resource | 0e80de5ccb9db562b0685ba681e8623a78806131 | [
"MIT"
] | 1 | 2018-06-26T20:11:49.000Z | 2018-06-26T20:11:49.000Z | README.md | adgear/helm-chart-resource | 0e80de5ccb9db562b0685ba681e8623a78806131 | [
"MIT"
] | 8 | 2018-06-13T17:47:15.000Z | 2021-01-11T13:55:34.000Z | README.md | adgear/helm-chart-resource | 0e80de5ccb9db562b0685ba681e8623a78806131 | [
"MIT"
] | 1 | 2021-06-25T10:12:51.000Z | 2021-06-25T10:12:51.000Z | # Helm chart resource
Tracks the helm releases in a [helm](https://helm.sh/) repository.
```yaml
resource_types:
- name: helm-chart-resource
type: docker-image
source:
repository: adgear/helm-chart-resource
```
## Source Configuration
* `chart_name`: *Required.* The chart name
* `repository_name`: *Optional.* The repository where the chart resides.
* `repos`: A list of repos to check for `chart_name`
### Example
Resource configuration for incubator repository
``` yaml
resources:
- name: helm-etcd
type: helm-chart-resource
source:
chart_name: etcd
repository_name: incubator
repos:
- name: incubator
url: https://kubernetes-charts-incubator.storage.googleapis.com/
```
Resource configuration for private repository
``` yaml
resources:
- name: helm-some_app
type: helm-chart-resource
source:
chart_name: some_app
repository_name: adgear-helm
repos:
- name: adgear-helm
url: https://adgear-charts.example.com/
username: ((username))
password: ((password))
```
Triggering on new release of chart `some_app`
``` yaml
- get: helm-some_app
trigger: true
```
## Behavior
### `check`: Check for new helm chart versions
Search for the latest version of `source.chart_name`.
### `in`: Get the latest version ref.
Output the latest `ref` to file `.version`
### `out`: Package and push to a helm repository.
Package `params.path` using the version in `{params.path}/Chart.yaml` and push it to an helm repo.
#### Parameters
* `path`: *Required.* The path of the previously `get` resource containing your helm chart code.
* `type`: *Required.* The type of helm repository to push to.
Only supports `artifactory` for now.
* `api_url`: *Optional.* The artifactory api url.
## Development
### Prerequisites
* Common sense.
### Running the tests
To be implemented.
### License
[MIT](LICENSE)
### Contributing
TBD. | 19.262626 | 98 | 0.699004 | eng_Latn | 0.834816 |
4721939d6973243d942dfd925fe7b43c7c358233 | 1,276 | md | Markdown | README.md | josh7197/midterm | 1513029bc31a8037b38642877e6bcf8beb6a7281 | [
"MIT"
] | null | null | null | README.md | josh7197/midterm | 1513029bc31a8037b38642877e6bcf8beb6a7281 | [
"MIT"
] | null | null | null | README.md | josh7197/midterm | 1513029bc31a8037b38642877e6bcf8beb6a7281 | [
"MIT"
] | null | null | null | # Insurance cross-selling prediction
# The goal of the project
A goal of this project is predicting whether health insurance holders are interested in car
insurance plans that the same company offers. Rather than marketing for unspecified potential
customers, cross marketing tactics for the target customers are less costly and more monetarily
efficient. The prediction model would allow the company to implement the improved sales
strategy with less capitals and higher number of vehicle insurance holders.
# The model
The analytical model for the goal is a classification because the target variable is a dummy
variable, “Response”, with 1 or 0 if an insurance holder is interested in a vehicle insurance is 1
or 0. The data set has 12 features, including the target variable and ID, and 381,109 rows(data
points). However, ID does not explain whether someone is interested in the insurance plan,
therefore, I would better omit this variable in the model. Therefore, the dataset consists of 10
explanatory variables and a single target variable with 381,109 data points.
# Used package version
- Python version is 3.9.7
- numpy version 1.21.1
- matplotlib version 3.4.2
- sklearn version 0.24.2
- pandas version 1.3.1
- xgboost version 1.3.3
- shap version 0.39.0
| 49.076923 | 98 | 0.795455 | eng_Latn | 0.999721 |
47219a251e0d460cfd20c9a40d710618cd733050 | 1,308 | md | Markdown | vendor/github.com/terraform-providers/terraform-provider-ignition/website/docs/d/raid.html.md | jeduardo/terraform-provider-libvirt | b11e5b79ff40dc8f03d762d92226fabe315955dc | [
"Apache-2.0"
] | null | null | null | vendor/github.com/terraform-providers/terraform-provider-ignition/website/docs/d/raid.html.md | jeduardo/terraform-provider-libvirt | b11e5b79ff40dc8f03d762d92226fabe315955dc | [
"Apache-2.0"
] | null | null | null | vendor/github.com/terraform-providers/terraform-provider-ignition/website/docs/d/raid.html.md | jeduardo/terraform-provider-libvirt | b11e5b79ff40dc8f03d762d92226fabe315955dc | [
"Apache-2.0"
] | 2 | 2018-07-03T07:39:54.000Z | 2019-03-04T07:34:41.000Z | ---
layout: "ignition"
page_title: "Ignition: ignition_raid"
sidebar_current: "docs-ignition-datasource-raid"
description: |-
Describes the desired state of the system’s RAID.
---
# ignition\_raid
Describes the desired state of the system’s RAID.
## Example Usage
```hcl
data "ignition_raid" "md" {
name = "data"
level = "stripe"
devices = [
"/dev/disk/by-partlabel/raid.1.1",
"/dev/disk/by-partlabel/raid.1.2"
]
}
data "ignition_disk" "disk1" {
device = "/dev/sdb"
wipe_table = true
partition {
label = "raid.1.1"
number = 1
size = 20480
start = 0
}
}
data "ignition_disk" "disk2" {
device = "/dev/sdc"
wipe_table = true
partition {
label = "raid.1.2"
number = 1
size = 20480
start = 0
}
}
```
## Argument Reference
The following arguments are supported:
* `name` - (Required) The name to use for the resulting md device.
* `level` - (Required) The redundancy level of the array (e.g. linear, raid1, raid5, etc.).
* `devices` - (Required) The list of devices (referenced by their absolute path) in the array.
* `spares` - (Optional) The number of spares (if applicable) in the array.
## Attributes Reference
The following attributes are exported:
* `id` - ID used to reference this resource in _ignition_config_ | 20.4375 | 94 | 0.659021 | eng_Latn | 0.954207 |
4721a9433636f069ff2480e90481e5e16ac2208b | 2,133 | md | Markdown | quickstart/python/README.md | brendanzagaeski/azure-cache-redis-samples | b624126af05f665820842fc8b4f16e7d3d6cfb26 | [
"MIT"
] | 16 | 2021-03-30T14:14:13.000Z | 2022-03-15T08:38:11.000Z | quickstart/python/README.md | brendanzagaeski/azure-cache-redis-samples | b624126af05f665820842fc8b4f16e7d3d6cfb26 | [
"MIT"
] | 8 | 2021-03-17T20:08:26.000Z | 2022-02-22T20:59:55.000Z | quickstart/python/README.md | brendanzagaeski/azure-cache-redis-samples | b624126af05f665820842fc8b4f16e7d3d6cfb26 | [
"MIT"
] | 48 | 2021-03-04T23:05:40.000Z | 2022-03-27T07:22:01.000Z | ---
page_type: sample
languages:
- python
name: 'Quickstart: Use Azure Cache for Redis in Python'
description: Learn how to incorporate Azure Cache for Redis into a Python app.
products:
- azure
- azure-cache-redis
---
# Quickstart: Use Azure Cache for Redis in Python
This sample show you how to incorporate Azure Cache for Redis into a Python app. See the [accompanying article](https://docs.microsoft.com/azure/azure-cache-for-redis/cache-python-get-started) on the documentation site for details, including best practices and how to create the sample code from scratch.
## Prerequisites
- Azure subscription - [create one for free](https://azure.microsoft.com/free/)
- Azure Cache for Redis cache - [create one](https://docs.microsoft.com/azure/azure-cache-for-redis/quickstart-create-redis)
- [Python 2 or 3](https://www.python.org/downloads/)
## Install redis-py
[Redis-py](https://github.com/andymccurdy/redis-py) is a Python interface to Azure Cache for Redis. Use the Python packages tool, *pip*, to install the *redis-py* package from a command prompt.
The following example used *pip3* for Python 3 to install *redis-py* on Windows 10 from an Administrator command prompt.

## Run the sample
[Download the sample code to your development PC.](/README.md#get-the-samples)
Change directories to the folder containing this sample.
Edit the *PythonApplication1.py* source. Replace `<Your Host Name>` and `<Your Access Key>` with the values from your Azure Cache for Redis instance. Your host name is of the form *\<DNS name>.redis.cache.windows.net*.
Run *PythonApplication1.py* with Python.
> [!IMPORTANT]
> For Azure Cache for Redis version 3.0 or higher, TLS/SSL certificate check is enforced. ssl_ca_certs must be explicitly set when connecting to Azure Cache for Redis. For RedHat Linux, ssl_ca_certs are in the */etc/pki/tls/certs/ca-bundle.crt* certificate module.
## References
* [Quickstart article on the documentation site](https://docs.microsoft.com/azure/azure-cache-for-redis/cache-python-get-started)
| 47.4 | 304 | 0.769339 | eng_Latn | 0.945439 |
4721d7f2ef7bed81c770acb445f08ab75c513c13 | 9,289 | md | Markdown | README.md | craftbyte/qrcode-svg | 9b769f506ddbad43e1ad71cef146b67f8fbaac02 | [
"MIT"
] | 311 | 2016-03-22T09:36:09.000Z | 2022-03-27T13:57:18.000Z | README.md | craftbyte/qrcode-svg | 9b769f506ddbad43e1ad71cef146b67f8fbaac02 | [
"MIT"
] | 19 | 2016-08-18T07:54:30.000Z | 2022-03-23T14:30:48.000Z | README.md | craftbyte/qrcode-svg | 9b769f506ddbad43e1ad71cef146b67f8fbaac02 | [
"MIT"
] | 72 | 2016-05-22T10:34:55.000Z | 2022-03-29T13:28:27.000Z | ## Introduction
This library has been written to generate a SVG image of QR Code in Node.js, goals:
* pure JavaScript
* no browser requirement
* no external dependencies
* generate SVG output
## Getting Started
Install the package:
```bash
npm install qrcode-svg
```
Inline example:
```javascript
var QRCode = require("qrcode-svg");
var svg = new QRCode("Hello World!").svg();
```
More options:
```javascript
var qrcode = new QRCode({
content: "http://github.com/",
padding: 4,
width: 256,
height: 256,
color: "#000000",
background: "#ffffff",
ecl: "M",
});
qrcode.save("sample.svg", function(error) {
if (error) throw error;
console.log("Done!");
});
```
## Options
**List of options:**
* **content** - QR Code content, the only **required** parameter
* **padding** - white space padding, `4` modules by default, `0` for no border
* **width** - QR Code width in pixels
* **height** - QR Code height in pixels
* **color** - color of modules (squares), color name or hex string, e.g. `#000000`
* **background** - color of background, color name or hex string, e.g. `white`
* **ecl** - error correction level: `L`, `M`, `H`, `Q`
* **join** - join modules (squares) into one shape, into the SVG `path` element, **recommended** for web and responsive use, default: `false`
* **predefined** - to create a squares as pattern, then populate the canvas, default: `false`, see the output examples below
* **pretty** - apply indents and new lines, default: `true`
* **swap** - swap X and Y modules, only if you have issues with some QR readers, default: `false`
* **xmlDeclaration** - prepend XML declaration to the SVG document, i.e. `<?xml version="1.0" standalone="yes"?>`, default: `true`
* **container** - wrapping element, default: `svg`, see below
**Container options:**
* **svg** - populate squares in a SVG document with `width` and `height` attriute, recommended for converting to raster images or PDF where QR Code is being static (exact size)
* **svg-viewbox** - populate squares in a SVG document with `viewBox` attriute, **recommended** for responsive web pages
* **g** - put squares in `g` element, useful when you need to put multiple QR Codes in a single SVG document
* **none** - no wrapper
## SVG output
### Editable squares
This mode is useful for designers to manipulate with particular squares.
Thus, one can open the QR Code in an editor, select particular modules, move around, change color, etc.
However, some old SVG viewers may generate minor gaps between the squares - the side effect when rendering an image at certain zoom level.
Default options
```javascript
var qrcode = new QRCode({
content: "Pretty Fox",
join: false,
predefined: false
});
```
Output with `rect` elements
```xml
<?xml version="1.0" standalone="yes"?>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="256" height="256">
<rect x="0" y="0" width="256" height="256" style="fill:#ffffff;shape-rendering:crispEdges;"/>
<rect x="16" y="16" width="8" height="8" style="fill:#000000;shape-rendering:crispEdges;"/>
<rect x="24" y="16" width="8" height="8" style="fill:#000000;shape-rendering:crispEdges;"/>
<rect x="32" y="16" width="8" height="8" style="fill:#000000;shape-rendering:crispEdges;"/>
...
</svg>
```
### Responsive web page
Squares joined into one `path` shape produce a compact file size, i.e. 4-5x reduced compared with `rect` elements.
A single `path` element will result in an optimized rendering, thus not producing any minor gaps between the squares.
Also using the container with `viewBox` attribute may contribute to the responsive scaling on the web.
Set `join` to `true`
```javascript
var qrcode = new QRCode({
content: "Pretty Fox",
join: true,
container: "svg-viewbox" //Useful but not required
});
```
Output with `path` element
```xml
<?xml version="1.0" standalone="yes"?>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 256 256">
<rect x="0" y="0" width="256" height="256" style="fill:beige;shape-rendering:crispEdges;"/>
<path x="0" y="0" style="fill:blue;shape-rendering:crispEdges;" d="M35.31,35.31 V44.14 H44.14 V35.31 H35.31 Z..." />
</svg>
```
### Predefined pattern
Algorithm defines the square pattern once before populating a canvas. Useful if you want to generate QR Code with candies.
However, some SVG software and converters do not support `defs` or `use` elements.
Set `predefined` to `true`
```javascript
var qrcode = new QRCode({
content: "Pretty Fox",
predefined: true
});
```
Output with `defs` and `use` elements
```xml
<?xml version="1.0" standalone="yes"?>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="256" height="256">
<defs><path id="qrmodule" d="M0 0 h8.827586206896552 v8.827586206896552 H0 z" style="fill:maroon;shape-rendering:crispEdges;" /></defs>
<rect x="0" y="0" width="256" height="256" style="fill:beige;shape-rendering:crispEdges;"/>
<use x="35.310344827586206" y="35.310344827586206" href="#qrmodule" />
<use x="44.13793103448276" y="35.310344827586206" href="#qrmodule" />
<use x="52.96551724137931" y="35.310344827586206" href="#qrmodule" />
<use x="61.79310344827586" y="35.310344827586206" href="#qrmodule" />
<use x="70.62068965517241" y="35.310344827586206" href="#qrmodule" />
...
</svg>
```
## Command Line
```
Usage:
qrcode-svg [options] <content>
Options:
--help Print this message
--version, -v Print version number
--padding , -p [value] Offset in number of modules
--width, -w [px] Image width in pixels
--height, -h [px] Image height in pixels
--color, -fg [color] Foreground color, hex or name
--background [color] Background color, hex or name
--ecl [value] Error correction level: L, M, H, Q
--join Join modules into one SVG path, i.e. for crisp rendering
--predefined Use 'defs' and 'use' elements in SVG, i.e. for compact output
--no-prettify Avoid indenting and new lines in SVG, i.e. for compact output
--viewbox Use 'viewBox' instead of 'width' and 'height' attributes
--swap-fix Swap X and Y modules to fix issues with some QR readers
--output, -o [file] Output file name
--force, -f Force overwrite
Examples:
qrcode-svg http://github.com
qrcode-svg -f -o hello.svg "Hello World"
qrcode-svg -p 4 -w 256 -h 256 --join --viewbox "Responsive..."
qrcode-svg --padding 2 --width 120 --height 120 "Little fox..."
qrcode-svg --color blue --background #ececec "...jumps over"
```
## Usage Scenarios
### Convert to other formats
Using [html-pdf](https://www.npmjs.com/package/html-pdf) to convert SVG to PDF (or PNG or JPEG)
```javascript
var QRCode = require('qrcode-svg');
var svg = new QRCode('hello').svg();
...
var pdf = require('html-pdf');
pdf.create(svg, { border: 0, type: 'pdf' }).toFile('output.pdf', function(err, res) {
...
});
```
### ASCII modules
QR Code in ASCII to output in a shell
```javascript
var QRCode = require('qrcode-svg');
var hello = new QRCode("Hello World!");
var modules = hello.qrcode.modules;
var ascii = '';
var length = modules.length;
for (var y = 0; y < length; y++) {
for (var x = 0; x < length; x++) {
var module = modules[x][y];
ascii += (module ? 'x' : ' ');
}
ascii += '\r\n';
}
console.log(ascii);
```
```
xxxxxxx xx x x xxxxxxx
x x xxxx x x x x
x xxx x xx xx x x xxx x
x xxx x xx x xxx x
x xxx x x x x x xxx x
x x x xx xx x x
xxxxxxx x x x x x xxxxxxx
xx xx
x x xx x x xx x x
x x xx x xx x xx x
x x xx x x x xx xx
x xx xxx xx x x x x x
xx xxxx xxxx x
x x x xx x xx xx x xx xx
x xx xxxx xxxx
xx xx x x x x xx x
xxxx xxxx xxxxxx x
x x x
xxxxxxx x xxx x x x x
x x xxx x xx x x
x xxx x xxxxxxxxxx
x xxx x xxxxxxxxx x xx
x xxx x xxx xx x x x
x x x x x
xxxxxxx xxx xxx x x x
```
### Web browser
Use on a HTML page with JavaScript
```html
<!DOCTYPE html>
<html>
<body>
<div id="container"></div>
<script src="dist/qrcode.min.js"></script>
<script>
var qrcode = new QRCode({
content: "Hello World!",
container: "svg-viewbox", //Responsive use
join: true //Crisp rendering and 4-5x reduced file size
});
var svg = qrcode.svg();
document.getElementById("container").innerHTML = svg;
</script>
</body>
</html>
```
## Thanks
Thanks to [davidshimjs](https://github.com/davidshimjs/qrcodejs) for the base library.
Thanks to [Kazuhiko Arase](http://www.d-project.com/) for the original QR Code in JavaScript algorithm.
Thanks to all contributors on the GitHub.
## Legal notice
```
Licensed under the MIT license:
http://www.opensource.org/licenses/mit-license.php
The word "QR Code" is registered trademark of DENSO WAVE INCORPORATED
http://www.denso-wave.com/qrcode/faqpatent-e.html
``` | 33.175 | 177 | 0.632899 | eng_Latn | 0.690518 |
47222b1f70900c344b0a08cb188a5f90dd42547d | 308 | md | Markdown | Simple projects/Project 8/README.md | grigorevmp/Arduino-projects | dcca138e7a81fb8cb939acc1d99e2e7fcc34998c | [
"MIT"
] | null | null | null | Simple projects/Project 8/README.md | grigorevmp/Arduino-projects | dcca138e7a81fb8cb939acc1d99e2e7fcc34998c | [
"MIT"
] | null | null | null | Simple projects/Project 8/README.md | grigorevmp/Arduino-projects | dcca138e7a81fb8cb939acc1d99e2e7fcc34998c | [
"MIT"
] | null | null | null | # Experiment with tilt sensor
Tilt sensor experiment
**Equipment:**
- LED light
- Resistor "220 Om"
- Tilt sensor
- Wires
**Input:** -
**Output:**
- Led light on device rotation
# Schema
 | 18.117647 | 114 | 0.724026 | eng_Latn | 0.237186 |
47222bf302d711e7724ed1156ba0602a4fa67872 | 399 | md | Markdown | vendor/tree-sitter-elixir/.github/ISSUE_TEMPLATE.md | jessesna/difftastic | 2c6545fae403f360c8b4d0e64fb3ece2baa8deea | [
"MIT"
] | 125 | 2021-09-30T10:55:46.000Z | 2022-03-16T10:49:32.000Z | vendor/tree-sitter-elixir/.github/ISSUE_TEMPLATE.md | jessesna/difftastic | 2c6545fae403f360c8b4d0e64fb3ece2baa8deea | [
"MIT"
] | 21 | 2021-10-01T22:47:58.000Z | 2022-03-25T19:43:13.000Z | vendor/tree-sitter-elixir/.github/ISSUE_TEMPLATE.md | jessesna/difftastic | 2c6545fae403f360c8b4d0e64fb3ece2baa8deea | [
"MIT"
] | 9 | 2021-10-13T13:06:11.000Z | 2022-02-12T17:16:06.000Z | <!--
This repository includes the Tree-sitter parser for Elixir,
whereas editor-specific integrations are separate projects.
If you're running into Tree-sitter related issues in your editor,
please report it on the respective repository (likely an editor
plugin responsible for Tree-sitter integration).
For example, when using Neovim, go to https://github.com/nvim-treesitter/nvim-treesitter
-->
| 36.272727 | 88 | 0.802005 | eng_Latn | 0.994536 |
47227ecc5443ac6ceaaadcb2794ed2154b540f2b | 5,373 | md | Markdown | docs/2014/data-quality-services/import-a-knowledge-base-from-a-dqs-file.md | antoniosql/sql-docs.es-es | 0340bd0278b0cf5de794836cd29d53b46452d189 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/2014/data-quality-services/import-a-knowledge-base-from-a-dqs-file.md | antoniosql/sql-docs.es-es | 0340bd0278b0cf5de794836cd29d53b46452d189 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/2014/data-quality-services/import-a-knowledge-base-from-a-dqs-file.md | antoniosql/sql-docs.es-es | 0340bd0278b0cf5de794836cd29d53b46452d189 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Importar una base de conocimiento desde un archivo .dqs | Microsoft Docs
ms.custom: ''
ms.date: 06/13/2017
ms.prod: sql-server-2014
ms.reviewer: ''
ms.suite: ''
ms.technology:
- data-quality-services
ms.tgt_pltfrm: ''
ms.topic: conceptual
ms.assetid: 9b9786fe-9e80-429a-afcb-dc3b3dd6f0b0
caps.latest.revision: 16
author: douglaslMS
ms.author: douglasl
manager: craigg
ms.openlocfilehash: d434f30fb3ef34c97aa38463aa9a28c115ac5e46
ms.sourcegitcommit: c18fadce27f330e1d4f36549414e5c84ba2f46c2
ms.translationtype: MT
ms.contentlocale: es-ES
ms.lasthandoff: 07/02/2018
ms.locfileid: "37321255"
---
# <a name="import-a-knowledge-base-from-a-dqs-file"></a>Importar una base de conocimiento desde un archivo .dqs
En este tema se describe cómo importar una base de conocimiento completa desde un archivo de datos .dqs en [!INCLUDE[ssDQSnoversion](../includes/ssdqsnoversion-md.md)] (DQS). Para crear el archivo de datos, debe exportar una base de conocimiento existente desde la aplicación [!INCLUDE[ssDQSClient](../includes/ssdqsclient-md.md)] (vea [exportar una Base de conocimiento a un archivo .dqs](../../2014/data-quality-services/export-a-knowledge-base-to-a-dqs-file.md)).
El uso de un archivo de datos .dqs para exportar el contenido de una base de conocimiento e importarlo a continuación en otra base de conocimiento del mismo [!INCLUDE[ssDQSServer](../includes/ssdqsserver-md.md)] o de otro [!INCLUDE[ssDQSServer](../includes/ssdqsserver-md.md)] simplifica el proceso de generación del conocimiento, lo que permite ahorrar tiempo y esfuerzo. Le permite compartir con los demás una base de conocimiento y su conocimiento, lo que se traduce en un ahorro de tiempo. El archivo .dqs contendrá toda la información de la base de conocimiento, incluido los dominios y la directiva de coincidencia, salvo la información de datos de referencia adjunta. Se importarán tanto los datos publicados como los no publicados.
El archivo de datos .dqs está cifrado, por lo que no se puede ver.
Al importar una base de conocimiento, puede utilizar el mismo nombre, a menos que este exista ya en la aplicación cliente, en cuyo caso deberá cambiarlo.
## <a name="BeforeYouBegin"></a> Antes de empezar
### <a name="Prerequisites"></a> Requisitos previos
Para importar una base de conocimiento desde un archivo de .dqs, antes debe haberla exportado al archivo .dqs.
### <a name="Security"></a> Seguridad
#### <a name="Permissions"></a> Permissions
Debe disponer del rol dqs_kb_editor o dqs_administrator en la base de datos DQS_MAIN para importar una base de conocimiento desde un archivo de datos .dqs.
## <a name="Import"></a> Import a knowledge base from a .dqs file
1. [!INCLUDE[ssDQSInitialStep](../includes/ssdqsinitialstep-md.md)] [Ejecute la aplicación Data Quality Client](../../2014/data-quality-services/run-the-data-quality-client-application.md).
2. En la página de inicio de [!INCLUDE[ssDQSClient](../includes/ssdqsclient-md.md)] , haga clic en **Nueva base de conocimiento**.
3. Escriba un nombre para la base de conocimiento.
4. Haga clic en la flecha abajo de **Crear base de conocimiento a partir de**y, a continuación, seleccione **Importar desde el archivo DQS**.
5. En **Seleccionar archivo de datos**, haga clic en **Examinar**.
6. En el cuadro de diálogo **Importar de archivo de datos** , desplácese a la carpeta que contiene el archivo .dqs que desea importar y, a continuación, haga clic en el nombre del archivo. Haga clic en **Abrir**.
7. Compruebe que en la lista **Dominios** aparecen la base de conocimiento y los dominios correctos.
8. Seleccione la actividad que desea realizar y, a continuación, haga clic en **Crear**.
9. En el cuadro de diálogo **Importar base de conocimiento** , compruebe que la línea de estado indica que se ha completado la importación. Haga clic en **Aceptar**.
10. Finalice las tareas de detección de conocimiento, administración de dominios o directiva de coincidencia que necesita realizar y, a continuación, haga clic en **Finalizar**.
11. Haga clic en **Publicar** para publicar el conocimiento en la base de conocimiento, o en **No** si prefiere no hacerlo.
12. Si opta por publicar la base de conocimiento, haga clic en **Aceptar**.
13. En la página de inicio de Data Quality Services, compruebe que la base de conocimiento aparece debajo de **Base de conocimiento reciente**.
## <a name="FollowUp"></a> Seguimiento: después de importar una base de conocimiento desde un archivo .dqs
Después de importar una base de conocimiento desde un archivo .dqs, puede agregar conocimiento a la base de conocimiento o utilizarla en un proyecto de limpieza o de búsqueda de coincidencias, dependiendo de su contenido. Para más información, vea [Realizar la detección de conocimiento](../../2014/data-quality-services/perform-knowledge-discovery.md), [Administrar un dominio](../../2014/data-quality-services/managing-a-domain.md), [Administrar un dominio compuesto](../../2014/data-quality-services/managing-a-composite-domain.md), [Crear una directiva de coincidencia](../../2014/data-quality-services/create-a-matching-policy.md), [Limpieza de datos](../../2014/data-quality-services/data-cleansing.md) o [Coincidencia de datos](../../2014/data-quality-services/data-matching.md).
| 71.64 | 789 | 0.752838 | spa_Latn | 0.980983 |
47228de4c5f79935dbf8aac9f1265ea705ac5f1d | 3,582 | md | Markdown | articles/virtual-machines/windows/infrastructure-example.md | nobrainn/azure-docs.ko-kr | f0d3f961756a9af5b87f8f084d0889b03d0e3db6 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/virtual-machines/windows/infrastructure-example.md | nobrainn/azure-docs.ko-kr | f0d3f961756a9af5b87f8f084d0889b03d0e3db6 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | articles/virtual-machines/windows/infrastructure-example.md | nobrainn/azure-docs.ko-kr | f0d3f961756a9af5b87f8f084d0889b03d0e3db6 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Azure 인프라 연습 예제
description: Azure에서 인프라 예제를 배포하기 위한 핵심 디자인 및 구현 지침에 대해 알아봅니다.
author: cynthn
ms.service: virtual-machines-windows
ms.workload: infrastructure-services
ms.topic: example-scenario
ms.date: 12/15/2017
ms.author: cynthn
ms.custom: H1Hack27Feb2017
ms.openlocfilehash: 43e96b891e60dfcf8bc3c29b202bb60213905372
ms.sourcegitcommit: 877491bd46921c11dd478bd25fc718ceee2dcc08
ms.translationtype: MT
ms.contentlocale: ko-KR
ms.lasthandoff: 07/02/2020
ms.locfileid: "81869462"
---
# <a name="example-azure-infrastructure-walkthrough-for-windows-vms"></a>Windows VM에 대한 Azure 인프라 연습 예제
이 문서에서는 예제 애플리케이션 인프라를 구축하는 과정을 안내합니다. 명명 규칙, 가용성 집합, 가상 네트워크 및 부하 분산 장치에 대한 모든 지침 및 결정 사항을 함께 제공하는 간단한 온라인 스토어용 인프라의 설계와 VM(가상 머신)의 실제 배포를 자세히 다룹니다.
## <a name="example-workload"></a>워크로드 예제
Adventure Works Cycles는 Azure에서 다음으로 구성된 온라인 스토어 애플리케이션을 구축하려고 합니다.
* 웹 계층에 있으며 클라이언트 프런트 엔드를 실행하는 두 IIS 서버
* 애플리케이션 계층에 있으며 데이터 및 주문을 처리하는 두 IIS 서버
* 데이터베이스 계층에 제품 데이터 및 주문을 저장하기 위한 AlwaysOn 가용성 그룹(두 SQL Server 및 주 노드 감시) 이 있는 두 개의 Microsoft SQL Server 인스턴스
* 인증 계층에 있는 고객 계정 및 공급자에 대한 두 Active Directory 도메인 컨트롤러
* 모든 서버는 다음 두 서브넷에 있습니다.
* 웹 서버에 대한 프런트 엔드 서브넷
* 애플리케이션 서버, SQL 클러스터 및 도메인 컨트롤러에 대한 백 엔드 서브넷

고객이 온라인 스토어를 검색할 때 들어오는 보안 웹 트래픽의 부하는 웹 서버 사이에서 분산되어야 합니다. 웹 서버에서 HTTP 요청 양식의 주문 처리 트래픽의 부하는 애플리케이션 서버 사이에서 부하 분산되어야 합니다. 또한 인프라는 고가용성을 위해 설계되어야 합니다.
결과로 나온 디자인 다음을 통합해야 합니다.
* Azure 구독 및 계정
* 단일 리소스 그룹
* Azure Managed Disks
* 두 서브넷을 사용하는 가상 네트워크
* 역할이 비슷한 VM에 대한 가용성 집합
* 가상 머신
위의 모든 사항은 명명 규칙을 따릅니다.
* Adventure Works Cycles는 **[IT 작업]-[위치]-[Azure 리소스]** 를 접두사로 사용합니다.
* 이 예의 경우 "**azos**" (Azure 온라인 저장소)는 IT 워크 로드 이름이 고 "**USE**" (미국 동부 2)는 위치입니다.
* 가상 네트워크는 AZOS-USE-VN **[숫자]** 를 사용합니다.
* 가용성 집합은 azos-use-as-**[역할]** 을 사용합니다.
* 가상 머신 이름은 azos-use-vm-**[VM 이름]** 을 사용합니다.
## <a name="azure-subscriptions-and-accounts"></a>Azure 구독 및 계정
Adventure Works Cycles는 이 IT 작업에 대한 청구를 제공하기 위해 Adventure Works Enterprise Subscription이라는 엔터프라이즈 구독을 사용합니다.
## <a name="storage"></a>스토리지
Adventure Works Cycles에서는 Azure Managed Disks를 사용해야 한다고 결정했습니다. VM을 만들 때 사용 가능한 두 스토리지 계층이 모두 사용됩니다.
* **Standard Storage** - 웹 서버, 애플리케이션 서버 및 도메인 컨트롤러와 해당 데이터 디스크
* **Premium Storage** - SQL Server VM과 해당 데이터 디스크
## <a name="virtual-network-and-subnets"></a>가상 네트워크 및 서브넷
가상 네트워크는 Adventure Work Cycles 온-프레미스 네트워크에 지속적인 연결이 필요하지 않기 때문에 클라우드 전용 가상 네트워크로 결정했습니다.
Azure 포털을 사용하여 다음 설정을 포함한 클라우드 전용 가상 네트워크를 만들 수 있습니다.
* 이름: AZOS-USE-VN01
* 위치: East US 2
* 가상 네트워크 주소 공간: 10.0.0.0/8
* 첫 번째 서브넷:
* 이름: FrontEnd
* 주소 공간: 10.0.1.0/24
* 두 번째 서브넷:
* 이름: BackEnd
* 주소 공간: 10.0.2.0/24
## <a name="availability-sets"></a>가용성 집합
온라인 스토어에서 모든 네 개 계층의 고가용성을 유지하기 위해 Adventure Works Cycles는 다음과 같은 네 개의 가용성 집합으로 결정했습니다.
* **azos-use-as-web**
* **azos-use-as-app**
* **azos-use-as-sql**
* **azos-use-as-dc**
## <a name="virtual-machines"></a>가상 머신
Adventure Works Cycles는 Azure VM에 대해 다음 이름을 결정했습니다.
* **azos-use-vm-web01**
* **azos-use-vm-web02**
* 첫 번째 애플리케이션 서버용 **azos-use-vm-app01**
* 두 번째 애플리케이션 서버용 **azos-use-vm-app02**
* **azfae-use-vm-sql01**
* **azfae-use-vm-sql02**
* **azos-use-vm-dc01**
* **azos-use-vm-dc02**
다음은 결과 구성입니다.

이 구성은 다음을 통합합니다.
* 두 서브넷을 사용하는 클라우드 전용 가상 네트워크(프런트 엔드 및 백 엔드)
* Standard 디스크와 Premium 디스크가 둘 다 있는 Azure Managed Disks
* 네 개의 가용성 집합, 온라인 스토어의 각 계층마다 한 개
* 네 계층에 대한 가상 머신
* 인터넷에서 웹 서버 간 HTTPS 기반 웹 트래픽에 대한 외부 부하 분산 집합
* 웹 서버에서 애플리케이션 서버 간 암호화되지 않은 웹 트래픽에 대한 내부 부하 분산 집합
* 단일 리소스 그룹
| 32.563636 | 149 | 0.70268 | kor_Hang | 1.00001 |
47232cc63dfe65b0de34ed8a6e3d17eac107203b | 2,950 | md | Markdown | README.md | accowa/n01-pages | 4b232e63dac5ff63fa0cf07bfb10155b70a26079 | [
"CC-BY-4.0"
] | 2 | 2021-03-05T16:35:14.000Z | 2021-03-05T21:54:03.000Z | README.md | accowa/n01-pages | 4b232e63dac5ff63fa0cf07bfb10155b70a26079 | [
"CC-BY-4.0"
] | null | null | null | README.md | accowa/n01-pages | 4b232e63dac5ff63fa0cf07bfb10155b70a26079 | [
"CC-BY-4.0"
] | null | null | null | # ARCHER2: n01 Oceans and Shelf Seas Consortium supplementry documentation
ARCHER2 is the next generation UK National Supercomputing Service. You
can find more information on the service and the research it supports on
the [ARCHER2 website](https://www.archer2.ac.uk).
This repository contains additional documentation for the NERC, n01 Oceans and
Shelf Seas consortium and is linked to a rendered version currently hosted on
Github:[n01-pages](https://accowa.github.io/n01-pages). This material includes a
section on using NEMO on ARCHER2 which is duplicated in the official [ARCHER2
documention prepared by EPCC, The University of
Edinburgh](https://docs.archer2.ac.uk). The rest of the material, here, is
supplementry and based entirely on the personal experiences of members of the
consortium. It is intended to provide guides and tips rather than rigid
methodologies. As such it is likely to evolve more rapidly than the official
documentation which should always be consulted first for information on wider
aspects of the service.
## How to contribute
We welcome contributions that improve the documentation from the n01
consortium. Contributions can take the form of
improved or additional content and/or Issues that identify problems or
opportunities for improvement.
To contribute content to this documentation, first you have to fork it
on GitHub and clone it to your machine, see [Fork a
Repo](https://help.github.com/articles/fork-a-repo/) for the GitHub
documentation on this process.
Once you have the git repository locally on your computer, you will need to [install
Material for mkdocs](https://squidfunk.github.io/mkdocs-material/getting-started/) to
be able to build the documentation. This can be done using a local installation or
using a Docker container.
Once you have made your changes and updated your Fork on GitHub you will
need to [Open a Pull
Request](https://help.github.com/articles/using-pull-requests/).
### Building the documentation on a local machine
Once Material for mkdocs is installed, you can preview the site locally using the
[instructions in the Material for mkdocs documentation](https://squidfunk.github.io/mkdocs-material/creating-your-site/#previewing-as-you-write).
## Making changes and style guide
The documentation consists of a series of Markdown files which have the `.md`
extension. These files are then automatically converted to HTMl and
combined into the web version of the documentation by mkdocs. It is
important that when editing the files the syntax of the Markdown files is
followed. If there are any errors in your changes the build will fail
and the documentation will not update, you can test your build locally
by running `mkdocs serve`. The easiest way to learn what files should look
like is to read the Markdown files already in the repository.
For a guide on the rst file format see
[this](http://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html)
document.
| 47.580645 | 145 | 0.804068 | eng_Latn | 0.998794 |
472394490fe88f1e9944353ee5a29f92a7f1f416 | 262 | md | Markdown | particles_meta_texture/README.md | naru-jpn/metal-drawings | 4aa274f31b44a85294c4926ddd96e6183e578e84 | [
"MIT"
] | 3 | 2020-06-18T18:34:58.000Z | 2021-01-12T13:08:22.000Z | particles_meta_texture/README.md | naru-jpn/metal-drawings | 4aa274f31b44a85294c4926ddd96e6183e578e84 | [
"MIT"
] | null | null | null | particles_meta_texture/README.md | naru-jpn/metal-drawings | 4aa274f31b44a85294c4926ddd96e6183e578e84 | [
"MIT"
] | null | null | null | # particles_meta_texture
Draw each particles with mipmapped texture. Image of texture is made from 2d gausiann distribution.
<kbd><img src="https://user-images.githubusercontent.com/5572875/86887282-44cd0080-c133-11ea-8903-d05a490e59cb.gif" width="350"></kbd>
| 43.666667 | 134 | 0.801527 | eng_Latn | 0.598612 |
4724434d7f2c13c166ef7ae274337af077147798 | 14,437 | md | Markdown | common-data-model/schema/core/operationsCommon/Tables/SupplyChain/Inventory/Main/WHSRFMenuItemCycleCount.md | billgib/common-data-model-and-service | 656ad3ac9b3928f0047d84d98bb627990cf71f62 | [
"CC-BY-4.0",
"MIT"
] | 38 | 2018-12-18T08:52:40.000Z | 2022-03-24T10:48:19.000Z | common-data-model/schema/core/operationsCommon/Tables/SupplyChain/Inventory/Main/WHSRFMenuItemCycleCount.md | billgib/common-data-model-and-service | 656ad3ac9b3928f0047d84d98bb627990cf71f62 | [
"CC-BY-4.0",
"MIT"
] | 92 | 2018-12-19T18:09:46.000Z | 2022-03-08T12:54:59.000Z | common-data-model/schema/core/operationsCommon/Tables/SupplyChain/Inventory/Main/WHSRFMenuItemCycleCount.md | billgib/common-data-model-and-service | 656ad3ac9b3928f0047d84d98bb627990cf71f62 | [
"CC-BY-4.0",
"MIT"
] | 59 | 2019-02-01T19:59:42.000Z | 2022-03-28T21:14:55.000Z | ---
title: WHSRFMenuItemCycleCount in Main - Common Data Model | Microsoft Docs
description: undefined
author: llawwaii
ms.service: common-data-model
ms.reviewer: deonhe
ms.topic: article
ms.date: 8/7/2020
ms.author: weiluo
---
# Mobile device cycle counting in Main(WHSRFMenuItemCycleCount)
Latest version of the JSON entity definition is available on <a href="https://github.com/Microsoft/CDM/tree/master/schemaDocuments/core/operationsCommon/Tables/SupplyChain/Inventory/Main/WHSRFMenuItemCycleCount.cdm.json" target="_blank">GitHub</a>.
## Traits
<details>
<summary>Traits for this entity are listed below.
</summary>
**is.identifiedBy**
names a specifc identity attribute to use with an entity <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>attribute</td><td>[WHSRFMenuItemCycleCount/(resolvedAttributes)/RecId](#RecId)</td><td>attribute</td><td></td></tr></table>
**is.CDM.entityVersion**
<table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>versionNumber</td><td>"1.1"</td><td>string</td><td>semantic version number of the entity</td></tr></table>
**is.application.releaseVersion**
<table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>releaseVersion</td><td>"10.0.13.0"</td><td>string</td><td>semantic version number of the application introducing this entity</td></tr></table>
**is.localized.displayedAs**
Holds the list of language specific display text for an object. <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>localizedDisplayText</td><td><table><tr><th>languageTag</th><th>displayText</th></tr><tr><td>en</td><td>Mobile device cycle counting</td></tr></table></td><td>entity</td><td>a reference to the constant entity holding the list of localized text</td></tr></table>
</details>
## Attributes
|Name|Description|First Included in Instance|
|---|---|---|
|[RecId](#RecId)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[BlindBatch](#BlindBatch)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[BlindItem](#BlindItem)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[BlindLP](#BlindLP)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[BlindSerial](#BlindSerial)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[MenuItemName](#MenuItemName)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[NumRetries](#NumRetries)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[DisplayReasonCode](#DisplayReasonCode)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[EditReasonCode](#EditReasonCode)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[DefaultCountingReasonCode](#DefaultCountingReasonCode)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[DataAreaId](#DataAreaId)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[Relationship_WHSRFMenuItemTableRelationshipId](#Relationship_WHSRFMenuItemTableRelationshipId)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[Relationship_InventCountingReasonCodeRelationshipId](#Relationship_InventCountingReasonCodeRelationshipId)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
|[Relationship_CompanyRelationshipId](#Relationship_CompanyRelationshipId)||<a href="WHSRFMenuItemCycleCount.md" target="_blank">Main/WHSRFMenuItemCycleCount</a>|
### <a href=#RecId name="RecId">RecId</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>isPrimaryKey</td><td>true</td></tr><tr><td>dataFormat</td><td>int64</td></tr><tr><td>isReadOnly</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the RecId attribute are listed below.</summary>
**is.dataFormat.integer**
**is.dataFormat.big**
**is.identifiedBy**
names a specifc identity attribute to use with an entity <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>attribute</td><td>[WHSRFMenuItemCycleCount/(resolvedAttributes)/RecId](#RecId)</td><td>attribute</td><td></td></tr></table>
**is.readOnly**
**is.dataFormat.integer**
**is.dataFormat.big**
</details>
### <a href=#BlindBatch name="BlindBatch">BlindBatch</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the BlindBatch attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#BlindItem name="BlindItem">BlindItem</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the BlindItem attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#BlindLP name="BlindLP">BlindLP</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the BlindLP attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#BlindSerial name="BlindSerial">BlindSerial</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the BlindSerial attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#MenuItemName name="MenuItemName">MenuItemName</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>string</td></tr></table>
#### Traits
<details>
<summary>List of traits for the MenuItemName attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.dataFormat.character**
**is.dataFormat.array**
</details>
### <a href=#NumRetries name="NumRetries">NumRetries</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the NumRetries attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#DisplayReasonCode name="DisplayReasonCode">DisplayReasonCode</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the DisplayReasonCode attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#EditReasonCode name="EditReasonCode">EditReasonCode</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>int32</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the EditReasonCode attribute are listed below.</summary>
**is.dataFormat.integer**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.integer**
</details>
### <a href=#DefaultCountingReasonCode name="DefaultCountingReasonCode">DefaultCountingReasonCode</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>string</td></tr><tr><td>isNullable</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the DefaultCountingReasonCode attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.nullable**
The attribute value may be set to NULL.
**is.dataFormat.character**
**is.dataFormat.array**
</details>
### <a href=#DataAreaId name="DataAreaId">DataAreaId</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>string</td></tr><tr><td>isReadOnly</td><td>true</td></tr></table>
#### Traits
<details>
<summary>List of traits for the DataAreaId attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.readOnly**
**is.dataFormat.character**
**is.dataFormat.array**
</details>
### <a href=#Relationship_WHSRFMenuItemTableRelationshipId name="Relationship_WHSRFMenuItemTableRelationshipId">Relationship_WHSRFMenuItemTableRelationshipId</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>guid</td></tr></table>
#### Traits
<details>
<summary>List of traits for the Relationship_WHSRFMenuItemTableRelationshipId attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.dataFormat.guid**
**means.identity.entityId**
**is.linkedEntity.identifier**
Marks the attribute(s) that hold foreign key references to a linked (used as an attribute) entity. This attribute is added to the resolved entity to enumerate the referenced entities. <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>entityReferences</td><td><table><tr><th>entityReference</th><th>attributeReference</th></tr><tr><td><a href="WHSRFMenuItemTable.md" target="_blank">/core/operationsCommon/Tables/SupplyChain/Inventory/Main/WHSRFMenuItemTable.cdm.json/WHSRFMenuItemTable</a></td><td><a href="WHSRFMenuItemTable.md#RecId" target="_blank">RecId</a></td></tr></table></td><td>entity</td><td>a reference to the constant entity holding the list of entity references</td></tr></table>
**is.dataFormat.guid**
**is.dataFormat.character**
**is.dataFormat.array**
</details>
### <a href=#Relationship_InventCountingReasonCodeRelationshipId name="Relationship_InventCountingReasonCodeRelationshipId">Relationship_InventCountingReasonCodeRelationshipId</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>guid</td></tr></table>
#### Traits
<details>
<summary>List of traits for the Relationship_InventCountingReasonCodeRelationshipId attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.dataFormat.guid**
**means.identity.entityId**
**is.linkedEntity.identifier**
Marks the attribute(s) that hold foreign key references to a linked (used as an attribute) entity. This attribute is added to the resolved entity to enumerate the referenced entities. <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>entityReferences</td><td><table><tr><th>entityReference</th><th>attributeReference</th></tr><tr><td><a href="InventCountingReasonCode.md" target="_blank">/core/operationsCommon/Tables/SupplyChain/Inventory/Main/InventCountingReasonCode.cdm.json/InventCountingReasonCode</a></td><td><a href="InventCountingReasonCode.md#RecId" target="_blank">RecId</a></td></tr></table></td><td>entity</td><td>a reference to the constant entity holding the list of entity references</td></tr></table>
**is.dataFormat.guid**
**is.dataFormat.character**
**is.dataFormat.array**
</details>
### <a href=#Relationship_CompanyRelationshipId name="Relationship_CompanyRelationshipId">Relationship_CompanyRelationshipId</a>
First included in: Main/WHSRFMenuItemCycleCount (this entity)
#### Properties
<table><tr><th>Name</th><th>Value</th></tr><tr><td>dataFormat</td><td>guid</td></tr></table>
#### Traits
<details>
<summary>List of traits for the Relationship_CompanyRelationshipId attribute are listed below.</summary>
**is.dataFormat.character**
**is.dataFormat.big**
**is.dataFormat.array**
**is.dataFormat.guid**
**means.identity.entityId**
**is.linkedEntity.identifier**
Marks the attribute(s) that hold foreign key references to a linked (used as an attribute) entity. This attribute is added to the resolved entity to enumerate the referenced entities. <table><tr><th>Parameter</th><th>Value</th><th>Data type</th><th>Explanation</th></tr><tr><td>entityReferences</td><td><table><tr><th>entityReference</th><th>attributeReference</th></tr><tr><td><a href="../../../Finance/Ledger/Main/CompanyInfo.md" target="_blank">/core/operationsCommon/Tables/Finance/Ledger/Main/CompanyInfo.cdm.json/CompanyInfo</a></td><td><a href="../../../Finance/Ledger/Main/CompanyInfo.md#RecId" target="_blank">RecId</a></td></tr></table></td><td>entity</td><td>a reference to the constant entity holding the list of entity references</td></tr></table>
**is.dataFormat.guid**
**is.dataFormat.character**
**is.dataFormat.array**
</details>
| 40.102778 | 762 | 0.728129 | yue_Hant | 0.387167 |
4725c6dfc8f620c020f615ff300bfcef4e87cffa | 3,147 | md | Markdown | docs/tutorials/contributing.md | camkerr/OpenTimelineIO | d1e9c087036d139ccb610af6f61ff52de3f8433f | [
"Apache-2.0"
] | 1,021 | 2017-07-29T05:50:20.000Z | 2022-03-28T16:53:28.000Z | docs/tutorials/contributing.md | camkerr/OpenTimelineIO | d1e9c087036d139ccb610af6f61ff52de3f8433f | [
"Apache-2.0"
] | 987 | 2017-08-01T17:14:57.000Z | 2022-03-31T22:49:03.000Z | docs/tutorials/contributing.md | camkerr/OpenTimelineIO | d1e9c087036d139ccb610af6f61ff52de3f8433f | [
"Apache-2.0"
] | 233 | 2017-07-28T23:27:10.000Z | 2022-03-31T10:40:35.000Z | # Contributing
We're excited to collaborate with the community and look forward to the many improvements you can make to OpenTimelineIO!
## Contributor License Agreement
Before contributing code to OpenTimelineIO, we ask that you sign a Contributor License Agreement (CLA). At the root of the repo you can find the two possible CLAs:
* [OTIO_CLA_Corporate.pdf](https://github.com/PixarAnimationStudios/OpenTimelineIO/raw/main/OTIO_CLA_Corporate.pdf): please sign this one for corporate use
* [OTIO_CLA_Individual.pdf](https://github.com/PixarAnimationStudios/OpenTimelineIO/raw/main/OTIO_CLA_Individual.pdf): please sign this one if you're an individual contributor
Once your CLA is signed, send it to `[email protected]` (please make sure to include your github username) and wait for confirmation that we've received it. After that, you can submit pull requests.
## Coding Conventions
Please follow the coding convention and style in each file and in each library when adding new files.
## Platform Support Policy
As recomended by the [VFX Platform](https://vfxplatform.com) (see "Support Guidance"), we support the intended calendar year of the release as well as the three prior years.
## Git Workflow
Here is the workflow we recommend for working on OpenTimelineIO if you intend on contributing changes back:
Post an issue on github to let folks know about the feature or bug that you found, and mention that you intend to work on it. That way, if someone else is working on a similar project, you can collaborate, or you can get early feedback which can sometimes save time.
Use the github website to fork your own private repository.
Clone your fork to your local machine, like this:
```bash
git clone https://github.com/you/OpenTimelineIO.git
```
Add the primary OpenTimelineIO repo as upstream to make it easier to update your remote and local repos with the latest changes:
```bash
cd OpenTimelineIO
git remote add upstream https://github.com/PixarAnimationStudios/OpenTimelineIO.git
```
Now you fetch the latest changes from the OpenTimelineIO repo like this:
```bash
git fetch upstream
git merge upstream/main
```
All the development should happen against the `main` branch. We recommend you create a new branch for each feature or fix that you'd like to make and give it a descriptive name so that you can remember it later. You can checkout a new branch and create it simultaneously like this:
```bash
git checkout -b mybugfix upstream/main
```
Now you can work in your branch locally.
Once you are happy with your change, you can verify that the change didn't cause tests failures by running tests like this:
```bash
make test
make lint
```
If all the tests pass and you'd like to send your change in for consideration, push it to your remote repo:
```bash
git push origin mybugfix
```
Now your remote branch will have your `mybugfix` branch, which you can now pull request (to OpenTimelineIO's `main` branch) using the github UI.
Please make sure that your pull requests are clean. Use the rebase and squash git facilities as needed to ensure that the pull request is as clean as possible.
| 43.708333 | 283 | 0.786146 | eng_Latn | 0.998353 |
4726074e0af7bd34c9c8fff8f059dff705ebc6c6 | 119 | md | Markdown | README.md | grassmoon/tryonglasses | 2d40be319885d7dac83faeb7fec7060c74810018 | [
"Apache-2.0"
] | null | null | null | README.md | grassmoon/tryonglasses | 2d40be319885d7dac83faeb7fec7060c74810018 | [
"Apache-2.0"
] | null | null | null | README.md | grassmoon/tryonglasses | 2d40be319885d7dac83faeb7fec7060c74810018 | [
"Apache-2.0"
] | null | null | null | # tryonglasses
This is a project using openCV for recoginsze the face and eye. Then put the glasses image in the eye.
| 29.75 | 102 | 0.781513 | eng_Latn | 0.999754 |
4726b927e7ae7faa67f6ca778858d91f6cbdb0fe | 15 | md | Markdown | README.md | luyang19930427/netgame | f57c8efec989e0c10856ef2de58036c32c144c1b | [
"Apache-2.0"
] | 1 | 2019-03-07T06:31:21.000Z | 2019-03-07T06:31:21.000Z | README.md | luyang19930427/netgame | f57c8efec989e0c10856ef2de58036c32c144c1b | [
"Apache-2.0"
] | null | null | null | README.md | luyang19930427/netgame | f57c8efec989e0c10856ef2de58036c32c144c1b | [
"Apache-2.0"
] | null | null | null | # netgame
网络游戏
| 5 | 9 | 0.733333 | eng_Latn | 0.733804 |
4726beb405f1614239f69c7b1efca96b5a3d2678 | 3,765 | md | Markdown | packages/aux/content/blog/2019-06-02/index.md | EmaSuriano/gatsby-example-decoupling | 871c8da382c47612cb877e70df26af43325d2834 | [
"MIT"
] | null | null | null | packages/aux/content/blog/2019-06-02/index.md | EmaSuriano/gatsby-example-decoupling | 871c8da382c47612cb877e70df26af43325d2834 | [
"MIT"
] | null | null | null | packages/aux/content/blog/2019-06-02/index.md | EmaSuriano/gatsby-example-decoupling | 871c8da382c47612cb877e70df26af43325d2834 | [
"MIT"
] | null | null | null | ---
title: How do you pronounce Axolotl?
author: johndoe
date: "2019-06-02"
image: ./hero.jpg
tags:
- Axolotl
---
Talking chamber as shewing an it minutes. Trees fully of blind do. Exquisite favourite at do extensive listening. Improve up musical welcome he. Gay attended vicinity prepared now diverted. Esteems it ye sending reached as. Longer lively her design settle tastes advice mrs off who.
Both rest of know draw fond post as. It agreement defective to excellent. Feebly do engage of narrow. Extensive repulsive belonging depending if promotion be zealously as. Preference inquietude ask now are dispatched led appearance. Small meant in so doubt hopes. Me smallness is existence attending he enjoyment favourite affection. Delivered is to ye belonging enjoyment preferred. Astonished and acceptance men two discretion. Law education recommend did objection how old.
Stronger unpacked felicity to of mistaken. Fanny at wrong table ye in. Be on easily cannot innate in lasted months on. Differed and and felicity steepest mrs age outweigh. Opinions learning likewise daughter now age outweigh. Raptures stanhill my greatest mistaken or exercise he on although. Discourse otherwise disposing as it of strangers forfeited deficient.
Prepared is me marianne pleasure likewise debating. Wonder an unable except better stairs do ye admire. His and eat secure sex called esteem praise. So moreover as speedily differed branched ignorant. Tall are her knew poor now does then. Procured to contempt oh he raptures amounted occasion. One boy assure income spirit lovers set.
Of friendship on inhabiting diminution discovered as. Did friendly eat breeding building few nor. Object he barton no effect played valley afford. Period so to oppose we little seeing or branch. Announcing contrasted not imprudence add frequently you possession mrs. Period saw his houses square and misery. Hour had held lain give yet.
Sussex result matter any end see. It speedily me addition weddings vicinity in pleasure. Happiness commanded an conveying breakfast in. Regard her say warmly elinor. Him these are visit front end for seven walls. Money eat scale now ask law learn. Side its they just any upon see last. He prepared no shutters perceive do greatest. Ye at unpleasant solicitude in companions interested.
An so vulgar to on points wanted. Not rapturous resolving continued household northward gay. He it otherwise supported instantly. Unfeeling agreeable suffering it on smallness newspaper be. So come must time no as. Do on unpleasing possession as of unreserved. Yet joy exquisite put sometimes enjoyment perpetual now. Behind lovers eat having length horses vanity say had its.
Considered discovered ye sentiments projecting entreaties of melancholy is. In expression an solicitude principles in do. Hard do me sigh with west same lady. Their saved linen downs tears son add music. Expression alteration entreaties mrs can terminated estimating. Her too add narrow having wished. To things so denied admire. Am wound worth water he linen at vexed.
Fat new smallness few supposing suspicion two. Course sir people worthy horses add entire suffer. How one dull get busy dare far. At principle perfectly by sweetness do. As mr started arrival subject by believe. Strictly numerous outlived kindness whatever on we no on addition.
Old unsatiable our now but considered travelling impression. In excuse hardly summer in basket misery. By rent an part need. At wrong of of water those linen. Needed oppose seemed how all. Very mrs shed shew gave you. Oh shutters do removing reserved wandered an. But described questions for recommend advantage belonging estimable had. Pianoforte reasonable as so am inhabiting. Chatty design remark and his abroad figure but its.
| 129.827586 | 476 | 0.814608 | eng_Latn | 0.999051 |
47274a1707bd2608072b0d0dcbad76bc6042aa14 | 1,119 | md | Markdown | README.md | gustavors22/knapsack-genetic-algorithm | 08f1f45e464ec94b50ebc7a866089511d2136e43 | [
"MIT"
] | null | null | null | README.md | gustavors22/knapsack-genetic-algorithm | 08f1f45e464ec94b50ebc7a866089511d2136e43 | [
"MIT"
] | null | null | null | README.md | gustavors22/knapsack-genetic-algorithm | 08f1f45e464ec94b50ebc7a866089511d2136e43 | [
"MIT"
] | null | null | null | <h2>What is Genetic algorithms ?</h2>
A genetic algorithm is a search heuristic that is inspired by Charles Darwin’s theory of natural evolution. This algorithm reflects the process of natural selection where the fittest individuals are selected for reproduction in order to produce offspring of the next generation.
<a href='https://pastmike.com/what-is-a-genetic-algorithm/'>
<img src='https://pastmike.com/wp-content/uploads/2018/08/genetic.png' >
</a>
<h2>Knapsack problem</h2>
The Knapsack problem is a combinatorial optimization problem . The name is given due to the model of a situation in which it is necessary to fill a backpack with objects of different weights and values. The goal is to fill the backpack with the highest possible value, not exceeding the maximum weight.
<a href='https://medium.com/bigdatarepublic/genetic-algorithms-in-practice-63bcdc552fbf'>
<img src='https://miro.medium.com/max/682/0*Um3SJ8TMyxZSRZjY.png'>
</a>
For more details see [Knapsack](https://gustavors22.github.io/knapsack-genetic-algorithm/).
# Tutorial
1) Dowload the source code
2) run pyhton main.py
| 38.586207 | 302 | 0.774799 | eng_Latn | 0.955013 |
47274e51faaa161ae41a48bc74370ebe6c75d496 | 4,834 | md | Markdown | assets/docs/jeongukjae/models/distilbert_multi_cased_L-6_H-768_A-12/1.md | AdityaKane2001/tfhub.dev | ee43536536ad1090ee2e296d58eacc6e3eaa0926 | [
"Apache-2.0"
] | 2 | 2021-10-02T17:21:42.000Z | 2021-11-08T12:44:36.000Z | assets/docs/jeongukjae/models/distilbert_multi_cased_L-6_H-768_A-12/1.md | AdityaKane2001/tfhub.dev | ee43536536ad1090ee2e296d58eacc6e3eaa0926 | [
"Apache-2.0"
] | null | null | null | assets/docs/jeongukjae/models/distilbert_multi_cased_L-6_H-768_A-12/1.md | AdityaKane2001/tfhub.dev | ee43536536ad1090ee2e296d58eacc6e3eaa0926 | [
"Apache-2.0"
] | null | null | null | # Module jeongukjae/distilbert_multi_cased_L-6_H-768_A-12/1
A small, fast, cheap and light Transformer model trained by distilling multilingual BERT base cased model.
<!-- asset-path: https://storage.googleapis.com/jeongukjae-tf-models/distilbert/distilbert-base-multilingual-cased.tar.gz -->
<!-- network-architecture: transformer -->
<!-- task: text-embedding -->
<!-- license: apache-2.0 -->
<!-- fine-tunable: true -->
<!-- format: saved_model_2 -->
<!-- language: ar -->
<!-- language: bn -->
<!-- language: bg -->
<!-- language: ca -->
<!-- language: zh-cn -->
<!-- language: zh-tw -->
<!-- language: da -->
<!-- language: en -->
<!-- language: et -->
<!-- language: fi -->
<!-- language: fr -->
<!-- language: de -->
<!-- language: el -->
<!-- language: he -->
<!-- language: hi -->
<!-- language: id -->
<!-- language: it -->
<!-- language: ja -->
<!-- language: ko -->
<!-- language: nl -->
<!-- language: no -->
<!-- language: pl -->
<!-- language: pt -->
<!-- language: ro -->
<!-- language: ru -->
<!-- language: es -->
<!-- language: sv -->
<!-- language: ta -->
<!-- language: tr -->
<!-- language: uk -->
<!-- language: ur -->
<!-- language: vi -->
## Overview
This model is a tensorflow conversion of [`distilbert-base-multilingual-cased`](https://huggingface.co/distilbert-base-multilingual-cased) from the HuggingFace model hub. It is exported as TF SavedModel in [this repository(jeongukjae/huggingface-to-tfhub)](https://github.com/jeongukjae/huggingface-to-tfhub). For more descriptions or training details, you can check [the model card in HuggingFace model hub](https://huggingface.co/distilbert-base-multilingual-cased).
## Model Size
| Model | Total # params |
| ------------------------------------- | -------------: |
| bert_multi_cased_L-12_H-768_A-12 | 178M |
| distilbert_multi_cased_L-6_H-768_A-12 | 135M |
## Example Use
You can use this model with an interface that is almost identical to bert's in tfhub.
For example, you can define a text embedding model with below code.
```python
# define a text embedding model
text_input = tf.keras.layers.Input(shape=(), dtype=tf.string)
preprocessor = hub.KerasLayer("https://tfhub.dev/jeongukjae/distilbert_multi_cased_preprocess/2")
encoder_inputs = preprocessor(text_input)
encoder = hub.KerasLayer("https://tfhub.dev/jeongukjae/distilbert_multi_cased_L-6_H-768_A-12/1", trainable=True)
encoder_outputs = encoder(encoder_inputs)
pooled_output = encoder_outputs["pooled_output"] # [batch_size, 768].
sequence_output = encoder_outputs["sequence_output"] # [batch_size, seq_length, 768].
model = tf.keras.Model(encoder_inputs, pooled_output)
# You can embed your sentences as follows
sentences = tf.constant(["(your text here)"])
print(embedding_model(sentences))
```
### Build model for multi text inputs
If you want a model for multi text inputs (i.e. fine-tuning with nli datasets), you can build as follows.
```python
preprocessor = hub.load("https://tfhub.dev/jeongukjae/distilbert_multi_cased_preprocess/2")
tokenize = hub.KerasLayer(preprocessor.tokenize)
bert_pack_inputs = hub.KerasLayer(preprocessor.bert_pack_inputs)
encoder = hub.KerasLayer("https://tfhub.dev/jeongukjae/distilbert_multi_cased_L-6_H-768_A-12/1", trainable=True)
text_inputs = [
tf.keras.layers.Input(shape=(), dtype=tf.string),
tf.keras.layers.Input(shape=(), dtype=tf.string),
]
tokenized_inputs = [tokenize(segment) for segment in text_inputs]
encoder_inputs = bert_pack_inputs(tokenized_inputs)
encoder_outputs = encoder(encoder_inputs)
pooled_output = encoder_outputs["pooled_output"] # [batch_size, 768].
sequence_output = encoder_outputs["sequence_output"] # [batch_size, seq_length, 768].
model = tf.keras.Model(encoder_inputs, pooled_output)
# You can pass your sentences as follows
hypotheses = tf.constant(["(your hypothesis text here)"])
premises = tf.constant(["(your premise text here)"])
print(embedding_model([hypotheses, premises]))
```
## Output details
The outputs of this model are a dict, and each entries are as follows:
- `"pooled_output"`: pooled output of the entire sequence with shape `[batch size, hidden size(768 for this model)]`. You can use this output as the sentence representation.
- `"sequence_output"`: representations of every token in the input sequence with shape `[batch size, max sequence length, hidden size(768)]`.
- `"encoder_outputs"`: A list of 6 tensors of shapes are `[batch size, sequence length, hidden size(768)]` with the outputs of the i-th Transformer block.
## References
- [DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter](https://arxiv.org/abs/1910.01108)
- [`distilbert-base-multilingual-cased` Model card in HuggingFace model hub](https://huggingface.co/distilbert-base-multilingual-cased)
| 40.283333 | 468 | 0.703351 | eng_Latn | 0.68057 |
47275a0c32768e02f267aa77f51d69d9c9fee9ab | 448 | md | Markdown | README.md | rfernandezdo/arm-ttk | a2d89076a3d2bb3100919f94614ba8b3d76450b1 | [
"MIT"
] | null | null | null | README.md | rfernandezdo/arm-ttk | a2d89076a3d2bb3100919f94614ba8b3d76450b1 | [
"MIT"
] | null | null | null | README.md | rfernandezdo/arm-ttk | a2d89076a3d2bb3100919f94614ba8b3d76450b1 | [
"MIT"
] | null | null | null | # Azure Resource Manager Template Toolkit action
This action "[use Azure Resource Manager Template Toolkit (arm-ttk)](https://github.com/Azure/arm-ttk)" for analyzing and testing Azure Resource Manager Templates.
It's my first GitHub Action and it's based on https://github.com/whaakman/armttk-github-action-demo
## Inputs
None
## Outputs
### `TestFailures`
Test Failures
## Example usage
```yaml
uses: rfernandezdo/arm-ttk@master
``` | 21.333333 | 163 | 0.747768 | eng_Latn | 0.410681 |
47283fb336750f525d17793b22ad0c3a3cb52732 | 666 | md | Markdown | README.md | sahincanfx/sahincan-discord-moderation-bot | f30445be06c9ed0e810bb452b3e88bbd58a7bd9e | [
"MIT"
] | 13 | 2022-01-09T08:33:57.000Z | 2022-01-29T18:03:09.000Z | README.md | sahincanfx/sahincan-discord-moderation-bot | f30445be06c9ed0e810bb452b3e88bbd58a7bd9e | [
"MIT"
] | null | null | null | README.md | sahincanfx/sahincan-discord-moderation-bot | f30445be06c9ed0e810bb452b3e88bbd58a7bd9e | [
"MIT"
] | null | null | null | ## Bir sene önce Pain ekibinde kullandığım moderasyon botudur. Yeni sistemler çıktığı ve altyapı eskidiği için paylaşıyorum, kullandığım dönemde her hangi bir hata çıkarmamıştı kullanımıyla alakalı ve diğer şeylerle ilgili destek/yardım almak için aşağıya bırakacağım linklere tıklayıp bana ulaşabilirsiniz
# Sahincan
Projenin ücretli satılması veya başkası tarafından, başka bir ad ile dağıtılması kesinlikle yasaktır. Proje lisanslı bir projedir, bu tarz işlemlerde bulunanlar olur ise lisans aracılığı ile gerekli yasal yollara başvurulacaktır.
- [Sahincan](https://discord.com/users/853235926825435146)
- [Bulunduğum Sunucu](https://discord.gg/animekizi)
| 66.6 | 306 | 0.828829 | tur_Latn | 1.000005 |
472867f66da488b4cc8f40a46e86342415089f92 | 140 | md | Markdown | README.md | PacktPublishing/Photorealistic-3D-Nature-Environment-Creation-with-Blender | 991074c5dc24218d45f281fb5aeba697c8604a27 | [
"MIT"
] | null | null | null | README.md | PacktPublishing/Photorealistic-3D-Nature-Environment-Creation-with-Blender | 991074c5dc24218d45f281fb5aeba697c8604a27 | [
"MIT"
] | null | null | null | README.md | PacktPublishing/Photorealistic-3D-Nature-Environment-Creation-with-Blender | 991074c5dc24218d45f281fb5aeba697c8604a27 | [
"MIT"
] | null | null | null | # Photorealistic-3D-Nature-Environment-Creation-with-Blender
Photorealistic 3D Nature Environment Creation with Blender, published by Packt
| 46.666667 | 78 | 0.857143 | eng_Latn | 0.332561 |
47288adf0eabb190507c34dbaab857fcc565dc5c | 308 | md | Markdown | README.md | fluxuator/Chrome-Tab-Url-Editor | 37fb0a0c9bee6cab2f1e076d8b40fa8591e237df | [
"MIT"
] | 3 | 2015-09-14T12:06:43.000Z | 2015-09-30T16:45:09.000Z | README.md | fluxuator/Chrome-Tab-Url-Editor | 37fb0a0c9bee6cab2f1e076d8b40fa8591e237df | [
"MIT"
] | null | null | null | README.md | fluxuator/Chrome-Tab-Url-Editor | 37fb0a0c9bee6cab2f1e076d8b40fa8591e237df | [
"MIT"
] | null | null | null | # Chrome Tab Url Editor
Extension that helps you edit a url of the active tab.
## Installation
1. Clone repository to your machine.
`git clone ~/MyChromeExtensions`
2. Open [chrome://extensions/](chrome://extensions/) in your Chrome browser
3. Load this extension as unpacked ones
4. Enjoy it.
| 20.533333 | 75 | 0.727273 | eng_Latn | 0.918532 |
47292eba6afa7e5bbff42b841a223aba6b1aa8c9 | 3,637 | md | Markdown | _posts/2018-06-09-Github-Jekyll.md | takeAction/takeaction.github.io | 32d782dbff69ceb61f2837c2466d008db28606fa | [
"MIT"
] | null | null | null | _posts/2018-06-09-Github-Jekyll.md | takeAction/takeaction.github.io | 32d782dbff69ceb61f2837c2466d008db28606fa | [
"MIT"
] | null | null | null | _posts/2018-06-09-Github-Jekyll.md | takeAction/takeaction.github.io | 32d782dbff69ceb61f2837c2466d008db28606fa | [
"MIT"
] | null | null | null | ---
layout : post
title : Github Jekyll
comments: true
categories : Other
---
### Show table
In Github Jekyll, we can use following syntax to organize information with table:
```
| First Header | Second Header |
| ------------- | ------------- |
| Content Cell | Content Cell |
| Content Cell | Content Cell |
```
The table can be shown on github editor, however it will not be shown when you enter your github page site by typing addreess on browser address bar.
In order to solve this problem, we have to add css about table in file `style.scss`.
The css looks like:
```CSS
table {
border-spacing:0;//remove spaces between cells
margin: 15px 0;
padding: 0;
}
table tr {
border-top: 1px solid #cccccc;
background-color: white;
margin: 0;
padding: 0;
}
//show different color between rows
table tr:nth-child(2n) {
background-color: #D0E4F5;
}
table tr th {
font-weight: bold;
border: 1px solid #cccccc;
text-align: left;
margin: 0;
padding: 6px 13px;
}
table tr td {
border: 1px solid #cccccc;
text-align: left;
margin: 0;
padding: 6px 13px;
}
table tr th :first-child, table tr td :first-child {
margin-top: 0;
}
table tr th :last-child, table tr td :last-child {
margin-bottom: 0;
}
```
This css file is included in `_layouts/default.html`, or you can write the table css in a separate file, then just include it in this html file.
### Add comment section on your post
1. Use 3rd comment service [Disqus](https://disqus.com), create one its account
2. Associate your site, that is your github site, with disqus
3. Get `shortname` in `admin/settings/general/`
4. Edit your `_config.yml` of github, make sure it contains following content:
```
disqus:
shortname: <your disqus short name>
```
5. Make sure there is `disqus.html` under `_includes` and it looks like:
```HTML
{% if page.comments %}
<div class="comments">
<div id="disqus_thread"></div>
<script type="text/javascript">
var disqus_shortname = '{{ site.disqus.shortname }}';
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.
</a></noscript>
</div>
{% endif %}
```
6. Include `disqus.html` in `_layouts/post.html`
```HTML
....
\{\% include disqus.html \%\}
</article>
```
**Please ignore \ above**
7. To enable comment, add `comments:true` on your post yaml front matter:
```
---
comments: true
# other options
---
```
Disable it by setting `comments: false` or by not including the comments option at all.
### Code highlight
Jekyll 3 and GitHub Pages now only support rouge for syntax highlighting.
Your `_config.yml` contains next content :
> kramdown:
>
> input: GFM
>
> syntax_highlighter: rouge
>
> syntax_highlighter_opts:
>
> css_class: 'highlight'
In order to highlight the code your specified, just write like :
> \```java
>
> //your code
>
> \```
the language code is from https://github.com/jneen/rouge/wiki/List-of-supported-languages-and-lexers
| 23.927632 | 151 | 0.617817 | eng_Latn | 0.947711 |
47297e24c01574600824781b7e98d69c335f86a4 | 8,293 | md | Markdown | docs/relational-databases/system-functions/sys-fn-my-permissions-transact-sql.md | v-brlaz/sql-docs | 5d902e328b551bb619fd95106ce3d320a8fdfbe9 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2019-02-06T20:12:14.000Z | 2019-02-06T20:12:14.000Z | docs/relational-databases/system-functions/sys-fn-my-permissions-transact-sql.md | v-brlaz/sql-docs | 5d902e328b551bb619fd95106ce3d320a8fdfbe9 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/relational-databases/system-functions/sys-fn-my-permissions-transact-sql.md | v-brlaz/sql-docs | 5d902e328b551bb619fd95106ce3d320a8fdfbe9 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2019-04-09T18:16:06.000Z | 2019-04-09T18:16:06.000Z | ---
title: "sys.fn_my_permissions (Transact-SQL) | Microsoft Docs"
ms.custom: ""
ms.date: "03/14/2017"
ms.prod: "sql"
ms.prod_service: "database-engine"
ms.service: ""
ms.component: "system-functions"
ms.reviewer: ""
ms.suite: "sql"
ms.technology:
- "database-engine"
ms.tgt_pltfrm: ""
ms.topic: "language-reference"
f1_keywords:
- "sys.fn_my_permissions_TSQL"
- "fn_my_permissions_TSQL"
- "sys.fn_my_permissions"
- "fn_my_permissions"
dev_langs:
- "TSQL"
helpviewer_keywords:
- "fn_my_permissions function"
- "sys.fn_my_permissions function"
ms.assetid: 30f97f00-03d8-443a-9de9-9ec420b7699b
caps.latest.revision: 21
author: "rothja"
ms.author: "jroth"
manager: "craigg"
ms.workload: "Active"
---
# sys.fn_my_permissions (Transact-SQL)
[!INCLUDE[tsql-appliesto-ss2008-xxxx-xxxx-xxx-md](../../includes/tsql-appliesto-ss2008-xxxx-xxxx-xxx-md.md)]
Returns a list of the permissions effectively granted to the principal on a securable. A related function is [HAS_PERMS_BY_NAME](../../t-sql/functions/has-perms-by-name-transact-sql.md).
 [Transact-SQL Syntax Conventions](../../t-sql/language-elements/transact-sql-syntax-conventions-transact-sql.md)
## Syntax
```
fn_my_permissions ( securable , 'securable_class' )
```
## Arguments
*securable*
Is the name of the securable. If the securable is the server or a database, this value should be set to NULL. *securable* is a scalar expression of type **sysname**. *securable* can be a multipart name.
'*securable_class*'
Is the name of the class of securable for which permissions are listed. *securable_class* is a **sysname**. *securable_class* must be one of the following: APPLICATION ROLE, ASSEMBLY, ASYMMETRIC KEY, CERTIFICATE, CONTRACT, DATABASE, ENDPOINT, FULLTEXT CATALOG, LOGIN, MESSAGE TYPE, OBJECT, REMOTE SERVICE BINDING, ROLE, ROUTE, SCHEMA, SERVER, SERVICE, SYMMETRIC KEY, TYPE, USER, XML SCHEMA COLLECTION.
## Columns Returned
The following table lists the columns that **fn_my_permissions** returns. Each row that is returned describes a permission held by the current security context on the securable. Returns NULL if the query fails.
|Column name|Type|Description|
|-----------------|----------|-----------------|
|entity_name|**sysname**|Name of the securable on which the listed permissions are effectively granted.|
|subentity_name|**sysname**|Column name if the securable has columns, otherwise NULL.|
|permission_name|**nvarchar**|Name of the permission.|
## Remarks
This table-valued function returns a list of the effective permissions held by the calling principal on a specified securable. An effective permission is any one of the following:
- A permission granted directly to the principal, and not denied.
- A permission implied by a higher-level permission held by the principal and not denied.
- A permission granted to a role or group of which the principal is a member, and not denied.
- A permission held by a role or group of which the principal is a member, and not denied.
The permission evaluation is always performed in the security context of the caller. To determine whether some other principal has an effective permission, the caller must have IMPERSONATE permission on that principal.
For schema-level entities, one-, two-, or three-part nonnull names are accepted. For database-level entities, a one-part name is accepted, with a null value meaning "*current database*". For the server itself, a null value (meaning "current server") is required. **fn_my_permissions** cannot check permissions on a linked server.
The following query will return a list of built-in securable classes:
```
SELECT DISTINCT class_desc FROM fn_builtin_permissions(default)
ORDER BY class_desc;
GO
```
If DEFAULT is supplied as the value of *securable* or *securable_class*, the value will be interpreted as NULL.
## Examples
### A. Listing effective permissions on the server
The following example returns a list of the effective permissions of the caller on the server.
```
SELECT * FROM fn_my_permissions(NULL, 'SERVER');
GO
```
### B. Listing effective permissions on the database
The following example returns a list of the effective permissions of the caller on the [!INCLUDE[ssSampleDBobject](../../includes/sssampledbobject-md.md)] database.
```
USE AdventureWorks2012;
SELECT * FROM fn_my_permissions (NULL, 'DATABASE');
GO
```
### C. Listing effective permissions on a view
The following example returns a list of the effective permissions of the caller on the `vIndividualCustomer` view in the `Sales` schema of the [!INCLUDE[ssSampleDBobject](../../includes/sssampledbobject-md.md)] database.
```
USE AdventureWorks2012;
SELECT * FROM fn_my_permissions('Sales.vIndividualCustomer', 'OBJECT')
ORDER BY subentity_name, permission_name ;
GO
```
### D. Listing effective permissions of another user
The following example returns a list of the effective permissions of database user `Wanida` on the `Employee` table in the `HumanResources` schema of the [!INCLUDE[ssSampleDBobject](../../includes/sssampledbobject-md.md)] database. The caller requires IMPERSONATE permission on user `Wanida`.
```
EXECUTE AS USER = 'Wanida';
SELECT * FROM fn_my_permissions('HumanResources.Employee', 'OBJECT')
ORDER BY subentity_name, permission_name ;
REVERT;
GO
```
### E. Listing effective permissions on a certificate
The following example returns a list of the effective permissions of the caller on a certificate named `Shipping47` in the current database.
```
SELECT * FROM fn_my_permissions('Shipping47', 'CERTIFICATE');
GO
```
### F. Listing effective permissions on an XML Schema Collection
The following example returns a list of the effective permissions of the caller on an XML Schema Collection named `ProductDescriptionSchemaCollection` in the [!INCLUDE[ssSampleDBobject](../../includes/sssampledbobject-md.md)] database.
```
USE AdventureWorks2012;
SELECT * FROM fn_my_permissions('ProductDescriptionSchemaCollection',
'XML SCHEMA COLLECTION');
GO
```
### G. Listing effective permissions on a database user
The following example returns a list of the effective permissions of the caller on a user named `MalikAr` in the current database.
```
SELECT * FROM fn_my_permissions('MalikAr', 'USER');
GO
```
### H. Listing effective permissions of another login
The following example returns a list of the effective permissions of [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] login `WanidaBenshoof` on the `Employee` table in the `HumanResources` schema of the [!INCLUDE[ssSampleDBobject](../../includes/sssampledbobject-md.md)] database. The caller requires IMPERSONATE permission on [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] login `WanidaBenshoof`.
```
EXECUTE AS LOGIN = 'WanidaBenshoof';
SELECT * FROM fn_my_permissions('AdventureWorks2012.HumanResources.Employee', 'OBJECT')
ORDER BY subentity_name, permission_name ;
REVERT;
GO
```
## See Also
[Security Functions (Transact-SQL)](../../t-sql/functions/security-functions-transact-sql.md)
[Permissions (Database Engine)](../../relational-databases/security/permissions-database-engine.md)
[Securables](../../relational-databases/security/securables.md)
[Permissions Hierarchy (Database Engine)](../../relational-databases/security/permissions-hierarchy-database-engine.md)
[sys.fn_builtin_permissions (Transact-SQL)](../../relational-databases/system-functions/sys-fn-builtin-permissions-transact-sql.md)
[Security Catalog Views (Transact-SQL)](../../relational-databases/system-catalog-views/security-catalog-views-transact-sql.md)
[EXECUTE AS (Transact-SQL)](../../t-sql/statements/execute-as-transact-sql.md)
| 47.66092 | 421 | 0.716387 | eng_Latn | 0.886577 |
4729ce77ec2f3800587eb78fe360755481d6949c | 1,419 | md | Markdown | core-services/quay-mirroring/README.md | elfosardo/release | 9cc9b90bbb793a80dff743ef097b2ff41d652286 | [
"Apache-2.0"
] | null | null | null | core-services/quay-mirroring/README.md | elfosardo/release | 9cc9b90bbb793a80dff743ef097b2ff41d652286 | [
"Apache-2.0"
] | null | null | null | core-services/quay-mirroring/README.md | elfosardo/release | 9cc9b90bbb793a80dff743ef097b2ff41d652286 | [
"Apache-2.0"
] | null | null | null | # Mirroring images to Quay
Images built and promoted by Prow/ci-operator jobs are only kept inside the CI
cluster registry. To publish them, they need to be mirrored to Quay. This is
achieved by periodic Prow jobs. These jobs consume image mappings from the
subdirectories of this directory. These image mapping files determine what
should be mirrored where and are usually separated by version.
## Configuring mirroring for new images
Simply submit a PR adding the image to then appropriate mapping file. You will
need an approval of an owner of the image set.
## Configuring mirroring for new sets of images
Submit a PR adding a new subdirectory here, with at least a single mapping file
and an `OWNERS` file (so that you can maintain your mappings). The mapping files
should follow the `mapping_$name$anything` naming convention to avoid conflicts
when put to a ConfigMap.
Additionally, you will need to add a new Periodic job [here](../../ci-operator/jobs/infra-image-mirroring.yaml).
You should not need to modify anything in the job besides the items marked as
`FIXME`, where you just need to fill in the name of your image set (it should
be the same as the name of the subdirectory here). The only exception to this is
the name of the secret that will allow the job to push your images to Quay. You
need to talk to DPTP about storing this secret in DPTP vault from where it would
be synced to the cluster.
| 50.678571 | 112 | 0.789288 | eng_Latn | 0.999837 |
4729fbefc17949da5f98389362d60f9a3531d6ef | 6,536 | md | Markdown | _posts/2021-08-24-ai_wk7.md | De-Finance/De-Finance.github.io | 81ff1feade72eefb12607b7294251f73f74d344f | [
"MIT"
] | null | null | null | _posts/2021-08-24-ai_wk7.md | De-Finance/De-Finance.github.io | 81ff1feade72eefb12607b7294251f73f74d344f | [
"MIT"
] | null | null | null | _posts/2021-08-24-ai_wk7.md | De-Finance/De-Finance.github.io | 81ff1feade72eefb12607b7294251f73f74d344f | [
"MIT"
] | null | null | null | ---
layout: post
title: "[AI] 7. Scaler, Batch, Optimizer"
subtitle: " Learn about Scaler, Batch, Optimizer"
category: [AI]
cover-img: /img/aipic.jpg
thumbnail-img: ""
tags: [AI]
comments: false
use_math: true
---
## 미래연구소 16기 강의 기반 정리
[http://futurelab.creatorlink.net/](http://futurelab.creatorlink.net/)
<br />
## 딥러닝 강의 7주차 - Scaler, Batch, Optimizer
<br />
## 1) Scaler
소위 Feature 값들을 0~1 사이로 맞춰주는 Normalization을 수행한다.
이를 통해 데이터들의 기준이 더욱 균일해지고 학습시에도 훨씬 더 빠르게 학습이 가능하다.
표준화 하는 대표적인 2가지 방법에는 **MinMaxscaler**와 **Standarization**이 있다.
<br />
### 1. MinMaxscaler
최대와 최소 값을 각각 1, 0으로 설정하여 계산하는 방법이다.
다른 값들은 최대보다 크지 않고, 최소보다 작지 않기에 모두 0과 1 사이에 위치하게 될 것이다.
$$ nx_{i} = \frac{x_{i} - min}{MAX-min}$$
예를 들어 x = [1, 101, 9, 2] 가 존재한다고 할 때
위 식에 따라 Max(x) = 101, min(x) = 1 인데, 각 값들을 위 계산식에 대입하면
$$ [\frac{1-1}{101-1} = 0, \frac{101-1}{101-1} = 1, \frac{9-1}{101-1} = 0.08, \frac{2-1}{101-1} = 0.01] $$
<br />
### 2. Standarization(표준화)
$$ X = \frac{X_{i} - μ}{σ} (X ~ N(μ, σ^{2})) $$
기존에 고등학교 과정에 있는 표준화다.
Data Feature의 평균과 표준편차를 구한 후 `(feature 값 - 평균)/표준편차` 연산을 가한다.
이 결과 Feature의 분포는 표준정규분포 $$ N(0,1) $$를 따른다.
<br />
위 방식을 계산하는 것은 쉽고 간편하나, 만일 이상치들이 최대/최소 범위가 되는 경우에 계산이 비효율적일 수 있다.
이런 아웃라이어들의 영향을 최소화하기 위해서 [RobustScaler](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.RobustScaler.html) 같은 방식이 개발되었다.
<br />
<br />
## 2) Mini-Batch Gradient Descent
**Batch:** 우리가 가진 전체 Training data
$$ W · X + b = W · \begin{pmatrix}
\vdots & \vdots & & \vdots \\
X^{(1)} & X^{(2)} & \cdots & X^{(m)} \\
\vdots & \vdots & & \vdots
\end{pmatrix} + b $$
Gradient Descent까지 한 번을 학습할 때, Batch 하나(데이터 전체)를 사용하게 된다.
그런데 Colab이나 Jupyter에서 학습을 시킬 때, 한 바가 하나의 Training을 의미하진 않는다.
이는 인공지능 학습할 때 RAM이 이 많은 데이터를 한꺼번에 들 수 없기 때문에 쪼개서 넣는 것이다.

<br />
원래 그림 속 데이터는 50000이지만, 정작 연산할 때 바 1개 당 1563개를 맡고 있다.
이는 계산해보면 50000 ÷ 1563 ≒ 32 란 소리다. 32등분 했다는 소리다. (이는 batch_size 세팅의 기본값)
이렇게 토막난 조각들을 **mini-batch** 라고 한다. Tensorflow에서는 batch_size를 정의하여 Training 모델을 정의한다.
모든 mini-batch가 학습이 완료될 때 Epoch 하나가 끝난다.
*참고로 완전히 나눠 떨어지지 않을 시 적더라도 남는 것끼리 Training을 한다*
<br />
### Mini-Batch Gradient Descent (용어)
1> **Batch Gradient Descent**: 한 번에 모든 Data를 Train한다.
2> **Mini-Batch Gradient Descent**: 한 번에 mini-batch씩만 Train한다.
3> **Stochastic Gradient Descent (SGD)**: 한 번에 1개 Data씩만 Train한다.
만일 SGD 이고 batch_size(등분)이 1 이상이면 한 번에 `batch_size`씩만 Train된다.
<br />
### batch_size에 따른 Gradient 관계

클 수록 정확한 Gradient를 계산하는 경향이 있다. 작을 경우에는 빠르게 연산할 수 있지만,
그만큼 변화에 민감하게 반응하고 부정확해서 어느 정도의 Trade-Off를 갖고 있다.
ex) 선거 출구조사에서 표본 더 크게 뽑으면 정확해지지만, 비용과 통계 시간 때문에 전부 조사하지 않는 것과 비슷하다.
### batch_size 관련 Tip
1. 2의 거듭제곱으로 설정하는 것이 메모리에 효율적이다.
2. Batch-size는 클 수록 좋다. (메모리 초과 일어나지 않을 정도만)
3. 따라서 보통 32 이상으로 설정한다.
4. batch_size가 작을 수록, 어느 정도 Regularization으로 Overfitting을 방지하는 효과를 낼 수 있지만 그 효과가 적어 굳이 이것을 조정하진 않는다.
<br />
<br />
## 3) Optimizer

가장 이상적인 Learning Rate는 지속적으로 감소하는 형태가 나와야한다.
이런 형태로 만들기 위해서는 Initialization 설정도 있지만, Optimizer를 변경하여 조절할 수 있다.
<br />
### 1. **SGD** (Stochastic Gradient Descent)
Random하게 추출한 mini-batch씩 Gradient Descent를 조절한다.
다만 단점은 **해당 위치에서의 Gradient만 고려**하여, 만일 gradient가 위치별로 차이가 크면 minimum에 도달하기 어려울 수 있다.
또한 실제 최대/최소가 아닌 Local Min/Max에서 Gradient Descent가 멈출 수 있다.
우리에게는 전체적인 방향성을 고려하는 방법이 필요하다.
<br />
이런 부분에 머무르지 않도록 Gradient Descent에도 관성을 붙어주면 해결할 수 있다는 주장이 나왔다.
$$ W := W - \alpha\frac{𝝏}{𝝏W}cost(W) $$
따라서 Gradient(방향)나 Learning Rate(사이즈)에 관성을 주는 2가지 경우로 이론이 발전해나갔다.

<br />
### 2. SGD + Momentum
$$ θ = θ (parameter) - v_{t} $$
$$ v_{t} = Γv_{t-1} + η(learning rate) · ▽_{θ}J(θ)(Gradient) $$
1> $$ v_{(t-1)} $$에 이전 방향이 저장되어 있다.
2> γ(momentum)를 곱해 이전 속도를 얼마나 반영할 지 결정

나온 방향대로 가는데 이전 관성의 영향을 받아 이동 경로가 달라진다.

벡터의 수직 방향 이동도 어느 정도 상쇄된다. (문제 1 해결)
또한 관성의 존재로 Local Minimum이나 안장점(기울기 0)도 빠져나올 수 있지만, 이것이 진짜 minimum도 빠져나올 시는 문제다.
γ 가 클 수록 더 잘 빠져나가지만, Brake도 잘 안 통하는 것과 같기 때문에 어느 정도만 커야 한다.
<br />
### 3. NAG (Nesterov Accelerated Gradient)

$$ θ = θ - v_{t} $$
$$ v_{t} = Γv_{t-1} + η▽_{θ}J(θ - Γv_{t-1})$$
Momentum 방향으로 갔을 때 예상되는 지점의 gradient를 사용한다.
그래서 더 converge 잘 되고, minimum도 더 잘 찾는다.
<br />
이상 Gradient를 제어하여 조정하는 것이였고, Learning Rate를 제어하는 기법에 대해 알아보자
<br />
### 4. Adagrad (Adaptive Gradient)
$$ θ_{t+1} = θ - \frac{η}{\sqrt{G_{t} + ϵ(= e^{-8})}} · ▽_{θ}J(θ_{t})$$
$$ G_{t} = G_{t-1} + (▽_{θ}J(θ_{t}))^{2}$$
$$ G_{t} $$ 는 현재의 속력이고, $$ G_{t-1} $$ 에는 이전 속력이 저장되어 있다
**누적된 속력으로 Learning Rate를 나눈다.**
이를 통해 이전 속력$$(G_{t-1})$$이 계속 빨랐다면 Learning Rate로 나눠서 속력을 늦추고
이전 속력$$(G_{t-1})$$이 계속 느렸다면 Learning Rate로 나눠서 속력을 가속화한다.
다만 속력이 계속 증가 시 Learning Rate를 너무 큰 수로 나눠져서 학습이 안될 수 있다.
또한, 속력이 초반만 크고 나중에 작은 형태면 속력을 줄이지 않아도 되는데 줄어둔다.
이를 극복하기 위해 SGD에서 관성을 더하는 것과 같이 인접한 속력에 가중치를 주는 방식으로 극복한다.
<br />
### 5. Rmsprop (Root Means Squared Prop)
$$ θ_{t+1} = θ - \frac{η}{\sqrt{G_{t} + ϵ}} · ▽_{θ}J(θ_{t})$$
$$ G_{t} = ΓG_{t-1} + (1-Γ)(▽_{θ}J(θ_{t}))^{2}$$
$$ ϵ = e^{-8} , Γ = 0.9 $$ 를 주로 사용한다.
인접한 속력에 더 가중치를 가해서 Learning Rate를 나눈다.
이를 통해 이존 속력이 커서 Step Size가 0에 수렴하는 것을 방지한다.
여기서 **Gradient 와 Learning Rate에 둘 다 적용**시킬 수 있으면 좋을것이다.
<br />
### 6. Adam (Adaptive Moment Estimation)
Step Size와 Step Direction 모두에게 관성을 부여했다.
(방향) $$ m_{t} = β_{1}m_{t-1} + (1-β_{1})g_{t}$$
(속력) $$ v_{t} = β_{2}v_{t-1} + (1-β_{2})g_{t}^{2}$$
$$ m̂_{t} = \frac{m_{t}}{1-β_{1}^{t}} $$
$$ v̂_{t} = \frac{v_{t}}{1-β_{2}^{t}} $$
(통합) $$ θ_{t+1} = θ_{t} - \frac{η}{\sqrt{v̂_{t} + ϵ}}m̂_{t} $$
주로 $$β_{1} = 0.9 β_{2} = 0.999 ϵ = e^{-8}$$를 사용한다.
Adam이 대체적으로 개선된 성능을 보인다.
다만 학습이 진행될수록 Learning Rate가 작아져서 Bad Learning Rate로 수렴할 수 있는데,
이를 Rectify 하여 어느 정도 방지해주는 [Rectified Adam](https://arxiv.org/pdf/1908.03265.pdf)이 나왔다.
<br />
현재는 Adam이 가장 성능이 좋지만, 상황에 따라서 그렇지 않은 경우도 있기 때문에
여유가 있다면 가능한 전부 사용해서 그래프를 그려보고 가장 잘 되는 것을 사용하는 것이 좋다.
<br />
<br />
| 24.571429 | 144 | 0.653458 | kor_Hang | 1.000008 |
472a11ae2f5eaba73d1e5d0b155e7c43799a5c88 | 5,987 | md | Markdown | docs/visual-basic/language-reference/operators/operator-precedence.md | CharleyGui/docs.fr-fr | 2563c94abf0d041d775f700b552d1dbe199f03d5 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/visual-basic/language-reference/operators/operator-precedence.md | CharleyGui/docs.fr-fr | 2563c94abf0d041d775f700b552d1dbe199f03d5 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/visual-basic/language-reference/operators/operator-precedence.md | CharleyGui/docs.fr-fr | 2563c94abf0d041d775f700b552d1dbe199f03d5 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Priorité des opérateurs
ms.date: 07/20/2015
helpviewer_keywords:
- arithmetic operators [Visual Basic], precedence
- operator precedence
- logical operators [Visual Basic], precedence
- operators [Visual Basic], associativity
- operators [Visual Basic], resolution
- associativity of operators [Visual Basic]
- operators [Visual Basic], precedence
- precedence [Visual Basic], of operators
- comparison operators [Visual Basic], precedence
- math operators [Visual Basic]
- order of precedence
ms.assetid: cbbdb282-f572-458e-a520-008a675f8063
ms.openlocfilehash: b5649cd2a58fd8d300df58c563aebeed8976c4f5
ms.sourcegitcommit: d2db216e46323f73b32ae312c9e4135258e5d68e
ms.translationtype: MT
ms.contentlocale: fr-FR
ms.lasthandoff: 09/22/2020
ms.locfileid: "90874790"
---
# <a name="operator-precedence-in-visual-basic"></a>Priorité des opérateurs en Visual Basic
Lorsque plusieurs opérations se produisent dans une expression, chaque composant est évalué et résolu dans un ordre prédéterminé appelé *priorité d’opérateur*.
## <a name="precedence-rules"></a>Règles de précédence
Lorsque des expressions contiennent des opérateurs issus de plusieurs catégories, elles sont évaluées en fonction des règles suivantes :
- Les opérateurs arithmétiques et de concaténation ont l’ordre de priorité décrit dans la section suivante, et tous ont une priorité plus élevée que les opérateurs de comparaison, logiques et au niveau du bit.
- Tous les opérateurs de comparaison ont une priorité égale, et tous ont une priorité plus élevée que les opérateurs logiques et au niveau du bit, mais une priorité plus faible que les opérateurs arithmétiques et de concaténation.
- Les opérateurs logiques et au niveau du bit ont l’ordre de priorité décrit dans la section suivante, et tous ont une priorité plus faible que les opérateurs arithmétiques, de concaténation et de comparaison.
- Les opérateurs de même priorité sont évalués de gauche à droite dans l’ordre dans lequel ils apparaissent dans l’expression.
## <a name="precedence-order"></a>Ordre de priorité
Les opérateurs sont évalués dans l’ordre de priorité suivant :
### <a name="await-operator"></a>Await, opérateur
Await
### <a name="arithmetic-and-concatenation-operators"></a>Opérateurs arithmétiques et de concaténation
Élévation à la puissance ( `^` )
Négation et identité unaire ( `+` , `–` )
Multiplication et Division à virgule flottante ( `*` , `/` )
Division d’entier ( `\` )
Arithmétique modulaire ( `Mod` )
Addition et soustraction ( `+` , `–` )
Concaténation de chaînes ( `&` )
Décalage binaire arithmétique ( `<<` , `>>` )
### <a name="comparison-operators"></a>Opérateurs de comparaison
Tous les opérateurs de comparaison ( `=` , `<>` , `<` , `<=` , `>` , `>=` , `Is` , `IsNot` , `Like` , `TypeOf` ... `Is` )
### <a name="logical-and-bitwise-operators"></a>Opérateurs de bits et opérateurs logiques
Négation ( `Not` )
Conjonction ( `And` , `AndAlso` )
Disjonction inclusive ( `Or` , `OrElse` )
Disjonction exclusive ( `Xor` )
### <a name="comments"></a>Commentaires
L' `=` opérateur est uniquement l’opérateur de comparaison d’égalité, et non l’opérateur d’assignation.
L’opérateur de concaténation de chaînes ( `&` ) n’est pas un opérateur arithmétique, mais en priorité il est groupé avec les opérateurs arithmétiques.
Les `Is` `IsNot` opérateurs et sont des opérateurs de comparaison de référence d’objet. Elles ne comparent pas les valeurs de deux objets ; ils vérifient uniquement si deux variables d’objet font référence à la même instance d’objet.
## <a name="associativity"></a>Associativité
Lorsque des opérateurs de priorité égale apparaissent ensemble dans une expression, par exemple multiplication et Division, le compilateur évalue chaque opération à mesure qu’elle le rencontre de gauche à droite. L'exemple suivant illustre ce comportement.
```vb
Dim n1 As Integer = 96 / 8 / 4
Dim n2 As Integer = (96 / 8) / 4
Dim n3 As Integer = 96 / (8 / 4)
```
La première expression évalue la division 96/8 (qui donne 12), puis la division 12/4, qui donne trois valeurs. Étant donné que le compilateur évalue les opérations de `n1` gauche à droite, l’évaluation est la même lorsque cet ordre est explicitement indiqué pour `n2` . Et ont tous les deux `n1` `n2` la valeur trois. En revanche, `n3` a le résultat 48, car les parenthèses forcent le compilateur à évaluer 8/4 en premier.
En raison de ce comportement, on dit que les opérateurs sont *associatifs à gauche* dans Visual Basic.
## <a name="overriding-precedence-and-associativity"></a>Substitution de la priorité et de l’associativité
Vous pouvez utiliser des parenthèses pour forcer l’évaluation de certaines parties d’une expression avant d’autres. Cela peut remplacer l’ordre de priorité et l’associativité à gauche. Visual Basic effectue toujours des opérations placées entre parenthèses avant celles extérieures à. Toutefois, entre parenthèses, il gère la priorité et l’associativité ordinaires, sauf si vous utilisez des parenthèses entre parenthèses. L'exemple suivant illustre ce comportement.
```vb
Dim a, b, c, d, e, f, g As Double
a = 8.0
b = 3.0
c = 4.0
d = 2.0
e = 1.0
f = a - b + c / d * e
' The preceding line sets f to 7.0. Because of natural operator
' precedence and associativity, it is exactly equivalent to the
' following line.
f = (a - b) + ((c / d) * e)
' The following line overrides the natural operator precedence
' and left associativity.
g = (a - (b + c)) / (d * e)
' The preceding line sets g to 0.5.
```
## <a name="see-also"></a>Voir aussi
- [= (Opérateur)](assignment-operator.md)
- [Is, opérateur](is-operator.md)
- [IsNot, opérateur](isnot-operator.md)
- [Like, opérateur](like-operator.md)
- [TypeOf, opérateur](typeof-operator.md)
- [Await (opérateur)](await-operator.md)
- [Opérateurs listés par fonctionnalité](operators-listed-by-functionality.md)
- [Opérateurs et expressions](../../programming-guide/language-features/operators-and-expressions/index.md)
| 44.679104 | 467 | 0.747787 | fra_Latn | 0.950095 |
472a652f312134a24e198d5663d8964224d8eed2 | 1,773 | md | Markdown | docs/relational-databases/errors-events/mssqlserver-18264-database-engine-error.md | MSFTPawelM/sql-docs | a6eef20d03e47d1e0aa4b4621a8eebc89a3ca48d | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/relational-databases/errors-events/mssqlserver-18264-database-engine-error.md | MSFTPawelM/sql-docs | a6eef20d03e47d1e0aa4b4621a8eebc89a3ca48d | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/relational-databases/errors-events/mssqlserver-18264-database-engine-error.md | MSFTPawelM/sql-docs | a6eef20d03e47d1e0aa4b4621a8eebc89a3ca48d | [
"CC-BY-4.0",
"MIT"
] | 1 | 2020-07-05T21:11:18.000Z | 2020-07-05T21:11:18.000Z | ---
title: "MSSQLSERVER_18264 | Microsoft Docs"
ms.custom: ""
ms.date: "04/04/2017"
ms.prod: sql
ms.reviewer: ""
ms.technology: supportability
ms.topic: "language-reference"
helpviewer_keywords:
- "18264 (Database Engine error)"
ms.assetid: 3050fc56-2be5-43cf-916b-50a3ac5f89aa
author: MashaMSFT
ms.author: mathoma
---
# MSSQLSERVER_18264
[!INCLUDE[appliesto-ss-xxxx-xxxx-xxx-md](../../includes/appliesto-ss-xxxx-xxxx-xxx-md.md)]
## Details
| Attribute | Value |
| :-------- | :---- |
|Product Name|Microsoft SQL Server|
|Event ID|18264|
|Event Source|MSSQLENGINE|
|Component|SQLEngine|
|Symbolic Name|STRMIO_DBDUMP|
|Message Text|Database backed up. Database: %s, creation date(time): %s(%s), pages dumped: %d, first LSN: %s, last LSN: %s, number of dump devices: %d, device information: (%s). This is an informational message only. No user action is required.|
## Explanation
By default, every successful backup adds this informational message to the [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] error log and the system event log. If you very frequently back up the transaction log, these messages can accumulate quickly, creating very large error logs that can make finding other messages difficult.
## User Action
You can suppress these log entries by using [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] trace flag **3226**. Enabling this trace flag is useful if you are running frequent log backups and if none of your scripts depend on those entries.
For information about using trace flags, see SQL Server Books Online.
## See Also
[Trace Flags (Transact-SQL)](~/t-sql/database-console-commands/dbcc-traceon-trace-flags-transact-sql.md)
| 44.325 | 341 | 0.711788 | eng_Latn | 0.825595 |
472aca0f9cae08cbfd8cb9a0aa56440c9764ff1c | 3,075 | md | Markdown | docs/database-engine/availability-groups/windows/availability-replica-is-disconnected.md | drake1983/sql-docs.es-es | d924b200133b8c9d280fc10842a04cd7947a1516 | [
"CC-BY-4.0",
"MIT"
] | 1 | 2020-04-25T17:50:01.000Z | 2020-04-25T17:50:01.000Z | docs/database-engine/availability-groups/windows/availability-replica-is-disconnected.md | drake1983/sql-docs.es-es | d924b200133b8c9d280fc10842a04cd7947a1516 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | docs/database-engine/availability-groups/windows/availability-replica-is-disconnected.md | drake1983/sql-docs.es-es | d924b200133b8c9d280fc10842a04cd7947a1516 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | ---
title: Réplica de disponibilidad desconectada | Microsoft Docs
ms.custom: ''
ms.date: 05/17/2016
ms.prod: sql
ms.reviewer: ''
ms.suite: sql
ms.technology: high-availability
ms.tgt_pltfrm: ''
ms.topic: conceptual
f1_keywords:
- sql13.swb.agdashboard.arp2connected.issues.f1
helpviewer_keywords:
- Availability Groups [SQL Server], policies
ms.assetid: 1a2162d3-54fb-4356-b349-effbdc15a5a4
caps.latest.revision: 12
author: MashaMSFT
ms.author: mathoma
manager: craigg
ms.openlocfilehash: d491f020902c675e3b574f3e7dba6213053c85e7
ms.sourcegitcommit: 8aa151e3280eb6372bf95fab63ecbab9dd3f2e5e
ms.translationtype: HT
ms.contentlocale: es-ES
ms.lasthandoff: 06/05/2018
ms.locfileid: "34771011"
---
# <a name="availability-replica-is-disconnected"></a>Réplica de disponibilidad desconectada
[!INCLUDE[appliesto-ss-xxxx-xxxx-xxx-md](../../../includes/appliesto-ss-xxxx-xxxx-xxx-md.md)]
## <a name="introduction"></a>Introducción
|||
|-|-|
|**Nombre de directiva**|Estado de conexión de la réplica de disponibilidad|
|**Problema**|La réplica de disponibilidad está desconectada.|
|**Categoría**|**Crítico**|
|**Faceta**|Réplica de disponibilidad|
## <a name="description"></a>Descripción
Esta directiva comprueba el estado de conexión entre las réplicas de disponibilidad. La directiva está en mal estado cuando el estado de conexión de la réplica de disponibilidad es DISCONNECTED. De lo contrario, la directiva está en un estado correcto.
> [!NOTE]
> Para esta versión de [!INCLUDE[ssCurrent](../../../includes/sscurrent-md.md)], la información sobre las posibles causas y soluciones se encuentra en el artículo [Réplica de disponibilidad desconectada](http://go.microsoft.com/fwlink/p/?LinkId=220857) en TechNet Wiki.
## <a name="possible-causes"></a>Posibles causas
La réplica secundaria no está conectada a la réplica principal. El estado de conexión es DISCONNECTED. Este problema puede deberse a lo siguiente:
- El puerto de conexión puede estar en conflicto con otra aplicación.
- El tipo de cifrado o el algoritmo no coinciden.
- Se ha eliminado o no se ha iniciado el extremo de la conexión.
- El transporte está desconectado.
## <a name="possible-solutions"></a>Soluciones posibles
Las siguientes son posibles soluciones para este problema:
- Compruebe la configuración del extremo de creación de reflejo de la base de datos para las instancias de la réplica principal y secundaria, y actualice la configuración que no coincide.
- Compruebe si el puerto está en conflicto y, si en ese caso, cambie el número de puerto.
## <a name="see-also"></a>Ver también
[Información general de los grupos de disponibilidad AlwaysOn (SQL Server)](../../../database-engine/availability-groups/windows/overview-of-always-on-availability-groups-sql-server.md)
[Usar el Panel de AlwaysOn (SQL Server Management Studio)](../../../database-engine/availability-groups/windows/use-the-always-on-dashboard-sql-server-management-studio.md)
| 45.220588 | 272 | 0.74374 | spa_Latn | 0.908272 |
472b180456f36472cd577460c86527b912639395 | 4,133 | md | Markdown | data/blog/Blazor-serverside-get-remote-ip.md | konradbartecki/blog-2021 | 09c9f7dc2a04ae906660937d217094513875c43a | [
"MIT"
] | 3 | 2021-09-15T14:46:28.000Z | 2022-03-10T14:52:10.000Z | data/blog/Blazor-serverside-get-remote-ip.md | konradbartecki/blog-2021 | 09c9f7dc2a04ae906660937d217094513875c43a | [
"MIT"
] | null | null | null | data/blog/Blazor-serverside-get-remote-ip.md | konradbartecki/blog-2021 | 09c9f7dc2a04ae906660937d217094513875c43a | [
"MIT"
] | null | null | null | ---
title: Remote client's IP in server side Blazor (2021)
date: '2021-03-14'
tags: ['blazor']
draft: false
summary: How to get remote client IP address in server side Blazor
---
## Two approaches
### Approach 1: Call external service using JavaScript
Pros:
- Slightly more simpler if you are using reverse proxy like nginx, traefik
Cons:
- May be blocked by external extensions/adblockers
- You will have to configure CORS
#### `_Host.cshtml`
```html
<script>
window.getIpAddress = () => {
return fetch('https://jsonip.com/')
.then((response) => response.json())
.then((data) => {
return data.ip
})
}
</script>
```
#### `RazorPage.razor.cs`
```cs
public partial class RazorPage : ComponentBase
{
[Inject] public IJSRuntime jsRuntime { get; set; }
public async Task<string> GetIpAddress()
{
try
{
var ipAddress = await jsRuntime.InvokeAsync<string>("getIpAddress")
.ConfigureAwait(true);
return ipAddress;
}
catch(Exception e)
{
//If your request was blocked by CORS or some extension like uBlock Origin then you will get an exception.
return string.Empty;
}
}
}
```
#### `Startup.cs`
```cs
public void ConfigureServices(IServiceCollection services)
{
//code...
services
.AddCors(x => x.AddPolicy("externalRequests",
policy => policy
.WithOrigins("https://jsonip.com")));
}
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
//code...
app.UseCors("externalRequests");
}
```
### Approach 2: Expose an endpoint in our Blazor app and call it using JavaScript
Pros:
- You won't have to configure CORS
- Won't be blocked by extensions or adblockers
Cons:
- May be slightly more complicated if you are using a reverse proxy like nginx, traefik, etc.
Now take care as you will use this approach, because if you are using a reverse proxy, then you will actually receive your reverse proxy IP address.
It is very possible that your reverse proxy is already forwarding an IP address of the external client in some sort of header, but it is up to you to figure it out.
Example: https://www.nginx.com/resources/wiki/start/topics/examples/forwarded/
#### `InfoController.cs`
```cs
[Route("api/[controller]")]
[ApiController]
public class InfoController : ControllerBase
{
[HttpGet]
[Route("ipaddress")]
public async Task<string> GetIpAddress()
{
var remoteIpAddress = this.HttpContext.Request.HttpContext.Connection.RemoteIpAddress;
if (remoteIpAddress != null)
return remoteIpAddress.ToString();
return string.Empty;
}
}
```
#### `Startup.cs`
```cs
app.UseEndpoints(endpoints =>
{
endpoints.MapControllers(); //remember to map controllers if you don't have this line
endpoints.MapBlazorHub();
endpoints.MapFallbackToPage("/_Host");
});
```
#### `_Host.cshtml`
```html
<script>
window.getIpAddress = () => {
return fetch('/api/info/ipaddress')
.then((response) => response.text())
.then((data) => {
return data
})
}
</script>
```
#### `RazorPage.razor.cs`
```cs
public partial class RazorPage : ComponentBase
{
[Inject] public IJSRuntime jsRuntime { get; set; }
public async Task<string> GetIpAddress()
{
try
{
var ipAddress = await jsRuntime.InvokeAsync<string>("getIpAddress")
.ConfigureAwait(true);
return ipAddress;
}
catch(Exception e)
{
//If your request was blocked by CORS or some extension like uBlock Origin then you will get an exception.
return string.Empty;
}
}
}
```
| 25.512346 | 164 | 0.585531 | eng_Latn | 0.818814 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.